Initial commit for TF-A CI scripts
Signed-off-by: Fathi Boudra <fathi.boudra@linaro.org>
diff --git a/script/__pycache__/gerrit.cpython-35.pyc b/script/__pycache__/gerrit.cpython-35.pyc
new file mode 100644
index 0000000..dd91d7c
--- /dev/null
+++ b/script/__pycache__/gerrit.cpython-35.pyc
Binary files differ
diff --git a/script/artefacts_receiver.py b/script/artefacts_receiver.py
new file mode 100755
index 0000000..5ee0f8e
--- /dev/null
+++ b/script/artefacts_receiver.py
@@ -0,0 +1,178 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# This is a server that accepts PUT requests that's primary used to receive
+# artefacts from Jenkins. Received files are placed under the output directory
+# under the same path mentioned in the URL.
+#
+# The script takes two arguments: IP address and a port number to listen to.
+# Note that the IP address has to be externally visible.
+
+import argparse
+import calendar
+import heapq
+import http.server
+import itertools
+import json
+import os
+import shutil
+import socketserver
+import threading
+import time
+import traceback
+import urllib
+import urllib.request
+
+
+JENKINS_URL = "http://jenkins.oss.arm.com"
+counter = itertools.count()
+exiting = False
+more_consoles = threading.Event()
+pq = []
+received = set()
+
+
+# Class representing a pending job whose console is yet to be downloaded. The
+# methods help identify when the job is finished (ready to download console),
+# and to download the console along with the received artefacts.
+class PendingJob:
+ def __init__(self, job, build, path):
+ self.job = job
+ self.build = build
+ self.path = path
+ self.url = "/".join([JENKINS_URL, "job", self.job, self.build])
+
+ def download_console(self, more):
+ console_url = "/".join([self.url, "consoleText"])
+ try:
+ with urllib.request.urlopen(console_url) as cfd, \
+ open(os.path.join(self.path, "console.txt"), "wb") as fd:
+ shutil.copyfileobj(cfd, fd)
+
+ print("{}: {}#{}: console (+{})".format(time_now(), self.job,
+ self.build, more))
+ except Exception as e:
+ traceback.print_exception(Exception, e, e.__traceback__)
+
+ def is_ready(self):
+ # Return true if there were errors as otherwise this job won't ever be
+ # completed.
+ ret = True
+
+ json_url = "/".join([self.url, "api", "json"])
+ try:
+ with urllib.request.urlopen(json_url) as fd:
+ job_json = json.loads(fd.read().decode())
+ ret = job_json["building"] == False
+ except Exception as e:
+ traceback.print_exception(Exception, e, e.__traceback__)
+
+ return ret
+
+
+# PUT handler for the receiver. When an artefact with a valid job name and build
+# number is received, we keep a pending job instance to download its console
+# when the job finishes.
+class ArtefactsReceiver(http.server.BaseHTTPRequestHandler):
+ def do_PUT(self):
+ parsed = urllib.parse.urlparse(self.path)
+ path = parsed.path.lstrip("/")
+ relpath = os.path.join(opts.output_dir, os.path.dirname(path))
+
+ os.makedirs(relpath, exist_ok=True)
+ content_length = int(self.headers["Content-Length"])
+
+ with open(os.path.join(opts.output_dir, path), "wb") as fd:
+ fd.write(self.rfile.read(content_length))
+
+ self.send_response(200)
+ self.end_headers()
+
+ qs = urllib.parse.parse_qs(parsed.query)
+ job = qs.get("j", [None])[0]
+ build = qs.get("b", [None])[0]
+
+ print("{}: {}#{}: {}".format(time_now(), job, build, path))
+
+ if job and build and (job, build) not in received:
+ item = (now(), next(counter), PendingJob(job, build, relpath))
+ heapq.heappush(pq, item)
+ more_consoles.set()
+ received.add((job, build))
+
+ # Avoid default logging by overriding with a dummy function
+ def log_message(self, *args):
+ pass
+
+
+class Server(socketserver.ThreadingMixIn, http.server.HTTPServer):
+ pass
+
+
+def now():
+ return calendar.timegm(time.gmtime())
+
+
+def time_now():
+ return time.strftime("%H:%M:%S")
+
+
+def console_monitor():
+ while not exiting:
+ # Wait here for the queue to be non-empty
+ try:
+ ts, count, job = pq[0]
+ except IndexError:
+ more_consoles.wait()
+ continue
+
+ # Short nap before next job is available
+ if ts > now():
+ time.sleep(2)
+ continue
+
+ ts, count, job = heapq.heappop(pq)
+ if not job.is_ready():
+ # Re-queue the job for later
+ heapq.heappush(pq, (ts + 10, count, job))
+ continue
+
+ job.download_console(len(pq))
+ more_consoles.clear()
+
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument("--output-dir", "-o", default="artefacts")
+parser.add_argument("ip", help="IP address to listen to")
+parser.add_argument("port", help="Port number to listen to")
+
+opts = parser.parse_args()
+
+os.makedirs(opts.output_dir, exist_ok=True)
+
+server = Server((opts.ip, int(opts.port)), ArtefactsReceiver)
+print("Trusted Firmware-A artefacts receiver:")
+print()
+print("\tUse artefacts_receiver=http://{}:{}".format(opts.ip, opts.port))
+print("\tArtefacts will be placed under '{}'. Waiting...".format(opts.output_dir))
+print()
+
+try:
+ more_consoles.clear()
+ console_thread = threading.Thread(target=console_monitor)
+ console_thread.start()
+ server.serve_forever()
+except KeyboardInterrupt:
+ pass
+finally:
+ print()
+ print("Exiting...")
+ exiting = True
+ more_consoles.set()
+ console_thread.join()
+ server.server_close()
diff --git a/script/build_package.sh b/script/build_package.sh
new file mode 100755
index 0000000..fa8d321
--- /dev/null
+++ b/script/build_package.sh
@@ -0,0 +1,1035 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# Builds a package with Trusted Firwmare and other payload binaries. The package
+# is meant to be executed by run_package.sh
+
+set -e
+
+ci_root="$(readlink -f "$(dirname "$0")/..")"
+source "$ci_root/utils.sh"
+
+if [ ! -d "$workspace" ]; then
+ die "Directory $workspace doesn't exist"
+fi
+
+# Directory to where the source code e.g. for Trusted Firmware is checked out.
+tf_root="${tf_root:-$workspace/trusted_firmware}"
+tftf_root="${tftf_root:-$workspace/trusted_firmware_tf}"
+scp_root="${scp_root:-$workspace/scp}"
+
+# Refspecs
+tf_refspec="$TF_REFSPEC"
+tftf_refspec="$TFTF_REFSPEC"
+scp_refspec="$SCP_REFSPEC"
+
+test_config="${TEST_CONFIG:?}"
+test_group="${TEST_GROUP:?}"
+build_configs="${BUILD_CONFIG:?}"
+run_config="${RUN_CONFIG:?}"
+
+archive="$artefacts"
+build_log="$artefacts/build.log"
+fiptool="$tf_root/tools/fiptool/fiptool"
+cert_create="$tf_root/tools/cert_create/cert_create"
+
+# Validate $bin_mode
+case "$bin_mode" in
+ "" | debug | release)
+ ;;
+ *)
+ die "Invalid value for bin_mode: $bin_mode"
+ ;;
+esac
+
+# File to save any environem
+hook_env_file="$(mktempfile)"
+
+# Check if a config is valid
+config_valid() {
+ local config="${1?}"
+ if [ -z "$config" ] || [ "$(basename "$config")" = "nil" ]; then
+ return 1
+ fi
+
+ return 0
+}
+
+# Echo from a build wrapper. Print to descriptor 3 that's opened by the build
+# function.
+echo_w() {
+ echo $echo_flags "$@" >&3
+}
+
+# Print a separator to the log file. Intended to be used at the tail end of a pipe
+log_separator() {
+ {
+ echo
+ echo "----------"
+ } >> "$build_log"
+
+ tee -a "$build_log"
+
+ {
+ echo "----------"
+ echo
+ } >> "$build_log"
+}
+
+# Call function $1 if it's defined
+call_func() {
+ if type "${1:?}" &>/dev/null; then
+ echo
+ echo "> ${2:?}:$1()"
+ eval "$1"
+ echo "< $2:$1()"
+ fi
+}
+
+# Call hook $1 in all chosen fragments if it's defined. Hooks are invoked from
+# within a subshell, so any variables set within a hook are lost. Should a
+# variable needs to be set from within a hook, the function 'set_hook_var'
+# should be used
+call_hook() {
+ local func="$1"
+ local config_fragment
+
+ [ -z "$func" ] && return 0
+
+ : >"$hook_env_file"
+
+ if [ "$run_config_candiates" ]; then
+ for config_fragment in $run_config_candiates; do
+ (
+ source "$ci_root/run_config/$config_fragment"
+ call_func "$func" "$config_fragment"
+ )
+ done
+ fi
+
+ # Also source test config file
+ (
+ unset "$func"
+ source "$test_config_file"
+ call_func "$func" "$(basename $test_config_file)"
+ )
+
+ # Have any variables set take effect
+ source "$hook_env_file"
+}
+
+# Set a variable from within a hook
+set_hook_var() {
+ echo "export $1=\"${2?}\"" >> "$hook_env_file"
+}
+
+# Append to an array from within a hook
+append_hook_var() {
+ echo "export $1+=\"${2?}\"" >> "$hook_env_file"
+}
+
+# Have the main build script source a file
+source_later() {
+ echo "source ${1?}" >> "$hook_env_file"
+}
+
+# Setup TF build wrapper function by pointing to a script containing a function
+# that will be called with the TF build commands.
+setup_tf_build_wrapper() {
+ source_later "$ci_root/script/${wrapper?}_wrapper.sh"
+ set_hook_var "tf_build_wrapper" "${wrapper}_wrapper"
+ echo "Setup $wrapper build wrapper."
+}
+
+# Collect .bin files for archiving
+collect_build_artefacts() {
+ if [ ! -d "${from:?}" ]; then
+ return
+ fi
+
+ if ! find "$from" \( -name "*.bin" -o -name '*.elf' -o -name '*.dtb' \) -exec cp -t "${to:?}" '{}' +; then
+ echo "You probably are running local CI on local repositories."
+ echo "Did you set 'dont_clean' but forgot to run 'distclean'?"
+ die
+ fi
+}
+
+# SCP and MCP binaries are named firmware.{bin,elf}, and are placed under
+# scp/mcp_ramfw and scp/mcp_romfw directories, so can't be collected by
+# collect_build_artefacts function.
+collect_scp_artefacts() {
+ to="${to:?}" \
+ find "$scp_root" \( -name "*.bin" -o -name '*.elf' \) -exec bash -c '
+ for file; do
+ ext="$(echo $file | awk -F. "{print \$NF}")"
+ case $file in
+ */scp_ramfw/*)
+ cp $file $to/scp_ram.$ext
+ ;;
+ */scp_romfw/*)
+ cp $file $to/scp_rom.$ext
+ ;;
+ */mcp_ramfw/*)
+ cp $file $to/mcp_ram.$ext
+ ;;
+ */mcp_romfw/*)
+ cp $file $to/mcp_rom.$ext
+ ;;
+ *)
+ echo "Unknown SCP binary: $file" >&2
+ ;;
+ esac
+ done
+ ' bash '{}' +
+}
+
+# Arrange environment varibles to be set when expect scripts are launched
+set_expect_variable() {
+ local var="${1:?}"
+ local val="${2?}"
+
+ local run_root="${archive:?}/run"
+ local uart_dir="$run_root/uart${uart:?}"
+ mkdir -p "$uart_dir"
+
+ env_file="$uart_dir/env" quote="1" emit_env "$var" "$val"
+ echo "UART$uart: env has $@"
+}
+
+# Place the binary package a pointer to expect script, and its parameters
+track_expect() {
+ local file="${file:?}"
+ local timeout="${timeout-600}"
+ local run_root="${archive:?}/run"
+
+ local uart_dir="$run_root/uart${uart:?}"
+ mkdir -p "$uart_dir"
+
+ echo "$file" > "$uart_dir/expect"
+ echo "$timeout" > "$uart_dir/timeout"
+
+ echo "UART$uart to be tracked with $file; timeout ${timeout}s"
+
+ # The run script assumes UART0 to be primary. If we're asked to set any
+ # other UART to be primary, set a run environment variable to signal
+ # that to the run script
+ if upon "$set_primary"; then
+ echo "Primary UART set to UART$uart."
+ set_run_env "primary_uart" "$uart"
+ fi
+}
+
+# Extract a FIP in $1 using fiptool
+extract_fip() {
+ local fip="$1"
+
+ if is_url "$1"; then
+ url="$1" fetch_file
+ fip="$(basename "$1")"
+ fi
+
+ "$fiptool" unpack "$fip"
+ echo "Extracted FIP: $fip"
+}
+
+# Report build failure by printing a the tail end of build log. Archive the
+# build log for later inspection
+fail_build() {
+ local log_path
+
+ if upon "$jenkins_run"; then
+ log_path="$BUILD_URL/artifact/artefacts/build.log"
+ else
+ log_path="$build_log"
+ fi
+
+ echo
+ echo "Build failed! Tail of build log below:"
+ echo "[...]"
+ echo
+ tail -n15 "$build_log"
+ echo
+ echo "See $log_path for full output"
+ echo
+ cp -t "$archive" "$build_log"
+ exit 1;
+}
+
+# Build a FIP with supplied arguments
+build_fip() {
+ (
+ echo "Building FIP with arguments: $@"
+ local tf_env="$workspace/tf.env"
+
+ if [ -f "$tf_env" ]; then
+ set -a
+ source "$tf_env"
+ set +a
+ fi
+
+ make -C "$tf_root" $(cat "$tf_config_file") DEBUG="$DEBUG" V=1 "$@" \
+ ${fip_targets:-fip} &>>"$build_log" || fail_build
+ )
+}
+
+fip_update() {
+ # Before the update process, check if the given image is supported by
+ # the fiptool. It's assumed that both fiptool and cert_create move in
+ # tandem, and therfore, if one has support, the other has it too.
+ if ! "$fiptool" update 2>&1 | grep -qe "\s\+--${bin_name:?}"; then
+ return 1
+ fi
+
+ if not_upon "$(get_tf_opt TRUSTED_BOARD_BOOT)"; then
+ echo "Updating FIP image: $bin_name"
+ # Update HW config. Without TBBR, it's only a matter of using
+ # the update sub-command of fiptool
+ "$fiptool" update "--$bin_name" "${src:-}" \
+ "$archive/fip.bin"
+ else
+ echo "Updating FIP image (TBBR): $bin_name"
+ # With TBBR, we need to unpack, re-create certificates, and then
+ # recreate the FIP.
+ local fip_dir="$(mktempdir)"
+ local bin common_args stem
+ local rot_key="$(get_tf_opt ROT_KEY)"
+
+ rot_key="${rot_key:?}"
+ if ! is_abs "$rot_key"; then
+ rot_key="$tf_root/$rot_key"
+ fi
+
+ # Arguments only for cert_create
+ local cert_args="-n"
+ cert_args+=" --tfw-nvctr ${nvctr:-31}"
+ cert_args+=" --ntfw-nvctr ${nvctr:-223}"
+ cert_args+=" --key-alg ${KEY_ALG:-rsa}"
+ cert_args+=" --rot-key $rot_key"
+
+ local dyn_config_opts=(
+ "hw-config"
+ "tb-fw-config"
+ "nt-fw-config"
+ "soc-fw-config"
+ "tos-fw-config"
+ )
+
+ # Binaries without key certificates
+ declare -A has_no_key_cert
+ for bin in "tb-fw" "${dyn_config_opts[@]}"; do
+ has_no_key_cert["$bin"]="1"
+ done
+
+ # Binaries without certificates
+ declare -A has_no_cert
+ for bin in "hw-config" "${dyn_config_opts[@]}"; do
+ has_no_cert["$bin"]="1"
+ done
+
+ pushd "$fip_dir"
+
+ # Unpack FIP
+ "$fiptool" unpack "$archive/fip.bin" &>>"$build_log"
+
+ # Remove all existing certificates
+ rm -f *-cert.bin
+
+ # Copy the binary to be updated
+ cp -f "$src" "${bin_name}.bin"
+
+ # FIP unpack dumps binaries with the same name as the option
+ # used to pack it; likewise for certificates. Reverse-engineer
+ # the command line from the binary output.
+ common_args="--trusted-key-cert trusted_key.crt"
+ for bin in *.bin; do
+ stem="${bin%%.bin}"
+ common_args+=" --$stem $bin"
+ if not_upon "${has_no_cert[$stem]}"; then
+ common_args+=" --$stem-cert $stem.crt"
+ fi
+ if not_upon "${has_no_key_cert[$stem]}"; then
+ common_args+=" --$stem-key-cert $stem-key.crt"
+ fi
+ done
+
+ # Create certificates
+ "$cert_create" $cert_args $common_args &>>"$build_log"
+
+ # Recreate and archive FIP
+ "$fiptool" create $common_args "fip.bin" &>>"$build_log"
+ archive_file "fip.bin"
+
+ popd
+ fi
+}
+
+# Update hw-config in FIP, and remove the original DTB afterwards.
+update_fip_hw_config() {
+ # The DTB needs to be loaded by the model (and not updated in the FIP)
+ # in configs where BL2 isn't present
+ case "1" in
+ "$(get_tf_opt RESET_TO_BL31)" | \
+ "$(get_tf_opt RESET_TO_SP_MIN)" | \
+ "$(get_tf_opt BL2_AT_EL3)")
+ return 0;;
+ esac
+
+ if bin_name="hw-config" src="$archive/dtb.bin" fip_update; then
+ # Remove the DTB so that model won't load it
+ rm -f "$archive/dtb.bin"
+ fi
+}
+
+get_scp_opt() {
+ (
+ name="${1:?}"
+ if config_valid "$scp_config_file"; then
+ source "$scp_config_file"
+ echo "${!name}"
+ fi
+ )
+}
+
+get_tftf_opt() {
+ (
+ name="${1:?}"
+ if config_valid "$tftf_config_file"; then
+ source "$tftf_config_file"
+ echo "${!name}"
+ fi
+ )
+}
+
+get_tf_opt() {
+ (
+ name="${1:?}"
+ if config_valid "$tf_config_file"; then
+ source "$tf_config_file"
+ echo "${!name}"
+ fi
+ )
+}
+
+build_tf() {
+ (
+ env_file="$workspace/tf.env"
+ config_file="${tf_build_config:-$tf_config_file}"
+
+ # Build fiptool and all targets by default
+ build_targets="${tf_build_targets:-fiptool all}"
+
+ source "$config_file"
+
+ # If it is a TBBR build, extract the MBED TLS library from archive
+ if [ "$(get_tf_opt TRUSTED_BOARD_BOOT)" = 1 ]; then
+ mbedtls_dir="$workspace/mbedtls"
+ if [ ! -d "$mbedtls_dir" ]; then
+ mbedtls_ar="$workspace/mbedtls.tar.gz"
+
+ url="$mbedtls_archive" saveas="$mbedtls_ar" fetch_file
+ mkdir "$mbedtls_dir"
+ pushd "$mbedtls_dir"
+ tar -xzf "$mbedtls_ar"
+ popd
+
+ fi
+
+ emit_env "MBEDTLS_DIR" "$mbedtls_dir"
+ fi
+
+ if [ -f "$env_file" ]; then
+ set -a
+ source "$env_file"
+ set +a
+ fi
+
+ cd "$tf_root"
+
+ # Always distclean when running on Jenkins. Skip distclean when running
+ # locally and explicitly requested.
+ if upon "$jenkins_run" || not_upon "$dont_clean"; then
+ make distclean &>>"$build_log" || fail_build
+ fi
+
+ # Log build command line. It is left unfolded on purpose to assist
+ # copying to clipboard.
+ cat <<EOF | log_separator >/dev/null
+
+Build command line:
+ $tf_build_wrapper make $make_j_opts $(cat "$config_file" | tr '\n' ' ') DEBUG=$DEBUG V=1 $build_targets
+
+EOF
+
+ # Build TF. Since build output is being directed to the build log, have
+ # descriptor 3 point to the current terminal for build wrappers to vent.
+ $tf_build_wrapper make $make_j_opts $(cat "$config_file") \
+ DEBUG="$DEBUG" V=1 \
+ $build_targets 3>&1 &>>"$build_log" || fail_build
+ )
+}
+
+build_tftf() {
+ (
+ config_file="${tftf_build_config:-$tftf_config_file}"
+
+ # Build tftf target by default
+ build_targets="${tftf_build_targets:-all}"
+
+ source "$config_file"
+
+ cd "$tftf_root"
+
+ # Always distclean when running on Jenkins. Skip distclean when running
+ # locally and explicitly requested.
+ if upon "$jenkins_run" || not_upon "$dont_clean"; then
+ make distclean &>>"$build_log" || fail_build
+ fi
+
+ # TFTF build system cannot reliably deal with -j option, so we avoid
+ # using that.
+
+ # Log build command line
+ cat <<EOF | log_separator >/dev/null
+
+Build command line:
+ make $(cat "$config_file" | tr '\n' ' ') DEBUG=$DEBUG V=1 $build_targets
+
+EOF
+
+ make $(cat "$config_file") DEBUG="$DEBUG" V=1 \
+ $build_targets &>>"$build_log" || fail_build
+ )
+}
+
+build_scp() {
+ (
+ config_file="${scp_build_config:-$scp_config_file}"
+
+ source "$config_file"
+
+ cd "$scp_root"
+
+ # Always distclean when running on Jenkins. Skip distclean when running
+ # locally and explicitly requested.
+ if upon "$jenkins_run" || not_upon "$dont_clean"; then
+ make clean &>>"$build_log" || fail_build
+ fi
+
+ # Log build command line. It is left unfolded on purpose to assist
+ # copying to clipboard.
+ cat <<EOF | log_separator >/dev/null
+
+SCP build command line:
+ make $(cat "$config_file" | tr '\n' ' ') MODE=$mode V=1
+
+EOF
+
+ # Build SCP
+ make $(cat "$config_file") MODE="$mode" V=1 &>>"$build_log" \
+ || fail_build
+ )
+}
+
+# Set metadata for the whole package so that it can be used by both Jenkins and
+# shell
+set_package_var() {
+ env_file="$artefacts/env" emit_env "$@"
+}
+
+set_tf_build_targets() {
+ echo "Set build target to '${targets:?}'"
+ set_hook_var "tf_build_targets" "$targets"
+}
+
+set_tftf_build_targets() {
+ echo "Set build target to '${targets:?}'"
+ set_hook_var "tftf_build_targets" "$targets"
+}
+
+set_scp_build_targets() {
+ echo "Set build target to '${targets:?}'"
+ set_hook_var "scp_build_targets" "$targets"
+}
+
+# Look under $archive directory for known files such as blX images, kernel, DTB,
+# initrd etc. For each known file foo, if foo.bin exists, then set variable
+# foo_bin to the path of the file. Make the path relative to the workspace so as
+# to remove any @ characters, which Jenkins inserts for parallel runs. If the
+# file doesn't exist, unset its path.
+set_default_bin_paths() {
+ local image image_name image_path path
+ local archive="${archive:?}"
+ local set_vars
+ local var
+
+ pushd "$archive"
+
+ for file in *.bin; do
+ # Get a shell variable from the file's stem
+ var_name="${file%%.*}_bin"
+ var_name="$(echo "$var_name" | sed -r 's/[^[:alnum:]]/_/g')"
+
+ # Skip setting the variable if it's already
+ if [ "${!var_name}" ]; then
+ echo "Note: not setting $var_name; already set to ${!var_name}"
+ continue
+ else
+ set_vars+="$var_name "
+ fi
+
+ eval "$var_name=$file"
+ done
+
+ echo "Binary paths set for: "
+ {
+ for var in $set_vars; do
+ echo -n "\$$var "
+ done
+ } | fmt -80 | sed 's/^/ /'
+ echo
+
+ popd
+}
+
+gen_model_params() {
+ local model_param_file="$archive/model_params"
+
+ set_default_bin_paths
+ echo "Generating model parameter for $model..."
+ source "$ci_root/model/${model:?}.sh"
+ archive_file "$model_param_file"
+}
+
+set_model_path() {
+ set_run_env "model_path" "${1:?}"
+}
+
+set_run_env() {
+ local var="${1:?}"
+ local val="${2?}"
+ local run_root="${archive:?}/run"
+
+ mkdir -p "$run_root"
+ env_file="$run_root/env" quote="1" emit_env "$var" "$val"
+}
+
+show_head() {
+ # Display HEAD descripton
+ pushd "$1"
+ git show --quiet --no-color | sed 's/^/ > /g'
+ echo
+ popd
+}
+
+# Choose debug binaries to run; by default, release binaries are chosen to run
+use_debug_bins() {
+ local run_root="${archive:?}/run"
+
+ echo "Choosing debug binaries for execution"
+ set_package_var "BIN_MODE" "debug"
+}
+
+assert_can_git_clone() {
+ local name="${1:?}"
+ local dir="${!name}"
+
+ # If it doesn't exist, it can be cloned into
+ if [ ! -e "$dir" ]; then
+ return 0
+ fi
+
+ # If it's a directory, it must be a Git clone already
+ if [ -d "$dir" ] && [ -d "$dir/.git" ]; then
+ # No need to clone again
+ echo "Using existing git clone for $name: $dir"
+ return 1
+ fi
+
+ die "Path $dir exists but is not a git clone"
+}
+
+clone_repo() {
+ if ! is_url "${clone_url?}"; then
+ # For --depth to take effect on local paths, it needs to use the
+ # file:// scheme.
+ clone_url="file://$clone_url"
+ fi
+
+ git clone -q --depth 1 "$clone_url" "${where?}"
+ if [ "$refspec" ]; then
+ pushd "$where"
+ git fetch -q --depth 1 origin "$refspec"
+ git checkout -q FETCH_HEAD
+ popd
+ fi
+}
+
+build_unstable() {
+ echo "--BUILD UNSTABLE--" | tee -a "$build_log"
+}
+
+undo_patch_record() {
+ if [ ! -f "${patch_record:?}" ]; then
+ return
+ fi
+
+ # Undo patches in reverse
+ echo
+ for patch_name in $(tac "$patch_record"); do
+ echo "Undoing $patch_name..."
+ if ! git apply -R "$ci_root/patch/$patch_name"; then
+ if upon "$local_ci"; then
+ echo
+ echo "Your local directory may have been dirtied."
+ echo
+ fi
+ fail_build
+ fi
+ done
+
+ rm -f "$patch_record"
+}
+
+undo_local_patches() {
+ pushd "$tf_root"
+ patch_record="$tf_patch_record" undo_patch_record
+ popd
+
+ if [ -d "$tftf_root" ]; then
+ pushd "$tftf_root"
+ patch_record="$tftf_patch_record" undo_patch_record
+ popd
+ fi
+}
+
+undo_tftf_patches() {
+ pushd "$tftf_root"
+ patch_record="$tftf_patch_record" undo_patch_record
+ popd
+}
+
+undo_tf_patches() {
+ pushd "$tf_root"
+ patch_record="$tf_patch_record" undo_patch_record
+ popd
+}
+
+apply_patch() {
+ # If skip_patches is set, the developer has applied required patches
+ # manually. They probably want to keep them applied for debugging
+ # purposes too. This means we don't have to apply/revert them as part of
+ # build process.
+ if upon "$skip_patches"; then
+ echo "Skipped applying ${1:?}..."
+ return 0
+ else
+ echo "Applying ${1:?}..."
+ fi
+
+ if git apply < "$ci_root/patch/$1"; then
+ echo "$1" >> "${patch_record:?}"
+ else
+ if upon "$local_ci"; then
+ undo_local_patches
+ fi
+ fail_build
+ fi
+}
+
+apply_tftf_patch() {
+ pushd "$tftf_root"
+ patch_record="$tftf_patch_record" apply_patch "$1"
+ popd
+}
+
+apply_tf_patch() {
+ pushd "$tf_root"
+ patch_record="$tf_patch_record" apply_patch "$1"
+ popd
+}
+
+# Clear workspace for a local run
+if not_upon "$jenkins_run"; then
+ rm -rf "$workspace"
+
+ # Clear residue from previous runs
+ rm -rf "$archive"
+fi
+
+mkdir -p "$workspace"
+mkdir -p "$archive"
+set_package_var "TEST_CONFIG" "$test_config"
+
+{
+echo
+echo "CONFIGURATION: $test_group/$test_config"
+echo
+} |& log_separator
+
+tf_config="$(echo "$build_configs" | awk -F, '{print $1}')"
+tftf_config="$(echo "$build_configs" | awk -F, '{print $2}')"
+scp_config="$(echo "$build_configs" | awk -F, '{print $3}')"
+
+test_config_file="$ci_root/group/$test_group/$test_config"
+
+tf_config_file="$ci_root/tf_config/$tf_config"
+tftf_config_file="$ci_root/tftf_config/$tftf_config"
+scp_config_file="$ci_root/scp_config/$scp_config"
+
+# File that keeps track of applied patches
+tf_patch_record="$workspace/tf_patches"
+tftf_patch_record="$workspace/tftf_patches"
+
+pushd "$workspace"
+
+if ! config_valid "$tf_config"; then
+ tf_config=
+else
+ echo "Trusted Firmware config:"
+ echo
+ sort "$tf_config_file" | sed '/^\s*$/d;s/^/\t/'
+ echo
+fi
+
+if ! config_valid "$tftf_config"; then
+ tftf_config=
+else
+ echo "Trusted Firmware TF config:"
+ echo
+ sort "$tftf_config_file" | sed '/^\s*$/d;s/^/\t/'
+ echo
+fi
+
+if ! config_valid "$scp_config"; then
+ scp_config=
+else
+ echo "SCP firmware config:"
+ echo
+ sort "$scp_config_file" | sed '/^\s*$/d;s/^/\t/'
+ echo
+fi
+
+if ! config_valid "$run_config"; then
+ run_config=
+fi
+
+if [ "$tf_config" ] && assert_can_git_clone "tf_root"; then
+ # If the Trusted Firmware repository has already been checked out, use
+ # that location. Otherwise, clone one ourselves.
+ echo "Cloning Trusted Firmware..."
+ clone_url="${TF_CHECKOUT_LOC:-$tf_src_repo_url}" where="$tf_root" \
+ refspec="$TF_REFSPEC" clone_repo &>>"$build_log"
+ show_head "$tf_root"
+fi
+
+if [ "$tftf_config" ] && assert_can_git_clone "tftf_root"; then
+ # If the Trusted Firmware TF repository has already been checked out,
+ # use that location. Otherwise, clone one ourselves.
+ echo "Cloning Trusted Firmware TF..."
+ clone_url="${TFTF_CHECKOUT_LOC:-$tftf_src_repo_url}" where="$tftf_root" \
+ refspec="$TFTF_REFSPEC" clone_repo &>>"$build_log"
+ show_head "$tftf_root"
+fi
+
+if [ "$scp_config" ] && assert_can_git_clone "scp_root"; then
+ # If the SCP firmware repository has already been checked out,
+ # use that location. Otherwise, clone one ourselves.
+ echo "Cloning SCP Firmware..."
+ clone_url="${SCP_CHECKOUT_LOC:-$scp_src_repo_url}" where="$scp_root" \
+ refspec="${SCP_REFSPEC-master-upstream}" clone_repo &>>"$build_log"
+
+ pushd "$scp_root"
+
+ # Use filer submodule as a reference if it exists
+ if [ -d "$SCP_CHECKOUT_LOC/cmsis" ]; then
+ cmsis_reference="--reference $SCP_CHECKOUT_LOC/cmsis"
+ fi
+
+ # If we don't have a reference yet, fall back to $cmsis_root if set, or
+ # then to project filer if accessible.
+ if [ -z "$cmsis_reference" ]; then
+ cmsis_ref_repo="${cmsis_root:-$project_filer/ref-repos/cmsis}"
+ if [ -d "$cmsis_ref_repo" ]; then
+ cmsis_reference="--reference $cmsis_ref_repo"
+ fi
+ fi
+
+ git submodule -q update $cmsis_reference --init
+
+ popd
+
+ show_head "$scp_root"
+fi
+
+if [ "$run_config" ]; then
+ # Get candidates for run config
+ run_config_candiates="$("$ci_root/script/gen_run_config_candidates.py" \
+ "$run_config")"
+ if [ -z "$run_config_candiates" ]; then
+ die "No run config candidates!"
+ else
+ echo
+ echo "Chosen fragments:"
+ echo
+ echo "$run_config_candiates" | sed 's/^\|\n/\t/g'
+ echo
+ fi
+fi
+
+call_hook "test_setup"
+echo
+
+if upon "$local_ci"; then
+ # For local runs, since each config is tried in sequence, it's
+ # advantageous to run jobs in parallel
+ if [ "$make_j" ]; then
+ make_j_opts="-j $make_j"
+ else
+ n_cores="$(getconf _NPROCESSORS_ONLN)" 2>/dev/null || true
+ if [ "$n_cores" ]; then
+ make_j_opts="-j $n_cores"
+ fi
+ fi
+fi
+
+modes="${bin_mode:-debug release}"
+for mode in $modes; do
+ # Build with a temporary archive
+ build_archive="$archive/$mode"
+ mkdir "$build_archive"
+
+ if [ "$mode" = "debug" ]; then
+ DEBUG=1
+ else
+ DEBUG=0
+ fi
+
+ # Perform builds in a subshell so as not to pollute the current and
+ # subsequent builds' environment
+
+ # SCP build
+ if config_valid "$scp_config"; then
+ (
+ echo "##########"
+
+ # Source platform-specific utilities
+ plat="$(get_scp_opt PRODUCT)"
+ plat_utils="$ci_root/${plat}_utils.sh"
+ if [ -f "$plat_utils" ]; then
+ source "$plat_utils"
+ fi
+
+ archive="$build_archive"
+ scp_build_root="$scp_root/build"
+
+ echo "Building SCP Firmware ($mode) ..." |& log_separator
+
+ build_scp
+
+ to="$archive" collect_scp_artefacts
+
+ echo "##########"
+ echo
+ )
+ fi
+
+ # TFTF build
+ if config_valid "$tftf_config"; then
+ (
+ echo "##########"
+
+ # Source platform-specific utilities
+ plat="$(get_tftf_opt PLAT)"
+ plat_utils="$ci_root/${plat}_utils.sh"
+ if [ -f "$plat_utils" ]; then
+ source "$plat_utils"
+ fi
+
+ archive="$build_archive"
+ tftf_build_root="$tftf_root/build"
+
+ echo "Building Trusted Firmware TF ($mode) ..." |& log_separator
+
+ # Call pre-build hook
+ call_hook pre_tftf_build
+
+ build_tftf
+
+ from="$tftf_build_root" to="$archive" collect_build_artefacts
+
+ # Clear any local changes made by applied patches
+ undo_tftf_patches
+
+ echo "##########"
+ echo
+ )
+ fi
+
+ # TF build
+ if config_valid "$tf_config"; then
+ (
+ echo "##########"
+
+ # Source platform-specific utilities
+ plat="$(get_tf_opt PLAT)"
+ plat_utils="$ci_root/${plat}_utils.sh"
+ if [ -f "$plat_utils" ]; then
+ source "$plat_utils"
+ fi
+
+ archive="$build_archive"
+ tf_build_root="$tf_root/build"
+
+ echo "Building Trusted Firmware ($mode) ..." |& log_separator
+
+ # Call pre-build hook
+ call_hook pre_tf_build
+
+ build_tf
+
+ # Call post-build hook
+ call_hook post_tf_build
+
+ # Pre-archive hook
+ call_hook pre_tf_archive
+
+ from="$tf_build_root" to="$archive" collect_build_artefacts
+
+ # Post-archive hook
+ call_hook post_tf_archive
+
+ call_hook fetch_tf_resource
+ call_hook post_fetch_tf_resource
+
+ # Clear any local changes made by applied patches
+ undo_tf_patches
+
+ echo "##########"
+ )
+ fi
+
+ echo
+ echo
+done
+
+call_hook pre_package
+
+call_hook post_package
+
+if upon "$jenkins_run" && upon "$artefacts_receiver" && [ -d "artefacts" ]; then
+ tar -cJf "artefacts.tar.xz" "artefacts"
+ where="$artefacts_receiver/$test_group/$test_config/artefacts.tar.xz"
+ where+="?j=$JOB_NAME&b=$BUILD_NUMBER"
+ if wget -q --method=PUT --body-file="artefacts.tar.xz" "$where"; then
+ echo "Artefacts submitted to $where."
+ else
+ echo "Error submitting artefacts to $where."
+ fi
+fi
+
+echo
+echo "Done"
diff --git a/script/clone_repos.sh b/script/clone_repos.sh
new file mode 100755
index 0000000..53b94bb
--- /dev/null
+++ b/script/clone_repos.sh
@@ -0,0 +1,475 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+#
+# Clone and sync all Trusted Firmware repositories.
+#
+# The main repository is checked out at the required refspec (GERRIT_REFSPEC).
+# The rest of repositories are attempted to sync to the topic of that refspec
+# (as pointed to by GERRIT_TOPIC). 'repo_under_test' must be set to a
+# GERRIT_PROJECT for sync to work.
+#
+# For every cloned repository, set its location to a variable so that the
+# checked out location can be passed down to sub-jobs.
+#
+# Generate an environment file that can then be sourced by the caller.
+
+set -e
+
+ci_root="$(readlink -f "$(dirname "$0")/..")"
+source "$ci_root/utils.sh"
+
+clone_log="$workspace/clone_repos.log"
+clone_data="$workspace/clone.data"
+override_data="$workspace/override.data"
+gerrit_data="$workspace/gerrit.data"
+inject_data="$workspace/inject.data"
+
+# File containing parameters for sub jobs
+param_file="$workspace/env.param"
+
+# Emit a parameter to sub jobs
+emit_param() {
+ echo "$1=$2" >> "$param_file"
+}
+
+meta_data() {
+ echo "$1" >> "$clone_data"
+}
+
+# Path into the project filer where various pieces of scripts that override
+# some CI environment variables are stored.
+ci_overrides="$project_filer/ci-overrides"
+
+display_override() {
+ echo
+ echo -n "Override: "
+ # Print the relative path of the override file.
+ echo "$1" | sed "s#$ci_overrides/\?##"
+}
+
+strip_var() {
+ local var="$1"
+ local val="$(echo "${!var}" | sed 's#^\s*\|\s*$##g')"
+ eval "$var=\"$val\""
+}
+
+prefix_tab() {
+ sed 's/^/\t/g' < "${1:?}"
+}
+
+prefix_arrow() {
+ sed 's/^/ > /g' < "${1:?}"
+}
+
+test_source() {
+ local file="${1:?}"
+ if ! bash -c "source $file" &>/dev/null; then
+ return 1
+ fi
+
+ source "$file"
+ return 0
+}
+
+post_gerrit_comment() {
+ local gerrit_url="${gerrit_url:-$GERRIT_HOST}"
+ gerrit_url="${gerrit_url:?}"
+
+ # Posting comments to gerrit.oss.arm.com does not require any special
+ # credentials, review.trustedfirmware.org does. Provide the ci-bot-user
+ # account credentials for the latter.
+ if [ "$gerrit_url" == "review.trustedfirmware.org" ]; then
+ ssh -p 29418 -i "$tforg_key" "$tforg_user@$gerrit_url" gerrit \
+ review "$GERRIT_CHANGE_NUMBER,$GERRIT_PATCHSET_NUMBER" \
+ --message "'$(cat ${msg_file:?})'"
+ else
+ ssh -p 29418 "$gerrit_url" gerrit review \
+ "$GERRIT_CHANGE_NUMBER,$GERRIT_PATCHSET_NUMBER" \
+ --message "'$(cat ${msg_file:?})'"
+ fi
+}
+
+# Whether we've synchronized branches or not
+has_synched=0
+
+# Whether we've overridden some CI environment variables.
+has_overrides=0
+
+# Whether we've injected environment via. Jenkins
+has_env_inject=0
+
+# Default Gerrit failure message file
+gerrit_fail_msg_file="$workspace/gerrit-fail"
+
+clone_and_sync() {
+ local stat
+ local topic
+ local refspec="${!ref}"
+ local s_before s_after s_diff
+ local reference_dir="$project_filer/ref-repos/${name?}"
+ local ref_repo
+ local ret
+ local gerrit_server
+ local gerrit_user
+ local gerrit_keyfile
+
+ strip_var refspec
+ strip_var url
+
+ case "$url" in
+ *${arm_gerrit_url}*)
+ gerrit_server="arm"
+ ;;
+
+ *${tforg_gerrit_url}*)
+ # SSH authentication is required on trustedfirmware.org.
+ gerrit_server="tforg"
+ gerrit_user="$tforg_user"
+ gerrit_keyfile="$tforg_key"
+ ;;
+
+ *)
+ # The project to clone might not be hosted on a Gerrit
+ # server at all (e.g. Github).
+ ;;
+ esac
+
+ # Refspec translation is supported for Gerrit patches only.
+ if [ "$gerrit_server" ]; then
+ refspec="$($ci_root/script/translate_refspec.py \
+ -p "$name" -s "$gerrit_server" -u "$gerrit_user" \
+ -k "$gerrit_keyfile" "$refspec")"
+ fi
+
+ # Clone in the filter workspace
+ mkdir -p "$ci_scratch"
+ pushd "$ci_scratch"
+
+ # Seconds before
+ s_before="$(date +%s)"
+
+ # Clone repository to the directory same as its name; HEAD stays at
+ # master.
+ if [ -d "$reference_dir" ]; then
+ ref_repo="--reference $reference_dir"
+ fi
+ git clone -q $ref_repo "$url" "$name" &>"$clone_log"
+ stat="on branch master"
+
+ pushd "$name"
+
+ if [ "$refspec" ] && [ "$refspec" != "master" ]; then
+ # If a specific revision is specified, always use that.
+ git fetch -q origin "$refspec" &>"$clone_log"
+ git checkout -q FETCH_HEAD &>"$clone_log"
+ stat="refspec $refspec"
+
+ # If it's not a commit hash, have the refspec replicated on the
+ # clone so that downstream jobs can clone from this one using
+ # the same refspec.
+ if echo "$refspec" | grep -qv '^[a-f0-9]\+$'; then
+ git branch "$refspec" FETCH_HEAD
+ fi
+ elif [ "$name" = "$repo_under_test" ]; then
+ # Main repository under test
+ if [ "$GERRIT_REFSPEC" ]; then
+ # Fetch and checkout GERRIT_REFSPEC
+ git fetch -q origin "$GERRIT_REFSPEC" \
+ &>"$clone_log"
+ git checkout -q FETCH_HEAD &>"$clone_log"
+ refspec="$GERRIT_REFSPEC"
+ stat="refspec $refspec"
+ git branch "$refspec" FETCH_HEAD
+ fi
+ elif [ "$GERRIT_TOPIC" ]; then
+ # Auxiliary repository: it's already on master when cloned above.
+ topic="$GERRIT_TOPIC"
+
+ # Check first if there's a Gerrit topic matching the topic of
+ # the main repository under test
+ ret=0
+ refspec="$("$ci_root/script/translate_refspec.py" -p "$name" \
+ -u "$gerrit_user" -k "$gerrit_keyfile" \
+ -s "$gerrit_server" "topic:$topic" 2>/dev/null)" \
+ || ret="$?"
+ if [ "$ret" = 0 ]; then
+ {
+ git fetch -q origin "$refspec"
+ git checkout -q FETCH_HEAD
+ } &>"$clone_log"
+ stat="gerrit topic $topic"
+ git branch "$refspec" FETCH_HEAD
+
+ has_synched=1
+ elif git fetch -q origin "topics/$topic" &>"$clone_log"; then
+ # If there's a remote branch matching the Gerrit topic
+ # name, checkout to that; otherwise, stay on master.
+ git checkout -q FETCH_HEAD &>"$clone_log"
+ refspec="topics/$topic"
+ stat="on branch $refspec"
+ git branch "$refspec" FETCH_HEAD
+
+ has_synched=1
+ fi
+ fi
+
+ # Generate meta data. Eliminate any quoting in commit subject as it
+ # might cause problems when reporting back to Gerrit.
+ meta_data "$name: $stat"
+ meta_data " $(git show --quiet --format=%H): $(git show --quiet --format=%s | sed "s/[\"']/ /g")"
+ meta_data " Commit date: $(git show --quiet --format=%cd)"
+ meta_data
+
+ # Calculate elapsed seconds
+ s_after="$(date +%s)"
+ let "s_diff = $s_after - $s_before" || true
+
+ echo
+ echo "Repository: $url ($stat)"
+ prefix_arrow <(git show --quiet)
+ echo "Cloned in $s_diff seconds"
+ echo
+
+ popd
+ popd
+
+ emit_env "$loc" "$ci_scratch/$name"
+ emit_env "$ref" "$refspec"
+
+ # If this repository is being tested under a Gerrit trigger, set the
+ # Gerrit test groups.
+ if [ "$name" = "$repo_under_test" ]; then
+ # For a Gerrit trigger, it's possible that users publish patch
+ # sets in quick succession. If the CI is already busy, this
+ # leads to more and more triggers queuing up. Also, it's likey
+ # that older patch sets are tested before new ones. But because
+ # there are newer patch sets already in queue, we should avoid
+ # running tests on older ones as their results will be discarded
+ # anyway.
+ pushd "$ci_scratch/$name"
+
+ change_id="$(git show -q --format=%b | awk '/Change-Id/{print $2}')"
+ commit_id="$(git show -q --format=%H)"
+ latest_commit_id="$($ci_root/script/translate_refspec.py \
+ -p "$name" -u "$gerrit_user" -k "$gerrit_keyfile" \
+ -s "$gerrit_server" "change:$change_id")"
+
+ if [ "$commit_id" != "$latest_commit_id" ]; then
+ # Overwrite Gerrit failure message
+ cat <<EOF >"$gerrit_fail_msg_file"
+Patch set $GERRIT_PATCHSET_NUMBER is not the latest; not tested.
+Please await results for the latest patch set.
+EOF
+
+ cat "$gerrit_fail_msg_file"
+ echo
+ die
+ fi
+
+ # Run nominations on this repository
+ rules_file="$ci_root/script/$name.nomination.py"
+ if [ -f "$rules_file" ]; then
+ "$ci_root/script/gen_nomination.py" "$rules_file" > "$nom_file"
+ if [ -s "$nom_file" ]; then
+ emit_env "NOMINATION_FILE" "$nom_file"
+ echo "$name has $(wc -l < $nom_file) test nominations."
+ fi
+ fi
+
+ popd
+
+ # Allow for groups to be overridden
+ GERRIT_BUILD_GROUPS="${GERRIT_BUILD_GROUPS-$gerrit_build_groups}"
+ if [ "$GERRIT_BUILD_GROUPS" ]; then
+ emit_env "GERRIT_BUILD_GROUPS" "$GERRIT_BUILD_GROUPS"
+ fi
+
+ GERRIT_TEST_GROUPS="${GERRIT_TEST_GROUPS-$gerrit_test_groups}"
+ if [ "$GERRIT_TEST_GROUPS" ]; then
+ emit_env "GERRIT_TEST_GROUPS" "$GERRIT_TEST_GROUPS"
+ fi
+ fi
+}
+
+# When triggered from Gerrit, the main repository that is under test. Can be
+# either TF, TFTF, SCP or CI.
+if [ "$GERRIT_REFSPEC" ]; then
+ repo_under_test="${repo_under_test:-$REPO_UNDER_TEST}"
+ repo_under_test="${repo_under_test:?}"
+fi
+
+# Environment file in Java property file format, that's soured in Jenkins job
+env_file="$workspace/env"
+rm -f "$env_file"
+
+# Workspace on external filer where all repositories gets cloned so that they're
+# accessible to all Jenkins slaves.
+if upon "$local_ci"; then
+ ci_scratch="$workspace/filer"
+else
+ scratch_owner="${JOB_NAME:?}-${BUILD_NUMBER:?}"
+ ci_scratch="$project_scratch/$scratch_owner"
+ tforg_key="$CI_BOT_KEY"
+ tforg_user="$CI_BOT_USERNAME"
+fi
+
+if [ -d "$ci_scratch" ]; then
+ # This could be because of jobs of same name running from
+ # production/staging/temporary VMs
+ echo "Scratch space $ci_scratch already exists; removing."
+ rm -rf "$ci_scratch"
+fi
+mkdir -p "$ci_scratch"
+
+# Nomination file
+nom_file="$ci_scratch/nominations"
+
+# Set CI_SCRATCH so that it'll be injected when sub-jobs are triggered.
+emit_param "CI_SCRATCH" "$ci_scratch"
+
+# However, on Jenkins v2, injected environment variables won't override current
+# job's parameters. This means that the current job (the scratch owner, the job
+# that's executing this script) would always observe CI_SCRATCH as empty, and
+# therefore won't be able to remove it. Therefore, use a different variable
+# other than CI_SCRATCH parameter for the current job to refer to the scratch
+# space (although they both will have the same value!)
+emit_env "SCRATCH_OWNER" "$scratch_owner"
+emit_env "SCRATCH_OWNER_SPACE" "$ci_scratch"
+
+strip_var CI_ENVIRONMENT
+if [ "$CI_ENVIRONMENT" ]; then
+ {
+ echo
+ echo "Injected environment:"
+ prefix_tab <(echo "$CI_ENVIRONMENT")
+ echo
+ } >> "$inject_data"
+
+ cat "$inject_data"
+
+ tmp_env=$(mktempfile)
+ echo "$CI_ENVIRONMENT" > "$tmp_env"
+ source "$tmp_env"
+ cat "$tmp_env" >> "$env_file"
+
+ has_env_inject=1
+fi
+
+if [ "$GERRIT_BRANCH" ]; then
+ # Overrides targeting a specific Gerrit branch.
+ target_branch_override="$ci_overrides/branch/$GERRIT_BRANCH/env"
+ if [ -f "$target_branch_override" ]; then
+ display_override "$target_branch_override"
+
+ {
+ echo
+ echo "Target branch overrides:"
+ prefix_tab "$target_branch_override"
+ echo
+ } >> "$override_data"
+
+ cat "$override_data"
+
+ source "$target_branch_override"
+ cat "$target_branch_override" >> "$env_file"
+
+ has_overrides=1
+ fi
+fi
+
+TF_REFSPEC="${tf_refspec:-$TF_REFSPEC}"
+if not_upon "$no_tf"; then
+ # Clone Trusted Firmware repository
+ url="$tf_src_repo_url" name="trusted-firmware" ref="TF_REFSPEC" \
+ loc="TF_CHECKOUT_LOC" \
+ gerrit_build_groups="tf-gerrit-build" \
+ gerrit_test_groups="tf-gerrit-tests tf-gerrit-tftf" \
+ clone_and_sync
+fi
+
+TFTF_REFSPEC="${tftf_refspec:-$TFTF_REFSPEC}"
+if not_upon "$no_tftf"; then
+ # Clone Trusted Firmware TF repository
+ url="$tftf_src_repo_url" name="trusted-firmware-tf" ref="TFTF_REFSPEC" \
+ loc="TFTF_CHECKOUT_LOC" \
+ gerrit_test_groups="tftf-master-build tftf-master-fwu tftf-l1" \
+ clone_and_sync
+fi
+
+SCP_REFSPEC="${scp_refspec:-$SCP_REFSPEC}"
+if upon "$clone_scp"; then
+ # Clone SCP Firmware repository
+ # NOTE: currently scp/firmware:master is not tracking the upstream.
+ # Therefore, if the url is gerrit.oss.arm.com/scp/firmware and there is
+ # no ref_spec, then set the ref_spec to master-upstream.
+ scp_src_repo_default="http://gerrit.oss.arm.com/scp/firmware"
+ if [ "$scp_src_repo_url" = "$scp_src_repo_default" ]; then
+ SCP_REFSPEC="${SCP_REFSPEC:-master-upstream}"
+ fi
+
+ url="$scp_src_repo_url" name="scp" ref="SCP_REFSPEC" \
+ loc="SCP_CHECKOUT_LOC" clone_and_sync
+
+ pushd "$ci_scratch/scp"
+
+ # Edit the submodule URL to point to the reference repository so that
+ # all submodule update pick from the reference repository instead of
+ # Github.
+ cmsis_ref_repo="${cmsis_root:-$project_filer/ref-repos/cmsis}"
+ if [ -d "$cmsis_ref_repo" ]; then
+ cmsis_reference="--reference $cmsis_ref_repo"
+ fi
+
+ git submodule -q update $cmsis_reference --init
+
+ popd
+fi
+
+CI_REFSPEC="${ci_refspec:-$CI_REFSPEC}"
+if not_upon "$no_ci"; then
+ # Clone Trusted Firmware CI repository
+ url="$tf_ci_repo_url" name="trusted-firmware-ci" ref="CI_REFSPEC" \
+ loc="CI_ROOT" gerrit_test_groups="ci-l1" \
+ clone_and_sync
+fi
+
+if [ "$GERRIT_BRANCH" ]; then
+ # If this CI run was in response to a Gerrit commit, post a comment back
+ # to the patch set calling out everything that we've done so far. This
+ # reassures both the developer and the reviewer about CI refspecs used
+ # for CI testing.
+ #
+ # Note the extra quoting for the message, which Gerrit requires.
+ if upon "$has_synched"; then
+ echo "Branches synchronized:" >> "$gerrit_data"
+ echo >> "$gerrit_data"
+ cat "$clone_data" >> "$gerrit_data"
+ fi
+
+ if upon "$has_overrides"; then
+ cat "$override_data" >> "$gerrit_data"
+ fi
+
+ if upon "$has_env_inject"; then
+ cat "$inject_data" >> "$gerrit_data"
+ fi
+
+ if [ -s "$gerrit_data" ]; then
+ msg_file="$gerrit_data" post_gerrit_comment
+ fi
+fi
+
+# Copy environment file to ci_scratch for sub-jobs' access
+cp "$env_file" "$ci_scratch"
+cp "$param_file" "$ci_scratch"
+
+# Copy clone data so that it's available for sub-jobs' HTML reporting
+if [ -f "$clone_data" ]; then
+ cp "$clone_data" "$ci_scratch"
+fi
+
+# vim: set tw=80 sw=8 noet:
diff --git a/script/coverity_parser.py b/script/coverity_parser.py
new file mode 100644
index 0000000..5cb31aa
--- /dev/null
+++ b/script/coverity_parser.py
@@ -0,0 +1,177 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+import argparse
+import json
+import re
+import shutil
+import sys
+
+
+_rule_exclusions = [
+ "MISRA C-2012 Rule 8.6",
+ "MISRA C-2012 Rule 5.1"
+]
+
+# The following classification of rules and directives include 'MISRA C:2012
+# Amendment 1'
+
+# Directives
+_dir_required = set(["1.1", "2.1", "3.1", "4.1", "4.3", "4.7", "4.10", "4.11",
+ "4.12", "4.14"])
+
+_dir_advisory = set(["4.2", "4.4", "4.5", "4.6", "4.8", "4.9", "4.13"])
+
+# Rules
+_rule_mandatory = set(["9.1", "9.2", "9.3", "12.5", "13.6", "17.3", "17.4",
+ "17.6", "19.1", "21.13", "21.17", "21.18", "21.19", "21.20", "22.2", "22.5",
+ "22.6"])
+
+_rule_required = set(["1.1", "1.3", "2.1", "2.2", "3.1", "3.2", "4.1", "5.1",
+ "5.2", "5.3", "5.4", "5.5", "5.6", "5.7", "5.8", "6.1", "6.2", "7.1", "7.2",
+ "7.3", "7.4", "8.1", "8.2", "8.3", "8.4", "8.5", "8.6", "8.7", "8.8",
+ "8.10", "8.12", "8.14", "9.2", "9.3", "9.4", "9.5", "10.1", "10.2", "10.3",
+ "10.4", "10.6", "10.7", "10.8", "11.1", "11.2", "11.3", "11.6", "11.7",
+ "11.8", "11.9", "12.2", "13.1", "13.2", "13.5", "14.1", "14.2", "14.3",
+ "14.4", "15.2", "15.3", "15.6", "15.7", "16.1", "16.2", "16.3", "16.4",
+ "16.5", "16.6", "16.7", "17.1", "17.2", "17.7", "18.1", "18.2", "18.3",
+ "18.6", "18.7", "18.8", "20.3", "20.4", "20.6", "20.7", "20.8", "20.9",
+ "20.11", "20.12", "20.13", "20.14", "21.1", "21.2", "21.3", "21.4", "21.5",
+ "21.6", "21.7", "21.8", "21.9", "21.10", "21.11", "21.14", "21.15", "21.16",
+ "22.1", "22.3", "22.4", "22.7", "22.8", "22.9", "22.10"])
+
+_rule_advisory = set(["1.2", "2.3", "2.4", "2.5", "2.6", "2.7", "4.2", "5.9",
+ "8.9", "8.11", "8.13", "10.5", "11.4", "11.5", "12.1", "12.3", "12.4",
+ "13.3", "13.4", "15.1", "15.4", "15.5", "17.5", "17.8", "18.4", "18.5",
+ "19.2", "20.1", "20.2", "20.5", "20.10", "21.12"])
+
+
+_checker_lookup = {
+ "Directive": {
+ "required": _dir_required,
+ "advisory": _dir_advisory
+ },
+ "Rule": {
+ "mandatory": _rule_mandatory,
+ "required": _rule_required,
+ "advisory": _rule_advisory
+ }
+ }
+
+_checker_re = re.compile(r"""(?P<kind>\w+) (?P<number>[\d\.]+)$""")
+
+
+def _classify_checker(checker):
+ match = _checker_re.search(checker)
+ if match:
+ kind, number = match.group("kind"), match.group("number")
+ for classification, class_set in _checker_lookup[kind].items():
+ if number in class_set:
+ return classification
+
+ return "unknown"
+
+
+# Return a copy of the original issue description. Update file path to strip
+# heading '/', and also insert CID.
+def _new_issue(cid, orig_issue):
+ checker = orig_issue["checker"]
+ classification = _classify_checker(checker)
+
+ return {
+ "cid": cid,
+ "file": orig_issue["file"].lstrip("/"),
+ "line": orig_issue["mainEventLineNumber"],
+ "checker": checker,
+ "classification": classification,
+ "description": orig_issue["mainEventDescription"]
+ }
+
+
+def _cls_string(issue):
+ cls = issue["classification"]
+
+ return " (" + cls + ")" if cls != "unknown" else ""
+
+
+# Given an issue, make a string formed of file name, line number, checker, and
+# the CID. This could be used as a dictionary key to identify unique defects
+# across the scan. Convert inegers to zero-padded strings for proper sorting.
+def make_key(i):
+ return (i["file"] + str(i["line"]).zfill(5) + i["checker"] +
+ str(i["cid"]).zfill(5))
+
+
+# Iterate through all issues that are not ignored. If show_all is set, only
+# issues that are not in the comparison snapshot are returned.
+def iter_issues(path, show_all=False):
+ with open(path, encoding="utf-8") as fd:
+ report = json.load(fd)
+
+ # Unconditional filter
+ filters = [lambda i: ((i["triage"]["action"] != "Ignore") and
+ (i["occurrences"][0]["checker"] not in _rule_exclusions))]
+
+ # Whether we need diffs only
+ if not show_all:
+ # Pick only issues that are not present in comparison snapshot
+ filters.append(lambda i: not i["presentInComparisonSnapshot"])
+
+ # Pick issue when all filters are true
+ filter_func = lambda i: all([f(i) for f in filters])
+
+ # Top-level is a group of issues, all sharing a common CID
+ for issue_group in filter(filter_func, report["issueInfo"]):
+ # Pick up individual occurrence of the CID
+ for occurrence in issue_group["occurrences"]:
+ yield _new_issue(issue_group["cid"], occurrence)
+
+
+# Format issue (returned from iter_issues()) as text.
+def format_issue(issue):
+ return ("{file}:{line}:[{checker}{cls}]<{cid}> {description}").format_map(
+ dict(issue, cls=_cls_string(issue)))
+
+
+# Format issue (returned from iter_issues()) as HTML table row.
+def format_issue_html(issue):
+ cls = _cls_string(issue)
+ cov_class = "cov-" + issue["classification"]
+
+ return """\
+<tr class="{cov_class}">
+ <td class="cov-file">{file}</td>
+ <td class="cov-line">{line}</td>
+ <td class="cov-checker">{checker}{cls}</td>
+ <td class="cov-cid">{cid}</td>
+ <td class="cov-description">{description}</td>
+</tr>""".format_map(dict(issue, cls=cls, cov_class=cov_class))
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument("--all", default=False, dest="show_all",
+ action="store_const", const=True, help="List all issues")
+ parser.add_argument("--output",
+ help="File to output filtered defects to in JSON")
+ parser.add_argument("json_report")
+
+ opts = parser.parse_args()
+
+ issues = []
+ for issue in sorted(iter_issues(opts.json_report, opts.show_all),
+ key=lambda i: make_key(i)):
+ print(format_issue(issue))
+ issues.append(issue)
+
+ if opts.output:
+ # Dump selected issues
+ with open(opts.output, "wt") as fd:
+ fd.write(json.dumps(issues))
+
+ sys.exit(int(len(issues) > 0))
diff --git a/script/coverity_summary.py b/script/coverity_summary.py
new file mode 100755
index 0000000..f6a2087
--- /dev/null
+++ b/script/coverity_summary.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+#
+# Given URL to a job instance, this script walks the job hierarchy, and inspects
+# for Coverity scan report. CIDs from individual reports are collected and
+# printed as a summary of defects for the entire scan.
+
+import argparse
+import coverity_parser
+import job_walker
+import json
+import sys
+import urllib.request
+
+parser = argparse.ArgumentParser()
+parser.add_argument("build_url",
+ help="URL to specific build number to walk")
+opts = parser.parse_args()
+
+# Parse the given job
+top = job_walker.JobInstance(opts.build_url)
+top.parse()
+
+# Iterate through terminal jobs, i.e., those with a config, viz. tf-worker
+merged_issues = {}
+for job in filter(lambda j: j.config, top.walk()):
+ # Collect CIDs from archived defects.json
+ try:
+ # Open json as text, not bytes
+ with job.open_artefact("defects.json", text=True) as fd:
+ issues = json.load(fd)
+ except urllib.error.HTTPError:
+ print("warning: unable to read defects.json from " + job.url,
+ file=sys.stderr)
+ continue
+
+ merged_issues.update({coverity_parser.make_key(i): i for i in issues})
+
+# Sort merged issues by file name, line number, checker, and then CID.
+sorted_issue_keys = sorted(merged_issues.keys())
+
+if sorted_issue_keys:
+ # Generate HTML table with issue description
+ print("""
+<style>
+#coverity-table {
+ display: block;
+ max-height: 600px;
+ overflow-y: auto;
+}
+#coverity-table thead td {
+ font-weight: bold;
+}
+#coverity-table td {
+ font-size: 0.9em;
+}
+#coverity-table .cov-file {
+ color: brown;
+}
+#coverity-table .cov-line {
+ color: darkviolet;
+}
+#coverity-table .cov-cid {
+ color: orangered;
+}
+#coverity-table .cov-mandatory {
+ background-color: #ff4d4d;
+}
+#coverity-table .cov-required {
+ background-color: #ffcccc;
+}
+</style>
+<table id="coverity-table" cellpadding="2">
+<thead>
+<tr>
+ <td>File</td>
+ <td>Line</td>
+ <td>Checker</td>
+ <td>CID</td>
+ <td>Description</td>
+</tr>
+</thead><tbody>""")
+ for key in sorted_issue_keys:
+ print(coverity_parser.format_issue_html(merged_issues[key]))
+ print("</tbody></table>")
+ print('<div style="line-height: 3em; font-weight: bold;">{} defects reported.</div>'.format(
+ len(sorted_issue_keys)))
diff --git a/script/coverity_wrapper.sh b/script/coverity_wrapper.sh
new file mode 100644
index 0000000..fed41e4
--- /dev/null
+++ b/script/coverity_wrapper.sh
@@ -0,0 +1,368 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# This file is sourced from the build_package.sh script to use
+# coverity_wrapper() function as a build wrapper.
+#
+# This wrapper supports two work flows:
+#
+# - Compare the branch under test with that of master, and print defects. If
+# there are defects, we arrange the build to be marked as unstable. Set
+# $cov_run_type to 'branch-report-compare' to use this.
+#
+# - Commit and create snapshot for the entire branch. Set $cov_run_type to
+# 'branch-report-full' to use this.
+#
+# Coverity analysis involves contacting the server, which have shown to be very
+# slow. Depending on the type of analysis performed, we might have to do
+# analysis more than once, and doing that in series would only increase the turn
+# around time. To mitigate this, all Coverity commands are saved as small
+# snippets, and are then called from a Makefile. Make take care of running
+# commands in parallel (all this at the expense of readability).
+
+coverity_wrapper() {
+ local cov_dir="$workspace/coverity"
+ local cov_config="$cov_dir/config"
+ local cov_compiler="${cov_compiler:-${CROSS_COMPILE}gcc}"
+
+ local golden_repo="$cov_dir/golden-repo"
+ local golden_snapshot="$cov_dir/golden-snapshot"
+
+ local branch_repo="$cov_dir/branch-repo"
+ local branch_snapshot="$cov_dir/branch-snapshot"
+
+ local auth_file="${cov_auth_file:-$ci_root/coverity/tfcibot@$coverity_host}"
+ local makefile="$workspace/makefile.cov"
+ local snippets_dir="$cov_dir/snippets"
+ local stream_name="${BUILD_CONFIG:?}"
+
+ local ref_arg
+ local description
+ local need_compare
+
+ echo_w
+ mkdir -p "$cov_dir"
+
+ if echo "${cov_run_type:?}" | grep -iq "branch-report-compare"; then
+ need_compare=1
+ local golden_url="${cov_golden_url:-$tf_src_repo_url}"
+ local golden_ref="${cov_golden_ref:-master}"
+ fi
+
+ if upon "$local_ci"; then
+ description="$USER-local ${cov_checker:?}"
+ # Reference repository can't be shallow
+ if [ ! -f "$tf_root/.git/shallow" ]; then
+ ref_arg="--reference $tf_root"
+ fi
+ else
+ description="$JOB_NAME#$BUILD_NUMBER ${cov_checker:?}"
+ ref_arg="--reference $project_filer/ref-repos/trusted-firmware"
+ fi
+
+ # Create a stream and assign to Trusted Firmware project
+ chmod 400 "$auth_file"
+
+ mkdir -p "$snippets_dir"
+ cat <<EOF >"$makefile"
+SHELL := /bin/bash
+
+define run-snippet
+echo ":\$@" >&3
+echo ">\$@: \$\$(date)"
+if ! bash -ex $snippets_dir/\$@; then \\
+ echo " :\$@ failed! See build log" >&3; \\
+ exit 1; \\
+fi
+echo "<\$@: \$\$(date)"
+endef
+
+EOF
+
+ create_snippet() {
+ # Create a script snippet
+ cat >"$snippets_dir/${name?}"
+
+ # Add a rule to the makefile
+ cat <<EOF >>"$makefile"
+$name:${deps:+ $deps}
+ @\$(run-snippet)
+
+EOF
+ }
+
+ # golden-setup. Additionally query for a snapshot ID corresponding to
+ # this version in the stream. If a snapshot ID exists, the comparison
+ # file is generated containing the snapshot ID.
+ #
+ # We need to make a shallow clone of the repository first in order to
+ # get the reference, however. And, if later we find needing a fresh
+ # snapshot, we unshallow that.
+ cat <<EOF | name="golden-setup" create_snippet
+git clone --depth 1 -q $ref_arg "$golden_url" "$golden_repo"
+cd -P "$golden_repo"
+git fetch --depth 1 -q origin "$golden_ref"
+git checkout -q FETCH_HEAD
+
+if [ -z "$cov_force_commit" ]; then
+ "$ci_root/script/get_latest_snapshot.py" \\
+ --host "$coverity_host" \\
+ --file "$golden_snapshot" \\
+ --description "*$cov_checker*" \\
+ --version "\$(git show -q --format=%H)" \\
+ "$stream_name" 2>&3 || true
+fi
+
+{
+echo " golden: $golden_url $golden_ref"
+echo " golden: \$(git show -q --format=%H)"
+} >&3
+
+if [ -f "$golden_snapshot" ]; then
+ echo " golden: snapshot ID \$(cat $golden_snapshot) exists" >&3
+else
+ git fetch -q --unshallow origin
+fi
+EOF
+
+
+ # Setup branch
+ if upon "$local_ci"; then
+ if not_upon "$need_compare"; then
+ ln -s "$tf_root" "$branch_repo"
+
+ # Run scanning as-is since we don't need a comparison.
+ cat <<EOF | name="branch-setup" create_snippet
+if [ "$dont_clean" != 1 ]; then
+ cd -P "$branch_repo"
+ MAKEFLAGS= make distclean
+fi
+EOF
+ else
+ # Running comparison means that we need to make a merge
+ # commit. It's undesirable to do that on the user's
+ # working copy, so do it on a separate one.
+ cat <<EOF | name="branch-setup" create_snippet
+git clone -q $ref_arg "$tf_src_repo_url" "$branch_repo"
+cd -P "$branch_repo"
+git checkout -b cov-branch origin/master
+rsync -a --exclude=".git" --exclude "**.o" --exclude "**.d" "$tf_root/" .
+git add .
+git -c user.useconfigonly=false commit --allow-empty -q -m "Test branch"
+git checkout master
+git -c user.useconfigonly=false merge --no-ff -q cov-branch
+
+git remote add golden "$golden_url"
+git fetch -q golden "$golden_ref"
+git checkout -q -b cov-golden FETCH_HEAD
+git -c user.useconfigonly=false merge --no-edit --no-ff -q cov-branch
+EOF
+ fi
+ else
+ # Use the local checkout at $tf_root for analysing branch and
+ # golden together
+ ln -s "$tf_root" "$branch_repo"
+
+ cat <<EOF | name="branch-setup" create_snippet
+if [ "$need_compare" ]; then
+ cd -P "$branch_repo"
+ if [ -f ".git/shallow" ]; then
+ git fetch -q --unshallow origin
+ fi
+ git remote add golden "$golden_url"
+ git fetch -q golden $golden_ref
+ git branch cov-branch HEAD
+ git checkout -q -b cov-golden FETCH_HEAD
+ echo " branch: \$(git show -q --format=%H cov-branch)" >&3
+ git -c user.useconfigonly=false merge --no-edit --no-ff -q cov-branch
+fi
+EOF
+ fi
+
+
+ # Setup stream
+ cat <<EOF | name="stream-setup" create_snippet
+if cov-manage-im --mode streams --add --set "name:$stream_name" \\
+ --auth-key-file "$auth_file" \\
+ --host "$coverity_host"; then
+ cov-manage-im --mode projects --name "Arm Trusted Firmware" --update \\
+ --insert "stream:$stream_name" --auth-key-file "$auth_file" \\
+ --host "$coverity_host"
+fi
+EOF
+
+
+ # Coverity configuration
+ cat <<EOF | name="cov-config" create_snippet
+cov-configure --comptype gcc --template --compiler "$cov_compiler" \\
+ --config "$cov_config/config.xml"
+EOF
+
+
+ # cov-build on golden; only performed if a comparison file doesn't
+ # exist.
+ cat <<EOF | name="golden-cov-build" deps="cov-config golden-setup" \
+ create_snippet
+if [ ! -f "$golden_snapshot" -o -n "$cov_force_commit" ]; then
+ cd -P "$golden_repo"
+ MAKEFLAGS= cov-build --config "$cov_config/config.xml" \\
+ --dir "$cov_dir/golden" $@
+else
+ echo " golden: cov-build skipped" >&3
+fi
+EOF
+
+
+ # cov-analyze on golden; only performed if a comparison file doesn't
+ # exist.
+ cat <<EOF | name="golden-cov-analyze" deps="golden-cov-build" \
+ create_snippet
+if [ ! -f "$golden_snapshot" -o -n "$cov_force_commit" ]; then
+ cd -P "$golden_repo"
+ cov-analyze --dir "$cov_dir/golden" $cov_options --verbose 0 \\
+ --strip-path "\$(pwd -P)" \\
+ --redirect "stdout,$cov_dir/golden.txt"
+else
+ echo " golden: cov-analyze skipped" >&3
+fi
+EOF
+
+
+ # cov-commit-defects on golden. Since more than one job could have
+ # started analyzing golden after finding the snapshot misssing, we check
+ # for a snapshot again, and a commit only performed if a comparison file
+ # doesn't exist.
+ cat <<EOF | name="golden-cov-commit-defects" \
+ deps="stream-setup golden-cov-analyze" create_snippet
+if [ ! -f "$golden_snapshot" -a -z "$cov_force_commit" ]; then
+ "$ci_root/script/get_latest_snapshot.py" \\
+ --host "$coverity_host" \\
+ --file "$golden_snapshot" \\
+ --description "*$cov_checker*" \\
+ --version "\$(git show -q --format=%H)" \\
+ "$stream_name" 2>&3 || true
+ retried=1
+fi
+
+if [ ! -f "$golden_snapshot" -o -n "$cov_force_commit" ]; then
+ cd -P "$golden_repo"
+ cov-commit-defects --dir "$cov_dir/golden" --host "$coverity_host" \\
+ --stream "$stream_name" --auth-key-file "$auth_file" \\
+ --version "\$(git show -q --format=%H)" \\
+ --description "$description" \\
+ --snapshot-id-file "$golden_snapshot"
+ echo " golden: new snapshot ID: \$(cat $golden_snapshot)" >&3
+elif [ "\$retried" ]; then
+ {
+ echo " golden: snapshot ID \$(cat $golden_snapshot) now exists"
+ echo " golden: cov-commit-defects skipped"
+ } >&3
+else
+ echo " golden: cov-commit-defects skipped" >&3
+fi
+EOF
+
+
+ # cov-build on branch
+ cat <<EOF | name="branch-cov-build" deps="cov-config branch-setup" \
+ create_snippet
+cd -P "$branch_repo"
+MAKEFLAGS= cov-build --config "$cov_config/config.xml" --dir "$cov_dir/branch" $@
+EOF
+
+
+ # cov-analyze on branch
+ cat <<EOF | name="branch-cov-analyze" deps="branch-cov-build" \
+ create_snippet
+cd -P "$branch_repo"
+cov-analyze --dir "$cov_dir/branch" $cov_options --verbose 0 \\
+ --strip-path "\$(pwd -P)" \\
+ --redirect "stdout,$cov_dir/branch.txt"
+EOF
+
+
+ # cov-commit-defects on branch
+ cat <<EOF | name="branch-cov-commit-defects" \
+ deps="stream-setup branch-cov-analyze" create_snippet
+if [ "$cov_force_commit" ]; then
+ cd -P "$branch_repo"
+ cov-commit-defects --dir "$cov_dir/branch" --host "$coverity_host" \\
+ --stream "$stream_name" --description "$description" \\
+ --version "\$(git show -q --format=%H%)" \\
+ --auth-key-file "$auth_file" \\
+ --snapshot-id-file "$branch_snapshot"
+ echo " branch: new snapshot ID: \$(cat $branch_snapshot)" >&3
+else
+ echo " branch: cov-commit-defects skipped" >&3
+fi
+EOF
+
+
+ # cov-commit-defects on branch, but compare with golden
+ cat <<EOF | name="branch-report-compare" \
+ deps="golden-cov-commit-defects branch-cov-analyze" create_snippet
+cov-commit-defects --dir "$cov_dir/branch" --host "$coverity_host" \\
+ --stream "$stream_name" --auth-key-file "$auth_file" \\
+ --preview-report-v2 "$cov_dir/report.json" \\
+ --comparison-snapshot-id "\$(cat $golden_snapshot)"
+EOF
+
+
+ # cov-commit-defects on branch to report branch report
+ cat <<EOF | name="branch-report-full" \
+ deps="branch-cov-commit-defects stream-setup branch-cov-analyze" \
+ create_snippet
+cov-commit-defects --dir "$cov_dir/branch" --host "$coverity_host" \\
+ --stream "$stream_name" --auth-key-file "$auth_file" \\
+ --preview-report-v2 "$cov_dir/report.json"
+EOF
+
+ local minus_j="-j"
+ if upon "$cov_serial_build"; then
+ minus_j=
+ fi
+
+ # Call Coverity targets
+ echo "Coverity run type: ${cov_run_type:?}"
+ if ! eval MAKEFLAGS= make -r $minus_j -f "$makefile" $cov_run_type; then
+ return 1
+ fi
+
+ # Generate a text report
+ local defects_file="$workspace/coverity_report.txt"
+
+ if [ -f "$cov_dir/report.json" ]; then
+ python3 "$ci_root/script/coverity_parser.py" \
+ --output "$workspace/defects.json" \
+ $cov_report_options \
+ "$cov_dir/report.json" >"$defects_file" 2>&3 || true
+ fi
+
+ # If there were defects, print them out to the console. For local CI,
+ # print them in yellow--the same color we'd use for UNSTABLE builds.
+ if [ -s "$defects_file" ]; then
+ echo_w
+ echo_w "Coverity defects found:"
+ echo_w
+ if upon "$local_ci"; then
+ echo_w "$(tput setaf 3)"
+ fi
+ cat "$defects_file" >&3
+ if upon "$local_ci"; then
+ echo_w "$(tput sgr0)"
+ fi
+ echo_w
+ echo_w "$(wc -l < "$defects_file") defects reported."
+ echo_w
+ build_unstable >&3
+ echo_w
+ else
+ echo_w
+ echo_w "No coverity defects found."
+ echo_w
+ fi
+}
diff --git a/script/download_linaro_release.sh b/script/download_linaro_release.sh
new file mode 100755
index 0000000..6f43307
--- /dev/null
+++ b/script/download_linaro_release.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# Given the name of the release (e.g., 18.04), this script downloads all
+# Linaro release archives to the current directory, verifies, extracts, and
+# finally removes the archive files.
+
+set -e
+
+# Download all ZIP files from the chosen Linaro release
+time wget -q -c -m -A .zip -np -nd "https://releases.linaro.org/members/arm/platforms/${1:?}/"
+
+# Uncompress each ZIP file in its own directory (named after the ZIP file)
+for zipfile in $(echo *.zip); do
+ echo
+ echo "Uncompressing file $zipfile"
+
+ unzip -d "${zipfile%.zip}" "$zipfile"
+done
+
+rm -f *.zip
diff --git a/script/find_fragment_users.sh b/script/find_fragment_users.sh
new file mode 100755
index 0000000..8a35b93
--- /dev/null
+++ b/script/find_fragment_users.sh
@@ -0,0 +1,29 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+ci_root="$(readlink -f "$(dirname "$0")/..")"
+run_config_dir="$ci_root/run_config"
+
+run_config="$1"
+if [ -z "$run_config" ]; then
+ echo "Run config exected as parameter"
+ exit 1
+elif [ ! -f "$run_config_dir/$run_config" ]; then
+ echo "Run config $run_config not found"
+ exit 1
+fi
+
+for test_config in $(cd "$ci_root/group" && find -type f -printf "%P\n"); do
+ if echo "$run_config_part" | grep -q ":nil$"; then
+ continue;
+ fi
+
+ if "$ci_root/script/gen_run_config_candidates.py" "$test_config" | \
+ grep -q "^$run_config$"; then
+ echo "$test_config"
+ fi
+done
diff --git a/script/gen_juno_linux_yaml.sh b/script/gen_juno_linux_yaml.sh
new file mode 100755
index 0000000..d7946de
--- /dev/null
+++ b/script/gen_juno_linux_yaml.sh
@@ -0,0 +1,88 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# Generate a YAML file in order to dispatch Juno runs on LAVA. Note that this
+# script would produce a meaningful output when run via. Jenkins
+#
+# $bin_mode must be set. This script outputs to STDOUT
+
+ci_root="$(readlink -f "$(dirname "$0")/..")"
+source "$ci_root/utils.sh"
+source "$ci_root/juno_utils.sh"
+
+get_recovery_image_url() {
+ local build_job="tf-build"
+ local bin_mode="${bin_mode:?}"
+
+ if upon "$jenkins_run"; then
+ echo "$jenkins_url/job/$JOB_NAME/$BUILD_NUMBER/artifact/artefacts/$bin_mode/juno_recovery.zip"
+ else
+ echo "file://$workspace/artefacts/$bin_mode/juno_recovery.zip"
+ fi
+}
+
+bootloader_prompt="${bootloader_prompt:-juno#}"
+juno_revision="${juno_revision:-juno-r0}"
+recovery_img_url="${recovery_img_url:-$(get_recovery_image_url)}"
+nfs_rootfs="${nfs_rootfs:-$juno_rootfs_url}"
+linux_prompt="${linux_prompt:-root@(.*):~#}"
+
+cat <<EOF
+device_type: juno
+job_name: tf-juno
+
+context:
+ bootloader_prompt: $bootloader_prompt
+
+tags:
+- $juno_revision
+
+timeouts:
+ # Global timeout value for the whole job.
+ job:
+ minutes: 30
+ # Unless explicitly overwritten, no single action should take more than
+ # 10 minutes to complete.
+ action:
+ minutes: 10
+
+priority: medium
+visibility: public
+
+actions:
+
+- deploy:
+ namespace: recovery
+ to: vemsd
+ recovery_image:
+ url: $recovery_img_url
+ compression: zip
+
+- deploy:
+ namespace: target
+ to: nfs
+ os: debian
+ nfsrootfs:
+ url: $nfs_rootfs
+ compression: gz
+
+- boot:
+ # Drastically increase the timeout for the boot action because of the udev
+ # issues when using TF build config "juno-all-cpu-reset-ops".
+ # TODO: Should increase the timeout only for this TF build config, not all!
+ timeout:
+ minutes: 15
+ namespace: target
+ connection-namespace: recovery
+ method: u-boot
+ commands: norflash
+ auto-login:
+ login_prompt: 'login:'
+ username: root
+ prompts:
+ - $linux_prompt
+EOF
diff --git a/script/gen_juno_tftf_yaml.sh b/script/gen_juno_tftf_yaml.sh
new file mode 100755
index 0000000..4229a47
--- /dev/null
+++ b/script/gen_juno_tftf_yaml.sh
@@ -0,0 +1,86 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# Generate a YAML file in order to dispatch Juno TFTF runs on LAVA. Note that
+# this script would produce a meaningful output when run via. Jenkins.
+#
+# $bin_mode must be set. This script outputs to STDOUT
+
+ci_root="$(readlink -f "$(dirname "$0")/..")"
+source "$ci_root/utils.sh"
+
+get_recovery_image_url() {
+ local build_job="tf-build"
+ local bin_mode="${bin_mode:?}"
+
+ if upon "$jenkins_run"; then
+ echo "$jenkins_url/job/$JOB_NAME/$BUILD_NUMBER/artifact/artefacts/$bin_mode/juno_recovery.zip"
+ else
+ echo "file://$workspace/artefacts/$bin_mode/juno_recovery.zip"
+ fi
+}
+
+juno_revision="${juno_revision:-juno-r0}"
+recovery_img_url="${recovery_img_url:-$(get_recovery_image_url)}"
+
+cat <<EOF
+device_type: juno
+job_name: tf-juno
+
+tags:
+- $juno_revision
+
+timeouts:
+ # Global timeout value for the whole job.
+ job:
+ minutes: 45
+ actions:
+ lava-test-monitor:
+ seconds: 120
+ connections:
+ lava-test-monitor:
+ seconds: 120
+
+priority: medium
+visibility: public
+
+actions:
+
+- deploy:
+ timeout:
+ minutes: 10
+ to: vemsd
+ recovery_image:
+ url: $recovery_img_url
+ compression: zip
+
+- boot:
+ method: minimal
+
+- test:
+ # Timeout for all the TFTF tests to complete.
+ timeout:
+ minutes: 30
+
+ monitors:
+ - name: TFTF
+ # LAVA looks for a testsuite start string...
+ start: 'Booting trusted firmware test framework'
+ # ...and a testsuite end string.
+ end: 'Exiting tests.'
+
+ # For each test case, LAVA looks for a string which includes the testcase
+ # name and result.
+ pattern: "(?s)> Executing '(?P<test_case_id>.+)'(.*) TEST COMPLETE\\\s+(?P<result>(Skipped|Passed|Failed|Crashed))"
+
+ # Teach to LAVA how to interpret the TFTF Tests results.
+ fixupdict:
+ Passed: pass
+ Failed: fail
+ Crashed: fail
+ Skipped: skip
+EOF
diff --git a/script/gen_nomination.py b/script/gen_nomination.py
new file mode 100755
index 0000000..fb930af
--- /dev/null
+++ b/script/gen_nomination.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# This script examines the checked out copy of a Git repository, inspects the
+# touched files in a commit, and then determines what test configs are suited to
+# be executed when testing the repository.
+#
+# The test nominations are based on the paths touched in a commit: for example,
+# when foo/bar is touched, run test blah:baz. All nominations are grouped under
+# NOMINATED directory.
+#
+# The script must be invoked from within a Git clone.
+
+import argparse
+import functools
+import os
+import re
+import subprocess
+import sys
+
+
+class Commit:
+ # REs to identify differ header
+ diff_re = re.compile(r"[+-]")
+ hunk_re = re.compile(r"(\+{3}|-{3}) [ab]/")
+
+ # A diff line looks like a diff, of course, but is not a hunk header
+ is_diff = lambda l: Commit.diff_re.match(l) and not Commit.hunk_re.match(l)
+
+ def __init__(self, refspec):
+ self.refspec = refspec
+
+ @functools.lru_cache()
+ def touched_files(self, parent):
+ git_cmd = ("git diff-tree --no-commit-id --name-only -r " +
+ self.refspec).split()
+ if parent:
+ git_cmd.append(parent)
+
+ return subprocess.check_output(git_cmd).decode(encoding='UTF-8').split(
+ "\n")
+
+ @functools.lru_cache()
+ def diff_lines(self, parent):
+ against = parent if parent else (self.refspec + "^")
+ git_cmd = "git diff {} {}".format(against, self.refspec).split()
+
+ # Filter valid diff lines from the git diff output
+ return list(filter(Commit.is_diff, subprocess.check_output(
+ git_cmd).decode(encoding="UTF-8").split("\n")))
+
+ def matches(self, rule, parent):
+ if type(rule) is str:
+ scheme, colon, rest = rule.partition(":")
+ if colon != ":":
+ raise Exception("rule {} doesn't have a scheme".format(rule))
+
+ if scheme == "path":
+ # Rule is path in plain string
+ return any(f.startswith(rest) for f in self.touched_files(parent))
+ elif scheme == "pathre":
+ # Rule is a regular expression matched against path
+ regex = re.compile(rest)
+ return any(regex.search(f) for f in self.touched_files(parent))
+ elif scheme == "has":
+ # Rule is a regular expression matched against the commit diff
+ has_upper = any(c.isupper() for c in rule)
+ pat_re = re.compile(rest, re.IGNORECASE if not has_upper else 0)
+
+ return any(pat_re.search(l) for l in self.diff_lines(parent))
+ elif scheme == "op":
+ pass
+ else:
+ raise Exception("unsupported scheme: " + scheme)
+ elif type(rule) is tuple:
+ # If op:match-all is found in the tuple, the tuple must match all
+ # rules (AND).
+ test = all if "op:match-all" in rule else any
+
+ # If the rule is a tuple, we match them individually
+ return test(self.matches(r, parent) for r in rule)
+ else:
+ raise Exception("unsupported rule type: {}".format(type(rule)))
+
+
+ci_root = os.path.abspath(os.path.join(__file__, os.pardir, os.pardir))
+group_dir = os.path.join(ci_root, "group")
+
+parser = argparse.ArgumentParser()
+
+# Argument setup
+parser.add_argument("--parent", help="Parent commit to compare against")
+parser.add_argument("--refspec", default="@", help="refspec")
+parser.add_argument("rules_file", help="Rules file")
+
+opts = parser.parse_args()
+
+# Import project-specific nomination_rules dictionary
+script_dir = os.path.dirname(os.path.abspath(__file__))
+with open(os.path.join(opts.rules_file)) as fd:
+ exec(fd.read())
+
+commit = Commit(opts.refspec)
+nominations = set()
+for rule, test_list in nomination_rules.items():
+ # Rule must be either string or tuple. Test list must be list
+ assert type(rule) is str or type(rule) is tuple
+ assert type(test_list) is list
+
+ if commit.matches(rule, opts.parent):
+ nominations |= set(test_list)
+
+for nom in nominations:
+ # Each test nomination must exist in the repository
+ if not os.path.isfile(os.path.join(group_dir, nom)):
+ raise Exception("nomination {} doesn't exist".format(nom))
+
+ print(nom)
diff --git a/script/gen_run_config_candidates.py b/script/gen_run_config_candidates.py
new file mode 100755
index 0000000..d9e3f9b
--- /dev/null
+++ b/script/gen_run_config_candidates.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# Output to stdout the chosen run configuration fragments for a given run
+# configuration. With -p, the script prints all fragments considered without
+# validating whether it exists.
+
+import argparse
+import os
+import sys
+
+parser = argparse.ArgumentParser(description="Choose run configurations")
+parser.add_argument("--print-only", "-p", action="store_true", default=False,
+ help="Print only; don't check for matching run configs.")
+parser.add_argument("args", nargs=argparse.REMAINDER, help="Run configuration")
+opts = parser.parse_args()
+
+if len(opts.args) != 1:
+ raise Exception("Exactly one argument expected")
+
+# Obtain path to run_config directory
+script_root = os.path.dirname(os.path.abspath(sys.argv[0]))
+run_config_dir = os.path.join(script_root, os.pardir, "run_config")
+
+arg = opts.args[0]
+run_config = arg.split(":")[-1]
+if not run_config:
+ raise Exception("Couldn't extract run config from " + arg)
+
+if run_config == "nil":
+ sys.exit(0)
+
+fragments = run_config.split("-")
+exit_code = 0
+
+# Stems are fragments, except with everything after dot removed.
+stems = list(map(lambda f: f.split(".")[0], fragments))
+
+# Consider each fragment in turn
+for frag_idx, chosen_fragment in enumerate(fragments):
+ # Choose all stems upto the current fragment
+ chosen = ["-".join(stems[0:i] + [chosen_fragment])
+ for i in range(frag_idx + 1)]
+
+ for i, fragment in enumerate(reversed(chosen)):
+ if opts.print_only:
+ print(fragment)
+ else:
+ # Output only if a matching run config exists
+ if os.path.isfile(os.path.join(run_config_dir, fragment)):
+ # Stop looking for generic once a specific fragment is found
+ print(fragment)
+ break
+ else:
+ # Ignore if the first fragment doesn't exist, which is usually the
+ # platform name. Otherwise, print a warning for not finding matches for
+ # the fragment.
+ if (not opts.print_only) and (i > 0):
+ print("warning: {}: no matches for fragment '{}'".format(
+ arg, fragment), file=sys.stderr)
+ exit_code = 1
+
+sys.exit(exit_code)
diff --git a/script/gen_test_desc.py b/script/gen_test_desc.py
new file mode 100755
index 0000000..5913c32
--- /dev/null
+++ b/script/gen_test_desc.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+#
+# Generate .test files in $workspace based on the $TEST_GROUPS parameter. Test
+# files are prefixed with a zero-padded number for a predictable ordering
+# amongst them.
+
+import os
+
+TEST_SUFFIX = ".test"
+
+
+def touch(a_file):
+ with open(a_file, "w"):
+ pass
+
+
+# Obtain the value of either $variable or $VARIABLE.
+def get_env(variable):
+ var_list = [variable, variable.upper()]
+ for v in var_list:
+ value = os.environ.get(v)
+ if value:
+ return value
+ else:
+ raise Exception("couldn't find {} in env".format(" or ".join(var_list)))
+
+
+# Perform group-specific translation on the build config
+def translate_build_config(group, config_list):
+ # config_list contains build configs as read from the test config
+ if group.startswith("scp-"):
+ # SCP configs would be specified in the following format:
+ # scp_config, tf_config, tftf_config
+ # Reshuffle them into the canonical format
+ config_list = [config_list[1], config_list[2], config_list[0]]
+
+ return config_list
+
+
+def gen_desc(group, test):
+ global num_spawn
+
+ build_config, run_config = test.split(":")
+
+ # Test descriptors are always generated in the following order:
+ # tf_config, tftf_config, scp_config
+ # Fill missing configs to the right with "nil".
+ config_list = (build_config.split(",") + ["nil"] * 3)[:3]
+
+ # Perform any group-specific translation on the config
+ config_list = translate_build_config(group, config_list)
+
+ test_config = ",".join(config_list) + ":" + run_config
+
+ # Create descriptor. Write the name of the original test config as its
+ # content.
+ desc = os.path.join(workspace, "%".join([str(num_spawn).zfill(3), group,
+ test_config + TEST_SUFFIX]))
+ with open(desc, "wt") as fd:
+ print(test, file=fd)
+
+ num_spawn += 1
+
+
+def process_item(item):
+ # If an item starts with @, then it's deemed to be an indirection--a file
+ # from which test groups are to be read.
+ if item.startswith("@"):
+ with open(item[1:]) as fd:
+ for line in fd:
+ line = line.strip()
+ if not line:
+ continue
+ process_item(line)
+
+ return
+
+ item_loc = os.path.join(group_dir, item)
+
+ if os.path.isfile(item_loc):
+ gen_desc(*item_loc.split(os.sep)[-2:])
+ elif os.path.isdir(item_loc):
+ # If it's a directory, select all files inside it
+ for a_file in next(os.walk(item_loc))[2]:
+ gen_desc(item, a_file)
+ else:
+ # The item doesn't exist
+ if ":" in item:
+ # A non-existent test config is specified
+ if "/" in item:
+ # The test config doesn't exist, and a group is also specified.
+ # This is not allowed.
+ raise Exception("'{}' doesn't exist.".format(item))
+ else:
+ # The user probably intended to create one on the fly; so create
+ # one in the superficial 'GENERATED' group.
+ print("note: '{}' doesn't exist; generated.".format(item))
+ touch(os.path.join(generated_dir, item))
+ gen_desc(os.path.basename(generated_dir), item)
+ else:
+ raise Exception("'{}' is not valid for test generation!".format(item))
+
+
+ci_root = os.path.abspath(os.path.join(__file__, os.pardir, os.pardir))
+group_dir = os.path.join(ci_root, "group")
+num_spawn = 0
+
+# Obtain variables from environment
+test_groups = get_env("test_groups")
+workspace = get_env("workspace")
+
+# Remove all test files, if any
+_, _, files = next(os.walk(workspace))
+for test_file in files:
+ if test_file.endswith(TEST_SUFFIX):
+ os.remove(os.path.join(workspace, test_file))
+
+generated_dir = os.path.join(group_dir, "GENERATED")
+os.makedirs(generated_dir, exist_ok=True)
+
+for item in test_groups.split():
+ process_item(item)
+
+print()
+print("{} children to be spawned...".format(num_spawn))
+print()
diff --git a/script/gen_test_report.css b/script/gen_test_report.css
new file mode 100644
index 0000000..e7d6df3
--- /dev/null
+++ b/script/gen_test_report.css
@@ -0,0 +1,132 @@
+/*
+ * Copyright (c) 2019, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+/* Result colors */
+.success {
+ background-color: #b4fd98;
+}
+.failure {
+ background-color: #ffb8b8;
+}
+.unstable {
+ background-color: #ffe133;
+}
+
+/* TF report */
+#tf-report-main {
+ margin-bottom: 10px;
+ margin-top: 10px;
+}
+#tf-report-main table {
+ border: 1px dotted #aaa;
+ border-collapse: collapse;
+ font: 10px monospace;
+}
+#tf-report-main td, #tf-report-main th {
+ border: 1px solid #c8c8c8;
+ padding: 5px 10px;
+ font: 10px monospace;
+ vertical-align: middle;
+}
+#tf-report-main th {
+ background-color: #e0f7ff;
+ font-weight: bold;
+}
+#tf-report-main .buildlink {
+ display: inline-block;
+ height: 16px;
+ margin-left: 10px;
+ vertical-align: middle;
+ /* Console icon 16x16 */
+ background: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAACJUlEQVR42qVTO4saYRS9n/EtCoKvQgZBFCyUyDba2BgRJF2a7QQDYmOhhSJCViw0oI2NU7gsFrEQZKsUEVKIGMXB/AQ3jJjOahVf6yNzvzAQAm4WcuEyMPPdc8853xkC/1mk1WppdTrdW71eLyeEwPl8vngYv2Efj0eYzWY/o9HoV9Ltdr+EQqEwDj9XIrAIslwuIZ/Pvye9Xo+32WxMo9EAr9f7T5DD4QCLxQLi8Tik02mW9Pt93mAwMN84DqxWK3wfj2H5+PgskFarhVQqBdlsliWDwYA3Go3Mh5sbeHd9DaFIBD4WCsA/PFwEcDqdEIvFoFqtsmQ4HFKAu1YL3oTD8Pn+HrjhkBp1SYbdbodisQi1Wo0lo9GIN5lMjNvtBolEAsKNgD8QgNdXV4DGfrq9hfV6TQFRP5oXDAahUqlAvV5nyXg8pgAOhwOUSiUoFApQq9X0iQDb7ZYO7vd72Gw2sNvtICLILJfLIBjPEo7jeIvFwng8HlCpVLTlcjlIpVIQc/H09EQHEQDZIIOC4FOz2WTJZDLhzWYz4/P5QKPR0O3IRCaTUYDT6US3IxMcXq1WEBAkZjIZEEL4GwAlICpejwiCLNAT1I7bxWH0wO/3QzKZhHa7TT344XK5bNPpFMQ0imkTY4seoAyxkSFK7HQ6dZLL5UqJRCLzSig8/Gcj/b/fiS2kcVkqlaJEiK9kPp/bhO2Kl/6ByEwAXwtSZ+SlQ5fqF5qGN9ApekVGAAAAAElFTkSuQmCC') no-repeat left top;
+ width: 16px;
+}
+#tf-report-main .emptycell {
+ background-color: #eee;
+}
+#tf-report-main .failure {
+ vertical-align: middle;
+}
+#tf-report-main .success {
+ vertical-align: middle;
+}
+#tf-report-main .unstable {
+ vertical-align: middle;
+}
+#tf-report-main .result {
+ display: inline-block;
+ min-width: 50px;
+}
+#tf-report-main .dim {
+ opacity: 0.2;
+}
+#tf-report-main input[type="checkbox"] {
+ margin-left: 10px;
+ vertical-align: middle;
+}
+
+/* Label container */
+.tf-label-container {
+ margin-bottom: 10px;
+ margin-top: 10px;
+ position: relative;
+}
+.tf-label-container .tf-label-label {
+ background-color: white;
+ left: 10px;
+ position: absolute;
+ top: -0.65em;
+}
+.tf-label-container pre {
+ white-space: pre;
+}
+.tf-label-container .tf-label-content,
+.tf-label-container pre {
+ border: 1px dotted #ffb8b8;
+ padding: 1em;
+}
+
+/* Rebuild table */
+#tf-rebuild-button, #tf-rebuild-all-button {
+ background-color: #4b758b; /* Same color as Jenkis buttons */
+ border: 1px solid #aaa;
+ color: #eee;
+ display: block;
+ font-weight: bold;
+ margin-bottom: 10px;
+ padding: 5px 15px;
+ width: 250px;
+}
+#tf-rebuild-button[disabled] {
+ background-color: #ccc;
+ color: #ddd;
+}
+#tf-rebuild-table {
+ margin-bottom: 20px;
+ margin-top: 20px;
+}
+#tf-rebuild-table .desc-col {
+ padding-right: 10px;
+ vertical-align: middle;
+}
+#tf-rebuild-table .button-col {
+ border-left: 1px solid #c8c8c8;
+ padding-left: 10px;
+ vertical-align: middle;
+}
+#tf-rebuild-table .select-row {
+ font-size: 0.8em;
+ padding-bottom: 15px;
+ padding-top: 10px;
+ text-align: center;
+}
+#tf-rebuild-table .select-all {
+ border: 1px solid #ccc;
+ border-radius: 3px;
+ cursor: pointer;
+ display: inline-block;
+ padding: 2px 3px 0px 3px;
+}
diff --git a/script/gen_test_report.js b/script/gen_test_report.js
new file mode 100644
index 0000000..9ce703d
--- /dev/null
+++ b/script/gen_test_report.js
@@ -0,0 +1,277 @@
+//<![CDATA[
+//
+// Copyright (c) 2019, Arm Limited. All rights reserved.
+//
+// SPDX-License-Identifier: BSD-3-Clause
+//
+// Get rid of all unhelpful and annoying orbs that Jenkins barfs to indicate sub
+// job status. We'd have that in the HTML report anyway. Unhelpfully, Jenkins
+// doesn't ID the element, nor does it assign a class to them. So, we:
+//
+// - Look for a h2 element with text "Subproject Builds" or "Subprojects";
+//
+// - The orbs are placed in a <ul> immediately following the h2 element; so we
+// remove it altogether.
+//
+document.querySelectorAll("h2").forEach(function(el) {
+ if ((el.innerText !== "Subproject Builds") && (el.innerText !== "Subprojects"))
+ return;
+ if (el.nextSibling.tagName !== "UL")
+ return;
+ el.nextSibling.remove();
+ el.remove();
+});
+
+// For failed jobs, there's this large "Identified problems" table that has no
+// value. Get rid of that as well.
+document.querySelectorAll("h2").forEach(function(el) {
+ if (el.innerText !== "Identified problems")
+ return;
+ el.closest("table").remove();
+});
+
+function onResultHover(e) {
+ var title = this.getAttribute("title");
+ var commandPre = document.querySelector("#tf-selected-commands");
+ var localCmd = "";
+
+ if (!title || title === "") {
+ localCmd = "<i>No local command available!</i>";
+ } else {
+ var titleElement = '<span style="color: red;">' + title + '</span>';
+
+ localCmd = "workspace=/tmp/workspace test_run=1 test_groups=" + titleElement +
+ " script/run_local_ci.sh";
+ }
+
+ commandPre.innerHTML = localCmd;
+}
+
+// Disable re-trigger button
+function retriggerDisable() {
+ var button = document.getElementById("tf-rebuild-button");
+ button.setAttribute("disabled", "");
+}
+
+var checkedCount = 0;
+
+// Enable or disable retrigger button according to its count attribute
+function retriggerEffectCount() {
+ var button = document.getElementById("tf-rebuild-button");
+
+ if (checkedCount === 0)
+ button.setAttribute("disabled", "");
+ else
+ button.removeAttribute("disabled");
+}
+
+function resultCheckboxes() {
+ return document.querySelectorAll("#tf-report-main input[type=checkbox]");
+}
+
+function computeCheckCount() {
+ checkedCount = 0;
+
+ resultCheckboxes().forEach(function(el) {
+ if (el.checked)
+ checkedCount++;
+ });
+
+ retriggerEffectCount();
+}
+
+function onConfigChange(e) {
+ var button = document.getElementById("tf-rebuild-button");
+
+ computeCheckCount();
+
+ // Collapse the re-build frame upon changing config selection
+ document.getElementById("tf-rebuild-frame").style.display = "none";
+}
+
+var retryCount = 0;
+
+function retryRebuild(frame, selectedConfigs, embed) {
+ var doc = frame.contentDocument;
+ var form = doc.querySelector("form[action=configSubmit]");
+ var errMsg = "Error re-triggering. Are you logged in?" +
+ " If this happens repeatedly, please check the browser console for errors.";
+
+ if (!form || !form.querySelector("button")) {
+ retryCount++;
+ if (retryCount > 50)
+ alert(errMsg);
+ else
+ setTimeout(retryRebuild, 100, frame, selectedConfigs, embed);
+ return;
+ }
+
+ try {
+ var groups = form.querySelector("input[value=TEST_GROUPS]");
+ groups = groups.nextElementSibling;
+
+ // Set groups only if there were selections, or leave unchanged.
+ if (selectedConfigs)
+ groups.value = selectedConfigs.join(" ");
+
+ // Clear the parameters derived from clone_repos.sh that had been passed
+ // over to the present job, which have now become stale. They are no more
+ // valid for a re-trigger, and have to be freshly set.
+ const paramsToClear = ["CI_SCRATCH"];
+ paramsToClear.forEach(function(item) {
+ var el = form.querySelector("input[value=" + item + "]");
+ if (!el)
+ return;
+
+ // The value for this parameter is the next sibling, with name=value
+ // property attached.
+ el = el.nextElementSibling;
+ if (el.getAttribute("name") != "value")
+ throw "Unable to clear parameter '" + item + "'";
+
+ // Clear the parameter's value
+ el.value = "";
+ });
+
+ if (embed) {
+ // Leave only the parameter form
+ try {
+ doc.querySelector("#side-panel").remove();
+ doc.querySelector("#page-head").remove();
+ doc.querySelector("footer").remove();
+
+ var mainPanel = doc.querySelector("#main-panel");
+ mainPanel.style.marginLeft = "0px";
+ mainPanel.style.padding = "10px";
+
+ doc.body.style.padding = "0px";
+ } catch (e) {
+ }
+
+ // Have the frame disappear after clicking, and remove event listener
+ var closer = form.querySelector("button").addEventListener("click", function(e) {
+ setTimeout(function() {
+ frame.style.display = "none";
+
+ // We had disabled the retrigger button when we opened the frame. Now
+ // that we're closing the frame, leave the button in the appropriate
+ // state.
+ retriggerEffectCount();
+
+ e.target.removeEventListener(e.type, closer);
+ alert("Build re-triggered for selected configurations.");
+ });
+ });
+
+ frame.style.height = "700px";
+ frame.style.width = "100%";
+ frame.style.display = "block";
+
+ // Disable re-trigger until this frame is closed
+ retriggerDisable();
+
+ window.scrollTo(0, frame.getBoundingClientRect().top);
+ } else {
+ // Trigger rebuild
+ form.querySelector("button").click();
+ if (selectedConfigs)
+ alert("Build re-triggered for selected configurations.");
+ else
+ alert("Job re-triggered.");
+ }
+ } catch (e) {
+ alert("Error triggering job: " + e);
+ }
+}
+
+function onRebuild(e) {
+ var selectedConfigs = [];
+ var parent;
+ var embed;
+ var configs;
+
+ var loc = window.location.href.replace(/\/*$/, "").split("/");
+ var buildNo = loc[loc.length - 1];
+ if (!parseInt(buildNo)) {
+ alert("Please visit the page of a specifc build, and try again.");
+ return;
+ }
+
+ resultCheckboxes().forEach(function(el) {
+ if (el.checked === true) {
+ parent = el.closest("td");
+ selectedConfigs.push(parent.getAttribute("title"));
+ }
+ });
+
+ loc.push("rebuild");
+ loc.push("parameterized");
+
+ // If shift key was pressed when clicking, just open a retrigger window
+ retryCount = 0;
+ if (e.shiftKey)
+ embed = true;
+
+ var frame = document.getElementById("tf-rebuild-frame");
+ frame.style.display = "none";
+ frame.src = loc.join("/");
+
+ configs = (e.target.id === "tf-rebuild-button")? selectedConfigs: null;
+ setTimeout(retryRebuild, 250, frame, configs, embed);
+}
+
+function onSelectAll(e) {
+ var selectClass = e.target.innerHTML.toLowerCase();
+
+ if (selectClass === "none") {
+ resultCheckboxes().forEach(function(checkbox) {
+ checkbox.checked = false;
+ });
+ } else {
+ document.querySelectorAll("." + selectClass).forEach(function(result) {
+ var input = result.querySelector("input");
+ if (input)
+ input.checked = true;
+ });
+ }
+
+ computeCheckCount();
+}
+
+function init() {
+ // The whole of Jenkins job result page is rendered in an HTML table. This
+ // means that anything that alters the size of content elements will cause a
+ // disruptive page layout reflow. That's exactly what happens with local
+ // commands when job results are hovered over. To avoid jitter when result
+ // hovering, fix the width of the element to its initial value.
+ var localCommands = document.querySelector("#tf-selected-commands");
+ localCommands.style.width = window.getComputedStyle(localCommands).width;
+
+ // Add result hover listeners
+ [".success", ".failure", ".unstable"].map(function(sel) {
+ return "#tf-report-main " + sel;
+ }).forEach(function(sel) {
+ document.querySelectorAll(sel).forEach(function(result) {
+ result.addEventListener("mouseover", onResultHover);
+ });
+ });
+
+ // Add checkbox click listeners
+ resultCheckboxes().forEach(function(el) {
+ el.addEventListener("change", onConfigChange);
+ });
+
+ // Add re-trigger button listener
+ document.getElementById("tf-rebuild-button").addEventListener("click", onRebuild);
+ document.getElementById("tf-rebuild-all-button").addEventListener("click", onRebuild);
+
+ // Add listener for select all widgets
+ document.querySelectorAll(".select-all").forEach(function(widget) {
+ widget.addEventListener("click", onSelectAll);
+ });
+
+ computeCheckCount();
+}
+
+document.addEventListener("DOMContentLoaded", init);
+//]]>
diff --git a/script/gen_test_report.py b/script/gen_test_report.py
new file mode 100755
index 0000000..e3edcac
--- /dev/null
+++ b/script/gen_test_report.py
@@ -0,0 +1,530 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# Generate a test report from data inferred from Jenkins environment. The
+# generated HTML file is meant for inclusion in the report status page,
+# therefore isn't standalone, fully-formed, HTML.
+
+import argparse
+import collections
+import json
+import io
+import os
+import re
+import shutil
+import sys
+import urllib.request
+
+PAGE_HEADER = """\
+<div id="tf-report-main">
+<table>
+"""
+
+PAGE_FOOTER = """\
+</tbody>
+</table>
+</div> <!-- tf-report-main -->
+
+<table id="tf-rebuild-table"><tbody>
+<tr><td colspan="2" class="select-row">
+ Select tests by result:
+ <span class="select-all">None</span>
+ |
+ <span class="select-all success">SUCCESS</span>
+ |
+ <span class="select-all unstable">UNSTABLE</span>
+ |
+ <span class="select-all failure">FAILURE</span>
+</td></tr>
+<tr>
+ <td class="desc-col">
+ Select build configurations, and click the button to re-trigger builds.
+ <br />
+ Use <b>Shift+Click</b> to alter parameters when re-triggering.
+ </td>
+ <td class="button-col">
+ <input id="tf-rebuild-button" type="button" value="Rebuild selected configs"
+ disabled count="0"/>
+ <input id="tf-rebuild-all-button" type="button" value="Rebuild this job"/>
+ </td>
+</tr>
+</tbody></table>
+
+<div class="tf-label-container">
+<div class="tf-label-label"> Local commands </div>
+<pre class="tf-label-cotent" id="tf-selected-commands">
+<i>Hover over test results to display equivalent local commands.</i>
+</pre>
+</div> <!-- tf-label-container -->
+
+<iframe id="tf-rebuild-frame" style="display: none"></iframe>
+"""
+
+TEST_SUFFIX = ".test"
+REPORT = "report.html"
+REPORT_JSON = "report.json"
+
+# Maximum depth for the tree of results, excluding status
+MAX_RESULTS_DEPTH = 5
+
+# We'd have a minimum of 3: group, a build config, a run config.
+MIN_RESULTS_DEPTH = 3
+
+# Table header corresponding to each level, starting from group. Note that
+# the result is held in the leaf node itself, and has to appear in a column of
+# its own.
+LEVEL_HEADERS = [
+ "Test Group",
+ "TF Build Config",
+ "TFTF Build Config",
+ "SCP Build Config",
+ "Run Config",
+ "Status"
+]
+
+Jenkins = None
+Dimmed_hypen = None
+Build_job = None
+Job = None
+
+# Indicates whether a level of table has no entries. Assume all levels are empty
+# to start; and flip that around as and when we see otherwise.
+Level_empty = [True] * MAX_RESULTS_DEPTH
+assert len(LEVEL_HEADERS) == (MAX_RESULTS_DEPTH + 1)
+
+# A column is deemed empty if it's content is empty or is the string "nil"
+is_empty = lambda key: key in ("", "nil")
+
+# A tree of ResultNodes are used to group test results by config. The tree is
+# MAX_RESULTS_DEPTH levels deep. Levels above MAX_RESULTS_DEPTH groups results,
+# where as those at MAX_RESULTS_DEPTH (leaves) hold test result and other meta
+# data.
+class ResultNode:
+ def __init__(self, depth=0):
+ self.depth = depth
+ self.printed = False
+ if depth == MAX_RESULTS_DEPTH:
+ self.result = None
+ self.build_number = None
+ self.desc = None
+ else:
+ self.num_children = 0
+ self.children = collections.OrderedDict()
+
+ # For a grouping node, set child by key.
+ def set_child(self, key):
+ assert self.depth < MAX_RESULTS_DEPTH
+
+ self.num_children += 1
+ if not is_empty(key):
+ Level_empty[self.depth] = False
+ return self.children.setdefault(key, ResultNode(self.depth + 1))
+
+ # For a leaf node, set result and other meta data.
+ def set_result(self, result, build_number):
+ assert self.depth == MAX_RESULTS_DEPTH
+
+ self.result = result
+ self.build_number = build_number
+
+ def set_desc(self, desc):
+ self.desc = desc
+
+ def get_desc(self):
+ return self.desc
+
+ # For a grouping node, return dictionary iterator.
+ def items(self):
+ assert self.depth < MAX_RESULTS_DEPTH
+
+ return self.children.items()
+
+ # Generator function that walks through test results. The output of
+ # iteration is reflected in the stack argument, which ought to be a deque.
+ def iterator(self, key, stack):
+ stack.append((key, self))
+ if self.depth < MAX_RESULTS_DEPTH:
+ for child_key, child in self.items():
+ yield from child.iterator(child_key, stack)
+ else:
+ yield
+ stack.pop()
+
+ # Convenient child access during debugging.
+ def __getitem__(self, key):
+ assert self.depth < MAX_RESULTS_DEPTH
+
+ return self.children[key]
+
+ # Print convenient representation for debugging.
+ def __repr__(self):
+ if self.depth < MAX_RESULTS_DEPTH:
+ return "node(depth={}, nc={}, {})".format(self.depth,
+ self.num_children,
+ ("None" if self.children is None else
+ list(self.children.keys())))
+ else:
+ return ("result(" +
+ ("None" if self.result is None else str(self.result)) + ")")
+
+
+# Open an HTML element, given its name, content, and a dictionary of attributes:
+# <name foo="bar"...>
+def open_element(name, attrs=None):
+ # If there are no attributes, return the element right away
+ if attrs is None:
+ return "<" + name + ">"
+
+ el_list = ["<" + name]
+
+ # 'class', being a Python keyword, can't be passed as a keyword argument, so
+ # is passed as 'class_' instead.
+ if "class_" in attrs:
+ attrs["class"] = attrs["class_"]
+ del attrs["class_"]
+
+ for key, val in attrs.items():
+ if val is not None:
+ el_list.append(' {}="{}"'.format(key, val))
+
+ el_list.append(">")
+
+ return "".join(el_list)
+
+
+# Close an HTML element
+def close_element(name):
+ return "</" + name + ">"
+
+
+# Make an HTML element, given its name, content, and a dictionary of attributes:
+# <name foo="bar"...>content</name>
+def make_element(name, content="", **attrs):
+ assert type(content) is str
+
+ return "".join([open_element(name, attrs), content, close_element(name)])
+
+
+# Wrap link in a hyperlink:
+# <a href="link" foo="bar"... target="_blank">content</a>
+def wrap_link(content, link, **attrs):
+ return make_element("a", content, href=link, target="_blank", **attrs)
+
+
+def jenkins_job_link(job, build_number):
+ return "/".join([Jenkins, "job", job, build_number, ""])
+
+
+# Begin table by emitting table headers for all levels that aren't empty, and
+# results column. Finish by opening a tbody element for rest of the table
+# content.
+def begin_table(results, fd):
+ # Iterate and filter out empty levels
+ table_headers = []
+ for level, empty in enumerate(Level_empty):
+ if empty:
+ continue
+ table_headers.append(make_element("th", LEVEL_HEADERS[level]))
+
+ # Result
+ table_headers.append(make_element("th", LEVEL_HEADERS[level + 1]))
+
+ row = make_element("tr", "\n".join(table_headers))
+ print(make_element("thead", row), file=fd)
+ print(open_element("tbody"), file=fd)
+
+
+# Parse the console output of the sub job to identify the build job number. Once
+# build job number is identified, return the link to job console. Upon error,
+# return None.
+def get_build_job_console_link(job_link):
+ job_console_text_link = job_link + "consoleText"
+ try:
+ with urllib.request.urlopen(job_console_text_link) as console_fd:
+ for line in console_fd:
+ # Look for lines like:
+ # tf-build #1356 completed. Result was SUCCESS
+ line = line.decode()
+ if line.find("completed. Result was") < 0:
+ continue
+
+ build_job, build_job_number, *_ = line.split()
+ if build_job != Build_job:
+ continue
+
+ build_job_number = build_job_number.replace("#", "")
+ return (jenkins_job_link(Build_job, build_job_number) +
+ "console")
+ except:
+ pass
+
+ return None
+
+
+# Given the node stack, reconstruct the original config name
+def reconstruct_config(node_stack):
+ group = node_stack[0][0]
+ run_config, run_node = node_stack[-1]
+
+ desc = run_node.get_desc()
+ try:
+ with open(desc) as fd:
+ test_config = fd.read().strip()
+ except FileNotFoundError:
+ print("warning: descriptor {} couldn't be opened.".format(desc),
+ file=sys.stderr);
+ return ""
+
+ if group != "GENERATED":
+ return os.path.join(group, test_config)
+ else:
+ return test_config
+
+
+# While iterating results, obtain a trail to the current result. node_stack is
+# iterated to identify the nodes contributing to one result.
+def result_to_html(node_stack):
+ global Dimmed_hypen
+
+ crumbs = []
+ for key, child_node in node_stack:
+ if child_node.printed:
+ continue
+
+ child_node.printed = True
+
+ # If the level is empty, skip emitting this column
+ if not Level_empty[child_node.depth - 1]:
+ # - TF config might be "nil" for TFTF-only build configs;
+ # - TFTF config might not be present for non-TFTF runs;
+ # - SCP config might not be present for non-SCP builds;
+ # - All build-only configs have runconfig as "nil";
+ #
+ # Make nil cells empty, and grey empty cells out.
+ if is_empty(key):
+ key = ""
+ td_class = "emptycell"
+ else:
+ td_class = None
+
+ rowspan = None
+ if (child_node.depth < MAX_RESULTS_DEPTH
+ and child_node.num_children > 1):
+ rowspan = child_node.num_children
+
+ # Keys are hyphen-separated strings. For better readability, dim
+ # hyphens so that text around the hyphens stand out.
+ if not Dimmed_hypen:
+ Dimmed_hypen = make_element("span", "-", class_="dim")
+ dimmed_key = Dimmed_hypen.join(key.split("-"))
+
+ crumbs.append(make_element("td", dimmed_key, rowspan=rowspan,
+ class_=td_class))
+
+ # For the last node, print result as well.
+ if child_node.depth == MAX_RESULTS_DEPTH:
+ # Make test result as a link to the job console
+ result_class = child_node.result.lower()
+ job_link = jenkins_job_link(Job, child_node.build_number)
+ result_link = wrap_link(child_node.result, job_link,
+ class_="result")
+ build_job_console_link = get_build_job_console_link(job_link)
+
+ # Add selection checkbox
+ selection = make_element("input", type="checkbox")
+
+ # Add link to build console if applicable
+ if build_job_console_link:
+ build_console = wrap_link("", build_job_console_link,
+ class_="buildlink", title="Click to visit build job console")
+ else:
+ build_console = ""
+
+ config_name = reconstruct_config(node_stack)
+
+ crumbs.append(make_element("td", (result_link + selection +
+ build_console), class_=result_class, title=config_name))
+
+ # Return result as string
+ return "".join(crumbs)
+
+
+def main(fd):
+ global Build_job, Jenkins, Job
+
+ parser = argparse.ArgumentParser()
+
+ # Add arguments
+ parser.add_argument("--build-job", default=None, help="Name of build job")
+ parser.add_argument("--from-json", "-j", default=None,
+ help="Generate results from JSON input rather than from Jenkins run")
+ parser.add_argument("--job", default=None, help="Name of immediate child job")
+ parser.add_argument("--meta-data", action="append", default=[],
+ help=("Meta data to read from file and include in report "
+ "(file allowed be absent). "
+ "Optionally prefix with 'text:' (default) or "
+ "'html:' to indicate type."))
+
+ opts = parser.parse_args()
+
+ workspace = os.environ["WORKSPACE"]
+ if not opts.from_json:
+ json_obj = {}
+
+ if not opts.job:
+ raise Exception("Must specify the name of Jenkins job with --job")
+ else:
+ Job = opts.job
+ json_obj["job"] = Job
+
+ if not opts.build_job:
+ raise Exception("Must specify the name of Jenkins build job with --build-job")
+ else:
+ Build_job = opts.build_job
+ json_obj["build_job"] = Build_job
+
+ Jenkins = os.environ["JENKINS_URL"].strip().rstrip("/")
+
+ # Replace non-alphabetical characters in the job name with underscores. This is
+ # how Jenkins does it too.
+ job_var = re.sub(r"[^a-zA-Z0-9]", "_", opts.job)
+
+ # Build numbers are comma-separated list
+ child_build_numbers = (os.environ["TRIGGERED_BUILD_NUMBERS_" +
+ job_var]).split(",")
+
+ # Walk the $WORKSPACE directory, and fetch file names that ends with
+ # TEST_SUFFIX
+ _, _, files = next(os.walk(workspace))
+ test_files = sorted(filter(lambda f: f.endswith(TEST_SUFFIX), files))
+
+ # Store information in JSON object
+ json_obj["job"] = Job
+ json_obj["build_job"] = Build_job
+ json_obj["jenkins_url"] = Jenkins
+
+ json_obj["child_build_numbers"] = child_build_numbers
+ json_obj["test_files"] = test_files
+ json_obj["test_results"] = {}
+ else:
+ # Load JSON
+ with open(opts.from_json) as json_fd:
+ json_obj = json.load(json_fd)
+
+ Job = json_obj["job"]
+ Build_job = json_obj["build_job"]
+ Jenkins = json_obj["jenkins_url"]
+
+ child_build_numbers = json_obj["child_build_numbers"]
+ test_files = json_obj["test_files"]
+
+ # This iteration is in the assumption that Jenkins visits the files in the same
+ # order and spawns children, which is ture as of this writing. The test files
+ # are named in sequence, so it's reasonable to expect that'll remain the case.
+ # Just sayin...
+ results = ResultNode(0)
+ for i, f in enumerate(test_files):
+ # Test description is generated in the following format:
+ # seq%group%build_config:run_config.test
+ _, group, desc = f.split("%")
+ test_config = desc[:-len(TEST_SUFFIX)]
+ build_config, run_config = test_config.split(":")
+ spare_commas = "," * (MAX_RESULTS_DEPTH - MIN_RESULTS_DEPTH)
+ tf_config, tftf_config, scp_config, *_ = (build_config +
+ spare_commas).split(",")
+
+ build_number = child_build_numbers[i]
+ if not opts.from_json:
+ var_name = "TRIGGERED_BUILD_RESULT_" + job_var + "_RUN_" + build_number
+ test_result = os.environ[var_name]
+ json_obj["test_results"][build_number] = test_result
+ else:
+ test_result = json_obj["test_results"][build_number]
+
+ # Build result tree
+ group_node = results.set_child(group)
+ tf_node = group_node.set_child(tf_config)
+ tftf_node = tf_node.set_child(tftf_config)
+ scp_node = tftf_node.set_child(scp_config)
+ run_node = scp_node.set_child(run_config)
+ run_node.set_result(test_result, build_number)
+ run_node.set_desc(os.path.join(workspace, f))
+
+ # Emit style sheet, script, and page header elements
+ stem = os.path.splitext(os.path.abspath(__file__))[0]
+ for tag, ext in [("style", "css"), ("script", "js")]:
+ print(open_element(tag), file=fd)
+ with open(os.extsep.join([stem, ext])) as ext_fd:
+ shutil.copyfileobj(ext_fd, fd)
+ print(close_element(tag), file=fd)
+ print(PAGE_HEADER, file=fd)
+ begin_table(results, fd)
+
+ # Generate HTML results for each group
+ node_stack = collections.deque()
+ for group, group_results in results.items():
+ node_stack.clear()
+
+ # For each result, make a table row
+ for _ in group_results.iterator(group, node_stack):
+ result_html = result_to_html(node_stack)
+ row = make_element("tr", result_html)
+ print(row, file=fd)
+
+ print(PAGE_FOOTER, file=fd)
+
+ # Insert meta data into report. Since meta data files aren't critical for
+ # the test report, and that other scripts may not generate all the time,
+ # ignore if the specified file doesn't exist.
+ type_to_el = dict(text="pre", html="div")
+ for data_file in opts.meta_data:
+ *prefix, filename = data_file.split(":")
+ file_type = prefix[0] if prefix else "text"
+ assert file_type in type_to_el.keys()
+
+ # Ignore if file doens't exist, or it's empty.
+ if not os.path.isfile(filename) or os.stat(filename).st_size == 0:
+ continue
+
+ with open(filename) as md_fd:
+ md_name = make_element("div", " " + filename + ": ",
+ class_="tf-label-label")
+ md_content = make_element(type_to_el[file_type],
+ md_fd.read().strip("\n"), class_="tf-label-content")
+ md_container = make_element("div", md_name + md_content,
+ class_="tf-label-container")
+ print(md_container, file=fd)
+
+ # Dump JSON file unless we're reading from it.
+ if not opts.from_json:
+ with open(REPORT_JSON, "wt") as json_fd:
+ json.dump(json_obj, json_fd, indent=2)
+
+
+with open(REPORT, "wt") as fd:
+ try:
+ main(fd)
+ except:
+ # Upon error, create a static HTML reporting the error, and then raise
+ # the latent exception again.
+ fd.seek(0, io.SEEK_SET)
+
+ # Provide inline style as there won't be a page header for us.
+ err_style = (
+ "border: 1px solid red;",
+ "color: red;",
+ "font-size: 30px;",
+ "padding: 15px;"
+ )
+
+ print(make_element("div",
+ "HTML report couldn't be prepared! Check job console.",
+ style=" ".join(err_style)), file=fd)
+
+ # Truncate file as we're disarding whatever there generated before.
+ fd.truncate()
+ raise
diff --git a/script/gerrit.py b/script/gerrit.py
new file mode 100644
index 0000000..26787cd
--- /dev/null
+++ b/script/gerrit.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+import json
+import subprocess
+
+class GerritServer:
+ def __init__(self, url, port=29418):
+ self.url = url
+ self.port = port
+
+ def query(self, project, q, username=None, keyfile=None):
+ cmd = ["ssh", "-p", str(self.port)]
+
+ if keyfile:
+ cmd += ["-i", keyfile]
+ if username:
+ cmd += ["{}@{}".format(username, self.url)]
+ else:
+ cmd += [self.url]
+
+ cmd += ["gerrit", "query", "--format=json", "--patch-sets",
+ "--comments", "--current-patch-set",
+ "project:{}".format(project)] + q
+
+ with subprocess.Popen(cmd, stdout=subprocess.PIPE) as proc:
+ changes = [json.loads(resp_line.decode()) for resp_line
+ in proc.stdout]
+ if not changes:
+ raise Exception("Error while querying Gerrit server {}.".format(
+ self.url))
+ return changes
+
+class GerritProject:
+ def __init__(self, name, server):
+ self.name = name
+ self.server = server
+
+ def query(self, q, username=None, keyfile=None):
+ return self.server.query(self.name, q, username, keyfile)
diff --git a/script/get_latest_snapshot.py b/script/get_latest_snapshot.py
new file mode 100755
index 0000000..3ddd377
--- /dev/null
+++ b/script/get_latest_snapshot.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+import argparse
+import datetime
+import os
+import sys
+
+# suds is not a standard library package. Although it's installed in the Jenkins
+# slaves, it might not be so in the user's machine (when running Coverity scan
+# on there).
+try:
+ import suds
+except ImportError:
+ print(" You need to have suds Python3 package to query Coverity server")
+ print(" pip3 install suds-py3")
+ sys.exit(0)
+
+# Get coverity host from environment, or fall back to the default one.
+coverity_host = os.environ.get("coverity_host", "coverity.cambridge.arm.com")
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument("--description", help="Snapshot description filter")
+parser.add_argument("--file", dest="output_file", help="Output file. Mandatory")
+parser.add_argument("--old", default=10, help="Max snapshot age in days")
+parser.add_argument("--host", default=coverity_host, help="Coverity server")
+parser.add_argument("--version", help="Snapshot version filter")
+parser.add_argument("stream_name")
+
+opts = parser.parse_args()
+
+if not opts.output_file:
+ raise Exception("Must specify an output file")
+
+# We output the snapshot ID to the specified file. In case of any errors, we
+# remove the file, and Coverity wrapper can test for its existence.
+try:
+ user = os.environ["TFCIBOT_USER"]
+ password = os.environ["TFCIBOT_PASSWORD"]
+except:
+ print(" Unable to get credentials for user tfcibot")
+ print(" For potentially faster analysis, suggest set "
+ "TFCIBOT_PASSWORD and TFCIBOT_PASSWORD in the environment")
+ sys.exit(0)
+
+# SOAP magic stuff
+client = suds.client.Client("http://{}/ws/v9/configurationservice?wsdl".format(opts.host))
+security = suds.wsse.Security()
+token = suds.wsse.UsernameToken(user, password)
+security.tokens.append(token)
+client.set_options(wsse=security)
+
+# Construct stream ID data object
+streamid_obj = client.factory.create("streamIdDataObj")
+streamid_obj.name = opts.stream_name
+
+# Snapshot filter
+filter_obj = client.factory.create("snapshotFilterSpecDataObj")
+
+# Filter snapshots for age
+past = datetime.date.today() - datetime.timedelta(days=opts.old)
+filter_obj.startDate = past.strftime("%Y-%m-%d")
+
+if opts.version:
+ filter_obj.versionPattern = opts.version
+
+if opts.description:
+ filter_obj.descriptionPattern = opts.description
+
+# Query server
+results = client.service.getSnapshotsForStream(streamid_obj, filter_obj)
+
+# Print ID of the last snapshot if results were returned
+if results:
+ try:
+ with open(opts.output_file, "w") as fd:
+ print(results[-1].id, file=fd)
+ except:
+ os.remove(opts.output_file)
+ raise
diff --git a/script/id_tf_processes.sh b/script/id_tf_processes.sh
new file mode 100755
index 0000000..9c2b4f6
--- /dev/null
+++ b/script/id_tf_processes.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+lookup() {
+ local string
+
+ string="$(grep "\\<${1:?}=" < "$proc_file")"
+ if [ "$string" ]; then
+ echo "$string"
+ eval "$string"
+ fi
+}
+
+for p in $(pgrep FVP); do
+ proc_file="$WORKSPACE/proc_file"
+ tr '\000' '\n' < "/proc/$p/environ" > "$proc_file"
+
+ echo "PID: $p"
+ lookup "TRUSTED_FIRMWARE_CI"
+ lookup "BUILD_NUMBER"
+ lookup "JOB_NAME"
+
+ if [ "$KILL_PROCESS" = "true" -a "$TRUSTED_FIRMWARE_CI" = "1" ]; then
+ kill -SIGTERM "$p"
+ echo "Killed $p"
+ fi
+
+ echo
+done
diff --git a/script/job_walker.py b/script/job_walker.py
new file mode 100755
index 0000000..47aa084
--- /dev/null
+++ b/script/job_walker.py
@@ -0,0 +1,171 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+#
+# This script is used to walk a job tree, primarily to identify sub-jobs
+# triggered by a top-level job.
+#
+# The script works by scraping console output of jobs, starting from the
+# top-level one, sniffing for patterns indicative of sub-jobs, and following the
+# trail.
+
+import argparse
+import contextlib
+import re
+import sys
+import urllib.request
+
+# Sub-job patters. All of them capture job name (j) and build number (b).
+_SUBJOB_PATTERNS = (
+ # Usualy seen on freestyle jobs
+ re.compile(r"(?P<j>[-a-z_]+) #(?P<b>[0-9]+) completed. Result was (?P<s>[A-Z]+)",
+ re.IGNORECASE),
+
+ # Usualy seen on multi-phase jobs
+ re.compile(r"Finished Build : #(?P<b>[0-9]+) of Job : (?P<j>[-a-z_]+) with status : (?P<s>[A-Z]+)",
+ re.IGNORECASE)
+)
+
+
+# Generator that yields lines on a job console as strings
+def _console_lines(console_url):
+ with urllib.request.urlopen(console_url) as console_fd:
+ for line in filter(None, console_fd):
+ # Console might have special characters. Yield an empty line in that case.
+ try:
+ yield line.decode().rstrip("\n")
+ except UnicodeDecodeError as e:
+ # In case of decode error, return up until the character that
+ # caused the error
+ yield line[:e.start].decode().rstrip("\n")
+
+
+# Class representing Jenkins job
+class JobInstance:
+ def __init__(self, url, status=None):
+ self.sub_jobs = []
+ self.url = url
+ self.name = None
+ self.build_number = None
+ self.config = None
+ self.status = status
+ self.depth = 0
+
+ # Representation for debugging
+ def __repr__(self):
+ return "{}#{}".format(self.name, self.build_number)
+
+ # Scrape job's console to identify sub jobs, and recurseively parse them.
+ def parse(self, *, depth=0):
+ url_fields = self.url.rstrip("/").split("/")
+
+ # Identify job name and number from the URL
+ try:
+ stem_url_list = url_fields[:-3]
+ self.name, self.build_number = url_fields[-2:]
+ if self.build_number not in ("lastBuild", "lastSuccessfulBuild"):
+ int(self.build_number)
+ except:
+ raise Exception(self.url + " is not a valid Jenkins build URL.")
+
+ self.depth = depth
+
+ # Scrape the job's console
+ console_url = "/".join(url_fields + ["consoleText"])
+ try:
+ for line in _console_lines(console_url):
+ # A job that prints CONFIGURATION is where we'd find the build
+ # artefacts
+ fields = line.split()
+ if len(fields) == 2 and fields[0] == "CONFIGURATION:":
+ self.config = fields[1]
+ return
+
+ # Look for sub job pattern, and recurse into the sub-job
+ child_matches = filter(None, map(lambda p: p.match(line),
+ _SUBJOB_PATTERNS))
+ for match in child_matches:
+ child = JobInstance("/".join(stem_url_list +
+ ["job", match.group("j"), match.group("b")]),
+ match.group("s"))
+ child.parse(depth=depth+1)
+ self.sub_jobs.append(child)
+ except urllib.error.HTTPError:
+ print(console_url + " is not accessible.", file=sys.stderr)
+
+ # Generator that yields individual jobs in the hierarchy
+ def walk(self, *, sort=False):
+ if not self.sub_jobs:
+ yield self
+ else:
+ descendants = self.sub_jobs
+ if sort:
+ descendants = sorted(self.sub_jobs, key=lambda j: j.build_number)
+ for child in descendants:
+ yield from child.walk(sort=sort)
+
+ # Print one job
+ def print(self):
+ config_str = "[" + self.config + "]" if self.config else ""
+ status = self.status if self.status else ""
+
+ print("{}{} #{} {} {}".format(" " * 2 * self.depth, self.name,
+ self.build_number, status, config_str))
+
+ # Print the whole hierarchy
+ def print_tree(self, *, sort=False):
+ self.print()
+ if not self.sub_jobs:
+ return
+
+ descendants = self.sub_jobs
+ if sort:
+ descendants = sorted(self.sub_jobs, key=lambda j: j.build_number)
+ for child in descendants:
+ child.print_tree(sort=sort)
+
+ @contextlib.contextmanager
+ def open_artefact(self, path, *, text=False):
+ # Wrapper class that offer string reads from a byte descriptor
+ class TextStream:
+ def __init__(self, byte_fd):
+ self.byte_fd = byte_fd
+
+ def read(self, sz=None):
+ return self.byte_fd.read(sz).decode("utf-8")
+
+ art_url = "/".join([self.url, "artifact", path])
+ with urllib.request.urlopen(art_url) as fd:
+ yield TextStream(fd) if text else fd
+
+
+
+# When invoked from command line, print the whole tree
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+
+ parser.add_argument("build_url",
+ help="URL to specific build number to walk")
+ parser.add_argument("--unique-tf-configs", default=False,
+ action="store_const", const=True, help="Print unique TF configs")
+
+ opts = parser.parse_args()
+
+ top = JobInstance(opts.build_url)
+ top.parse()
+
+ if opts.unique_tf_configs:
+ unique_configs = set()
+
+ # Extract the base TF config name from the job's config, which contains
+ # group, TFTF configs etc.
+ for job in filter(lambda j: j.config, top.walk()):
+ unique_configs.add(job.config.split("/")[1].split(":")[0].split(",")[0])
+
+ for config in sorted(unique_configs):
+ print(config)
+ else:
+ top.print_tree()
diff --git a/script/make_stress_test_image.sh b/script/make_stress_test_image.sh
new file mode 100755
index 0000000..30c6dfb
--- /dev/null
+++ b/script/make_stress_test_image.sh
@@ -0,0 +1,248 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+#
+# When pointed to a root file system archive ($root_fs) this script creates a
+# disk image file ($img_file of size $size_gb, or 5GB by default) with 2
+# partitions. Linaro OE ramdisk specifies the second partition as root device;
+# the first partition is unused. The second partition is formatted as ext2, and
+# the root file system extracted into it.
+#
+# Test suites for stress testing are created under /opt/tests.
+
+set -e
+
+extract_script() {
+ local to="${name:?}.sh"
+
+ sed -n "/BEGIN $name/,/END $name/ {
+ /^#\\(BEGIN\\|END\\)/d
+ s/^#//
+ p
+ }" < "${progname:?}" > "$to"
+
+ chmod +x "$to"
+}
+
+progname="$(readlink -f $0)"
+root_fs="$(readlink -f ${root_fs:?})"
+img_file="$(readlink -f ${img_file:?})"
+
+mount_dir="${mount_dir:-/mnt}"
+mount_dir="$(readlink -f $mount_dir)"
+
+# Create an image file. We assume 5G is enough
+size_gb="${size_gb:-5}"
+echo "Creating image file $img_file (${size_gb}GB)..."
+dd if=/dev/zero of="$img_file" bs=1M count="${size_gb}000" &>/dev/null
+
+# Create a partition table, and then create 2 partitions. The boot expects the
+# root file system to be present in the second partition.
+echo "Creating partitions in $img_file..."
+sed 's/ *#.*$//' <<EOF | fdisk "$img_file" &>/dev/null
+o # Create new partition table
+n # New partition
+p # Primary partition
+ # Default partition number
+ # Default start sector
++1M # Dummy partition of 1MB
+n # New partition
+p # Primary partition
+ # Default partition number
+ # Default start sector
+ # Default end sector
+w
+q
+EOF
+
+# Get the offset of partition
+fdisk_out="$(fdisk -l "$img_file" | sed -n '$p')"
+
+offset="$(echo "$fdisk_out" | awk '{print $2 * 512}')"
+size="$(echo "$fdisk_out" | awk '{print (($3 - $2) * 512)}')"
+
+# Setup and identify loop device
+loop_dev="$(losetup --offset "$offset" --sizelimit "$size" --show --find \
+ "$img_file")"
+
+# Create ext2 file system on the mount
+echo "Formatting partition as ext2 in $img_file..."
+mkfs.ext2 "$loop_dev" &>/dev/null
+
+# Mount loop device
+mount "$loop_dev" "$mount_dir"
+
+# Extract the root file system into the mount
+cd "$mount_dir"
+echo "Extracting $root_fs to $img_file..."
+tar -xzf "$root_fs"
+
+tests_dir="$mount_dir/opt/tests"
+mkdir -p "$tests_dir"
+cd "$tests_dir"
+
+# Extract embedded scripts into the disk image
+name="hotplug" extract_script
+name="execute_pmqa" extract_script
+
+echo
+rm -rf "test_assets"
+echo "Cloning test assets..."
+git clone -q --depth 1 http://ssg-sw.cambridge.arm.com/gerrit/tests/test_assets.git
+echo "Cloned test assets."
+
+cd test_assets
+rm -rf "pm-qa"
+echo "Cloning pm-qa..."
+git clone -q --depth 1 git://git.linaro.org/tools/pm-qa.git
+echo "Cloned pm-qa."
+
+cd
+umount "$mount_dir"
+
+losetup -d "$loop_dev"
+
+if [ "$SUDO_USER" ]; then
+ chown "$SUDO_USER:$SUDO_USER" "$img_file"
+fi
+
+echo "Updated $img_file with stress tests."
+
+#BEGIN hotplug
+##!/bin/sh
+#
+#if [ -n "$1" ]
+#then
+# min_cpu=$1
+# shift
+#fi
+#
+#if [ -n "$1" ]
+#then
+# max_cpu=$1
+# shift
+#fi
+#
+#f_kconfig="/proc/config.gz"
+#f_max_cpus="/sys/devices/system/cpu/present"
+#hp_support=0
+#hp="`gunzip -c /proc/config.gz | sed -n '/HOTPLUG.*=/p' 2>/dev/null`"
+#
+#if [ ! -f "$f_kconfig" ]
+#then
+# if [ ! -f "$f_max_cpus" ]
+# then
+# echo "Unable to detect hotplug support. Exiting..."
+# exit -1
+# else
+# hp_support=1
+# fi
+#else
+# if [ -n "$hp" ]
+# then
+# hp_support=1
+# else
+# echo "Unable to detect hotplug support. Exiting..."
+# exit -1
+# fi
+#fi
+#
+#if [ -z "$max_cpu" ]
+#then
+# max_cpu=`sed -E -n 's/([0-9]+)-([0-9]+)/\2/gpI' < $f_max_cpus`
+#fi
+#if [ -z "$min_cpu" ]
+#then
+# min_cpu=`sed -E -n 's/([0-9]+)-([0-9]+)/\1/gpI' < $f_max_cpus`
+#fi
+#
+#max_cpu=$(($max_cpu + 1))
+#min_cpu=$(($min_cpu + 1))
+#max_op=2
+#
+#while :
+#do
+# cpu=$((RANDOM % max_cpu))
+# op=$((RANDOM % max_op))
+#
+# if [ $op -eq 0 ]
+# then
+## echo "Hotpluging out cpu$cpu..."
+## echo $op > /sys/devices/system/cpu/cpu$cpu/online >/dev/null
+## echo $op > /sys/devices/system/cpu/cpu$cpu/online | grep -i "err"
+# echo $op > /sys/devices/system/cpu/cpu$cpu/online
+# else
+## echo "Hotpluging in cpu$cpu..."
+## echo $op > /sys/devices/system/cpu/cpu$cpu/online >/dev/null
+## echo $op > /sys/devices/system/cpu/cpu$cpu/online | grep -i "err"
+# echo $op > /sys/devices/system/cpu/cpu$cpu/online
+#
+# fi
+#done
+#
+#exit 0
+#
+#MAXCOUNT=10
+#count=1
+#
+#echo
+#echo "$MAXCOUNT random numbers:"
+#echo "-----------------"
+#while [ "$count" -le $MAXCOUNT ] # Generate 10 ($MAXCOUNT) random integers.
+#do
+# number=$RANDOM
+# echo $number
+# count=$(($count + 1))
+#done
+#echo "-----------------"
+#END hotplug
+
+
+#BEGIN execute_pmqa
+##!/bin/sh
+#
+#usage ()
+#{
+# printf "\n*************** Usage *******************\n"
+# printf "sh execute_pmqa.sh args\n"
+# printf "args:\n"
+# printf "t -> -t|--targets=Folders (tests) within PM QA folder to be executed by make, i.e. cpufreq, cpuidle, etc. Defaults to . (all)\n"
+# printf "\t -> -a|--assets=Test assets folder (within the FS) where resides the PM QA folder. Required.\n"
+#}
+#
+#for i in "$@"
+#do
+# case $i in
+# -t=*|--targets=*)
+# TARGETS="${i#*=}"
+# ;;
+# -a=*|--assets=*)
+# TEST_ASSETS_FOLDER="${i#*=}"
+# ;;
+# *)
+# # unknown option
+# printf "Unknown argument $i in arguments $@\n"
+# usage
+# exit 1
+# ;;
+# esac
+#done
+#
+#if [ -z "$TEST_ASSETS_FOLDER" ]; then
+# usage
+# exit 1
+#fi
+#
+#TARGETS=${TARGETS:-'.'}
+#cd $TEST_ASSETS_FOLDER/pm-qa && make -C utils
+#for j in $TARGETS
+#do
+# make -k -C "$j" check
+#done
+#make clean
+#rm -f ./utils/cpuidle_killer
+#tar -zcvf ../pm-qa.tar.gz ./
+#END execute_pmqa
diff --git a/script/parse_lava_job.py b/script/parse_lava_job.py
new file mode 100755
index 0000000..9e331e3
--- /dev/null
+++ b/script/parse_lava_job.py
@@ -0,0 +1,104 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# After lava job is dispatched, its results will be collected in
+# $WORKSPACE/job_results.yaml file. Parse that file, and exit from this script
+# with the respective exit status
+
+import argparse
+import os
+import sys
+import yaml
+
+
+def report_job_failure():
+ job_url = os.environ["JOB_URL"]
+ build_number = os.environ["BUILD_NUMBER"]
+ print()
+ print("Job failed!")
+ print("See " + "/".join([job_url.rstrip("/"), build_number, "artifact",
+ "job_output.log"]))
+ print()
+ sys.exit(1)
+
+
+def report_job_success():
+ print()
+ print("Job success.")
+ print()
+ sys.exit(0)
+
+
+def parse_cmd_line():
+ parser = argparse.ArgumentParser(description="Parse results from LAVA. "
+ "The results must be provided as a YAML file.")
+ parser.add_argument("--payload-type", default="linux", type=str,
+ help="Type of payload that was used in the test (default: %(default)s)")
+ parser.add_argument("--file",
+ default=os.path.join(os.environ["WORKSPACE"], "job_results.yaml"),
+ type=str, help="YAML file to parse (default: %(default)s)")
+ args = parser.parse_args()
+ return args
+
+
+args = parse_cmd_line()
+
+with open(args.file) as fd:
+ results = yaml.load(fd)
+
+ # Iterate through results. Find the element whose name is "job" in the
+ # "lava" suite. It contains the result of the overall LAVA run.
+ for phase in results:
+ if phase["name"] == "job" and phase["suite"] == "lava":
+ break
+ else:
+ raise Exception("Couldn't find 'job' phase in 'lava' suite in results")
+
+ if phase["result"] != "pass":
+ report_job_failure()
+
+ # If we've simply booted to the Linux shell prompt then we don't need to
+ # further analyze the results from LAVA.
+ if args.payload_type == "linux":
+ report_job_success()
+
+ # If we've run TFTF tests instead, then do some further parsing.
+ # First make sure the test session finished.
+ for phase in filter(lambda p: p["name"] == "lava-test-monitor", results):
+ if phase["result"] != "pass":
+ print("TFTF test session failed. Did it time out?")
+ report_job_failure()
+ break
+ else:
+ raise Exception("Couldn't find 'lava-test-monitor' phase results")
+
+ # Then count the number of tests that failed/skipped.
+ test_failures = 0
+ test_skips = 0
+ for phase in filter(lambda p: p["suite"] == "tftf", results):
+ metadata = phase["metadata"]
+ testcase_name = metadata["case"]
+ testcase_result = metadata["result"]
+ if testcase_result == "fail":
+ test_failures += 1
+ print("=> FAILED: " + testcase_name)
+ elif testcase_result == "skip":
+ test_skips += 1
+ print(" SKIPPED: " + testcase_name)
+
+ # Print a test summary
+ print()
+ if test_failures == 0 and test_skips == 0:
+ print("All tests passed.")
+ else:
+ print("{} tests failed; {} skipped. All other tests passed.".format(
+ test_failures, test_skips))
+
+ if test_failures == 0:
+ report_job_success()
+ else:
+ report_job_failure()
diff --git a/script/parse_test.sh b/script/parse_test.sh
new file mode 100755
index 0000000..4086ace
--- /dev/null
+++ b/script/parse_test.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# Decode test description and extract TF build configuration, run configuration,
+# test group etc.
+#
+# See gen_test_desc.py
+
+set -e
+
+ci_root="$(readlink -f "$(dirname "$0")/..")"
+source "$ci_root/utils.sh"
+
+test_desc="${test_desc:-$TEST_DESC}"
+test_desc="${test_desc:?}"
+
+# Strip test suffix
+test_desc="${test_desc%%.test}"
+
+lhs="$(echo "$test_desc" | awk -F: '{print $1}')"
+rhs="$(echo "$test_desc" | awk -F: '{print $2}')"
+
+test_group="$(echo "$lhs" | awk -F% '{print $2}')"
+build_config="$(echo "$lhs" | awk -F% '{print $3}')"
+run_config="${rhs%.test}"
+test_config="$(cat $workspace/TEST_DESC)"
+
+env_file="$workspace/env"
+rm -f "$env_file"
+
+emit_env "BUILD_CONFIG" "$build_config"
+emit_env "RUN_CONFIG" "$run_config"
+emit_env "TEST_CONFIG" "$test_config"
+emit_env "TEST_GROUP" "$test_group"
+
+# Default binary mode. This would usually come from the build package for FVP
+# runs, but is provided for LAVA jobs.
+emit_env "BIN_MODE" "release"
diff --git a/script/run_local_ci.sh b/script/run_local_ci.sh
new file mode 100755
index 0000000..9376305
--- /dev/null
+++ b/script/run_local_ci.sh
@@ -0,0 +1,309 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+set -e
+
+in_red() {
+ echo "$(tput setaf 1)${1:?}$(tput sgr0)"
+}
+export -f in_red
+
+in_green() {
+ echo "$(tput setaf 2)${1:?}$(tput sgr0)"
+}
+export -f in_green
+
+in_yellow() {
+ echo "$(tput setaf 3)${1:?}$(tput sgr0)"
+}
+export -f in_yellow
+
+print_success() {
+ in_green "$1: SUCCESS"
+}
+export -f print_success
+
+print_failure() {
+ in_red "$1: FAILURE"
+}
+export -f print_failure
+
+print_unstable() {
+ in_yellow "$1: UNSTABLE"
+}
+export -f print_unstable
+
+gen_makefile() {
+ local num="$(find -name "*.test" -type f | wc -l)"
+ local i=0
+
+ cat <<EOF >Makefile
+SHELL=/bin/bash
+
+all:
+
+EOF
+
+ # If we're using local checkouts for either TF or TFTF, we must
+ # serialise builds
+ while [ "$i" -lt "$num" ]; do
+ {
+ printf "all: %03d_run %03d_build\n" "$i" "$i"
+ if upon "$serialize_builds" && [ "$i" -gt 0 ]; then
+ printf "%03d_build: %03d_build\n" "$i" "$((i - 1))"
+ fi
+ echo
+ } >>Makefile
+ let "++i"
+ done
+
+ cat <<EOF >>Makefile
+
+%_run: %_build
+ @run_one_test "\$@"
+
+%_build:
+ @run_one_test "\$@"
+EOF
+}
+
+# This function is invoked from the Makefile. Descriptor 5 points to the active
+# terminal.
+run_one_test() {
+ id="${1%%_*}"
+ action="${1##*_}"
+ test_file="$(find -name "$id*.test" -printf "%f\n")"
+
+ mkdir -p "$id"
+
+ # Copy the test_file into the workspace directory with the name
+ # TEST_DESC, just like Jenkins would.
+ export TEST_DESC="$(basename "$test_file")"
+ cp "$test_file" "$id/TEST_DESC"
+
+ workspace="$id" test_desc="$test_file" "$ci_root/script/parse_test.sh"
+
+ set -a
+ source "$id/env"
+ set +a
+
+ # Makefiles don't like commas and colons in file names. We therefore
+ # replace them with _
+ config_subst="$(echo "$TEST_CONFIG" | tr ',:' '_')"
+ config_string="$id: $TEST_GROUP/$TEST_CONFIG"
+ workspace="$workspace/$TEST_GROUP/$config_subst"
+ mkdir -p "$workspace"
+
+ log_file="$workspace/artefacts/build.log"
+ if [ "$parallel" -gt 1 ]; then
+ console_file="$workspace/console.log"
+ exec 6>>"$console_file"
+ else
+ exec 6>&5
+ fi
+
+ # Unset make flags for build script
+ MAKEFLAGS=
+
+ case "$action" in
+ "build")
+ echo "building: $config_string" >&5
+ if ! bash $minus_x "$ci_root/script/build_package.sh" \
+ >&6 2>&1; then
+ {
+ print_failure "$config_string (build)"
+ if [ "$console_file" ]; then
+ echo " see $console_file"
+ fi
+ } >&5
+ exit 1
+ fi
+ ;;
+
+ "run")
+ # Local runs only for FVP unless asked not to
+ if echo "$RUN_CONFIG" | grep -q "^fvp" && \
+ not_upon "$skip_runs"; then
+ echo "running: $config_string" >&5
+ if bash $minus_x "$ci_root/script/run_package.sh" \
+ >&6 2>&1; then
+ if grep -q -e "--BUILD UNSTABLE--" \
+ "$log_file"; then
+ print_unstable "$config_string" >&5
+ else
+ print_success "$config_string" >&5
+ fi
+ exit 0
+ else
+ {
+ print_failure "$config_string (run)"
+ if [ "$console_file" ]; then
+ echo " see $console_file"
+ fi
+ } >&5
+ exit 1
+ fi
+ else
+ if grep -q -e "--BUILD UNSTABLE--" \
+ "$log_file"; then
+ print_unstable "$config_string (not run)" >&5
+ else
+ print_success "$config_string (not run)" >&5
+ fi
+ exit 0
+ fi
+ ;;
+
+ *)
+ in_red "Invalid action: $action!" >&5
+ exit 1
+ ;;
+ esac
+}
+export -f run_one_test
+
+workspace="${workspace:?}"
+ci_root="$(readlink -f "$(dirname "$0")/..")"
+
+# If this script was invoked with bash -x, have subsequent build/run invocations
+# to use -x as well.
+if echo "$-" | grep -q "x"; then
+ export minus_x="-x"
+fi
+
+# For a local run, when some variables as specified as "?", launch zenity to
+# prompt for test config via. GUI. If it's "??", then choose a directory.
+if [ "$test_groups" = "?" -o "$test_groups" = "??" ]; then
+ zenity_opts=(
+ --file-selection
+ --filename="$ci_root/group/README"
+ --multiple
+ --title "Choose test config"
+ )
+
+ if [ "$test_groups" = "??" ]; then
+ zenity_opts+=("--directory")
+ fi
+
+ # In case of multiple selections, zenity returns absolute paths of files
+ # separated by '|'. We remove the pipe characters, and make the paths
+ # relative to the group directory.
+ selections="$(cd "$ci_root"; zenity ${zenity_opts[*]})"
+ test_groups="$(echo "$selections" | tr '|' ' ')"
+ test_groups="$(echo "$test_groups" | sed "s#$ci_root/group/##g")"
+fi
+
+test_groups="${test_groups:?}"
+local_count=0
+
+if [ -z "$tf_root" ]; then
+ in_red "NOTE: NOT using local work tree for TF"
+else
+ tf_root="$(readlink -f $tf_root)"
+ tf_refspec=
+ in_green "Using local work tree for TF"
+ let "++local_count"
+fi
+
+if [ -z "$tftf_root" ]; then
+ in_red "NOTE: NOT using local work tree for TFTF"
+ tforg_user="${tforg_user:?}"
+else
+ tftf_root="$(readlink -f $tftf_root)"
+ tf_refspec=
+ in_green "Using local work tree for TFTF"
+ let "++local_count"
+fi
+
+if [ -z "$scp_root" ]; then
+ in_red "NOTE: NOT using local work tree for SCP"
+else
+ scp_root="$(readlink -f $scp_root)"
+ scp_refspec=
+ in_green "Using local work tree for SCP"
+ let "++local_count"
+fi
+
+# User preferences
+user_test_run="$test_run"
+user_dont_clean="$dont_clean"
+user_keep_going="$keep_going"
+user_primary_live="$primary_live"
+
+export ci_root
+export dont_clean=0
+export local_ci=1
+export parallel
+export test_run=0
+export primary_live=0
+
+rm -rf "$workspace"
+mkdir -p "$workspace"
+
+source "$ci_root/utils.sh"
+
+# SCP is not cloned by default
+export clone_scp
+export scp_root
+if not_upon "$scp_root" && upon "$clone_scp"; then
+ clone_scp=1
+else
+ clone_scp=0
+fi
+
+# Use clone_repos.sh to clone and share repositores that aren't local.
+no_tf="$tf_root" no_tftf="$tftf_root" no_ci="$ci_root" \
+ bash $minus_x "$ci_root/script/clone_repos.sh"
+
+set -a
+source "$workspace/env"
+set +a
+
+if [ "$local_count" -gt 0 ]; then
+ # At least one repository is local
+ serialize_builds=1
+else
+ dont_clean=0
+fi
+
+export -f upon not_upon
+
+# Generate test descriptions
+"$ci_root/script/gen_test_desc.py"
+
+# Iterate through test files in workspace
+pushd "$workspace"
+
+if not_upon "$parallel" || echo "$parallel" | grep -vq "[0-9]"; then
+ parallel=1
+ test_run="$user_test_run"
+ dont_clean="$user_dont_clean"
+ primary_live="$user_primary_live"
+fi
+
+if [ "$parallel" -gt 1 ]; then
+ msg="Running at most $parallel jobs in parallel"
+ if upon "$serialize_builds"; then
+ msg+=" (builds serialized)"
+ fi
+ msg+="..."
+fi
+
+# Generate Makefile
+gen_makefile
+
+if upon "$msg"; then
+ echo "$msg"
+ echo
+fi
+
+keep_going="${user_keep_going:-1}"
+if not_upon "$keep_going"; then
+ keep_going=
+fi
+
+MAKEFLAGS= make -r -j "$parallel" ${keep_going+-k} 5>&1 &>"make.log"
diff --git a/script/run_package.sh b/script/run_package.sh
new file mode 100755
index 0000000..382c336
--- /dev/null
+++ b/script/run_package.sh
@@ -0,0 +1,349 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+set -e
+
+# Enable job control to have background processes run in their own process
+# group. That way, we can kill a background process group in one go.
+set -m
+
+ci_root="$(readlink -f "$(dirname "$0")/..")"
+source "$ci_root/utils.sh"
+
+artefacts="${artefacts-$workspace/artefacts}"
+
+run_root="$workspace/run"
+pid_dir="$workspace/pids"
+
+mkdir -p "$pid_dir"
+mkdir -p "$run_root"
+
+kill_and_reap() {
+ local gid
+
+ # Kill an active process. Ignore errors
+ [ "$1" ] || return 0
+ kill -0 "$1" &>/dev/null || return 0
+
+ # Kill the group
+ gid="$(awk '{print $5}' < /proc/$1/stat)"
+ kill -SIGKILL -- "-$gid" &>/dev/null || true
+ wait "$gid" &>/dev/null || true
+}
+
+# Perform clean up and ignore errors
+cleanup() {
+ local pid
+
+ # Test success. Kill all background processes so far and wait for them
+ pushd "$pid_dir"
+ set +e
+ while read pid; do
+ pid="$(cat $pid)"
+ kill_and_reap "$pid"
+ done < <(find -name '*.pid')
+ popd
+}
+
+# Launch a program. Have its PID saved in a file with given name with .pid
+# suffix. When the program exits, create a file with .success suffix, or one
+# with .fail if it fails. This function blocks, so the caller must '&' this if
+# they want to continue. Call must wait for $pid_dir/$name.pid to be created
+# should it want to read it.
+launch() {
+ local pid
+
+ "$@" &
+ pid="$!"
+ echo "$pid" > "$pid_dir/${name:?}.pid"
+ if wait "$pid"; then
+ touch "$pid_dir/$name.success"
+ else
+ touch "$pid_dir/$name.fail"
+ fi
+}
+
+# Cleanup actions
+trap cleanup SIGINT SIGHUP SIGTERM EXIT
+
+# Prevent xterm windows from untracked terminals from popping up, especially
+# when running locally
+not_upon "$test_run" && export DISPLAY=
+
+# Source variables required for run
+source "$artefacts/env"
+
+echo
+echo "RUNNING: $TEST_CONFIG"
+echo
+
+# Accept BIN_MODE from environment, or default to release. If bin_mode is set
+# and non-empty (intended to be set from command line), that takes precedence.
+pkg_bin_mode="${BIN_MODE:-release}"
+bin_mode="${bin_mode:-$pkg_bin_mode}"
+
+# Assume 0 is the primary UART to track
+primary_uart=0
+
+# Assume 4 UARTs by default
+num_uarts="${num_uarts:-4}"
+
+# Whether to display primary UART progress live on the console
+primary_live="${primary_live-$PRIMARY_LIVE}"
+
+# Change directory so that all binaries can be accessed realtive to where they
+# lie
+run_cwd="$artefacts/$bin_mode"
+cd "$run_cwd"
+
+# Source environment for run
+if [ -f "run/env" ]; then
+ source "run/env"
+fi
+
+# Fail if there was no model path set
+if [ -z "$model_path" ]; then
+ die "No model path set by package!"
+fi
+
+# Launch model with parameters
+model_out="$run_root/model_log.txt"
+run_sh="$run_root/run.sh"
+
+# Generate run.sh
+echo "$model_path \\" > "$run_sh"
+sed '/^\s*$/d' < model_params | sort | sed 's/^/\t/;s/$/ \\/' >> "$run_sh"
+echo -e "\t\"\$@\"" >> "$run_sh"
+
+echo "Model command line:"
+echo
+cat "$run_sh"
+chmod +x "$run_sh"
+echo
+
+# If it's a test run, skip all the hoops and launch model directly.
+if upon "$test_run"; then
+ "$run_sh" "$@"
+ exit 0
+fi
+
+# For an automated run, export a known variable so that we can identify stale
+# processes spawned by Trusted Firmware CI by inspecting its environment.
+export TRUSTED_FIRMWARE_CI="1"
+
+# Change directory to workspace, as all artifacts paths are relative to
+# that, and launch the model. Have model use no buffering on stdout
+: >"$model_out"
+name="model" launch stdbuf -o0 -e0 "$run_sh" &>"$model_out" &
+wait_count=0
+while :; do
+ if [ -f "$pid_dir/model.pid" ]; then
+ break
+ fi
+ sleep 0.1
+
+ let "wait_count += 1"
+ if [ "$wait_count" -gt 100 ]; then
+ die "Failed to launch model!"
+ fi
+done
+model_pid="$(cat "$pid_dir/model.pid")"
+
+ports_output="$(mktempfile)"
+if not_upon "$ports_script"; then
+ # Default AWK script to parse model ports
+ ports_script="$(mktempfile)"
+ cat <<'EOF' >"$ports_script"
+/terminal_0/ { ports[0] = $NF }
+/terminal_1/ { ports[1] = $NF }
+/terminal_2/ { ports[2] = $NF }
+/terminal_3/ { ports[3] = $NF }
+END {
+ for (i = 0; i < num_uarts; i++) {
+ if (ports[i] != "")
+ print "ports[" i "]=" ports[i]
+ }
+}
+EOF
+fi
+
+# Start a watchdog to kill ourselves if we wait too long for the model
+# response. Note that this is not the timeout for the whole test, but only for
+# the Model to output port numbers.
+(
+if upon "$jenkins_run"; then
+ # Increase this timeout for a cluster run, as it could take longer if
+ # the load on the Jenkins server is high.
+ model_wait_timeout=120
+else
+ model_wait_timeout=30
+fi
+sleep $model_wait_timeout
+echo "Model wait timeout!"
+kill "$$"
+) &
+watchdog="$!"
+
+# Parse UARTs ports from early model output. Send a SIGSTOP to the model
+# as soon as it outputs all UART ports. This is to prevent the model
+# executing before the expect scripts get a chance to connect to the
+# UART thereby losing messages.
+model_fail=1
+while :; do
+ awk -v "num_uarts=$num_uarts" -f "$ports_script" "$model_out" \
+ > "$ports_output"
+ if [ $(wc -l < "$ports_output") -eq "$num_uarts" ]; then
+ kill -SIGSTOP "$model_pid"
+ model_fail=0
+ break
+ fi
+
+ # Bail out if model exited meanwhile
+ if ! kill -0 "$model_pid" &>/dev/null; then
+ echo "Model terminated unexpectedly!"
+ break
+ fi
+done
+
+# Kill the watch dog
+kill_and_reap "$watchdog" || true
+
+# Check the model had failed meanwhile, for some reason
+if [ "$model_fail" -ne 0 ]; then
+ exit 1
+fi
+
+# The wait loop above exited after model port numbers have been parsed. The
+# script's output is ready to be sourced now.
+declare -a ports
+source "$ports_output"
+rm -f "$ports_output"
+if [ "${#ports[@]}" -ne "$num_uarts" ]; then
+ echo "Failed to get UART port numbers"
+ kill_and_reap "$model_pid"
+ unset model_pid
+fi
+
+# Launch expect scripts for all UARTs
+uarts=0
+for u in $(seq 0 $num_uarts | tac); do
+ script="run/uart$u/expect"
+ if [ -f "$script" ]; then
+ script="$(cat "$script")"
+ else
+ script=
+ fi
+
+ # Primary UART must have a script
+ if [ -z "$script" ]; then
+ if [ "$u" = "$primary_uart" ]; then
+ die "No primary UART script!"
+ else
+ continue
+ fi
+ fi
+
+ timeout="run/uart$u/timeout"
+ if [ -f "$timeout" ]; then
+ timeout="$(cat "$timeout")"
+ else
+ timeout=
+ fi
+ timeout="${timeout-600}"
+
+ full_log="$run_root/uart${u}_full.txt"
+
+ if [ "$u" = "$primary_uart" ]; then
+ star="*"
+ uart_name="primary_uart"
+ else
+ star=" "
+ uart_name="uart$u"
+ fi
+
+ # Launch expect after exporting required variables
+ (
+ if [ -f "run/uart$u/env" ]; then
+ set -a
+ source "run/uart$u/env"
+ set +a
+ fi
+
+ if [ "$u" = "$primary_uart" ] && upon "$primary_live"; then
+ uart_port="${ports[$u]}" timeout="$timeout" \
+ name="$uart_name" launch expect -f "$ci_root/expect/$script" | \
+ tee "$full_log"
+ echo
+ else
+ uart_port="${ports[$u]}" timeout="$timeout" \
+ name="$uart_name" launch expect -f "$ci_root/expect/$script" \
+ &>"$full_log"
+ fi
+
+ ) &
+
+ let "uarts += 1"
+ echo "Tracking UART$u$star with $script; timeout $timeout."
+done
+
+# Wait here long 'enough' for expect scripts to connect to ports; then
+# let the model proceed
+sleep 2
+kill -SIGCONT "$model_pid"
+
+# Wait for all children. Note that the wait below is *not* a timed wait.
+result=0
+
+set +e
+pushd "$pid_dir"
+while :; do
+ wait -n
+
+ # Exit failure if we've any failures
+ if [ "$(wc -l < <(find -name '*.fail'))" -ne 0 ]; then
+ result=1
+ break
+ fi
+
+ # We're done if the primary UART exits success
+ if [ -f "$pid_dir/primary_uart.success" ]; then
+ break
+ fi
+done
+popd
+
+cleanup
+
+if [ "$result" -eq 0 ]; then
+ echo "Test success!"
+else
+ echo "Test failed!"
+fi
+
+if upon "$jenkins_run"; then
+ echo
+ echo "Artefacts location: $BUILD_URL."
+ echo
+fi
+
+if upon "$jenkins_run" && upon "$artefacts_receiver" && [ -d "$workspace/run" ]; then
+ pushd "$workspace"
+ run_archive="run.tar.xz"
+ tar -cJf "$run_archive" "run"
+ where="$artefacts_receiver/${TEST_GROUP:?}/${TEST_CONFIG:?}/$run_archive"
+ where+="?j=$JOB_NAME&b=$BUILD_NUMBER"
+ if wget -q --method=PUT --body-file="$run_archive" "$where"; then
+ echo "Run logs submitted to $where."
+ else
+ echo "Error submitting run logs to $where."
+ fi
+ popd
+fi
+
+exit "$result"
+
+# vim: set tw=80 sw=8 noet:
diff --git a/script/run_tools_through_valgrind.sh b/script/run_tools_through_valgrind.sh
new file mode 100755
index 0000000..c437868
--- /dev/null
+++ b/script/run_tools_through_valgrind.sh
@@ -0,0 +1,67 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+set -e
+
+ci_root="$(readlink -f "$(dirname "$0")/..")"
+source "$ci_root/utils.sh"
+
+# Change directory to the TF-A checkout ready to build
+cd "$TF_CHECKOUT_LOC"
+
+# Build TF-A to get blx.bin images and the tools (fiptool and cert_create)
+# Debug build enabled so that valgrind has access to source file line numbers
+if ! make CROSS_COMPILE="aarch64-linux-gnu-" all fiptool certtool DEBUG=1 V=1 \
+ &>"$workspace/build.log"; then
+ echo "Error building tools; see archived build.log"
+ exit 1
+fi
+
+run_valgrind() {
+ valgrind --leak-check=full -v --log-file="$log_file" $*
+ echo
+ if ! grep -iq "All heap blocks were freed -- no leaks are possible" \
+ "$log_file"; then
+ echo "Memory leak reported in $log_file"
+ return 1
+ fi
+ return 0
+}
+
+has_leak=0
+
+fiptool_cmd="./tools/fiptool/fiptool \
+ create \
+ --tb-fw build/fvp/debug/bl2.bin \
+ --soc-fw build/fvp/debug/bl31.bin \
+ fip.bin"
+
+# Build the FIP under Valgrind
+if ! log_file="$workspace/fiptool.log" run_valgrind "$fiptool_cmd"; then
+ echo "fiptool has memory leaks."
+ has_leak=1
+else
+ echo "fiptool does not have memory leaks."
+fi
+
+echo
+
+cert_create_cmd="./tools/cert_create/cert_create \
+ -n \
+ --tb-fw build/fvp/debug/bl2.bin"
+
+# Run cert_create under Valgrind
+if ! log_file="$workspace/cert_create.log" run_valgrind "$cert_create_cmd"; then
+ echo "cert_create has memory leaks."
+ has_leak=1
+else
+ echo "cert_create does not have memory leaks."
+fi
+
+echo
+
+exit "$has_leak"
diff --git a/script/scratch_scripts/README b/script/scratch_scripts/README
new file mode 100644
index 0000000..590071b
--- /dev/null
+++ b/script/scratch_scripts/README
@@ -0,0 +1,9 @@
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+The scripts in this folder are a copy of the scripts in the projectscratch
+folder. They should be kept in sync, deploy_scratch.sh is a helper script
+to copy them over.
diff --git a/script/scratch_scripts/delete_old_workspaces.sh b/script/scratch_scripts/delete_old_workspaces.sh
new file mode 100755
index 0000000..d34873e
--- /dev/null
+++ b/script/scratch_scripts/delete_old_workspaces.sh
@@ -0,0 +1,10 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# Cleanup work spaces older than a day
+cd /arm/projectscratch/ssg/trusted-fw/ci-workspace
+find -maxdepth 1 \( -not -name . -a -mtime +1 \) -exec rm -rf '{}' +
diff --git a/script/scratch_scripts/deploy_scratch.sh b/script/scratch_scripts/deploy_scratch.sh
new file mode 100755
index 0000000..0c39ab8
--- /dev/null
+++ b/script/scratch_scripts/deploy_scratch.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+ci_root="$(readlink -f "$(dirname "$0")/../..")"
+source "$ci_root/utils.sh"
+
+if ! ls $project_filer/ci-scripts/
+then
+ echo "make sure /arm is mounted, if it is not, it can be mounted with the following command:" >&2
+ echo "sudo sshfs [USER]@login1.euhpc2.arm.com:/arm /arm -o allow_other,reconnect" >&2
+ echo "note that the euhpc and euhpc2 have different /arm mounts" >&2
+ exit 1
+fi
+
+COMMAND="cp $ci_root/script/scratch_scripts/* $project_filer/ci-scripts/"
+FILES=`ls -al "$ci_root"/script/scratch_scripts/*`
+
+echo "files to be copied:"
+echo "$FILES"
+echo ""
+echo "####DANGER### POTENTIAL FOR DAMAGE, CHECK THIS COMMAND"
+echo "command to be run: \"$COMMAND\""
+read -p "Run this command [Y/n]: "
+echo
+if [[ $REPLY =~ ^[Yy]$ ]]
+then
+ eval "$COMMAND"
+fi
diff --git a/script/scratch_scripts/initial_clone.sh b/script/scratch_scripts/initial_clone.sh
new file mode 100755
index 0000000..94c9de5
--- /dev/null
+++ b/script/scratch_scripts/initial_clone.sh
@@ -0,0 +1,87 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# This script is meant to be run from Jenkins to make an initial clone of the
+# CI repository.
+#
+# - If CI_ROOT is set, we assume that a parent job has already cloned required
+# repositories; so we skip further cloning. However, in order to prevent this
+# job from potentially cleaning up the filer workspace (which is the
+# responsibility of the parent job which did the original clone), we unset
+# the FILER_WS variable in the env file.
+#
+# - Otherwise, we call clone_repos.sh to have all required repositories to be
+# cloned.
+#
+# Note that, since this file resides in the repository itself, a copy of this
+# file must be 'wget'. I.e., any changes to to this file must be committed first
+# to the CI repository master for it to take effect!
+
+strip_var() {
+ local var="$1"
+ local val="$(echo "${!var}" | sed 's#^\s*\|\s*$##g')"
+ eval "$var=$val"
+}
+
+strip_var CI_REFSPEC
+
+if [ "$CI_ENVIRONMENT" ]; then
+ tmpfile="$(mktemp --tmpdir="$WORKSPACE")"
+ echo "$CI_ENVIRONMENT" > "$tmpfile"
+ set -a
+ source "$tmpfile"
+ set +a
+fi
+
+if [ "$CI_ROOT" ]; then
+ # We're not going to clone repos; so prevent this job from cleaning up
+ # filer workspace.
+ echo "FILER_WS=" > env
+
+ # Resetting a variable doesn't seem to work on new Jenkins instance. So
+ # us a different variable altogether instead.
+ echo "DONT_CLEAN_WS=1" >> env
+
+ exit 0
+fi
+
+# If no CI ref specs were explicitly specified, but was triggered from a CI
+# Gerrit trigger, move to the Gerrit refspec instead so that we use the expected
+# version of clone_repos.sh.
+if [ -z "$CI_REFSPEC" ] && [ "$REPO_UNDER_TEST" = "trusted-firmware-ci" ] && \
+ [ "$GERRIT_REFSPEC" ]; then
+ CI_REFSPEC="$GERRIT_REFSPEC"
+fi
+
+# Clone CI repository and move to the refspec
+git clone -q --depth 1 \
+ http://ssg-sw.cambridge.arm.com/gerrit/pdswinf/ci/pdcs-platforms/platform-ci
+
+if [ "$CI_REFSPEC" ]; then
+ # Only recent Git versions support fetching refs via. commit IDs.
+ # However, platform slaves have been updated to a version that can do
+ # this (https://jira.arm.com/browse/SSGSWINF-1426). The module load
+ # commands have been commented out since.
+ #
+ # source /arm/tools/setup/init/bash
+ # module load swdev
+ # module load git/git/2.14.3
+
+ pushd platform-ci &>/dev/null
+ git fetch -q --depth 1 origin "$CI_REFSPEC"
+ git checkout -q FETCH_HEAD
+ echo "CI repo checked out to $CI_REFSPEC"
+ popd &>/dev/null
+fi
+
+if ! platform-ci/trusted-fw/new-ci/script/clone_repos.sh; then
+ echo "clone_repos.sh failed!"
+ cat clone_repos.log
+ exit 1
+fi
+
+# vim:set tw=80 sw=8 sts=8 noet:
diff --git a/script/scratch_scripts/initial_clone_temp.sh b/script/scratch_scripts/initial_clone_temp.sh
new file mode 100755
index 0000000..ef762bf
--- /dev/null
+++ b/script/scratch_scripts/initial_clone_temp.sh
@@ -0,0 +1,123 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# This script is meant to be run from Jenkins to make an initial clone of the
+# CI repository.
+#
+# - If CI_SCRATCH is set, we assume that a parent job has already cloned
+# required repositories; so we skip further cloning.
+#
+# - Otherwise, we call clone_repos.sh to have all required repositories to be
+# cloned.
+#
+# Note that, since this file resides in the repository itself, a copy of this
+# file must be 'wget'. I.e., any changes to to this file must be committed first
+# to the CI repository master for it to take effect!
+
+set -e
+set -x
+
+strip_var() {
+ local var="$1"
+ local val="$(echo "${!var}" | sed 's#^\s*\|\s*$##g')"
+ eval "$var=$val"
+}
+
+set_ci_root() {
+ export ci_root=`pwd`/"platform-ci"
+ export CI_ROOT=$ci_root
+}
+
+strip_var CI_REFSPEC
+
+if [ "$CI_ENVIRONMENT" ]; then
+ tmpfile="$(mktemp --tmpdir="$WORKSPACE")"
+ echo "$CI_ENVIRONMENT" | tr ' ' '\n' > "$tmpfile"
+ set -a
+ source "$tmpfile"
+ set +a
+fi
+
+if [ "$CI_SCRATCH" ]; then
+ if [ ! -d "$CI_SCRATCH" ]; then
+ echo "\$CI_SCRATCH is stale; ignored."
+ else
+ # Copy environment and parameter file from scratch to this job's
+ # workspace
+ cp "$CI_SCRATCH/env" .
+ cp "$CI_SCRATCH/env.param" .
+ find "$CI_SCRATCH" -name "*.data" -exec cp -t . '{}' +
+
+ exit 0
+ fi
+fi
+
+# If no CI ref specs were explicitly specified, but was triggered from a CI
+# Gerrit trigger, move to the Gerrit refspec instead so that we use the expected
+# version of clone_repos.sh.
+if [ -z "$CI_REFSPEC" ] && [ "$REPO_UNDER_TEST" = "trusted-firmware-ci" ] && \
+ [ "$GERRIT_REFSPEC" ]; then
+ export CI_REFSPEC="$GERRIT_REFSPEC"
+fi
+
+# Clone CI repository and move to the refspec
+if [ ! -d "platform-ci" ]
+then
+ git clone -q --depth 1 \
+ --reference /arm/projectscratch/ssg/trusted-fw/ref-repos/trusted-firmware-ci \
+ http://ssg-sw.cambridge.arm.com/gerrit/pdswinf/ci/pdcs-platforms/platform-ci
+else
+ pushd platform-ci
+ git fetch
+ popd
+fi
+
+# Set CI_ROOT as a fallback
+set_ci_root
+echo "CI_ROOT=$ci_root" >> env
+
+if [ "$CI_REFSPEC" ]; then
+ # Only recent Git versions support fetching refs via. commit IDs.
+ # However, platform slaves have been updated to a version that can do
+ # this (https://jira.arm.com/browse/SSGSWINF-1426). The module load
+ # commands have been commented out since.
+ #
+ # source /arm/tools/setup/init/bash
+ # module load swdev
+ # module load git/git/2.14.3
+
+ # Translate refspec if supported
+ if [ -x "$ci_root/script/translate_refspec.py" ]; then
+ CI_REFSPEC="$("$ci_root/script/translate_refspec.py" \
+ -p trusted-firmware-ci "$CI_REFSPEC")"
+ fi
+
+ pushd platform-ci &>/dev/null
+ git fetch -q --depth 1 origin "$CI_REFSPEC"
+ git checkout -q FETCH_HEAD
+ echo
+ echo "Initial CI repo checked out to '$CI_REFSPEC'."
+ popd &>/dev/null
+fi
+
+if [ "$ci_only" ]; then
+ exit 0
+fi
+
+if echo "$-" | grep -q "x"; then
+ minus_x="-x"
+fi
+
+if ! bash $minus_x "$ci_root/script/clone_repos.sh"; then
+ echo "clone_repos.sh failed!"
+ cat clone_repos.log
+ exit 1
+fi
+
+set_ci_root
+
+# vim:set tw=80 sw=8 sts=8 noet:
diff --git a/script/scratch_scripts/initial_clone_v2.5.sh b/script/scratch_scripts/initial_clone_v2.5.sh
new file mode 100755
index 0000000..5630831
--- /dev/null
+++ b/script/scratch_scripts/initial_clone_v2.5.sh
@@ -0,0 +1,128 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# This script is meant to be run from Jenkins to make an initial clone of the
+# CI repository.
+#
+# - If CI_SCRATCH is set, we assume that a parent job has already cloned
+# required repositories; so we skip further cloning.
+#
+# - Otherwise, we call clone_repos.sh to have all required repositories to be
+# cloned.
+#
+# Note that, since this file resides in the repository itself, a copy of this
+# file must be 'wget'. I.e., any changes to to this file must be committed first
+# to the CI repository master for it to take effect!
+
+set -e
+
+strip_var() {
+ local var="$1"
+ local val="$(echo "${!var}" | sed 's#^\s*\|\s*$##g')"
+ eval "$var=$val"
+}
+
+set_ci_root() {
+ ci_root=`pwd`/"platform-ci"
+ CI_ROOT=$ci_root
+}
+
+strip_var CI_REFSPEC
+
+if [ ! -z $PROJECT ]; then
+ export REPO_UNDER_TEST=`basename $PROJECT`
+ echo "REPO_UNDER_TEST is blank, but PROJECT is set, setting REPO_UNDER_TEST based on PROJECT"
+ echo "REPO_UNDER_TEST=$REPO_UNDER_TEST"
+ echo "REPO_UNDER_TEST=$REPO_UNDER_TEST" >> env
+fi
+
+if [ "$CI_ENVIRONMENT" ]; then
+ tmpfile="$(mktemp --tmpdir="$WORKSPACE")"
+ echo "$CI_ENVIRONMENT" | tr ' ' '\n' > "$tmpfile"
+ set -a
+ source "$tmpfile"
+ set +a
+fi
+
+if [ "$CI_SCRATCH" ]; then
+ if [ ! -d "$CI_SCRATCH" ]; then
+ echo "\$CI_SCRATCH is stale; ignored."
+ else
+ # Copy environment and parameter file from scratch to this job's
+ # workspace
+ cp "$CI_SCRATCH/env" .
+ cp "$CI_SCRATCH/env.param" .
+ find "$CI_SCRATCH" -name "*.data" -exec cp -t . '{}' +
+
+ exit 0
+ fi
+fi
+
+# If no CI ref specs were explicitly specified, but was triggered from a CI
+# Gerrit trigger, move to the Gerrit refspec instead so that we use the expected
+# version of clone_repos.sh.
+if [ -z "$CI_REFSPEC" ] && [ "$REPO_UNDER_TEST" = "trusted-firmware-ci" ] && \
+ [ "$GERRIT_REFSPEC" ]; then
+ export CI_REFSPEC="$GERRIT_REFSPEC"
+fi
+
+# Clone CI repository and move to the refspec
+if [ ! -d "platform-ci" ]
+then
+git clone -q --depth 1 \
+ --reference /arm/projectscratch/ssg/trusted-fw/ref-repos/trusted-firmware-ci \
+ http://ssg-sw.cambridge.arm.com/gerrit/pdswinf/ci/pdcs-platforms/platform-ci
+else
+ pushd platform-ci
+ git fetch
+ git checkout origin/master
+ popd
+fi
+
+set_ci_root
+# Set CI_ROOT as a fallback
+echo "CI_ROOT=$ci_root" >> env
+
+if [ "$CI_REFSPEC" ]; then
+ # Only recent Git versions support fetching refs via. commit IDs.
+ # However, platform slaves have been updated to a version that can do
+ # this (https://jira.arm.com/browse/SSGSWINF-1426). The module load
+ # commands have been commented out since.
+ #
+ # source /arm/tools/setup/init/bash
+ # module load swdev
+ # module load git/git/2.14.3
+
+ # Translate refspec if supported
+ if [ -x "$ci_root/script/translate_refspec.py" ]; then
+ CI_REFSPEC="$("$ci_root/script/translate_refspec.py" \
+ -p trusted-firmware-ci -s arm "$CI_REFSPEC")"
+ fi
+
+ pushd platform-ci &>/dev/null
+ git fetch -q --depth 1 origin "$CI_REFSPEC"
+ git checkout -q FETCH_HEAD
+ echo
+ echo "Initial CI repo checked out to '$CI_REFSPEC'."
+ popd &>/dev/null
+fi
+
+if [ "$ci_only" ]; then
+ exit 0
+fi
+
+if echo "$-" | grep -q "x"; then
+ minus_x="-x"
+fi
+
+if ! bash $minus_x "$ci_root/script/clone_repos.sh"; then
+ echo "clone_repos.sh failed!"
+ cat clone_repos.log
+ exit 1
+fi
+
+# vim:set tw=80 sw=8 sts=8 noet:
diff --git a/script/scratch_scripts/initial_clone_v2.sh b/script/scratch_scripts/initial_clone_v2.sh
new file mode 100755
index 0000000..0836fe1
--- /dev/null
+++ b/script/scratch_scripts/initial_clone_v2.sh
@@ -0,0 +1,108 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# This script is meant to be run from Jenkins to make an initial clone of the
+# CI repository.
+#
+# - If CI_SCRATCH is set, we assume that a parent job has already cloned
+# required repositories; so we skip further cloning.
+#
+# - Otherwise, we call clone_repos.sh to have all required repositories to be
+# cloned.
+#
+# Note that, since this file resides in the repository itself, a copy of this
+# file must be 'wget'. I.e., any changes to to this file must be committed first
+# to the CI repository master for it to take effect!
+
+set -e
+
+strip_var() {
+ local var="$1"
+ local val="$(echo "${!var}" | sed 's#^\s*\|\s*$##g')"
+ eval "$var=$val"
+}
+
+strip_var CI_REFSPEC
+
+if [ "$CI_ENVIRONMENT" ]; then
+ tmpfile="$(mktemp --tmpdir="$WORKSPACE")"
+ echo "$CI_ENVIRONMENT" | tr ' ' '\n' > "$tmpfile"
+ set -a
+ source "$tmpfile"
+ set +a
+fi
+
+if [ "$CI_SCRATCH" ]; then
+ if [ ! -d "$CI_SCRATCH" ]; then
+ echo "\$CI_SCRATCH is stale; ignored."
+ else
+ # Copy environment and parameter file from scratch to this job's
+ # workspace
+ cp "$CI_SCRATCH/env" .
+ cp "$CI_SCRATCH/env.param" .
+ find "$CI_SCRATCH" -name "*.data" -exec cp -t . '{}' +
+
+ exit 0
+ fi
+fi
+
+# If no CI ref specs were explicitly specified, but was triggered from a CI
+# Gerrit trigger, move to the Gerrit refspec instead so that we use the expected
+# version of clone_repos.sh.
+if [ -z "$CI_REFSPEC" ] && [ "$REPO_UNDER_TEST" = "trusted-firmware-ci" ] && \
+ [ "$GERRIT_REFSPEC" ]; then
+ export CI_REFSPEC="$GERRIT_REFSPEC"
+fi
+
+# Clone CI repository and move to the refspec
+git clone -q --depth 1 \
+ --reference /arm/projectscratch/ssg/trusted-fw/ref-repos/trusted-firmware-ci \
+ http://ssg-sw.cambridge.arm.com/gerrit/pdswinf/ci/pdcs-platforms/platform-ci
+
+# Set CI_ROOT as a fallback
+ci_root="platform-ci/trusted-fw/new-ci"
+echo "CI_ROOT=$ci_root" >> env
+
+if [ "$CI_REFSPEC" ]; then
+ # Only recent Git versions support fetching refs via. commit IDs.
+ # However, platform slaves have been updated to a version that can do
+ # this (https://jira.arm.com/browse/SSGSWINF-1426). The module load
+ # commands have been commented out since.
+ #
+ # source /arm/tools/setup/init/bash
+ # module load swdev
+ # module load git/git/2.14.3
+
+ # Translate refpsec if supported
+ if [ -x "$ci_root/script/translate_refspec.py" ]; then
+ CI_REFSPEC="$("$ci_root/script/translate_refspec.py" \
+ -p trusted-firmware-ci "$CI_REFSPEC")"
+ fi
+
+ pushd platform-ci &>/dev/null
+ git fetch -q --depth 1 origin "$CI_REFSPEC"
+ git checkout -q FETCH_HEAD
+ echo
+ echo "Initial CI repo checked out to '$CI_REFSPEC'."
+ popd &>/dev/null
+fi
+
+if [ "$ci_only" ]; then
+ exit 0
+fi
+
+if echo "$-" | grep -q "x"; then
+ minus_x="-x"
+fi
+
+if ! bash $minus_x "$ci_root/script/clone_repos.sh"; then
+ echo "clone_repos.sh failed!"
+ cat clone_repos.log
+ exit 1
+fi
+
+# vim:set tw=80 sw=8 sts=8 noet:
diff --git a/script/scratch_scripts/initial_clone_v3.sh b/script/scratch_scripts/initial_clone_v3.sh
new file mode 100755
index 0000000..5f4e3fc
--- /dev/null
+++ b/script/scratch_scripts/initial_clone_v3.sh
@@ -0,0 +1,106 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# This script is meant to be run from Jenkins to make an initial clone of the
+# CI repository.
+#
+# - If CI_SCRATCH is set, we assume that a parent job has already cloned
+# required repositories; so we skip further cloning.
+#
+# - Otherwise, we call clone_repos.sh to have all required repositories to be
+# cloned.
+#
+# Note that, since this file resides in the repository itself, a copy of this
+# file must be 'wget'. I.e., any changes to to this file must be committed first
+# to the CI repository master for it to take effect!
+
+set -e
+
+strip_var() {
+ local var="$1"
+ local val="$(echo "${!var}" | sed 's#^\s*\|\s*$##g')"
+ eval "$var=$val"
+}
+
+strip_var CI_REFSPEC
+
+if [ "$CI_ENVIRONMENT" ]; then
+ tmpfile="$(mktemp --tmpdir="$WORKSPACE")"
+ echo "$CI_ENVIRONMENT" | tr ' ' '\n' > "$tmpfile"
+ set -a
+ source "$tmpfile"
+ set +a
+fi
+
+if [ "$CI_SCRATCH" ]; then
+ if [ ! -d "$CI_SCRATCH" ]; then
+ echo "\$CI_SCRATCH is stale; ignored."
+ else
+ # Copy environment and parameter file from scratch to this job's
+ # workspace
+ cp "$CI_SCRATCH/env" .
+ cp "$CI_SCRATCH/env.param" .
+ find "$CI_SCRATCH" -name "*.data" -exec cp -t . '{}' +
+
+ exit 0
+ fi
+fi
+
+# If no CI ref specs were explicitly specified, but was triggered from a CI
+# Gerrit trigger, move to the Gerrit refspec instead so that we use the expected
+# version of clone_repos.sh.
+if [ -z "$CI_REFSPEC" ] && [ "$REPO_UNDER_TEST" = "tf-a-ci" ] && \
+ [ "$GERRIT_REFSPEC" ]; then
+ export CI_REFSPEC="$GERRIT_REFSPEC"
+fi
+
+ci_root=`pwd`/"tf-a-ci"
+# Clone CI repository and move to the refspec
+git clone -q --depth 1 \
+ --reference /arm/projectscratch/ssg/trusted-fw/ref-repos/tf-a-ci \
+ https://gerrit.oss.arm.com/trusted-firmware/tf-a-ci $ci_root
+
+# Set CI_ROOT as a fallback
+echo "CI_ROOT=$ci_root" >> env
+export CI_ROOT=$ci_root
+echo "CI_ROOT:"$CI_ROOT
+
+if [ "$CI_REFSPEC" ]; then
+ # Only recent Git versions support fetching refs via. commit IDs.
+ # However, platform slaves have been updated to a version that can do
+ # this (https://jira.arm.com/browse/SSGSWINF-1426). The module load
+ # commands have been commented out since.
+ #
+ # source /arm/tools/setup/init/bash
+ # module load swdev
+ # module load git/git/2.14.3
+
+ # Translate refpsec if supported
+ if [ -x "$ci_root/script/translate_refspec.py" ]; then
+ CI_REFSPEC="$("$ci_root/script/translate_refspec.py" \
+ -p tf-a-ci "$CI_REFSPEC")"
+ fi
+
+ pushd $ci_root &>/dev/null
+ git fetch -q --depth 1 origin "$CI_REFSPEC"
+ git checkout -q FETCH_HEAD
+ echo
+ echo "Initial CI repo checked out to '$CI_REFSPEC'."
+ popd &>/dev/null
+fi
+
+if [ "$ci_only" ]; then
+ exit 0
+fi
+
+if ! "$ci_root/script/clone_repos.sh"; then
+ echo "clone_repos.sh failed!"
+ cat clone_repos.log
+ exit 1
+fi
+
+# vim:set tw=80 sw=8 sts=8 noet:
diff --git a/script/scratch_scripts/post_build_setup.sh b/script/scratch_scripts/post_build_setup.sh
new file mode 100644
index 0000000..02741b3
--- /dev/null
+++ b/script/scratch_scripts/post_build_setup.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# THIS SCRIPT IS SOURCED!
+#
+# This script exists only to obtain a meaningful value for $CI_ROOT, the root
+# directory for CI scripts, from which other post-build scripts are executed.
+# Normally, $CI_ROOT *would* be available via. environment injection, but if a job
+# failed in its early stages, it wouldn't.
+
+# Although env file is meant to be sourced, RHS might have white spaces in it,
+# so sourcing will fail.
+set_ci_root() {
+ if [ -d "platform-ci/trusted-fw/new-ci" ]
+ then
+ ci_root="platform-ci/trusted-fw/new-ci"
+ else
+ ci_root="platform-ci"
+ fi
+}
+if [ -f "$WORKSPACE/env" ]; then
+ source "$WORKSPACE/env" 2>/dev/null || true
+fi
+
+if [ -z "$CI_ROOT" ] && [ -d "$WORKSPACE/platform-ci" ]; then
+ set_ci_root
+ CI_ROOT=$ci_root
+fi
+
+if [ -z "$CI_ROOT" ]; then
+ echo "warning: couldn't not determine value for \$CI_ROOT"
+fi
diff --git a/script/scratch_scripts/post_build_setup_v2.sh b/script/scratch_scripts/post_build_setup_v2.sh
new file mode 100644
index 0000000..9bf719a
--- /dev/null
+++ b/script/scratch_scripts/post_build_setup_v2.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# THIS SCRIPT IS SOURCED!
+#
+# This script exists only to obtain a meaningful value for $CI_ROOT, the root
+# directory for CI scripts, from which other post-build scripts are executed.
+# Normally, $CI_ROOT *would* be available via. environment injection, but if a job
+# failed in its early stages, it wouldn't.
+
+# Although env file is meant to be sourced, RHS might have white spaces in it,
+# so sourcing will fail.
+if [ -f "$WORKSPACE/env" ]; then
+ source "$WORKSPACE/env" 2>/dev/null || true
+fi
+
+if [ -z "$CI_ROOT" ] && [ -d "$WORKSPACE/tf-a-ci" ]; then
+ CI_ROOT="$WORKSPACE/tf-a-ci"
+fi
+
+if [ -z "$CI_ROOT" ]; then
+ echo "warning: couldn't not determine value for \$CI_ROOT"
+fi
diff --git a/script/static-checks/check-copyright.py b/script/static-checks/check-copyright.py
new file mode 100755
index 0000000..350381b
--- /dev/null
+++ b/script/static-checks/check-copyright.py
@@ -0,0 +1,187 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+"""
+Check if a given file includes the copyright boiler plate.
+This checker supports the following comment styles:
+ * Used by .c, .h, .S, .dts and .dtsi files
+ # Used by Makefile (including .mk)
+"""
+
+import argparse
+import datetime
+import collections
+import fnmatch
+import shlex
+import os
+import re
+import sys
+import utils
+from itertools import islice
+
+# File extensions to check
+VALID_FILE_EXTENSIONS = ('.c', '.S', '.h', 'Makefile', '.mk', '.dts', '.dtsi', '.ld')
+
+# Paths inside the tree to ignore. Hidden folders and files are always ignored.
+# They mustn't end in '/'.
+IGNORED_FOLDERS = (
+ 'include/lib/libfdt',
+ 'lib/compiler-rt',
+ 'lib/libfdt',
+ 'lib/zlib'
+)
+
+# List of ignored files in folders that aren't ignored
+IGNORED_FILES = (
+ 'include/tools_share/uuid.h'
+)
+
+# Supported comment styles (Python regex)
+COMMENT_PATTERN = '^(( \* ?)|(\# ?))'
+
+# License pattern to match
+LICENSE_PATTERN = '''(?P<copyright_prologue>
+{0}Copyright \(c\) (?P<years>[0-9]{{4}}(-[0-9]{{4}})?), (Arm Limited|ARM Limited and Contributors)\. All rights reserved\.$
+{0}$
+{0}SPDX-License-Identifier: BSD-3-Clause$
+)'''.format(
+ COMMENT_PATTERN
+)
+
+# Compiled license pattern
+RE_PATTERN = re.compile(LICENSE_PATTERN, re.MULTILINE)
+
+COPYRIGHT_OK = 0
+COPYRIGHT_ERROR = 1
+COPYRIGHT_WARNING = 2
+
+def check_copyright(path):
+ '''Checks a file for a correct copyright header.'''
+
+ with open(path) as file_:
+ file_content = file_.read()
+
+ if RE_PATTERN.search(file_content):
+ return COPYRIGHT_OK
+
+ for line in file_content.split('\n'):
+ if 'SPDX-License-Identifier' in line:
+ if ('BSD-3-Clause' in line or
+ 'BSD-2-Clause-FreeBSD' in line):
+ return COPYRIGHT_WARNING
+ break
+
+ return COPYRIGHT_ERROR
+
+
+def main(args):
+ print("Checking the copyrights in the code...")
+
+ all_files_correct = True
+
+ if args.patch:
+ print("Checking files modified between patches " + args.from_ref
+ + " and " + args.to_ref + "...")
+
+ (rc, stdout, stderr) = utils.shell_command(['git', 'diff',
+ '--diff-filter=ACMRT', '--name-only', args.from_ref, args.to_ref ])
+ if rc:
+ return 1
+
+ files = stdout.splitlines()
+
+ else:
+ print("Checking all files tracked by git...")
+
+ (rc, stdout, stderr) = utils.shell_command([ 'git', 'ls-files' ])
+ if rc:
+ return 1
+
+ files = stdout.splitlines()
+
+ count_ok = 0
+ count_warning = 0
+ count_error = 0
+
+ for f in files:
+
+ if utils.file_is_ignored(f, VALID_FILE_EXTENSIONS, IGNORED_FILES, IGNORED_FOLDERS):
+ if args.verbose:
+ print("Ignoring file " + f)
+ continue
+
+ if args.verbose:
+ print("Checking file " + f)
+
+ rc = check_copyright(f)
+
+ if rc == COPYRIGHT_OK:
+ count_ok += 1
+ elif rc == COPYRIGHT_WARNING:
+ count_warning += 1
+ print("WARNING: " + f)
+ elif rc == COPYRIGHT_ERROR:
+ count_error += 1
+ print("ERROR: " + f)
+
+ print("\nSummary:")
+ print("\t{} files analyzed".format(count_ok + count_warning + count_error))
+
+ if count_warning == 0 and count_error == 0:
+ print("\tNo errors found")
+ return 0
+
+ if count_error > 0:
+ print("\t{} errors found".format(count_error))
+
+ if count_warning > 0:
+ print("\t{} warnings found".format(count_warning))
+
+
+def parse_cmd_line(argv, prog_name):
+ parser = argparse.ArgumentParser(
+ prog=prog_name,
+ formatter_class=argparse.RawTextHelpFormatter,
+ description="Check copyright of all files of codebase",
+ epilog="""
+For each source file in the tree, checks that the copyright header
+has the correct format.
+""")
+
+ parser.add_argument("--tree", "-t",
+ help="Path to the source tree to check (default: %(default)s)",
+ default=os.curdir)
+
+ parser.add_argument("--verbose", "-v",
+ help="Increase verbosity to the source tree to check (default: %(default)s)",
+ action='store_true', default=False)
+
+ parser.add_argument("--patch", "-p",
+ help="""
+Patch mode.
+Instead of checking all files in the source tree, the script will consider
+only files that are modified by the latest patch(es).""",
+ action="store_true")
+ parser.add_argument("--from-ref",
+ help="Base commit in patch mode (default: %(default)s)",
+ default="master")
+ parser.add_argument("--to-ref",
+ help="Final commit in patch mode (default: %(default)s)",
+ default="HEAD")
+
+ args = parser.parse_args(argv)
+ return args
+
+
+if __name__ == "__main__":
+ args = parse_cmd_line(sys.argv[1:], sys.argv[0])
+
+ os.chdir(args.tree)
+
+ rc = main(args)
+
+ sys.exit(rc)
diff --git a/script/static-checks/check-include-order.py b/script/static-checks/check-include-order.py
new file mode 100755
index 0000000..481ca42
--- /dev/null
+++ b/script/static-checks/check-include-order.py
@@ -0,0 +1,349 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+import argparse
+import codecs
+import os
+import re
+import sys
+import utils
+
+
+# File extensions to check
+VALID_FILE_EXTENSIONS = ('.c', '.S', '.h')
+
+
+# Paths inside the tree to ignore. Hidden folders and files are always ignored.
+# They mustn't end in '/'.
+IGNORED_FOLDERS = ("include/lib/stdlib",
+ "include/lib/libc",
+ "include/lib/libfdt",
+ "lib/libfdt",
+ "lib/libc",
+ "lib/stdlib")
+
+# List of ignored files in folders that aren't ignored
+IGNORED_FILES = (
+)
+
+def line_remove_comments(line):
+ '''Remove C comments within a line. This code doesn't know if the line is
+ commented in a multi line comment that involves more lines than itself.'''
+
+ # Multi line comments
+ while line.find("/*") != -1:
+ start_comment = line.find("/*")
+ end_comment = line.find("*/")
+ if end_comment != -1:
+ end_comment = end_comment + 2 # Skip the "*/"
+ line = line[ : start_comment ] + line[ end_comment : ]
+ else: # The comment doesn't end this line.
+ line = line[ : start_comment ]
+
+ # Single line comments
+ comment = line.find("//")
+ if comment != -1:
+ line = line[ : comment ]
+
+ return line
+
+
+def line_get_include_path(line):
+ '''It takes a line of code with an include directive and returns the file
+ path with < or the first " included to tell them apart.'''
+ if line.find('<') != -1:
+ if line.find('.h>') == -1:
+ return None
+ inc = line[ line.find('<') : line.find('.h>') ]
+ elif line.find('"') != -1:
+ if line.find('.h"') == -1:
+ return None
+ inc = line[ line.find('"') : line.find('.h"') ]
+ else:
+ inc = None
+
+ return inc
+
+
+def file_get_include_list(path, _encoding='ascii'):
+ '''Reads all lines from a file and returns a list of include paths. It
+ tries to read the file in ASCII mode and UTF-8 if it fails. If it succeeds
+ it will return a list of include paths. If it fails it will return None.'''
+
+ inc_list = []
+
+ try:
+ f = codecs.open(path, encoding=_encoding)
+ except:
+ print("ERROR:" + path + ":open() error!")
+ utils.print_exception_info()
+ return None
+
+ # Allow spaces in between, but not comments.
+ pattern = re.compile(r"^\s*#\s*include\s\s*[\"<]")
+
+ fatal_error = False
+
+ try:
+ for line in f:
+ if pattern.match(line):
+ line_remove_comments(line)
+ inc = line_get_include_path(line)
+ if inc != None:
+ inc_list.append(inc)
+
+ except UnicodeDecodeError:
+ # Capture exceptions caused by non-ASCII encoded files.
+ if _encoding == 'ascii':
+ # Reopen the file in UTF-8 mode. Python allows a file to be opened
+ # more than once at a time. Exceptions for the recursively called
+ # function will be handled inside it.
+ # Output a warning.
+ print("ERROR:" + path + ":Non-ASCII encoded file!")
+ inc_list = file_get_include_list(path,'utf-8')
+ else:
+ # Already tried to decode in UTF-8 mode. Don't try again.
+ print("ERROR:" + path + ":Failed to decode UTF-8!")
+ fatal_error = True # Can't return while file is still open.
+ utils.print_exception_info()
+ except:
+ print("ERROR:" + path + ":error while parsing!")
+ utils.print_exception_info()
+
+ f.close()
+
+ if fatal_error:
+ return None
+
+ return inc_list
+
+
+def inc_order_is_correct(inc_list, path, commit_hash=""):
+ '''Returns true if the provided list is in order. If not, output error
+ messages to stdout.'''
+
+ # If there are less than 2 includes there's no need to check.
+ if len(inc_list) < 2:
+ return True
+
+ if commit_hash != "":
+ commit_hash = commit_hash + ":" # For formatting
+
+ sys_after_user = False
+ sys_order_wrong = False
+ user_order_wrong = False
+
+ # First, check if all system includes are before the user includes.
+ previous_delimiter = '<' # Begin with system includes.
+
+ for inc in inc_list:
+ delimiter = inc[0]
+ if previous_delimiter == '<' and delimiter == '"':
+ previous_delimiter = '"' # Started user includes.
+ elif previous_delimiter == '"' and delimiter == '<':
+ sys_after_user = True
+
+ # Then, check alphabetic order (system and user separately).
+ usr_incs = []
+ sys_incs = []
+
+ for inc in inc_list:
+ if inc.startswith('<'):
+ sys_incs.append(inc)
+ elif inc.startswith('"'):
+ usr_incs.append(inc)
+
+ if sorted(sys_incs) != sys_incs:
+ sys_order_wrong = True
+ if sorted(usr_incs) != usr_incs:
+ user_order_wrong = True
+
+ # Output error messages.
+ if sys_after_user:
+ print("ERROR:" + commit_hash + path +
+ ":System include after user include.")
+ if sys_order_wrong:
+ print("ERROR:" + commit_hash + path +
+ ":System includes not in order.")
+ if user_order_wrong:
+ print("ERROR:" + commit_hash + path +
+ ":User includes not in order.")
+
+ return not ( sys_after_user or sys_order_wrong or user_order_wrong )
+
+
+def file_is_correct(path):
+ '''Checks whether the order of includes in the file specified in the path
+ is correct or not.'''
+
+ inc_list = file_get_include_list(path)
+
+ if inc_list == None: # Failed to decode - Flag as incorrect.
+ return False
+
+ return inc_order_is_correct(inc_list, path)
+
+
+def directory_tree_is_correct():
+ '''Checks all tracked files in the current git repository, except the ones
+ explicitly ignored by this script.
+ Returns True if all files are correct.'''
+
+ # Get list of files tracked by git
+ (rc, stdout, stderr) = utils.shell_command([ 'git', 'ls-files' ])
+ if rc != 0:
+ return False
+
+ all_files_correct = True
+
+ files = stdout.splitlines()
+
+ for f in files:
+ if not utils.file_is_ignored(f, VALID_FILE_EXTENSIONS, IGNORED_FILES, IGNORED_FOLDERS):
+ if not file_is_correct(f):
+ # Make the script end with an error code, but continue
+ # checking files even if one of them is incorrect.
+ all_files_correct = False
+
+ return all_files_correct
+
+
+def patch_is_correct(base_commit, end_commit):
+ '''Get the output of a git diff and analyse each modified file.'''
+
+ # Get patches of the affected commits with one line of context.
+ (rc, stdout, stderr) = utils.shell_command([ 'git', 'log', '--unified=1',
+ '--pretty="commit %h"',
+ base_commit + '..' + end_commit ])
+
+ if rc != 0:
+ return False
+
+ # Parse stdout to get all renamed, modified and added file paths.
+ # Then, check order of new includes. The log output begins with each commit
+ # comment and then a list of files and differences.
+ lines = stdout.splitlines()
+
+ all_files_correct = True
+
+ # All files without a valid extension are ignored. /dev/null is also used by
+ # git patch to tell that a file has been deleted, and it doesn't have a
+ # valid extension, so it will be used as a reset value.
+ path = "/dev/null"
+ commit_hash = "0"
+ # There are only 2 states: commit msg or file. Start inside commit message
+ # because the include list is not checked when changing from this state.
+ inside_commit_message = True
+ inc_list = []
+
+ # Allow spaces in between, but not comments.
+ # Check for lines with "+" or " " at the beginning (added or not modified)
+ pattern = re.compile(r"^[+ ]\s*#\s*include\s\s*[\"<]")
+
+ total_line_num = len(lines)
+ # By iterating this way the loop can detect if it's the last iteration and
+ # check the last file (the log doesn't have any indicator of the end)
+ for i, line in enumerate(lines): # Save line number in i
+
+ new_commit = False
+ new_file = False
+ log_last_line = i == total_line_num-1
+
+ # 1. Check which kind of line this is. If this line means that the file
+ # being analysed is finished, don't update the path or hash until after
+ # checking the order of includes, they are used in error messages. Check
+ # for any includes in case this is the last line of the log.
+
+ # Line format: <"commit 0000000"> (quotes present in stdout)
+ if line.startswith('"commit '): # New commit
+ new_commit = True
+ # Line format: <+++ b/path>
+ elif line.startswith("+++ b/"): # New file.
+ new_file = True
+ # Any other line
+ else: # Check for includes inside files, not in the commit message.
+ if not inside_commit_message:
+ if pattern.match(line):
+ line_remove_comments(line)
+ inc = line_get_include_path(line)
+ if inc != None:
+ inc_list.append(inc)
+
+ # 2. Check order of includes if the file that was being analysed has
+ # finished. Print hash and path of the analised file in the error
+ # messages.
+
+ if new_commit or new_file or log_last_line:
+ if not inside_commit_message: # If a file is being analysed
+ if not utils.file_is_ignored(path, VALID_FILE_EXTENSIONS,
+ IGNORED_FILES, IGNORED_FOLDERS):
+ if not inc_order_is_correct(inc_list, path, commit_hash):
+ all_files_correct = False
+ inc_list = [] # Reset the include list for the next file (if any)
+
+ # 3. Update path or hash for the new file or commit. Update state.
+
+ if new_commit: # New commit, save hash
+ inside_commit_message = True # Enter commit message state
+ commit_hash = line[ 8 : -1 ] # Discard last "
+ elif new_file: # New file, save path.
+ inside_commit_message = False # Save path, exit commit message state
+ # A deleted file will appear as /dev/null so it will be ignored.
+ path = line[ 6 : ]
+
+ return all_files_correct
+
+
+
+def parse_cmd_line(argv, prog_name):
+ parser = argparse.ArgumentParser(
+ prog=prog_name,
+ formatter_class=argparse.RawTextHelpFormatter,
+ description="Check alphabetical order of #includes",
+ epilog="""
+For each source file in the tree, checks that #include's C preprocessor
+directives are ordered alphabetically (as mandated by the Trusted
+Firmware coding style). System header includes must come before user
+header includes.
+""")
+
+ parser.add_argument("--tree", "-t",
+ help="Path to the source tree to check (default: %(default)s)",
+ default=os.curdir)
+ parser.add_argument("--patch", "-p",
+ help="""
+Patch mode.
+Instead of checking all files in the source tree, the script will consider
+only files that are modified by the latest patch(es).""",
+ action="store_true")
+ parser.add_argument("--from-ref",
+ help="Base commit in patch mode (default: %(default)s)",
+ default="master")
+ parser.add_argument("--to-ref",
+ help="Final commit in patch mode (default: %(default)s)",
+ default="HEAD")
+ args = parser.parse_args(argv)
+ return args
+
+
+if __name__ == "__main__":
+ args = parse_cmd_line(sys.argv[1:], sys.argv[0])
+
+ os.chdir(args.tree)
+
+ if args.patch:
+ print("Checking files modified between patches " + args.from_ref
+ + " and " + args.to_ref + "...")
+ if not patch_is_correct(args.from_ref, args.to_ref):
+ sys.exit(1)
+ else:
+ print("Checking all files in directory '%s'..." % os.path.abspath(args.tree))
+ if not directory_tree_is_correct():
+ sys.exit(1)
+
+ # All source code files are correct.
+ sys.exit(0)
diff --git a/script/static-checks/checkpatch.pl b/script/static-checks/checkpatch.pl
new file mode 100755
index 0000000..e16d671
--- /dev/null
+++ b/script/static-checks/checkpatch.pl
@@ -0,0 +1,6577 @@
+#!/usr/bin/env perl
+# (c) 2001, Dave Jones. (the file handling bit)
+# (c) 2005, Joel Schopp <jschopp@austin.ibm.com> (the ugly bit)
+# (c) 2007,2008, Andy Whitcroft <apw@uk.ibm.com> (new conditions, test suite)
+# (c) 2008-2010 Andy Whitcroft <apw@canonical.com>
+# Licensed under the terms of the GNU GPL License version 2
+
+use strict;
+use warnings;
+use POSIX;
+use File::Basename;
+use Cwd 'abs_path';
+use Term::ANSIColor qw(:constants);
+
+my $P = $0;
+my $D = dirname(abs_path($P));
+
+my $V = '0.32';
+
+use Getopt::Long qw(:config no_auto_abbrev);
+
+my $quiet = 0;
+my $tree = 1;
+my $chk_signoff = 1;
+my $chk_patch = 1;
+my $tst_only;
+my $emacs = 0;
+my $terse = 0;
+my $showfile = 0;
+my $file = 0;
+my $git = 0;
+my %git_commits = ();
+my $check = 0;
+my $check_orig = 0;
+my $summary = 1;
+my $mailback = 0;
+my $summary_file = 0;
+my $show_types = 0;
+my $list_types = 0;
+my $fix = 0;
+my $fix_inplace = 0;
+my $root;
+my %debug;
+my %camelcase = ();
+my %use_type = ();
+my @use = ();
+my %ignore_type = ();
+my @ignore = ();
+my $help = 0;
+my $configuration_file = ".checkpatch.conf";
+my $max_line_length = 80;
+my $ignore_perl_version = 0;
+my $minimum_perl_version = 5.10.0;
+my $min_conf_desc_length = 4;
+my $spelling_file = "$D/spelling.txt";
+my $codespell = 0;
+my $codespellfile = "/usr/share/codespell/dictionary.txt";
+my $conststructsfile = "$D/const_structs.checkpatch";
+my $typedefsfile = "";
+my $color = "auto";
+my $allow_c99_comments = 1;
+
+sub help {
+ my ($exitcode) = @_;
+
+ print << "EOM";
+Usage: $P [OPTION]... [FILE]...
+Version: $V
+
+Options:
+ -q, --quiet quiet
+ --no-tree run without a kernel tree
+ --no-signoff do not check for 'Signed-off-by' line
+ --patch treat FILE as patchfile (default)
+ --emacs emacs compile window format
+ --terse one line per report
+ --showfile emit diffed file position, not input file position
+ -g, --git treat FILE as a single commit or git revision range
+ single git commit with:
+ <rev>
+ <rev>^
+ <rev>~n
+ multiple git commits with:
+ <rev1>..<rev2>
+ <rev1>...<rev2>
+ <rev>-<count>
+ git merges are ignored
+ -f, --file treat FILE as regular source file
+ --subjective, --strict enable more subjective tests
+ --list-types list the possible message types
+ --types TYPE(,TYPE2...) show only these comma separated message types
+ --ignore TYPE(,TYPE2...) ignore various comma separated message types
+ --show-types show the specific message type in the output
+ --max-line-length=n set the maximum line length, if exceeded, warn
+ --min-conf-desc-length=n set the min description length, if shorter, warn
+ --root=PATH PATH to the kernel tree root
+ --no-summary suppress the per-file summary
+ --mailback only produce a report in case of warnings/errors
+ --summary-file include the filename in summary
+ --debug KEY=[0|1] turn on/off debugging of KEY, where KEY is one of
+ 'values', 'possible', 'type', and 'attr' (default
+ is all off)
+ --test-only=WORD report only warnings/errors containing WORD
+ literally
+ --fix EXPERIMENTAL - may create horrible results
+ If correctable single-line errors exist, create
+ "<inputfile>.EXPERIMENTAL-checkpatch-fixes"
+ with potential errors corrected to the preferred
+ checkpatch style
+ --fix-inplace EXPERIMENTAL - may create horrible results
+ Is the same as --fix, but overwrites the input
+ file. It's your fault if there's no backup or git
+ --ignore-perl-version override checking of perl version. expect
+ runtime errors.
+ --codespell Use the codespell dictionary for spelling/typos
+ (default:/usr/share/codespell/dictionary.txt)
+ --codespellfile Use this codespell dictionary
+ --typedefsfile Read additional types from this file
+ --color[=WHEN] Use colors 'always', 'never', or only when output
+ is a terminal ('auto'). Default is 'auto'.
+ -h, --help, --version display this help and exit
+
+When FILE is - read standard input.
+EOM
+
+ exit($exitcode);
+}
+
+sub uniq {
+ my %seen;
+ return grep { !$seen{$_}++ } @_;
+}
+
+sub list_types {
+ my ($exitcode) = @_;
+
+ my $count = 0;
+
+ local $/ = undef;
+
+ open(my $script, '<', abs_path($P)) or
+ die "$P: Can't read '$P' $!\n";
+
+ my $text = <$script>;
+ close($script);
+
+ my @types = ();
+ # Also catch when type or level is passed through a variable
+ for ($text =~ /(?:(?:\bCHK|\bWARN|\bERROR|&\{\$msg_level})\s*\(|\$msg_type\s*=)\s*"([^"]+)"/g) {
+ push (@types, $_);
+ }
+ @types = sort(uniq(@types));
+ print("#\tMessage type\n\n");
+ foreach my $type (@types) {
+ print(++$count . "\t" . $type . "\n");
+ }
+
+ exit($exitcode);
+}
+
+my $conf = which_conf($configuration_file);
+if (-f $conf) {
+ my @conf_args;
+ open(my $conffile, '<', "$conf")
+ or warn "$P: Can't find a readable $configuration_file file $!\n";
+
+ while (<$conffile>) {
+ my $line = $_;
+
+ $line =~ s/\s*\n?$//g;
+ $line =~ s/^\s*//g;
+ $line =~ s/\s+/ /g;
+
+ next if ($line =~ m/^\s*#/);
+ next if ($line =~ m/^\s*$/);
+
+ my @words = split(" ", $line);
+ foreach my $word (@words) {
+ last if ($word =~ m/^#/);
+ push (@conf_args, $word);
+ }
+ }
+ close($conffile);
+ unshift(@ARGV, @conf_args) if @conf_args;
+}
+
+# Perl's Getopt::Long allows options to take optional arguments after a space.
+# Prevent --color by itself from consuming other arguments
+foreach (@ARGV) {
+ if ($_ eq "--color" || $_ eq "-color") {
+ $_ = "--color=$color";
+ }
+}
+
+GetOptions(
+ 'q|quiet+' => \$quiet,
+ 'tree!' => \$tree,
+ 'signoff!' => \$chk_signoff,
+ 'patch!' => \$chk_patch,
+ 'emacs!' => \$emacs,
+ 'terse!' => \$terse,
+ 'showfile!' => \$showfile,
+ 'f|file!' => \$file,
+ 'g|git!' => \$git,
+ 'subjective!' => \$check,
+ 'strict!' => \$check,
+ 'ignore=s' => \@ignore,
+ 'types=s' => \@use,
+ 'show-types!' => \$show_types,
+ 'list-types!' => \$list_types,
+ 'max-line-length=i' => \$max_line_length,
+ 'min-conf-desc-length=i' => \$min_conf_desc_length,
+ 'root=s' => \$root,
+ 'summary!' => \$summary,
+ 'mailback!' => \$mailback,
+ 'summary-file!' => \$summary_file,
+ 'fix!' => \$fix,
+ 'fix-inplace!' => \$fix_inplace,
+ 'ignore-perl-version!' => \$ignore_perl_version,
+ 'debug=s' => \%debug,
+ 'test-only=s' => \$tst_only,
+ 'codespell!' => \$codespell,
+ 'codespellfile=s' => \$codespellfile,
+ 'typedefsfile=s' => \$typedefsfile,
+ 'color=s' => \$color,
+ 'no-color' => \$color, #keep old behaviors of -nocolor
+ 'nocolor' => \$color, #keep old behaviors of -nocolor
+ 'h|help' => \$help,
+ 'version' => \$help
+) or help(1);
+
+help(0) if ($help);
+
+list_types(0) if ($list_types);
+
+$fix = 1 if ($fix_inplace);
+$check_orig = $check;
+
+my $exit = 0;
+
+if ($^V && $^V lt $minimum_perl_version) {
+ printf "$P: requires at least perl version %vd\n", $minimum_perl_version;
+ if (!$ignore_perl_version) {
+ exit(1);
+ }
+}
+
+#if no filenames are given, push '-' to read patch from stdin
+if ($#ARGV < 0) {
+ push(@ARGV, '-');
+}
+
+if ($color =~ /^[01]$/) {
+ $color = !$color;
+} elsif ($color =~ /^always$/i) {
+ $color = 1;
+} elsif ($color =~ /^never$/i) {
+ $color = 0;
+} elsif ($color =~ /^auto$/i) {
+ $color = (-t STDOUT);
+} else {
+ die "Invalid color mode: $color\n";
+}
+
+sub hash_save_array_words {
+ my ($hashRef, $arrayRef) = @_;
+
+ my @array = split(/,/, join(',', @$arrayRef));
+ foreach my $word (@array) {
+ $word =~ s/\s*\n?$//g;
+ $word =~ s/^\s*//g;
+ $word =~ s/\s+/ /g;
+ $word =~ tr/[a-z]/[A-Z]/;
+
+ next if ($word =~ m/^\s*#/);
+ next if ($word =~ m/^\s*$/);
+
+ $hashRef->{$word}++;
+ }
+}
+
+sub hash_show_words {
+ my ($hashRef, $prefix) = @_;
+
+ if (keys %$hashRef) {
+ print "\nNOTE: $prefix message types:";
+ foreach my $word (sort keys %$hashRef) {
+ print " $word";
+ }
+ print "\n";
+ }
+}
+
+hash_save_array_words(\%ignore_type, \@ignore);
+hash_save_array_words(\%use_type, \@use);
+
+my $dbg_values = 0;
+my $dbg_possible = 0;
+my $dbg_type = 0;
+my $dbg_attr = 0;
+for my $key (keys %debug) {
+ ## no critic
+ eval "\${dbg_$key} = '$debug{$key}';";
+ die "$@" if ($@);
+}
+
+my $rpt_cleaners = 0;
+
+if ($terse) {
+ $emacs = 1;
+ $quiet++;
+}
+
+if ($tree) {
+ if (defined $root) {
+ if (!top_of_kernel_tree($root)) {
+ die "$P: $root: --root does not point at a valid tree\n";
+ }
+ } else {
+ if (top_of_kernel_tree('.')) {
+ $root = '.';
+ } elsif ($0 =~ m@(.*)/scripts/[^/]*$@ &&
+ top_of_kernel_tree($1)) {
+ $root = $1;
+ }
+ }
+
+ if (!defined $root) {
+ print "Must be run from the top-level dir. of a kernel tree\n";
+ exit(2);
+ }
+}
+
+my $emitted_corrupt = 0;
+
+our $Ident = qr{
+ [A-Za-z_][A-Za-z\d_]*
+ (?:\s*\#\#\s*[A-Za-z_][A-Za-z\d_]*)*
+ }x;
+our $Storage = qr{extern|static|asmlinkage};
+our $Sparse = qr{
+ __user|
+ __kernel|
+ __force|
+ __iomem|
+ __must_check|
+ __init_refok|
+ __kprobes|
+ __ref|
+ __rcu|
+ __private
+ }x;
+our $InitAttributePrefix = qr{__(?:mem|cpu|dev|net_|)};
+our $InitAttributeData = qr{$InitAttributePrefix(?:initdata\b)};
+our $InitAttributeConst = qr{$InitAttributePrefix(?:initconst\b)};
+our $InitAttributeInit = qr{$InitAttributePrefix(?:init\b)};
+our $InitAttribute = qr{$InitAttributeData|$InitAttributeConst|$InitAttributeInit};
+
+# Notes to $Attribute:
+# We need \b after 'init' otherwise 'initconst' will cause a false positive in a check
+our $Attribute = qr{
+ const|
+ __percpu|
+ __nocast|
+ __safe|
+ __bitwise|
+ __packed__|
+ __packed2__|
+ __naked|
+ __maybe_unused|
+ __always_unused|
+ __noreturn|
+ __used|
+ __cold|
+ __pure|
+ __noclone|
+ __deprecated|
+ __read_mostly|
+ __kprobes|
+ $InitAttribute|
+ ____cacheline_aligned|
+ ____cacheline_aligned_in_smp|
+ ____cacheline_internodealigned_in_smp|
+ __weak
+ }x;
+our $Modifier;
+our $Inline = qr{inline|__always_inline|noinline|__inline|__inline__};
+our $Member = qr{->$Ident|\.$Ident|\[[^]]*\]};
+our $Lval = qr{$Ident(?:$Member)*};
+
+our $Int_type = qr{(?i)llu|ull|ll|lu|ul|l|u};
+our $Binary = qr{(?i)0b[01]+$Int_type?};
+our $Hex = qr{(?i)0x[0-9a-f]+$Int_type?};
+our $Int = qr{[0-9]+$Int_type?};
+our $Octal = qr{0[0-7]+$Int_type?};
+our $String = qr{"[X\t]*"};
+our $Float_hex = qr{(?i)0x[0-9a-f]+p-?[0-9]+[fl]?};
+our $Float_dec = qr{(?i)(?:[0-9]+\.[0-9]*|[0-9]*\.[0-9]+)(?:e-?[0-9]+)?[fl]?};
+our $Float_int = qr{(?i)[0-9]+e-?[0-9]+[fl]?};
+our $Float = qr{$Float_hex|$Float_dec|$Float_int};
+our $Constant = qr{$Float|$Binary|$Octal|$Hex|$Int};
+our $Assignment = qr{\*\=|/=|%=|\+=|-=|<<=|>>=|&=|\^=|\|=|=};
+our $Compare = qr{<=|>=|==|!=|<|(?<!-)>};
+our $Arithmetic = qr{\+|-|\*|\/|%};
+our $Operators = qr{
+ <=|>=|==|!=|
+ =>|->|<<|>>|<|>|!|~|
+ &&|\|\||,|\^|\+\+|--|&|\||$Arithmetic
+ }x;
+
+our $c90_Keywords = qr{do|for|while|if|else|return|goto|continue|switch|default|case|break}x;
+
+our $BasicType;
+our $NonptrType;
+our $NonptrTypeMisordered;
+our $NonptrTypeWithAttr;
+our $Type;
+our $TypeMisordered;
+our $Declare;
+our $DeclareMisordered;
+
+our $NON_ASCII_UTF8 = qr{
+ [\xC2-\xDF][\x80-\xBF] # non-overlong 2-byte
+ | \xE0[\xA0-\xBF][\x80-\xBF] # excluding overlongs
+ | [\xE1-\xEC\xEE\xEF][\x80-\xBF]{2} # straight 3-byte
+ | \xED[\x80-\x9F][\x80-\xBF] # excluding surrogates
+ | \xF0[\x90-\xBF][\x80-\xBF]{2} # planes 1-3
+ | [\xF1-\xF3][\x80-\xBF]{3} # planes 4-15
+ | \xF4[\x80-\x8F][\x80-\xBF]{2} # plane 16
+}x;
+
+our $UTF8 = qr{
+ [\x09\x0A\x0D\x20-\x7E] # ASCII
+ | $NON_ASCII_UTF8
+}x;
+
+our $typeC99Typedefs = qr{(?:__)?(?:[us]_?)?int_?(?:8|16|32|64)_t};
+our $typeOtherOSTypedefs = qr{(?x:
+ u_(?:char|short|int|long) | # bsd
+ u(?:nchar|short|int|long) # sysv
+)};
+our $typeKernelTypedefs = qr{(?x:
+ (?:__)?(?:u|s|be|le)(?:8|16|32|64)|
+ atomic_t
+)};
+our $typeTypedefs = qr{(?x:
+ $typeC99Typedefs\b|
+ $typeOtherOSTypedefs\b|
+ $typeKernelTypedefs\b
+)};
+
+our $zero_initializer = qr{(?:(?:0[xX])?0+$Int_type?|NULL|false)\b};
+
+our $logFunctions = qr{(?x:
+ printk(?:_ratelimited|_once|_deferred_once|_deferred|)|
+ (?:[a-z0-9]+_){1,2}(?:printk|emerg|alert|crit|err|warning|warn|notice|info|debug|dbg|vdbg|devel|cont|WARN)(?:_ratelimited|_once|)|
+ TP_printk|
+ WARN(?:_RATELIMIT|_ONCE|)|
+ panic|
+ MODULE_[A-Z_]+|
+ seq_vprintf|seq_printf|seq_puts
+)};
+
+our $signature_tags = qr{(?xi:
+ Signed-off-by:|
+ Acked-by:|
+ Tested-by:|
+ Reviewed-by:|
+ Reported-by:|
+ Suggested-by:|
+ To:|
+ Cc:
+)};
+
+our @typeListMisordered = (
+ qr{char\s+(?:un)?signed},
+ qr{int\s+(?:(?:un)?signed\s+)?short\s},
+ qr{int\s+short(?:\s+(?:un)?signed)},
+ qr{short\s+int(?:\s+(?:un)?signed)},
+ qr{(?:un)?signed\s+int\s+short},
+ qr{short\s+(?:un)?signed},
+ qr{long\s+int\s+(?:un)?signed},
+ qr{int\s+long\s+(?:un)?signed},
+ qr{long\s+(?:un)?signed\s+int},
+ qr{int\s+(?:un)?signed\s+long},
+ qr{int\s+(?:un)?signed},
+ qr{int\s+long\s+long\s+(?:un)?signed},
+ qr{long\s+long\s+int\s+(?:un)?signed},
+ qr{long\s+long\s+(?:un)?signed\s+int},
+ qr{long\s+long\s+(?:un)?signed},
+ qr{long\s+(?:un)?signed},
+);
+
+our @typeList = (
+ qr{void},
+ qr{(?:(?:un)?signed\s+)?char},
+ qr{(?:(?:un)?signed\s+)?short\s+int},
+ qr{(?:(?:un)?signed\s+)?short},
+ qr{(?:(?:un)?signed\s+)?int},
+ qr{(?:(?:un)?signed\s+)?long\s+int},
+ qr{(?:(?:un)?signed\s+)?long\s+long\s+int},
+ qr{(?:(?:un)?signed\s+)?long\s+long},
+ qr{(?:(?:un)?signed\s+)?long},
+ qr{(?:un)?signed},
+ qr{float},
+ qr{double},
+ qr{bool},
+ qr{struct\s+$Ident},
+ qr{union\s+$Ident},
+ qr{enum\s+$Ident},
+ qr{${Ident}_t},
+ qr{${Ident}_handler},
+ qr{${Ident}_handler_fn},
+ @typeListMisordered,
+);
+
+our $C90_int_types = qr{(?x:
+ long\s+long\s+int\s+(?:un)?signed|
+ long\s+long\s+(?:un)?signed\s+int|
+ long\s+long\s+(?:un)?signed|
+ (?:(?:un)?signed\s+)?long\s+long\s+int|
+ (?:(?:un)?signed\s+)?long\s+long|
+ int\s+long\s+long\s+(?:un)?signed|
+ int\s+(?:(?:un)?signed\s+)?long\s+long|
+
+ long\s+int\s+(?:un)?signed|
+ long\s+(?:un)?signed\s+int|
+ long\s+(?:un)?signed|
+ (?:(?:un)?signed\s+)?long\s+int|
+ (?:(?:un)?signed\s+)?long|
+ int\s+long\s+(?:un)?signed|
+ int\s+(?:(?:un)?signed\s+)?long|
+
+ int\s+(?:un)?signed|
+ (?:(?:un)?signed\s+)?int
+)};
+
+our @typeListFile = ();
+our @typeListWithAttr = (
+ @typeList,
+ qr{struct\s+$InitAttribute\s+$Ident},
+ qr{union\s+$InitAttribute\s+$Ident},
+);
+
+our @modifierList = (
+ qr{fastcall},
+);
+our @modifierListFile = ();
+
+our @mode_permission_funcs = (
+ ["module_param", 3],
+ ["module_param_(?:array|named|string)", 4],
+ ["module_param_array_named", 5],
+ ["debugfs_create_(?:file|u8|u16|u32|u64|x8|x16|x32|x64|size_t|atomic_t|bool|blob|regset32|u32_array)", 2],
+ ["proc_create(?:_data|)", 2],
+ ["(?:CLASS|DEVICE|SENSOR|SENSOR_DEVICE|IIO_DEVICE)_ATTR", 2],
+ ["IIO_DEV_ATTR_[A-Z_]+", 1],
+ ["SENSOR_(?:DEVICE_|)ATTR_2", 2],
+ ["SENSOR_TEMPLATE(?:_2|)", 3],
+ ["__ATTR", 2],
+);
+
+#Create a search pattern for all these functions to speed up a loop below
+our $mode_perms_search = "";
+foreach my $entry (@mode_permission_funcs) {
+ $mode_perms_search .= '|' if ($mode_perms_search ne "");
+ $mode_perms_search .= $entry->[0];
+}
+$mode_perms_search = "(?:${mode_perms_search})";
+
+our $mode_perms_world_writable = qr{
+ S_IWUGO |
+ S_IWOTH |
+ S_IRWXUGO |
+ S_IALLUGO |
+ 0[0-7][0-7][2367]
+}x;
+
+our %mode_permission_string_types = (
+ "S_IRWXU" => 0700,
+ "S_IRUSR" => 0400,
+ "S_IWUSR" => 0200,
+ "S_IXUSR" => 0100,
+ "S_IRWXG" => 0070,
+ "S_IRGRP" => 0040,
+ "S_IWGRP" => 0020,
+ "S_IXGRP" => 0010,
+ "S_IRWXO" => 0007,
+ "S_IROTH" => 0004,
+ "S_IWOTH" => 0002,
+ "S_IXOTH" => 0001,
+ "S_IRWXUGO" => 0777,
+ "S_IRUGO" => 0444,
+ "S_IWUGO" => 0222,
+ "S_IXUGO" => 0111,
+);
+
+#Create a search pattern for all these strings to speed up a loop below
+our $mode_perms_string_search = "";
+foreach my $entry (keys %mode_permission_string_types) {
+ $mode_perms_string_search .= '|' if ($mode_perms_string_search ne "");
+ $mode_perms_string_search .= $entry;
+}
+our $single_mode_perms_string_search = "(?:${mode_perms_string_search})";
+our $multi_mode_perms_string_search = qr{
+ ${single_mode_perms_string_search}
+ (?:\s*\|\s*${single_mode_perms_string_search})*
+}x;
+
+sub perms_to_octal {
+ my ($string) = @_;
+
+ return trim($string) if ($string =~ /^\s*0[0-7]{3,3}\s*$/);
+
+ my $val = "";
+ my $oval = "";
+ my $to = 0;
+ my $curpos = 0;
+ my $lastpos = 0;
+ while ($string =~ /\b(($single_mode_perms_string_search)\b(?:\s*\|\s*)?\s*)/g) {
+ $curpos = pos($string);
+ my $match = $2;
+ my $omatch = $1;
+ last if ($lastpos > 0 && ($curpos - length($omatch) != $lastpos));
+ $lastpos = $curpos;
+ $to |= $mode_permission_string_types{$match};
+ $val .= '\s*\|\s*' if ($val ne "");
+ $val .= $match;
+ $oval .= $omatch;
+ }
+ $oval =~ s/^\s*\|\s*//;
+ $oval =~ s/\s*\|\s*$//;
+ return sprintf("%04o", $to);
+}
+
+our $allowed_asm_includes = qr{(?x:
+ irq|
+ memory|
+ time|
+ reboot
+)};
+# memory.h: ARM has a custom one
+
+# Load common spelling mistakes and build regular expression list.
+my $misspellings;
+my %spelling_fix;
+
+if (open(my $spelling, '<', $spelling_file)) {
+ while (<$spelling>) {
+ my $line = $_;
+
+ $line =~ s/\s*\n?$//g;
+ $line =~ s/^\s*//g;
+
+ next if ($line =~ m/^\s*#/);
+ next if ($line =~ m/^\s*$/);
+
+ my ($suspect, $fix) = split(/\|\|/, $line);
+
+ $spelling_fix{$suspect} = $fix;
+ }
+ close($spelling);
+} else {
+ warn "No typos will be found - file '$spelling_file': $!\n";
+}
+
+if ($codespell) {
+ if (open(my $spelling, '<', $codespellfile)) {
+ while (<$spelling>) {
+ my $line = $_;
+
+ $line =~ s/\s*\n?$//g;
+ $line =~ s/^\s*//g;
+
+ next if ($line =~ m/^\s*#/);
+ next if ($line =~ m/^\s*$/);
+ next if ($line =~ m/, disabled/i);
+
+ $line =~ s/,.*$//;
+
+ my ($suspect, $fix) = split(/->/, $line);
+
+ $spelling_fix{$suspect} = $fix;
+ }
+ close($spelling);
+ } else {
+ warn "No codespell typos will be found - file '$codespellfile': $!\n";
+ }
+}
+
+$misspellings = join("|", sort keys %spelling_fix) if keys %spelling_fix;
+
+sub read_words {
+ my ($wordsRef, $file) = @_;
+
+ if (open(my $words, '<', $file)) {
+ while (<$words>) {
+ my $line = $_;
+
+ $line =~ s/\s*\n?$//g;
+ $line =~ s/^\s*//g;
+
+ next if ($line =~ m/^\s*#/);
+ next if ($line =~ m/^\s*$/);
+ if ($line =~ /\s/) {
+ print("$file: '$line' invalid - ignored\n");
+ next;
+ }
+
+ $$wordsRef .= '|' if ($$wordsRef ne "");
+ $$wordsRef .= $line;
+ }
+ close($file);
+ return 1;
+ }
+
+ return 0;
+}
+
+my $const_structs = "";
+read_words(\$const_structs, $conststructsfile)
+ or warn "No structs that should be const will be found - file '$conststructsfile': $!\n";
+
+my $typeOtherTypedefs = "";
+if (length($typedefsfile)) {
+ read_words(\$typeOtherTypedefs, $typedefsfile)
+ or warn "No additional types will be considered - file '$typedefsfile': $!\n";
+}
+$typeTypedefs .= '|' . $typeOtherTypedefs if ($typeOtherTypedefs ne "");
+
+sub build_types {
+ my $mods = "(?x: \n" . join("|\n ", (@modifierList, @modifierListFile)) . "\n)";
+ my $all = "(?x: \n" . join("|\n ", (@typeList, @typeListFile)) . "\n)";
+ my $Misordered = "(?x: \n" . join("|\n ", @typeListMisordered) . "\n)";
+ my $allWithAttr = "(?x: \n" . join("|\n ", @typeListWithAttr) . "\n)";
+ $Modifier = qr{(?:$Attribute|$Sparse|$mods)};
+ $BasicType = qr{
+ (?:$typeTypedefs\b)|
+ (?:${all}\b)
+ }x;
+ $NonptrType = qr{
+ (?:$Modifier\s+|const\s+)*
+ (?:
+ (?:typeof|__typeof__)\s*\([^\)]*\)|
+ (?:$typeTypedefs\b)|
+ (?:${all}\b)
+ )
+ (?:\s+$Modifier|\s+const)*
+ }x;
+ $NonptrTypeMisordered = qr{
+ (?:$Modifier\s+|const\s+)*
+ (?:
+ (?:${Misordered}\b)
+ )
+ (?:\s+$Modifier|\s+const)*
+ }x;
+ $NonptrTypeWithAttr = qr{
+ (?:$Modifier\s+|const\s+)*
+ (?:
+ (?:typeof|__typeof__)\s*\([^\)]*\)|
+ (?:$typeTypedefs\b)|
+ (?:${allWithAttr}\b)
+ )
+ (?:\s+$Modifier|\s+const)*
+ }x;
+ $Type = qr{
+ $NonptrType
+ (?:(?:\s|\*|\[\])+\s*const|(?:\s|\*\s*(?:const\s*)?|\[\])+|(?:\s*\[\s*\])+)?
+ (?:\s+$Inline|\s+$Modifier)*
+ }x;
+ $TypeMisordered = qr{
+ $NonptrTypeMisordered
+ (?:(?:\s|\*|\[\])+\s*const|(?:\s|\*\s*(?:const\s*)?|\[\])+|(?:\s*\[\s*\])+)?
+ (?:\s+$Inline|\s+$Modifier)*
+ }x;
+ $Declare = qr{(?:$Storage\s+(?:$Inline\s+)?)?$Type};
+ $DeclareMisordered = qr{(?:$Storage\s+(?:$Inline\s+)?)?$TypeMisordered};
+}
+build_types();
+
+our $Typecast = qr{\s*(\(\s*$NonptrType\s*\)){0,1}\s*};
+
+# Using $balanced_parens, $LvalOrFunc, or $FuncArg
+# requires at least perl version v5.10.0
+# Any use must be runtime checked with $^V
+
+our $balanced_parens = qr/(\((?:[^\(\)]++|(?-1))*\))/;
+our $LvalOrFunc = qr{((?:[\&\*]\s*)?$Lval)\s*($balanced_parens{0,1})\s*};
+our $FuncArg = qr{$Typecast{0,1}($LvalOrFunc|$Constant|$String)};
+
+our $declaration_macros = qr{(?x:
+ (?:$Storage\s+)?(?:[A-Z_][A-Z0-9]*_){0,2}(?:DEFINE|DECLARE)(?:_[A-Z0-9]+){1,6}\s*\(|
+ (?:$Storage\s+)?[HLP]?LIST_HEAD\s*\(|
+ (?:$Storage\s+)?${Type}\s+uninitialized_var\s*\(|
+ (?:SKCIPHER_REQUEST|SHASH_DESC|AHASH_REQUEST)_ON_STACK\s*\(
+)};
+
+sub deparenthesize {
+ my ($string) = @_;
+ return "" if (!defined($string));
+
+ while ($string =~ /^\s*\(.*\)\s*$/) {
+ $string =~ s@^\s*\(\s*@@;
+ $string =~ s@\s*\)\s*$@@;
+ }
+
+ $string =~ s@\s+@ @g;
+
+ return $string;
+}
+
+sub seed_camelcase_file {
+ my ($file) = @_;
+
+ return if (!(-f $file));
+
+ local $/;
+
+ open(my $include_file, '<', "$file")
+ or warn "$P: Can't read '$file' $!\n";
+ my $text = <$include_file>;
+ close($include_file);
+
+ my @lines = split('\n', $text);
+
+ foreach my $line (@lines) {
+ next if ($line !~ /(?:[A-Z][a-z]|[a-z][A-Z])/);
+ if ($line =~ /^[ \t]*(?:#[ \t]*define|typedef\s+$Type)\s+(\w*(?:[A-Z][a-z]|[a-z][A-Z])\w*)/) {
+ $camelcase{$1} = 1;
+ } elsif ($line =~ /^\s*$Declare\s+(\w*(?:[A-Z][a-z]|[a-z][A-Z])\w*)\s*[\(\[,;]/) {
+ $camelcase{$1} = 1;
+ } elsif ($line =~ /^\s*(?:union|struct|enum)\s+(\w*(?:[A-Z][a-z]|[a-z][A-Z])\w*)\s*[;\{]/) {
+ $camelcase{$1} = 1;
+ }
+ }
+}
+
+sub is_maintained_obsolete {
+ my ($filename) = @_;
+
+ return 0 if (!$tree || !(-e "$root/scripts/get_maintainer.pl"));
+
+ my $status = `perl $root/scripts/get_maintainer.pl --status --nom --nol --nogit --nogit-fallback -f $filename 2>&1`;
+
+ return $status =~ /obsolete/i;
+}
+
+my $camelcase_seeded = 0;
+sub seed_camelcase_includes {
+ return if ($camelcase_seeded);
+
+ my $files;
+ my $camelcase_cache = "";
+ my @include_files = ();
+
+ $camelcase_seeded = 1;
+
+ if (-e ".git") {
+ my $git_last_include_commit = `git log --no-merges --pretty=format:"%h%n" -1 -- include`;
+ chomp $git_last_include_commit;
+ $camelcase_cache = ".checkpatch-camelcase.git.$git_last_include_commit";
+ } else {
+ my $last_mod_date = 0;
+ $files = `find $root/include -name "*.h"`;
+ @include_files = split('\n', $files);
+ foreach my $file (@include_files) {
+ my $date = POSIX::strftime("%Y%m%d%H%M",
+ localtime((stat $file)[9]));
+ $last_mod_date = $date if ($last_mod_date < $date);
+ }
+ $camelcase_cache = ".checkpatch-camelcase.date.$last_mod_date";
+ }
+
+ if ($camelcase_cache ne "" && -f $camelcase_cache) {
+ open(my $camelcase_file, '<', "$camelcase_cache")
+ or warn "$P: Can't read '$camelcase_cache' $!\n";
+ while (<$camelcase_file>) {
+ chomp;
+ $camelcase{$_} = 1;
+ }
+ close($camelcase_file);
+
+ return;
+ }
+
+ if (-e ".git") {
+ $files = `git ls-files "include/*.h"`;
+ @include_files = split('\n', $files);
+ }
+
+ foreach my $file (@include_files) {
+ seed_camelcase_file($file);
+ }
+
+ if ($camelcase_cache ne "") {
+ unlink glob ".checkpatch-camelcase.*";
+ open(my $camelcase_file, '>', "$camelcase_cache")
+ or warn "$P: Can't write '$camelcase_cache' $!\n";
+ foreach (sort { lc($a) cmp lc($b) } keys(%camelcase)) {
+ print $camelcase_file ("$_\n");
+ }
+ close($camelcase_file);
+ }
+}
+
+sub git_commit_info {
+ my ($commit, $id, $desc) = @_;
+
+ return ($id, $desc) if ((which("git") eq "") || !(-e ".git"));
+
+ my $output = `git log --no-color --format='%H %s' -1 $commit 2>&1`;
+ $output =~ s/^\s*//gm;
+ my @lines = split("\n", $output);
+
+ return ($id, $desc) if ($#lines < 0);
+
+ if ($lines[0] =~ /^error: short SHA1 $commit is ambiguous\./) {
+# Maybe one day convert this block of bash into something that returns
+# all matching commit ids, but it's very slow...
+#
+# echo "checking commits $1..."
+# git rev-list --remotes | grep -i "^$1" |
+# while read line ; do
+# git log --format='%H %s' -1 $line |
+# echo "commit $(cut -c 1-12,41-)"
+# done
+ } elsif ($lines[0] =~ /^fatal: ambiguous argument '$commit': unknown revision or path not in the working tree\./) {
+ $id = undef;
+ } else {
+ $id = substr($lines[0], 0, 12);
+ $desc = substr($lines[0], 41);
+ }
+
+ return ($id, $desc);
+}
+
+$chk_signoff = 0 if ($file);
+
+my @rawlines = ();
+my @lines = ();
+my @fixed = ();
+my @fixed_inserted = ();
+my @fixed_deleted = ();
+my $fixlinenr = -1;
+
+# If input is git commits, extract all commits from the commit expressions.
+# For example, HEAD-3 means we need check 'HEAD, HEAD~1, HEAD~2'.
+die "$P: No git repository found\n" if ($git && !-e ".git");
+
+if ($git) {
+ my @commits = ();
+ foreach my $commit_expr (@ARGV) {
+ my $git_range;
+ if ($commit_expr =~ m/^(.*)-(\d+)$/) {
+ $git_range = "-$2 $1";
+ } elsif ($commit_expr =~ m/\.\./) {
+ $git_range = "$commit_expr";
+ } else {
+ $git_range = "-1 $commit_expr";
+ }
+ my $lines = `git log --no-color --no-merges --pretty=format:'%H %s' $git_range`;
+ foreach my $line (split(/\n/, $lines)) {
+ $line =~ /^([0-9a-fA-F]{40,40}) (.*)$/;
+ next if (!defined($1) || !defined($2));
+ my $sha1 = $1;
+ my $subject = $2;
+ unshift(@commits, $sha1);
+ $git_commits{$sha1} = $subject;
+ }
+ }
+ die "$P: no git commits after extraction!\n" if (@commits == 0);
+ @ARGV = @commits;
+}
+
+my $vname;
+for my $filename (@ARGV) {
+ my $FILE;
+ if ($git) {
+ open($FILE, '-|', "git format-patch -M --stdout -1 $filename") ||
+ die "$P: $filename: git format-patch failed - $!\n";
+ } elsif ($file) {
+ open($FILE, '-|', "diff -u /dev/null $filename") ||
+ die "$P: $filename: diff failed - $!\n";
+ } elsif ($filename eq '-') {
+ open($FILE, '<&STDIN');
+ } else {
+ open($FILE, '<', "$filename") ||
+ die "$P: $filename: open failed - $!\n";
+ }
+ if ($filename eq '-') {
+ $vname = 'Your patch';
+ } elsif ($git) {
+ $vname = "Commit " . substr($filename, 0, 12) . ' ("' . $git_commits{$filename} . '")';
+ } else {
+ $vname = $filename;
+ }
+ while (<$FILE>) {
+ chomp;
+ push(@rawlines, $_);
+ }
+ close($FILE);
+
+ if ($#ARGV > 0 && $quiet == 0) {
+ print '-' x length($vname) . "\n";
+ print "$vname\n";
+ print '-' x length($vname) . "\n";
+ }
+
+ if (!process($filename)) {
+ $exit = 1;
+ }
+ @rawlines = ();
+ @lines = ();
+ @fixed = ();
+ @fixed_inserted = ();
+ @fixed_deleted = ();
+ $fixlinenr = -1;
+ @modifierListFile = ();
+ @typeListFile = ();
+ build_types();
+}
+
+if (!$quiet) {
+ hash_show_words(\%use_type, "Used");
+ hash_show_words(\%ignore_type, "Ignored");
+
+ if ($^V lt 5.10.0) {
+ print << "EOM"
+
+NOTE: perl $^V is not modern enough to detect all possible issues.
+ An upgrade to at least perl v5.10.0 is suggested.
+EOM
+ }
+ if ($exit) {
+ print << "EOM"
+
+NOTE: If any of the errors are false positives, please report
+ them to the maintainer, see CHECKPATCH in MAINTAINERS.
+EOM
+ }
+}
+
+exit($exit);
+
+sub top_of_kernel_tree {
+ my ($root) = @_;
+
+ my @tree_check = (
+ "COPYING", "CREDITS", "Kbuild", "MAINTAINERS", "Makefile",
+ "README", "Documentation", "arch", "include", "drivers",
+ "fs", "init", "ipc", "kernel", "lib", "scripts",
+ );
+
+ foreach my $check (@tree_check) {
+ if (! -e $root . '/' . $check) {
+ return 0;
+ }
+ }
+ return 1;
+}
+
+sub parse_email {
+ my ($formatted_email) = @_;
+
+ my $name = "";
+ my $address = "";
+ my $comment = "";
+
+ if ($formatted_email =~ /^(.*)<(\S+\@\S+)>(.*)$/) {
+ $name = $1;
+ $address = $2;
+ $comment = $3 if defined $3;
+ } elsif ($formatted_email =~ /^\s*<(\S+\@\S+)>(.*)$/) {
+ $address = $1;
+ $comment = $2 if defined $2;
+ } elsif ($formatted_email =~ /(\S+\@\S+)(.*)$/) {
+ $address = $1;
+ $comment = $2 if defined $2;
+ $formatted_email =~ s/\Q$address\E.*$//;
+ $name = $formatted_email;
+ $name = trim($name);
+ $name =~ s/^\"|\"$//g;
+ # If there's a name left after stripping spaces and
+ # leading quotes, and the address doesn't have both
+ # leading and trailing angle brackets, the address
+ # is invalid. ie:
+ # "joe smith joe@smith.com" bad
+ # "joe smith <joe@smith.com" bad
+ if ($name ne "" && $address !~ /^<[^>]+>$/) {
+ $name = "";
+ $address = "";
+ $comment = "";
+ }
+ }
+
+ $name = trim($name);
+ $name =~ s/^\"|\"$//g;
+ $address = trim($address);
+ $address =~ s/^\<|\>$//g;
+
+ if ($name =~ /[^\w \-]/i) { ##has "must quote" chars
+ $name =~ s/(?<!\\)"/\\"/g; ##escape quotes
+ $name = "\"$name\"";
+ }
+
+ return ($name, $address, $comment);
+}
+
+sub format_email {
+ my ($name, $address) = @_;
+
+ my $formatted_email;
+
+ $name = trim($name);
+ $name =~ s/^\"|\"$//g;
+ $address = trim($address);
+
+ if ($name =~ /[^\w \-]/i) { ##has "must quote" chars
+ $name =~ s/(?<!\\)"/\\"/g; ##escape quotes
+ $name = "\"$name\"";
+ }
+
+ if ("$name" eq "") {
+ $formatted_email = "$address";
+ } else {
+ $formatted_email = "$name <$address>";
+ }
+
+ return $formatted_email;
+}
+
+sub which {
+ my ($bin) = @_;
+
+ foreach my $path (split(/:/, $ENV{PATH})) {
+ if (-e "$path/$bin") {
+ return "$path/$bin";
+ }
+ }
+
+ return "";
+}
+
+sub which_conf {
+ my ($conf) = @_;
+
+ foreach my $path (split(/:/, ".:$ENV{HOME}:.scripts")) {
+ if (-e "$path/$conf") {
+ return "$path/$conf";
+ }
+ }
+
+ return "";
+}
+
+sub expand_tabs {
+ my ($str) = @_;
+
+ my $res = '';
+ my $n = 0;
+ for my $c (split(//, $str)) {
+ if ($c eq "\t") {
+ $res .= ' ';
+ $n++;
+ for (; ($n % 8) != 0; $n++) {
+ $res .= ' ';
+ }
+ next;
+ }
+ $res .= $c;
+ $n++;
+ }
+
+ return $res;
+}
+sub copy_spacing {
+ (my $res = shift) =~ tr/\t/ /c;
+ return $res;
+}
+
+sub line_stats {
+ my ($line) = @_;
+
+ # Drop the diff line leader and expand tabs
+ $line =~ s/^.//;
+ $line = expand_tabs($line);
+
+ # Pick the indent from the front of the line.
+ my ($white) = ($line =~ /^(\s*)/);
+
+ return (length($line), length($white));
+}
+
+my $sanitise_quote = '';
+
+sub sanitise_line_reset {
+ my ($in_comment) = @_;
+
+ if ($in_comment) {
+ $sanitise_quote = '*/';
+ } else {
+ $sanitise_quote = '';
+ }
+}
+sub sanitise_line {
+ my ($line) = @_;
+
+ my $res = '';
+ my $l = '';
+
+ my $qlen = 0;
+ my $off = 0;
+ my $c;
+
+ # Always copy over the diff marker.
+ $res = substr($line, 0, 1);
+
+ for ($off = 1; $off < length($line); $off++) {
+ $c = substr($line, $off, 1);
+
+ # Comments we are whacking completely including the begin
+ # and end, all to $;.
+ if ($sanitise_quote eq '' && substr($line, $off, 2) eq '/*') {
+ $sanitise_quote = '*/';
+
+ substr($res, $off, 2, "$;$;");
+ $off++;
+ next;
+ }
+ if ($sanitise_quote eq '*/' && substr($line, $off, 2) eq '*/') {
+ $sanitise_quote = '';
+ substr($res, $off, 2, "$;$;");
+ $off++;
+ next;
+ }
+ if ($sanitise_quote eq '' && substr($line, $off, 2) eq '//') {
+ $sanitise_quote = '//';
+
+ substr($res, $off, 2, $sanitise_quote);
+ $off++;
+ next;
+ }
+
+ # A \ in a string means ignore the next character.
+ if (($sanitise_quote eq "'" || $sanitise_quote eq '"') &&
+ $c eq "\\") {
+ substr($res, $off, 2, 'XX');
+ $off++;
+ next;
+ }
+ # Regular quotes.
+ if ($c eq "'" || $c eq '"') {
+ if ($sanitise_quote eq '') {
+ $sanitise_quote = $c;
+
+ substr($res, $off, 1, $c);
+ next;
+ } elsif ($sanitise_quote eq $c) {
+ $sanitise_quote = '';
+ }
+ }
+
+ #print "c<$c> SQ<$sanitise_quote>\n";
+ if ($off != 0 && $sanitise_quote eq '*/' && $c ne "\t") {
+ substr($res, $off, 1, $;);
+ } elsif ($off != 0 && $sanitise_quote eq '//' && $c ne "\t") {
+ substr($res, $off, 1, $;);
+ } elsif ($off != 0 && $sanitise_quote && $c ne "\t") {
+ substr($res, $off, 1, 'X');
+ } else {
+ substr($res, $off, 1, $c);
+ }
+ }
+
+ if ($sanitise_quote eq '//') {
+ $sanitise_quote = '';
+ }
+
+ # The pathname on a #include may be surrounded by '<' and '>'.
+ if ($res =~ /^.\s*\#\s*include\s+\<(.*)\>/) {
+ my $clean = 'X' x length($1);
+ $res =~ s@\<.*\>@<$clean>@;
+
+ # The whole of a #error is a string.
+ } elsif ($res =~ /^.\s*\#\s*(?:error|warning)\s+(.*)\b/) {
+ my $clean = 'X' x length($1);
+ $res =~ s@(\#\s*(?:error|warning)\s+).*@$1$clean@;
+ }
+
+ if ($allow_c99_comments && $res =~ m@(//.*$)@) {
+ my $match = $1;
+ $res =~ s/\Q$match\E/"$;" x length($match)/e;
+ }
+
+ return $res;
+}
+
+sub get_quoted_string {
+ my ($line, $rawline) = @_;
+
+ return "" if (!defined($line) || !defined($rawline));
+ return "" if ($line !~ m/($String)/g);
+ return substr($rawline, $-[0], $+[0] - $-[0]);
+}
+
+sub ctx_statement_block {
+ my ($linenr, $remain, $off) = @_;
+ my $line = $linenr - 1;
+ my $blk = '';
+ my $soff = $off;
+ my $coff = $off - 1;
+ my $coff_set = 0;
+
+ my $loff = 0;
+
+ my $type = '';
+ my $level = 0;
+ my @stack = ();
+ my $p;
+ my $c;
+ my $len = 0;
+
+ my $remainder;
+ while (1) {
+ @stack = (['', 0]) if ($#stack == -1);
+
+ #warn "CSB: blk<$blk> remain<$remain>\n";
+ # If we are about to drop off the end, pull in more
+ # context.
+ if ($off >= $len) {
+ for (; $remain > 0; $line++) {
+ last if (!defined $lines[$line]);
+ next if ($lines[$line] =~ /^-/);
+ $remain--;
+ $loff = $len;
+ $blk .= $lines[$line] . "\n";
+ $len = length($blk);
+ $line++;
+ last;
+ }
+ # Bail if there is no further context.
+ #warn "CSB: blk<$blk> off<$off> len<$len>\n";
+ if ($off >= $len) {
+ last;
+ }
+ if ($level == 0 && substr($blk, $off) =~ /^.\s*#\s*define/) {
+ $level++;
+ $type = '#';
+ }
+ }
+ $p = $c;
+ $c = substr($blk, $off, 1);
+ $remainder = substr($blk, $off);
+
+ #warn "CSB: c<$c> type<$type> level<$level> remainder<$remainder> coff_set<$coff_set>\n";
+
+ # Handle nested #if/#else.
+ if ($remainder =~ /^#\s*(?:ifndef|ifdef|if)\s/) {
+ push(@stack, [ $type, $level ]);
+ } elsif ($remainder =~ /^#\s*(?:else|elif)\b/) {
+ ($type, $level) = @{$stack[$#stack - 1]};
+ } elsif ($remainder =~ /^#\s*endif\b/) {
+ ($type, $level) = @{pop(@stack)};
+ }
+
+ # Statement ends at the ';' or a close '}' at the
+ # outermost level.
+ if ($level == 0 && $c eq ';') {
+ last;
+ }
+
+ # An else is really a conditional as long as its not else if
+ if ($level == 0 && $coff_set == 0 &&
+ (!defined($p) || $p =~ /(?:\s|\}|\+)/) &&
+ $remainder =~ /^(else)(?:\s|{)/ &&
+ $remainder !~ /^else\s+if\b/) {
+ $coff = $off + length($1) - 1;
+ $coff_set = 1;
+ #warn "CSB: mark coff<$coff> soff<$soff> 1<$1>\n";
+ #warn "[" . substr($blk, $soff, $coff - $soff + 1) . "]\n";
+ }
+
+ if (($type eq '' || $type eq '(') && $c eq '(') {
+ $level++;
+ $type = '(';
+ }
+ if ($type eq '(' && $c eq ')') {
+ $level--;
+ $type = ($level != 0)? '(' : '';
+
+ if ($level == 0 && $coff < $soff) {
+ $coff = $off;
+ $coff_set = 1;
+ #warn "CSB: mark coff<$coff>\n";
+ }
+ }
+ if (($type eq '' || $type eq '{') && $c eq '{') {
+ $level++;
+ $type = '{';
+ }
+ if ($type eq '{' && $c eq '}') {
+ $level--;
+ $type = ($level != 0)? '{' : '';
+
+ if ($level == 0) {
+ if (substr($blk, $off + 1, 1) eq ';') {
+ $off++;
+ }
+ last;
+ }
+ }
+ # Preprocessor commands end at the newline unless escaped.
+ if ($type eq '#' && $c eq "\n" && $p ne "\\") {
+ $level--;
+ $type = '';
+ $off++;
+ last;
+ }
+ $off++;
+ }
+ # We are truly at the end, so shuffle to the next line.
+ if ($off == $len) {
+ $loff = $len + 1;
+ $line++;
+ $remain--;
+ }
+
+ my $statement = substr($blk, $soff, $off - $soff + 1);
+ my $condition = substr($blk, $soff, $coff - $soff + 1);
+
+ #warn "STATEMENT<$statement>\n";
+ #warn "CONDITION<$condition>\n";
+
+ #print "coff<$coff> soff<$off> loff<$loff>\n";
+
+ return ($statement, $condition,
+ $line, $remain + 1, $off - $loff + 1, $level);
+}
+
+sub statement_lines {
+ my ($stmt) = @_;
+
+ # Strip the diff line prefixes and rip blank lines at start and end.
+ $stmt =~ s/(^|\n)./$1/g;
+ $stmt =~ s/^\s*//;
+ $stmt =~ s/\s*$//;
+
+ my @stmt_lines = ($stmt =~ /\n/g);
+
+ return $#stmt_lines + 2;
+}
+
+sub statement_rawlines {
+ my ($stmt) = @_;
+
+ my @stmt_lines = ($stmt =~ /\n/g);
+
+ return $#stmt_lines + 2;
+}
+
+sub statement_block_size {
+ my ($stmt) = @_;
+
+ $stmt =~ s/(^|\n)./$1/g;
+ $stmt =~ s/^\s*{//;
+ $stmt =~ s/}\s*$//;
+ $stmt =~ s/^\s*//;
+ $stmt =~ s/\s*$//;
+
+ my @stmt_lines = ($stmt =~ /\n/g);
+ my @stmt_statements = ($stmt =~ /;/g);
+
+ my $stmt_lines = $#stmt_lines + 2;
+ my $stmt_statements = $#stmt_statements + 1;
+
+ if ($stmt_lines > $stmt_statements) {
+ return $stmt_lines;
+ } else {
+ return $stmt_statements;
+ }
+}
+
+sub ctx_statement_full {
+ my ($linenr, $remain, $off) = @_;
+ my ($statement, $condition, $level);
+
+ my (@chunks);
+
+ # Grab the first conditional/block pair.
+ ($statement, $condition, $linenr, $remain, $off, $level) =
+ ctx_statement_block($linenr, $remain, $off);
+ #print "F: c<$condition> s<$statement> remain<$remain>\n";
+ push(@chunks, [ $condition, $statement ]);
+ if (!($remain > 0 && $condition =~ /^\s*(?:\n[+-])?\s*(?:if|else|do)\b/s)) {
+ return ($level, $linenr, @chunks);
+ }
+
+ # Pull in the following conditional/block pairs and see if they
+ # could continue the statement.
+ for (;;) {
+ ($statement, $condition, $linenr, $remain, $off, $level) =
+ ctx_statement_block($linenr, $remain, $off);
+ #print "C: c<$condition> s<$statement> remain<$remain>\n";
+ last if (!($remain > 0 && $condition =~ /^(?:\s*\n[+-])*\s*(?:else|do)\b/s));
+ #print "C: push\n";
+ push(@chunks, [ $condition, $statement ]);
+ }
+
+ return ($level, $linenr, @chunks);
+}
+
+sub ctx_block_get {
+ my ($linenr, $remain, $outer, $open, $close, $off) = @_;
+ my $line;
+ my $start = $linenr - 1;
+ my $blk = '';
+ my @o;
+ my @c;
+ my @res = ();
+
+ my $level = 0;
+ my @stack = ($level);
+ for ($line = $start; $remain > 0; $line++) {
+ next if ($rawlines[$line] =~ /^-/);
+ $remain--;
+
+ $blk .= $rawlines[$line];
+
+ # Handle nested #if/#else.
+ if ($lines[$line] =~ /^.\s*#\s*(?:ifndef|ifdef|if)\s/) {
+ push(@stack, $level);
+ } elsif ($lines[$line] =~ /^.\s*#\s*(?:else|elif)\b/) {
+ $level = $stack[$#stack - 1];
+ } elsif ($lines[$line] =~ /^.\s*#\s*endif\b/) {
+ $level = pop(@stack);
+ }
+
+ foreach my $c (split(//, $lines[$line])) {
+ ##print "C<$c>L<$level><$open$close>O<$off>\n";
+ if ($off > 0) {
+ $off--;
+ next;
+ }
+
+ if ($c eq $close && $level > 0) {
+ $level--;
+ last if ($level == 0);
+ } elsif ($c eq $open) {
+ $level++;
+ }
+ }
+
+ if (!$outer || $level <= 1) {
+ push(@res, $rawlines[$line]);
+ }
+
+ last if ($level == 0);
+ }
+
+ return ($level, @res);
+}
+sub ctx_block_outer {
+ my ($linenr, $remain) = @_;
+
+ my ($level, @r) = ctx_block_get($linenr, $remain, 1, '{', '}', 0);
+ return @r;
+}
+sub ctx_block {
+ my ($linenr, $remain) = @_;
+
+ my ($level, @r) = ctx_block_get($linenr, $remain, 0, '{', '}', 0);
+ return @r;
+}
+sub ctx_statement {
+ my ($linenr, $remain, $off) = @_;
+
+ my ($level, @r) = ctx_block_get($linenr, $remain, 0, '(', ')', $off);
+ return @r;
+}
+sub ctx_block_level {
+ my ($linenr, $remain) = @_;
+
+ return ctx_block_get($linenr, $remain, 0, '{', '}', 0);
+}
+sub ctx_statement_level {
+ my ($linenr, $remain, $off) = @_;
+
+ return ctx_block_get($linenr, $remain, 0, '(', ')', $off);
+}
+
+sub ctx_locate_comment {
+ my ($first_line, $end_line) = @_;
+
+ # Catch a comment on the end of the line itself.
+ my ($current_comment) = ($rawlines[$end_line - 1] =~ m@.*(/\*.*\*/)\s*(?:\\\s*)?$@);
+ return $current_comment if (defined $current_comment);
+
+ # Look through the context and try and figure out if there is a
+ # comment.
+ my $in_comment = 0;
+ $current_comment = '';
+ for (my $linenr = $first_line; $linenr < $end_line; $linenr++) {
+ my $line = $rawlines[$linenr - 1];
+ #warn " $line\n";
+ if ($linenr == $first_line and $line =~ m@^.\s*\*@) {
+ $in_comment = 1;
+ }
+ if ($line =~ m@/\*@) {
+ $in_comment = 1;
+ }
+ if (!$in_comment && $current_comment ne '') {
+ $current_comment = '';
+ }
+ $current_comment .= $line . "\n" if ($in_comment);
+ if ($line =~ m@\*/@) {
+ $in_comment = 0;
+ }
+ }
+
+ chomp($current_comment);
+ return($current_comment);
+}
+sub ctx_has_comment {
+ my ($first_line, $end_line) = @_;
+ my $cmt = ctx_locate_comment($first_line, $end_line);
+
+ ##print "LINE: $rawlines[$end_line - 1 ]\n";
+ ##print "CMMT: $cmt\n";
+
+ return ($cmt ne '');
+}
+
+sub raw_line {
+ my ($linenr, $cnt) = @_;
+
+ my $offset = $linenr - 1;
+ $cnt++;
+
+ my $line;
+ while ($cnt) {
+ $line = $rawlines[$offset++];
+ next if (defined($line) && $line =~ /^-/);
+ $cnt--;
+ }
+
+ return $line;
+}
+
+sub get_stat_real {
+ my ($linenr, $lc) = @_;
+
+ my $stat_real = raw_line($linenr, 0);
+ for (my $count = $linenr + 1; $count <= $lc; $count++) {
+ $stat_real = $stat_real . "\n" . raw_line($count, 0);
+ }
+
+ return $stat_real;
+}
+
+sub get_stat_here {
+ my ($linenr, $cnt, $here) = @_;
+
+ my $herectx = $here . "\n";
+ for (my $n = 0; $n < $cnt; $n++) {
+ $herectx .= raw_line($linenr, $n) . "\n";
+ }
+
+ return $herectx;
+}
+
+sub cat_vet {
+ my ($vet) = @_;
+ my ($res, $coded);
+
+ $res = '';
+ while ($vet =~ /([^[:cntrl:]]*)([[:cntrl:]]|$)/g) {
+ $res .= $1;
+ if ($2 ne '') {
+ $coded = sprintf("^%c", unpack('C', $2) + 64);
+ $res .= $coded;
+ }
+ }
+ $res =~ s/$/\$/;
+
+ return $res;
+}
+
+my $av_preprocessor = 0;
+my $av_pending;
+my @av_paren_type;
+my $av_pend_colon;
+
+sub annotate_reset {
+ $av_preprocessor = 0;
+ $av_pending = '_';
+ @av_paren_type = ('E');
+ $av_pend_colon = 'O';
+}
+
+sub annotate_values {
+ my ($stream, $type) = @_;
+
+ my $res;
+ my $var = '_' x length($stream);
+ my $cur = $stream;
+
+ print "$stream\n" if ($dbg_values > 1);
+
+ while (length($cur)) {
+ @av_paren_type = ('E') if ($#av_paren_type < 0);
+ print " <" . join('', @av_paren_type) .
+ "> <$type> <$av_pending>" if ($dbg_values > 1);
+ if ($cur =~ /^(\s+)/o) {
+ print "WS($1)\n" if ($dbg_values > 1);
+ if ($1 =~ /\n/ && $av_preprocessor) {
+ $type = pop(@av_paren_type);
+ $av_preprocessor = 0;
+ }
+
+ } elsif ($cur =~ /^(\(\s*$Type\s*)\)/ && $av_pending eq '_') {
+ print "CAST($1)\n" if ($dbg_values > 1);
+ push(@av_paren_type, $type);
+ $type = 'c';
+
+ } elsif ($cur =~ /^($Type)\s*(?:$Ident|,|\)|\(|\s*$)/) {
+ print "DECLARE($1)\n" if ($dbg_values > 1);
+ $type = 'T';
+
+ } elsif ($cur =~ /^($Modifier)\s*/) {
+ print "MODIFIER($1)\n" if ($dbg_values > 1);
+ $type = 'T';
+
+ } elsif ($cur =~ /^(\#\s*define\s*$Ident)(\(?)/o) {
+ print "DEFINE($1,$2)\n" if ($dbg_values > 1);
+ $av_preprocessor = 1;
+ push(@av_paren_type, $type);
+ if ($2 ne '') {
+ $av_pending = 'N';
+ }
+ $type = 'E';
+
+ } elsif ($cur =~ /^(\#\s*(?:undef\s*$Ident|include\b))/o) {
+ print "UNDEF($1)\n" if ($dbg_values > 1);
+ $av_preprocessor = 1;
+ push(@av_paren_type, $type);
+
+ } elsif ($cur =~ /^(\#\s*(?:ifdef|ifndef|if))/o) {
+ print "PRE_START($1)\n" if ($dbg_values > 1);
+ $av_preprocessor = 1;
+
+ push(@av_paren_type, $type);
+ push(@av_paren_type, $type);
+ $type = 'E';
+
+ } elsif ($cur =~ /^(\#\s*(?:else|elif))/o) {
+ print "PRE_RESTART($1)\n" if ($dbg_values > 1);
+ $av_preprocessor = 1;
+
+ push(@av_paren_type, $av_paren_type[$#av_paren_type]);
+
+ $type = 'E';
+
+ } elsif ($cur =~ /^(\#\s*(?:endif))/o) {
+ print "PRE_END($1)\n" if ($dbg_values > 1);
+
+ $av_preprocessor = 1;
+
+ # Assume all arms of the conditional end as this
+ # one does, and continue as if the #endif was not here.
+ pop(@av_paren_type);
+ push(@av_paren_type, $type);
+ $type = 'E';
+
+ } elsif ($cur =~ /^(\\\n)/o) {
+ print "PRECONT($1)\n" if ($dbg_values > 1);
+
+ } elsif ($cur =~ /^(__attribute__)\s*\(?/o) {
+ print "ATTR($1)\n" if ($dbg_values > 1);
+ $av_pending = $type;
+ $type = 'N';
+
+ } elsif ($cur =~ /^(sizeof)\s*(\()?/o) {
+ print "SIZEOF($1)\n" if ($dbg_values > 1);
+ if (defined $2) {
+ $av_pending = 'V';
+ }
+ $type = 'N';
+
+ } elsif ($cur =~ /^(if|while|for)\b/o) {
+ print "COND($1)\n" if ($dbg_values > 1);
+ $av_pending = 'E';
+ $type = 'N';
+
+ } elsif ($cur =~/^(case)/o) {
+ print "CASE($1)\n" if ($dbg_values > 1);
+ $av_pend_colon = 'C';
+ $type = 'N';
+
+ } elsif ($cur =~/^(return|else|goto|typeof|__typeof__)\b/o) {
+ print "KEYWORD($1)\n" if ($dbg_values > 1);
+ $type = 'N';
+
+ } elsif ($cur =~ /^(\()/o) {
+ print "PAREN('$1')\n" if ($dbg_values > 1);
+ push(@av_paren_type, $av_pending);
+ $av_pending = '_';
+ $type = 'N';
+
+ } elsif ($cur =~ /^(\))/o) {
+ my $new_type = pop(@av_paren_type);
+ if ($new_type ne '_') {
+ $type = $new_type;
+ print "PAREN('$1') -> $type\n"
+ if ($dbg_values > 1);
+ } else {
+ print "PAREN('$1')\n" if ($dbg_values > 1);
+ }
+
+ } elsif ($cur =~ /^($Ident)\s*\(/o) {
+ print "FUNC($1)\n" if ($dbg_values > 1);
+ $type = 'V';
+ $av_pending = 'V';
+
+ } elsif ($cur =~ /^($Ident\s*):(?:\s*\d+\s*(,|=|;))?/) {
+ if (defined $2 && $type eq 'C' || $type eq 'T') {
+ $av_pend_colon = 'B';
+ } elsif ($type eq 'E') {
+ $av_pend_colon = 'L';
+ }
+ print "IDENT_COLON($1,$type>$av_pend_colon)\n" if ($dbg_values > 1);
+ $type = 'V';
+
+ } elsif ($cur =~ /^($Ident|$Constant)/o) {
+ print "IDENT($1)\n" if ($dbg_values > 1);
+ $type = 'V';
+
+ } elsif ($cur =~ /^($Assignment)/o) {
+ print "ASSIGN($1)\n" if ($dbg_values > 1);
+ $type = 'N';
+
+ } elsif ($cur =~/^(;|{|})/) {
+ print "END($1)\n" if ($dbg_values > 1);
+ $type = 'E';
+ $av_pend_colon = 'O';
+
+ } elsif ($cur =~/^(,)/) {
+ print "COMMA($1)\n" if ($dbg_values > 1);
+ $type = 'C';
+
+ } elsif ($cur =~ /^(\?)/o) {
+ print "QUESTION($1)\n" if ($dbg_values > 1);
+ $type = 'N';
+
+ } elsif ($cur =~ /^(:)/o) {
+ print "COLON($1,$av_pend_colon)\n" if ($dbg_values > 1);
+
+ substr($var, length($res), 1, $av_pend_colon);
+ if ($av_pend_colon eq 'C' || $av_pend_colon eq 'L') {
+ $type = 'E';
+ } else {
+ $type = 'N';
+ }
+ $av_pend_colon = 'O';
+
+ } elsif ($cur =~ /^(\[)/o) {
+ print "CLOSE($1)\n" if ($dbg_values > 1);
+ $type = 'N';
+
+ } elsif ($cur =~ /^(-(?![->])|\+(?!\+)|\*|\&\&|\&)/o) {
+ my $variant;
+
+ print "OPV($1)\n" if ($dbg_values > 1);
+ if ($type eq 'V') {
+ $variant = 'B';
+ } else {
+ $variant = 'U';
+ }
+
+ substr($var, length($res), 1, $variant);
+ $type = 'N';
+
+ } elsif ($cur =~ /^($Operators)/o) {
+ print "OP($1)\n" if ($dbg_values > 1);
+ if ($1 ne '++' && $1 ne '--') {
+ $type = 'N';
+ }
+
+ } elsif ($cur =~ /(^.)/o) {
+ print "C($1)\n" if ($dbg_values > 1);
+ }
+ if (defined $1) {
+ $cur = substr($cur, length($1));
+ $res .= $type x length($1);
+ }
+ }
+
+ return ($res, $var);
+}
+
+sub possible {
+ my ($possible, $line) = @_;
+ my $notPermitted = qr{(?:
+ ^(?:
+ $Modifier|
+ $Storage|
+ $Type|
+ DEFINE_\S+
+ )$|
+ ^(?:
+ goto|
+ return|
+ case|
+ else|
+ asm|__asm__|
+ do|
+ \#|
+ \#\#|
+ )(?:\s|$)|
+ ^(?:typedef|struct|enum)\b
+ )}x;
+ warn "CHECK<$possible> ($line)\n" if ($dbg_possible > 2);
+ if ($possible !~ $notPermitted) {
+ # Check for modifiers.
+ $possible =~ s/\s*$Storage\s*//g;
+ $possible =~ s/\s*$Sparse\s*//g;
+ if ($possible =~ /^\s*$/) {
+
+ } elsif ($possible =~ /\s/) {
+ $possible =~ s/\s*$Type\s*//g;
+ for my $modifier (split(' ', $possible)) {
+ if ($modifier !~ $notPermitted) {
+ warn "MODIFIER: $modifier ($possible) ($line)\n" if ($dbg_possible);
+ push(@modifierListFile, $modifier);
+ }
+ }
+
+ } else {
+ warn "POSSIBLE: $possible ($line)\n" if ($dbg_possible);
+ push(@typeListFile, $possible);
+ }
+ build_types();
+ } else {
+ warn "NOTPOSS: $possible ($line)\n" if ($dbg_possible > 1);
+ }
+}
+
+my $prefix = '';
+
+sub show_type {
+ my ($type) = @_;
+
+ $type =~ tr/[a-z]/[A-Z]/;
+
+ return defined $use_type{$type} if (scalar keys %use_type > 0);
+
+ return !defined $ignore_type{$type};
+}
+
+sub report {
+ my ($level, $type, $msg) = @_;
+
+ if (!show_type($type) ||
+ (defined $tst_only && $msg !~ /\Q$tst_only\E/)) {
+ return 0;
+ }
+ my $output = '';
+ if ($color) {
+ if ($level eq 'ERROR') {
+ $output .= RED;
+ } elsif ($level eq 'WARNING') {
+ $output .= YELLOW;
+ } else {
+ $output .= GREEN;
+ }
+ }
+ $output .= $prefix . $level . ':';
+ if ($show_types) {
+ $output .= BLUE if ($color);
+ $output .= "$type:";
+ }
+ $output .= RESET if ($color);
+ $output .= ' ' . $msg . "\n";
+
+ if ($showfile) {
+ my @lines = split("\n", $output, -1);
+ splice(@lines, 1, 1);
+ $output = join("\n", @lines);
+ }
+ $output = (split('\n', $output))[0] . "\n" if ($terse);
+
+ push(our @report, $output);
+
+ return 1;
+}
+
+sub report_dump {
+ our @report;
+}
+
+sub fixup_current_range {
+ my ($lineRef, $offset, $length) = @_;
+
+ if ($$lineRef =~ /^\@\@ -\d+,\d+ \+(\d+),(\d+) \@\@/) {
+ my $o = $1;
+ my $l = $2;
+ my $no = $o + $offset;
+ my $nl = $l + $length;
+ $$lineRef =~ s/\+$o,$l \@\@/\+$no,$nl \@\@/;
+ }
+}
+
+sub fix_inserted_deleted_lines {
+ my ($linesRef, $insertedRef, $deletedRef) = @_;
+
+ my $range_last_linenr = 0;
+ my $delta_offset = 0;
+
+ my $old_linenr = 0;
+ my $new_linenr = 0;
+
+ my $next_insert = 0;
+ my $next_delete = 0;
+
+ my @lines = ();
+
+ my $inserted = @{$insertedRef}[$next_insert++];
+ my $deleted = @{$deletedRef}[$next_delete++];
+
+ foreach my $old_line (@{$linesRef}) {
+ my $save_line = 1;
+ my $line = $old_line; #don't modify the array
+ if ($line =~ /^(?:\+\+\+|\-\-\-)\s+\S+/) { #new filename
+ $delta_offset = 0;
+ } elsif ($line =~ /^\@\@ -\d+,\d+ \+\d+,\d+ \@\@/) { #new hunk
+ $range_last_linenr = $new_linenr;
+ fixup_current_range(\$line, $delta_offset, 0);
+ }
+
+ while (defined($deleted) && ${$deleted}{'LINENR'} == $old_linenr) {
+ $deleted = @{$deletedRef}[$next_delete++];
+ $save_line = 0;
+ fixup_current_range(\$lines[$range_last_linenr], $delta_offset--, -1);
+ }
+
+ while (defined($inserted) && ${$inserted}{'LINENR'} == $old_linenr) {
+ push(@lines, ${$inserted}{'LINE'});
+ $inserted = @{$insertedRef}[$next_insert++];
+ $new_linenr++;
+ fixup_current_range(\$lines[$range_last_linenr], $delta_offset++, 1);
+ }
+
+ if ($save_line) {
+ push(@lines, $line);
+ $new_linenr++;
+ }
+
+ $old_linenr++;
+ }
+
+ return @lines;
+}
+
+sub fix_insert_line {
+ my ($linenr, $line) = @_;
+
+ my $inserted = {
+ LINENR => $linenr,
+ LINE => $line,
+ };
+ push(@fixed_inserted, $inserted);
+}
+
+sub fix_delete_line {
+ my ($linenr, $line) = @_;
+
+ my $deleted = {
+ LINENR => $linenr,
+ LINE => $line,
+ };
+
+ push(@fixed_deleted, $deleted);
+}
+
+sub ERROR {
+ my ($type, $msg) = @_;
+
+ if (report("ERROR", $type, $msg)) {
+ our $clean = 0;
+ our $cnt_error++;
+ return 1;
+ }
+ return 0;
+}
+sub WARN {
+ my ($type, $msg) = @_;
+
+ if (report("WARNING", $type, $msg)) {
+ our $clean = 0;
+ our $cnt_warn++;
+ return 1;
+ }
+ return 0;
+}
+sub CHK {
+ my ($type, $msg) = @_;
+
+ if ($check && report("CHECK", $type, $msg)) {
+ our $clean = 0;
+ our $cnt_chk++;
+ return 1;
+ }
+ return 0;
+}
+
+sub check_absolute_file {
+ my ($absolute, $herecurr) = @_;
+ my $file = $absolute;
+
+ ##print "absolute<$absolute>\n";
+
+ # See if any suffix of this path is a path within the tree.
+ while ($file =~ s@^[^/]*/@@) {
+ if (-f "$root/$file") {
+ ##print "file<$file>\n";
+ last;
+ }
+ }
+ if (! -f _) {
+ return 0;
+ }
+
+ # It is, so see if the prefix is acceptable.
+ my $prefix = $absolute;
+ substr($prefix, -length($file)) = '';
+
+ ##print "prefix<$prefix>\n";
+ if ($prefix ne ".../") {
+ WARN("USE_RELATIVE_PATH",
+ "use relative pathname instead of absolute in changelog text\n" . $herecurr);
+ }
+}
+
+sub trim {
+ my ($string) = @_;
+
+ $string =~ s/^\s+|\s+$//g;
+
+ return $string;
+}
+
+sub ltrim {
+ my ($string) = @_;
+
+ $string =~ s/^\s+//;
+
+ return $string;
+}
+
+sub rtrim {
+ my ($string) = @_;
+
+ $string =~ s/\s+$//;
+
+ return $string;
+}
+
+sub string_find_replace {
+ my ($string, $find, $replace) = @_;
+
+ $string =~ s/$find/$replace/g;
+
+ return $string;
+}
+
+sub tabify {
+ my ($leading) = @_;
+
+ my $source_indent = 8;
+ my $max_spaces_before_tab = $source_indent - 1;
+ my $spaces_to_tab = " " x $source_indent;
+
+ #convert leading spaces to tabs
+ 1 while $leading =~ s@^([\t]*)$spaces_to_tab@$1\t@g;
+ #Remove spaces before a tab
+ 1 while $leading =~ s@^([\t]*)( {1,$max_spaces_before_tab})\t@$1\t@g;
+
+ return "$leading";
+}
+
+sub pos_last_openparen {
+ my ($line) = @_;
+
+ my $pos = 0;
+
+ my $opens = $line =~ tr/\(/\(/;
+ my $closes = $line =~ tr/\)/\)/;
+
+ my $last_openparen = 0;
+
+ if (($opens == 0) || ($closes >= $opens)) {
+ return -1;
+ }
+
+ my $len = length($line);
+
+ for ($pos = 0; $pos < $len; $pos++) {
+ my $string = substr($line, $pos);
+ if ($string =~ /^($FuncArg|$balanced_parens)/) {
+ $pos += length($1) - 1;
+ } elsif (substr($line, $pos, 1) eq '(') {
+ $last_openparen = $pos;
+ } elsif (index($string, '(') == -1) {
+ last;
+ }
+ }
+
+ return length(expand_tabs(substr($line, 0, $last_openparen))) + 1;
+}
+
+sub process {
+ my $filename = shift;
+
+ my $linenr=0;
+ my $prevline="";
+ my $prevrawline="";
+ my $stashline="";
+ my $stashrawline="";
+
+ my $length;
+ my $indent;
+ my $previndent=0;
+ my $stashindent=0;
+
+ our $clean = 1;
+ my $signoff = 0;
+ my $is_patch = 0;
+ my $in_header_lines = $file ? 0 : 1;
+ my $in_commit_log = 0; #Scanning lines before patch
+ my $has_commit_log = 0; #Encountered lines before patch
+ my $commit_log_possible_stack_dump = 0;
+ my $commit_log_long_line = 0;
+ my $commit_log_has_diff = 0;
+ my $reported_maintainer_file = 0;
+ my $non_utf8_charset = 0;
+
+ my $last_blank_line = 0;
+ my $last_coalesced_string_linenr = -1;
+
+ our @report = ();
+ our $cnt_lines = 0;
+ our $cnt_error = 0;
+ our $cnt_warn = 0;
+ our $cnt_chk = 0;
+
+ # Trace the real file/line as we go.
+ my $realfile = '';
+ my $realline = 0;
+ my $realcnt = 0;
+ my $here = '';
+ my $context_function; #undef'd unless there's a known function
+ my $in_comment = 0;
+ my $comment_edge = 0;
+ my $first_line = 0;
+ my $p1_prefix = '';
+
+ my $prev_values = 'E';
+
+ # suppression flags
+ my %suppress_ifbraces;
+ my %suppress_whiletrailers;
+ my %suppress_export;
+ my $suppress_statement = 0;
+
+ my %signatures = ();
+
+ # Pre-scan the patch sanitizing the lines.
+ # Pre-scan the patch looking for any __setup documentation.
+ #
+ my @setup_docs = ();
+ my $setup_docs = 0;
+
+ my $camelcase_file_seeded = 0;
+
+ my $checklicenseline = 1;
+
+ sanitise_line_reset();
+ my $line;
+ foreach my $rawline (@rawlines) {
+ $linenr++;
+ $line = $rawline;
+
+ push(@fixed, $rawline) if ($fix);
+
+ if ($rawline=~/^\+\+\+\s+(\S+)/) {
+ $setup_docs = 0;
+ if ($1 =~ m@Documentation/admin-guide/kernel-parameters.rst$@) {
+ $setup_docs = 1;
+ }
+ #next;
+ }
+ if ($rawline =~ /^\@\@ -\d+(?:,\d+)? \+(\d+)(,(\d+))? \@\@/) {
+ $realline=$1-1;
+ if (defined $2) {
+ $realcnt=$3+1;
+ } else {
+ $realcnt=1+1;
+ }
+ $in_comment = 0;
+
+ # Guestimate if this is a continuing comment. Run
+ # the context looking for a comment "edge". If this
+ # edge is a close comment then we must be in a comment
+ # at context start.
+ my $edge;
+ my $cnt = $realcnt;
+ for (my $ln = $linenr + 1; $cnt > 0; $ln++) {
+ next if (defined $rawlines[$ln - 1] &&
+ $rawlines[$ln - 1] =~ /^-/);
+ $cnt--;
+ #print "RAW<$rawlines[$ln - 1]>\n";
+ last if (!defined $rawlines[$ln - 1]);
+ if ($rawlines[$ln - 1] =~ m@(/\*|\*/)@ &&
+ $rawlines[$ln - 1] !~ m@"[^"]*(?:/\*|\*/)[^"]*"@) {
+ ($edge) = $1;
+ last;
+ }
+ }
+ if (defined $edge && $edge eq '*/') {
+ $in_comment = 1;
+ }
+
+ # Guestimate if this is a continuing comment. If this
+ # is the start of a diff block and this line starts
+ # ' *' then it is very likely a comment.
+ if (!defined $edge &&
+ $rawlines[$linenr] =~ m@^.\s*(?:\*\*+| \*)(?:\s|$)@)
+ {
+ $in_comment = 1;
+ }
+
+ ##print "COMMENT:$in_comment edge<$edge> $rawline\n";
+ sanitise_line_reset($in_comment);
+
+ } elsif ($realcnt && $rawline =~ /^(?:\+| |$)/) {
+ # Standardise the strings and chars within the input to
+ # simplify matching -- only bother with positive lines.
+ $line = sanitise_line($rawline);
+ }
+ push(@lines, $line);
+
+ if ($realcnt > 1) {
+ $realcnt-- if ($line =~ /^(?:\+| |$)/);
+ } else {
+ $realcnt = 0;
+ }
+
+ #print "==>$rawline\n";
+ #print "-->$line\n";
+
+ if ($setup_docs && $line =~ /^\+/) {
+ push(@setup_docs, $line);
+ }
+ }
+
+ $prefix = '';
+
+ $realcnt = 0;
+ $linenr = 0;
+ $fixlinenr = -1;
+ foreach my $line (@lines) {
+ $linenr++;
+ $fixlinenr++;
+ my $sline = $line; #copy of $line
+ $sline =~ s/$;/ /g; #with comments as spaces
+
+ my $rawline = $rawlines[$linenr - 1];
+
+#extract the line range in the file after the patch is applied
+ if (!$in_commit_log &&
+ $line =~ /^\@\@ -\d+(?:,\d+)? \+(\d+)(,(\d+))? \@\@(.*)/) {
+ my $context = $4;
+ $is_patch = 1;
+ $first_line = $linenr + 1;
+ $realline=$1-1;
+ if (defined $2) {
+ $realcnt=$3+1;
+ } else {
+ $realcnt=1+1;
+ }
+ annotate_reset();
+ $prev_values = 'E';
+
+ %suppress_ifbraces = ();
+ %suppress_whiletrailers = ();
+ %suppress_export = ();
+ $suppress_statement = 0;
+ if ($context =~ /\b(\w+)\s*\(/) {
+ $context_function = $1;
+ } else {
+ undef $context_function;
+ }
+ next;
+
+# track the line number as we move through the hunk, note that
+# new versions of GNU diff omit the leading space on completely
+# blank context lines so we need to count that too.
+ } elsif ($line =~ /^( |\+|$)/) {
+ $realline++;
+ $realcnt-- if ($realcnt != 0);
+
+ # Measure the line length and indent.
+ ($length, $indent) = line_stats($rawline);
+
+ # Track the previous line.
+ ($prevline, $stashline) = ($stashline, $line);
+ ($previndent, $stashindent) = ($stashindent, $indent);
+ ($prevrawline, $stashrawline) = ($stashrawline, $rawline);
+
+ #warn "line<$line>\n";
+
+ } elsif ($realcnt == 1) {
+ $realcnt--;
+ }
+
+ my $hunk_line = ($realcnt != 0);
+
+ $here = "#$linenr: " if (!$file);
+ $here = "#$realline: " if ($file);
+
+ my $found_file = 0;
+ # extract the filename as it passes
+ if ($line =~ /^diff --git.*?(\S+)$/) {
+ $realfile = $1;
+ $realfile =~ s@^([^/]*)/@@ if (!$file);
+ $in_commit_log = 0;
+ $found_file = 1;
+ } elsif ($line =~ /^\+\+\+\s+(\S+)/) {
+ $realfile = $1;
+ $realfile =~ s@^([^/]*)/@@ if (!$file);
+ $in_commit_log = 0;
+
+ $p1_prefix = $1;
+ if (!$file && $tree && $p1_prefix ne '' &&
+ -e "$root/$p1_prefix") {
+ WARN("PATCH_PREFIX",
+ "patch prefix '$p1_prefix' exists, appears to be a -p0 patch\n");
+ }
+
+ if ($realfile =~ m@^include/asm/@) {
+ ERROR("MODIFIED_INCLUDE_ASM",
+ "do not modify files in include/asm, change architecture specific files in include/asm-<architecture>\n" . "$here$rawline\n");
+ }
+ $found_file = 1;
+ }
+
+#make up the handle for any error we report on this line
+ if ($showfile) {
+ $prefix = "$realfile:$realline: "
+ } elsif ($emacs) {
+ if ($file) {
+ $prefix = "$filename:$realline: ";
+ } else {
+ $prefix = "$filename:$linenr: ";
+ }
+ }
+
+ if ($found_file) {
+ if (is_maintained_obsolete($realfile)) {
+ WARN("OBSOLETE",
+ "$realfile is marked as 'obsolete' in the MAINTAINERS hierarchy. No unnecessary modifications please.\n");
+ }
+ if ($realfile =~ m@^(?:drivers/net/|net/|drivers/staging/)@) {
+ $check = 1;
+ } else {
+ $check = $check_orig;
+ }
+ $checklicenseline = 1;
+ next;
+ }
+
+ $here .= "FILE: $realfile:$realline:" if ($realcnt != 0);
+
+ my $hereline = "$here\n$rawline\n";
+ my $herecurr = "$here\n$rawline\n";
+ my $hereprev = "$here\n$prevrawline\n$rawline\n";
+
+ $cnt_lines++ if ($realcnt != 0);
+
+# Check if the commit log has what seems like a diff which can confuse patch
+ if ($in_commit_log && !$commit_log_has_diff &&
+ (($line =~ m@^\s+diff\b.*a/[\w/]+@ &&
+ $line =~ m@^\s+diff\b.*a/([\w/]+)\s+b/$1\b@) ||
+ $line =~ m@^\s*(?:\-\-\-\s+a/|\+\+\+\s+b/)@ ||
+ $line =~ m/^\s*\@\@ \-\d+,\d+ \+\d+,\d+ \@\@/)) {
+ ERROR("DIFF_IN_COMMIT_MSG",
+ "Avoid using diff content in the commit message - patch(1) might not work\n" . $herecurr);
+ $commit_log_has_diff = 1;
+ }
+
+# Check for incorrect file permissions
+ if ($line =~ /^new (file )?mode.*[7531]\d{0,2}$/) {
+ my $permhere = $here . "FILE: $realfile\n";
+ if ($realfile !~ m@scripts/@ &&
+ $realfile !~ /\.(py|pl|awk|sh)$/) {
+ ERROR("EXECUTE_PERMISSIONS",
+ "do not set execute permissions for source files\n" . $permhere);
+ }
+ }
+
+# Check the patch for a signoff:
+ if ($line =~ /^\s*signed-off-by:/i) {
+ $signoff++;
+ $in_commit_log = 0;
+ }
+
+# Check if MAINTAINERS is being updated. If so, there's probably no need to
+# emit the "does MAINTAINERS need updating?" message on file add/move/delete
+ if ($line =~ /^\s*MAINTAINERS\s*\|/) {
+ $reported_maintainer_file = 1;
+ }
+
+# Check signature styles
+ if (!$in_header_lines &&
+ $line =~ /^(\s*)([a-z0-9_-]+by:|$signature_tags)(\s*)(.*)/i) {
+ my $space_before = $1;
+ my $sign_off = $2;
+ my $space_after = $3;
+ my $email = $4;
+ my $ucfirst_sign_off = ucfirst(lc($sign_off));
+
+ if ($sign_off !~ /$signature_tags/) {
+ WARN("BAD_SIGN_OFF",
+ "Non-standard signature: $sign_off\n" . $herecurr);
+ }
+ if (defined $space_before && $space_before ne "") {
+ if (WARN("BAD_SIGN_OFF",
+ "Do not use whitespace before $ucfirst_sign_off\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =
+ "$ucfirst_sign_off $email";
+ }
+ }
+ if ($sign_off =~ /-by:$/i && $sign_off ne $ucfirst_sign_off) {
+ if (WARN("BAD_SIGN_OFF",
+ "'$ucfirst_sign_off' is the preferred signature form\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =
+ "$ucfirst_sign_off $email";
+ }
+
+ }
+ if (!defined $space_after || $space_after ne " ") {
+ if (WARN("BAD_SIGN_OFF",
+ "Use a single space after $ucfirst_sign_off\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =
+ "$ucfirst_sign_off $email";
+ }
+ }
+
+ my ($email_name, $email_address, $comment) = parse_email($email);
+ my $suggested_email = format_email(($email_name, $email_address));
+ if ($suggested_email eq "") {
+ ERROR("BAD_SIGN_OFF",
+ "Unrecognized email address: '$email'\n" . $herecurr);
+ } else {
+ my $dequoted = $suggested_email;
+ $dequoted =~ s/^"//;
+ $dequoted =~ s/" </ </;
+ # Don't force email to have quotes
+ # Allow just an angle bracketed address
+ if ("$dequoted$comment" ne $email &&
+ "<$email_address>$comment" ne $email &&
+ "$suggested_email$comment" ne $email) {
+ WARN("BAD_SIGN_OFF",
+ "email address '$email' might be better as '$suggested_email$comment'\n" . $herecurr);
+ }
+ }
+
+# Check for duplicate signatures
+ my $sig_nospace = $line;
+ $sig_nospace =~ s/\s//g;
+ $sig_nospace = lc($sig_nospace);
+ if (defined $signatures{$sig_nospace}) {
+ WARN("BAD_SIGN_OFF",
+ "Duplicate signature\n" . $herecurr);
+ } else {
+ $signatures{$sig_nospace} = 1;
+ }
+ }
+
+# Check email subject for common tools that don't need to be mentioned
+ if ($in_header_lines &&
+ $line =~ /^Subject:.*\b(?:checkpatch|sparse|smatch)\b[^:]/i) {
+ WARN("EMAIL_SUBJECT",
+ "A patch subject line should describe the change not the tool that found it\n" . $herecurr);
+ }
+
+# Check for old stable address
+ if ($line =~ /^\s*cc:\s*.*<?\bstable\@kernel\.org\b>?.*$/i) {
+ ERROR("STABLE_ADDRESS",
+ "The 'stable' address should be 'stable\@vger.kernel.org'\n" . $herecurr);
+ }
+
+# Check for unwanted Gerrit info
+ if ($in_commit_log && $line =~ /^\s*change-id:/i) {
+ ERROR("GERRIT_CHANGE_ID",
+ "Remove Gerrit Change-Id's before submitting upstream.\n" . $herecurr);
+ }
+
+# Check if the commit log is in a possible stack dump
+ if ($in_commit_log && !$commit_log_possible_stack_dump &&
+ ($line =~ /^\s*(?:WARNING:|BUG:)/ ||
+ $line =~ /^\s*\[\s*\d+\.\d{6,6}\s*\]/ ||
+ # timestamp
+ $line =~ /^\s*\[\<[0-9a-fA-F]{8,}\>\]/)) {
+ # stack dump address
+ $commit_log_possible_stack_dump = 1;
+ }
+
+# Check for line lengths > 75 in commit log, warn once
+ if ($in_commit_log && !$commit_log_long_line &&
+ length($line) > 75 &&
+ !($line =~ /^\s*[a-zA-Z0-9_\/\.]+\s+\|\s+\d+/ ||
+ # file delta changes
+ $line =~ /^\s*(?:[\w\.\-]+\/)++[\w\.\-]+:/ ||
+ # filename then :
+ $line =~ /^\s*(?:Fixes:|Link:)/i ||
+ # A Fixes: or Link: line
+ $commit_log_possible_stack_dump)) {
+ WARN("COMMIT_LOG_LONG_LINE",
+ "Possible unwrapped commit description (prefer a maximum 75 chars per line)\n" . $herecurr);
+ $commit_log_long_line = 1;
+ }
+
+# Reset possible stack dump if a blank line is found
+ if ($in_commit_log && $commit_log_possible_stack_dump &&
+ $line =~ /^\s*$/) {
+ $commit_log_possible_stack_dump = 0;
+ }
+
+# Check for git id commit length and improperly formed commit descriptions
+ if ($in_commit_log && !$commit_log_possible_stack_dump &&
+ $line !~ /^\s*(?:Link|Patchwork|http|https|BugLink):/i &&
+ $line !~ /^This reverts commit [0-9a-f]{7,40}/ &&
+ ($line =~ /\bcommit\s+[0-9a-f]{5,}\b/i ||
+ ($line =~ /(?:\s|^)[0-9a-f]{12,40}(?:[\s"'\(\[]|$)/i &&
+ $line !~ /[\<\[][0-9a-f]{12,40}[\>\]]/i &&
+ $line !~ /\bfixes:\s*[0-9a-f]{12,40}/i))) {
+ my $init_char = "c";
+ my $orig_commit = "";
+ my $short = 1;
+ my $long = 0;
+ my $case = 1;
+ my $space = 1;
+ my $hasdesc = 0;
+ my $hasparens = 0;
+ my $id = '0123456789ab';
+ my $orig_desc = "commit description";
+ my $description = "";
+
+ if ($line =~ /\b(c)ommit\s+([0-9a-f]{5,})\b/i) {
+ $init_char = $1;
+ $orig_commit = lc($2);
+ } elsif ($line =~ /\b([0-9a-f]{12,40})\b/i) {
+ $orig_commit = lc($1);
+ }
+
+ $short = 0 if ($line =~ /\bcommit\s+[0-9a-f]{12,40}/i);
+ $long = 1 if ($line =~ /\bcommit\s+[0-9a-f]{41,}/i);
+ $space = 0 if ($line =~ /\bcommit [0-9a-f]/i);
+ $case = 0 if ($line =~ /\b[Cc]ommit\s+[0-9a-f]{5,40}[^A-F]/);
+ if ($line =~ /\bcommit\s+[0-9a-f]{5,}\s+\("([^"]+)"\)/i) {
+ $orig_desc = $1;
+ $hasparens = 1;
+ } elsif ($line =~ /\bcommit\s+[0-9a-f]{5,}\s*$/i &&
+ defined $rawlines[$linenr] &&
+ $rawlines[$linenr] =~ /^\s*\("([^"]+)"\)/) {
+ $orig_desc = $1;
+ $hasparens = 1;
+ } elsif ($line =~ /\bcommit\s+[0-9a-f]{5,}\s+\("[^"]+$/i &&
+ defined $rawlines[$linenr] &&
+ $rawlines[$linenr] =~ /^\s*[^"]+"\)/) {
+ $line =~ /\bcommit\s+[0-9a-f]{5,}\s+\("([^"]+)$/i;
+ $orig_desc = $1;
+ $rawlines[$linenr] =~ /^\s*([^"]+)"\)/;
+ $orig_desc .= " " . $1;
+ $hasparens = 1;
+ }
+
+ ($id, $description) = git_commit_info($orig_commit,
+ $id, $orig_desc);
+
+ if (defined($id) &&
+ ($short || $long || $space || $case || ($orig_desc ne $description) || !$hasparens)) {
+ ERROR("GIT_COMMIT_ID",
+ "Please use git commit description style 'commit <12+ chars of sha1> (\"<title line>\")' - ie: '${init_char}ommit $id (\"$description\")'\n" . $herecurr);
+ }
+ }
+
+# Check for added, moved or deleted files
+ if (!$reported_maintainer_file && !$in_commit_log &&
+ ($line =~ /^(?:new|deleted) file mode\s*\d+\s*$/ ||
+ $line =~ /^rename (?:from|to) [\w\/\.\-]+\s*$/ ||
+ ($line =~ /\{\s*([\w\/\.\-]*)\s*\=\>\s*([\w\/\.\-]*)\s*\}/ &&
+ (defined($1) || defined($2))))) {
+ $is_patch = 1;
+ $reported_maintainer_file = 1;
+ WARN("FILE_PATH_CHANGES",
+ "added, moved or deleted file(s), does MAINTAINERS need updating?\n" . $herecurr);
+ }
+
+# Check for wrappage within a valid hunk of the file
+ if ($realcnt != 0 && $line !~ m{^(?:\+|-| |\\ No newline|$)}) {
+ ERROR("CORRUPTED_PATCH",
+ "patch seems to be corrupt (line wrapped?)\n" .
+ $herecurr) if (!$emitted_corrupt++);
+ }
+
+# UTF-8 regex found at http://www.w3.org/International/questions/qa-forms-utf-8.en.php
+ if (($realfile =~ /^$/ || $line =~ /^\+/) &&
+ $rawline !~ m/^$UTF8*$/) {
+ my ($utf8_prefix) = ($rawline =~ /^($UTF8*)/);
+
+ my $blank = copy_spacing($rawline);
+ my $ptr = substr($blank, 0, length($utf8_prefix)) . "^";
+ my $hereptr = "$hereline$ptr\n";
+
+ CHK("INVALID_UTF8",
+ "Invalid UTF-8, patch and commit message should be encoded in UTF-8\n" . $hereptr);
+ }
+
+# Check if it's the start of a commit log
+# (not a header line and we haven't seen the patch filename)
+ if ($in_header_lines && $realfile =~ /^$/ &&
+ !($rawline =~ /^\s+(?:\S|$)/ ||
+ $rawline =~ /^(?:commit\b|from\b|[\w-]+:)/i)) {
+ $in_header_lines = 0;
+ $in_commit_log = 1;
+ $has_commit_log = 1;
+ }
+
+# Check if there is UTF-8 in a commit log when a mail header has explicitly
+# declined it, i.e defined some charset where it is missing.
+ if ($in_header_lines &&
+ $rawline =~ /^Content-Type:.+charset="(.+)".*$/ &&
+ $1 !~ /utf-8/i) {
+ $non_utf8_charset = 1;
+ }
+
+ if ($in_commit_log && $non_utf8_charset && $realfile =~ /^$/ &&
+ $rawline =~ /$NON_ASCII_UTF8/) {
+ WARN("UTF8_BEFORE_PATCH",
+ "8-bit UTF-8 used in possible commit log\n" . $herecurr);
+ }
+
+# Check for absolute kernel paths in commit message
+ if ($tree && $in_commit_log) {
+ while ($line =~ m{(?:^|\s)(/\S*)}g) {
+ my $file = $1;
+
+ if ($file =~ m{^(.*?)(?::\d+)+:?$} &&
+ check_absolute_file($1, $herecurr)) {
+ #
+ } else {
+ check_absolute_file($file, $herecurr);
+ }
+ }
+ }
+
+# Check for various typo / spelling mistakes
+ if (defined($misspellings) &&
+ ($in_commit_log || $line =~ /^(?:\+|Subject:)/i)) {
+ while ($rawline =~ /(?:^|[^a-z@])($misspellings)(?:\b|$|[^a-z@])/gi) {
+ my $typo = $1;
+ my $typo_fix = $spelling_fix{lc($typo)};
+ $typo_fix = ucfirst($typo_fix) if ($typo =~ /^[A-Z]/);
+ $typo_fix = uc($typo_fix) if ($typo =~ /^[A-Z]+$/);
+ my $msg_level = \&WARN;
+ $msg_level = \&CHK if ($file);
+ if (&{$msg_level}("TYPO_SPELLING",
+ "'$typo' may be misspelled - perhaps '$typo_fix'?\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/(^|[^A-Za-z@])($typo)($|[^A-Za-z@])/$1$typo_fix$3/;
+ }
+ }
+ }
+
+# ignore non-hunk lines and lines being removed
+ next if (!$hunk_line || $line =~ /^-/);
+
+#trailing whitespace
+ if ($line =~ /^\+.*\015/) {
+ my $herevet = "$here\n" . cat_vet($rawline) . "\n";
+ if (ERROR("DOS_LINE_ENDINGS",
+ "DOS line endings\n" . $herevet) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/[\s\015]+$//;
+ }
+ } elsif ($rawline =~ /^\+.*\S\s+$/ || $rawline =~ /^\+\s+$/) {
+ my $herevet = "$here\n" . cat_vet($rawline) . "\n";
+ if (ERROR("TRAILING_WHITESPACE",
+ "trailing whitespace\n" . $herevet) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\s+$//;
+ }
+
+ $rpt_cleaners = 1;
+ }
+
+# Check for FSF mailing addresses.
+ if ($rawline =~ /\bwrite to the Free/i ||
+ $rawline =~ /\b675\s+Mass\s+Ave/i ||
+ $rawline =~ /\b59\s+Temple\s+Pl/i ||
+ $rawline =~ /\b51\s+Franklin\s+St/i) {
+ my $herevet = "$here\n" . cat_vet($rawline) . "\n";
+ my $msg_level = \&ERROR;
+ $msg_level = \&CHK if ($file);
+ &{$msg_level}("FSF_MAILING_ADDRESS",
+ "Do not include the paragraph about writing to the Free Software Foundation's mailing address from the sample GPL notice. The FSF has changed addresses in the past, and may do so again. Linux already includes a copy of the GPL.\n" . $herevet)
+ }
+
+# check for Kconfig help text having a real description
+# Only applies when adding the entry originally, after that we do not have
+# sufficient context to determine whether it is indeed long enough.
+ if ($realfile =~ /Kconfig/ &&
+ # 'choice' is usually the last thing on the line (though
+ # Kconfig supports named choices), so use a word boundary
+ # (\b) rather than a whitespace character (\s)
+ $line =~ /^\+\s*(?:config|menuconfig|choice)\b/) {
+ my $length = 0;
+ my $cnt = $realcnt;
+ my $ln = $linenr + 1;
+ my $f;
+ my $is_start = 0;
+ my $is_end = 0;
+ for (; $cnt > 0 && defined $lines[$ln - 1]; $ln++) {
+ $f = $lines[$ln - 1];
+ $cnt-- if ($lines[$ln - 1] !~ /^-/);
+ $is_end = $lines[$ln - 1] =~ /^\+/;
+
+ next if ($f =~ /^-/);
+ last if (!$file && $f =~ /^\@\@/);
+
+ if ($lines[$ln - 1] =~ /^\+\s*(?:bool|tristate|prompt)\s*["']/) {
+ $is_start = 1;
+ } elsif ($lines[$ln - 1] =~ /^\+\s*(?:help|---help---)\s*$/) {
+ if ($lines[$ln - 1] =~ "---help---") {
+ WARN("CONFIG_DESCRIPTION",
+ "prefer 'help' over '---help---' for new help texts\n" . $herecurr);
+ }
+ $length = -1;
+ }
+
+ $f =~ s/^.//;
+ $f =~ s/#.*//;
+ $f =~ s/^\s+//;
+ next if ($f =~ /^$/);
+
+ # This only checks context lines in the patch
+ # and so hopefully shouldn't trigger false
+ # positives, even though some of these are
+ # common words in help texts
+ if ($f =~ /^\s*(?:config|menuconfig|choice|endchoice|
+ if|endif|menu|endmenu|source)\b/x) {
+ $is_end = 1;
+ last;
+ }
+ $length++;
+ }
+ if ($is_start && $is_end && $length < $min_conf_desc_length) {
+ WARN("CONFIG_DESCRIPTION",
+ "please write a paragraph that describes the config symbol fully\n" . $herecurr);
+ }
+ #print "is_start<$is_start> is_end<$is_end> length<$length>\n";
+ }
+
+# check for MAINTAINERS entries that don't have the right form
+ if ($realfile =~ /^MAINTAINERS$/ &&
+ $rawline =~ /^\+[A-Z]:/ &&
+ $rawline !~ /^\+[A-Z]:\t\S/) {
+ if (WARN("MAINTAINERS_STYLE",
+ "MAINTAINERS entries use one tab after TYPE:\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/^(\+[A-Z]):\s*/$1:\t/;
+ }
+ }
+
+# discourage the use of boolean for type definition attributes of Kconfig options
+ if ($realfile =~ /Kconfig/ &&
+ $line =~ /^\+\s*\bboolean\b/) {
+ WARN("CONFIG_TYPE_BOOLEAN",
+ "Use of boolean is deprecated, please use bool instead.\n" . $herecurr);
+ }
+
+ if (($realfile =~ /Makefile.*/ || $realfile =~ /Kbuild.*/) &&
+ ($line =~ /\+(EXTRA_[A-Z]+FLAGS).*/)) {
+ my $flag = $1;
+ my $replacement = {
+ 'EXTRA_AFLAGS' => 'asflags-y',
+ 'EXTRA_CFLAGS' => 'ccflags-y',
+ 'EXTRA_CPPFLAGS' => 'cppflags-y',
+ 'EXTRA_LDFLAGS' => 'ldflags-y',
+ };
+
+ WARN("DEPRECATED_VARIABLE",
+ "Use of $flag is deprecated, please use \`$replacement->{$flag} instead.\n" . $herecurr) if ($replacement->{$flag});
+ }
+
+# check for DT compatible documentation
+ if (defined $root &&
+ (($realfile =~ /\.dtsi?$/ && $line =~ /^\+\s*compatible\s*=\s*\"/) ||
+ ($realfile =~ /\.[ch]$/ && $line =~ /^\+.*\.compatible\s*=\s*\"/))) {
+
+ my @compats = $rawline =~ /\"([a-zA-Z0-9\-\,\.\+_]+)\"/g;
+
+ my $dt_path = $root . "/Documentation/devicetree/bindings/";
+ my $vp_file = $dt_path . "vendor-prefixes.txt";
+
+ foreach my $compat (@compats) {
+ my $compat2 = $compat;
+ $compat2 =~ s/\,[a-zA-Z0-9]*\-/\,<\.\*>\-/;
+ my $compat3 = $compat;
+ $compat3 =~ s/\,([a-z]*)[0-9]*\-/\,$1<\.\*>\-/;
+ `grep -Erq "$compat|$compat2|$compat3" $dt_path`;
+ if ( $? >> 8 ) {
+ WARN("UNDOCUMENTED_DT_STRING",
+ "DT compatible string \"$compat\" appears un-documented -- check $dt_path\n" . $herecurr);
+ }
+
+ next if $compat !~ /^([a-zA-Z0-9\-]+)\,/;
+ my $vendor = $1;
+ `grep -Eq "^$vendor\\b" $vp_file`;
+ if ( $? >> 8 ) {
+ WARN("UNDOCUMENTED_DT_STRING",
+ "DT compatible string vendor \"$vendor\" appears un-documented -- check $vp_file\n" . $herecurr);
+ }
+ }
+ }
+
+# check for using SPDX license tag at beginning of files
+ if ($realline == $checklicenseline) {
+ if ($rawline =~ /^[ \+]\s*\#\!\s*\//) {
+ $checklicenseline = 2;
+ } elsif ($rawline =~ /^\+/) {
+ my $comment = "";
+ if ($realfile =~ /\.(h|s|S)$/) {
+ $comment = '/*';
+ } elsif ($realfile =~ /\.(c|dts|dtsi)$/) {
+ $comment = '//';
+ } elsif (($checklicenseline == 2) || $realfile =~ /\.(sh|pl|py|awk|tc)$/) {
+ $comment = '#';
+ } elsif ($realfile =~ /\.rst$/) {
+ $comment = '..';
+ }
+
+ if ($comment !~ /^$/ &&
+ $rawline !~ /^\+\Q$comment\E SPDX-License-Identifier: /) {
+ WARN("SPDX_LICENSE_TAG",
+ "Missing or malformed SPDX-License-Identifier tag in line $checklicenseline\n" . $herecurr);
+ }
+ }
+ }
+
+# check we are in a valid source file if not then ignore this hunk
+ next if ($realfile !~ /\.(h|c|s|S|sh|dtsi|dts)$/);
+
+# line length limit (with some exclusions)
+#
+# There are a few types of lines that may extend beyond $max_line_length:
+# logging functions like pr_info that end in a string
+# lines with a single string
+# #defines that are a single string
+# lines with an RFC3986 like URL
+#
+# There are 3 different line length message types:
+# LONG_LINE_COMMENT a comment starts before but extends beyond $max_line_length
+# LONG_LINE_STRING a string starts before but extends beyond $max_line_length
+# LONG_LINE all other lines longer than $max_line_length
+#
+# if LONG_LINE is ignored, the other 2 types are also ignored
+#
+
+ if ($line =~ /^\+/ && $length > $max_line_length) {
+ my $msg_type = "LONG_LINE";
+
+ # Check the allowed long line types first
+
+ # logging functions that end in a string that starts
+ # before $max_line_length
+ if ($line =~ /^\+\s*$logFunctions\s*\(\s*(?:(?:KERN_\S+\s*|[^"]*))?($String\s*(?:|,|\)\s*;)\s*)$/ &&
+ length(expand_tabs(substr($line, 1, length($line) - length($1) - 1))) <= $max_line_length) {
+ $msg_type = "";
+
+ # lines with only strings (w/ possible termination)
+ # #defines with only strings
+ } elsif ($line =~ /^\+\s*$String\s*(?:\s*|,|\)\s*;)\s*$/ ||
+ $line =~ /^\+\s*#\s*define\s+\w+\s+$String$/) {
+ $msg_type = "";
+
+ # More special cases
+ } elsif ($line =~ /^\+.*\bEFI_GUID\s*\(/ ||
+ $line =~ /^\+\s*(?:\w+)?\s*DEFINE_PER_CPU/) {
+ $msg_type = "";
+
+ # URL ($rawline is used in case the URL is in a comment)
+ } elsif ($rawline =~ /^\+.*\b[a-z][\w\.\+\-]*:\/\/\S+/i) {
+ $msg_type = "";
+
+ # Otherwise set the alternate message types
+
+ # a comment starts before $max_line_length
+ } elsif ($line =~ /($;[\s$;]*)$/ &&
+ length(expand_tabs(substr($line, 1, length($line) - length($1) - 1))) <= $max_line_length) {
+ $msg_type = "LONG_LINE_COMMENT"
+
+ # a quoted string starts before $max_line_length
+ } elsif ($sline =~ /\s*($String(?:\s*(?:\\|,\s*|\)\s*;\s*))?)$/ &&
+ length(expand_tabs(substr($line, 1, length($line) - length($1) - 1))) <= $max_line_length) {
+ $msg_type = "LONG_LINE_STRING"
+ }
+
+ if ($msg_type ne "" &&
+ (show_type("LONG_LINE") || show_type($msg_type))) {
+ WARN($msg_type,
+ "line over $max_line_length characters\n" . $herecurr);
+ }
+ }
+
+# check for adding lines without a newline.
+ if ($line =~ /^\+/ && defined $lines[$linenr] && $lines[$linenr] =~ /^\\ No newline at end of file/) {
+ WARN("MISSING_EOF_NEWLINE",
+ "adding a line without newline at end of file\n" . $herecurr);
+ }
+
+# check we are in a valid source file C or perl if not then ignore this hunk
+ next if ($realfile !~ /\.(h|c|pl|dtsi|dts)$/);
+
+# at the beginning of a line any tabs must come first and anything
+# more than 8 must use tabs.
+ if ($rawline =~ /^\+\s* \t\s*\S/ ||
+ $rawline =~ /^\+\s* \s*/) {
+ my $herevet = "$here\n" . cat_vet($rawline) . "\n";
+ $rpt_cleaners = 1;
+ if (ERROR("CODE_INDENT",
+ "code indent should use tabs where possible\n" . $herevet) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/^\+([ \t]+)/"\+" . tabify($1)/e;
+ }
+ }
+
+# check for space before tabs.
+ if ($rawline =~ /^\+/ && $rawline =~ / \t/) {
+ my $herevet = "$here\n" . cat_vet($rawline) . "\n";
+ if (WARN("SPACE_BEFORE_TAB",
+ "please, no space before tabs\n" . $herevet) &&
+ $fix) {
+ while ($fixed[$fixlinenr] =~
+ s/(^\+.*) {8,8}\t/$1\t\t/) {}
+ while ($fixed[$fixlinenr] =~
+ s/(^\+.*) +\t/$1\t/) {}
+ }
+ }
+
+# check for assignments on the start of a line
+ if ($sline =~ /^\+\s+($Assignment)[^=]/) {
+ CHK("ASSIGNMENT_CONTINUATIONS",
+ "Assignment operator '$1' should be on the previous line\n" . $hereprev);
+ }
+
+# check for && or || at the start of a line
+ if ($rawline =~ /^\+\s*(&&|\|\|)/) {
+ CHK("LOGICAL_CONTINUATIONS",
+ "Logical continuations should be on the previous line\n" . $hereprev);
+ }
+
+# check indentation starts on a tab stop
+ if ($^V && $^V ge 5.10.0 &&
+ $sline =~ /^\+\t+( +)(?:$c90_Keywords\b|\{\s*$|\}\s*(?:else\b|while\b|\s*$)|$Declare\s*$Ident\s*[;=])/) {
+ my $indent = length($1);
+ if ($indent % 8) {
+ if (WARN("TABSTOP",
+ "Statements should start on a tabstop\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s@(^\+\t+) +@$1 . "\t" x ($indent/8)@e;
+ }
+ }
+ }
+
+# check multi-line statement indentation matches previous line
+ if ($^V && $^V ge 5.10.0 &&
+ $prevline =~ /^\+([ \t]*)((?:$c90_Keywords(?:\s+if)\s*)|(?:$Declare\s*)?(?:$Ident|\(\s*\*\s*$Ident\s*\))\s*|(?:\*\s*)*$Lval\s*=\s*$Ident\s*)\(.*(\&\&|\|\||,)\s*$/) {
+ $prevline =~ /^\+(\t*)(.*)$/;
+ my $oldindent = $1;
+ my $rest = $2;
+
+ my $pos = pos_last_openparen($rest);
+ if ($pos >= 0) {
+ $line =~ /^(\+| )([ \t]*)/;
+ my $newindent = $2;
+
+ my $goodtabindent = $oldindent .
+ "\t" x ($pos / 8) .
+ " " x ($pos % 8);
+ my $goodspaceindent = $oldindent . " " x $pos;
+
+ if ($newindent ne $goodtabindent &&
+ $newindent ne $goodspaceindent) {
+
+ if (CHK("PARENTHESIS_ALIGNMENT",
+ "Alignment should match open parenthesis\n" . $hereprev) &&
+ $fix && $line =~ /^\+/) {
+ $fixed[$fixlinenr] =~
+ s/^\+[ \t]*/\+$goodtabindent/;
+ }
+ }
+ }
+ }
+
+# check for space after cast like "(int) foo" or "(struct foo) bar"
+# avoid checking a few false positives:
+# "sizeof(<type>)" or "__alignof__(<type>)"
+# function pointer declarations like "(*foo)(int) = bar;"
+# structure definitions like "(struct foo) { 0 };"
+# multiline macros that define functions
+# known attributes or the __attribute__ keyword
+ if ($line =~ /^\+(.*)\(\s*$Type\s*\)([ \t]++)((?![={]|\\$|$Attribute|__attribute__))/ &&
+ (!defined($1) || $1 !~ /\b(?:sizeof|__alignof__)\s*$/)) {
+ if (CHK("SPACING",
+ "No space is necessary after a cast\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~
+ s/(\(\s*$Type\s*\))[ \t]+/$1/;
+ }
+ }
+
+# Block comment styles
+# Networking with an initial /*
+ if ($realfile =~ m@^(drivers/net/|net/)@ &&
+ $prevrawline =~ /^\+[ \t]*\/\*[ \t]*$/ &&
+ $rawline =~ /^\+[ \t]*\*/ &&
+ $realline > 2) {
+ WARN("NETWORKING_BLOCK_COMMENT_STYLE",
+ "networking block comments don't use an empty /* line, use /* Comment...\n" . $hereprev);
+ }
+
+# Block comments use * on subsequent lines
+ if ($prevline =~ /$;[ \t]*$/ && #ends in comment
+ $prevrawline =~ /^\+.*?\/\*/ && #starting /*
+ $prevrawline !~ /\*\/[ \t]*$/ && #no trailing */
+ $rawline =~ /^\+/ && #line is new
+ $rawline !~ /^\+[ \t]*\*/) { #no leading *
+ WARN("BLOCK_COMMENT_STYLE",
+ "Block comments use * on subsequent lines\n" . $hereprev);
+ }
+
+# Block comments use */ on trailing lines
+ if ($rawline !~ m@^\+[ \t]*\*/[ \t]*$@ && #trailing */
+ $rawline !~ m@^\+.*/\*.*\*/[ \t]*$@ && #inline /*...*/
+ $rawline !~ m@^\+.*\*{2,}/[ \t]*$@ && #trailing **/
+ $rawline =~ m@^\+[ \t]*.+\*\/[ \t]*$@) { #non blank */
+ WARN("BLOCK_COMMENT_STYLE",
+ "Block comments use a trailing */ on a separate line\n" . $herecurr);
+ }
+
+# Block comment * alignment
+ if ($prevline =~ /$;[ \t]*$/ && #ends in comment
+ $line =~ /^\+[ \t]*$;/ && #leading comment
+ $rawline =~ /^\+[ \t]*\*/ && #leading *
+ (($prevrawline =~ /^\+.*?\/\*/ && #leading /*
+ $prevrawline !~ /\*\/[ \t]*$/) || #no trailing */
+ $prevrawline =~ /^\+[ \t]*\*/)) { #leading *
+ my $oldindent;
+ $prevrawline =~ m@^\+([ \t]*/?)\*@;
+ if (defined($1)) {
+ $oldindent = expand_tabs($1);
+ } else {
+ $prevrawline =~ m@^\+(.*/?)\*@;
+ $oldindent = expand_tabs($1);
+ }
+ $rawline =~ m@^\+([ \t]*)\*@;
+ my $newindent = $1;
+ $newindent = expand_tabs($newindent);
+ if (length($oldindent) ne length($newindent)) {
+ WARN("BLOCK_COMMENT_STYLE",
+ "Block comments should align the * on each line\n" . $hereprev);
+ }
+ }
+
+# check for missing blank lines after struct/union declarations
+# with exceptions for various attributes and macros
+ if ($prevline =~ /^[\+ ]};?\s*$/ &&
+ $line =~ /^\+/ &&
+ !($line =~ /^\+\s*$/ ||
+ $line =~ /^\+\s*EXPORT_SYMBOL/ ||
+ $line =~ /^\+\s*MODULE_/i ||
+ $line =~ /^\+\s*\#\s*(?:end|elif|else)/ ||
+ $line =~ /^\+[a-z_]*init/ ||
+ $line =~ /^\+\s*(?:static\s+)?[A-Z_]*ATTR/ ||
+ $line =~ /^\+\s*DECLARE/ ||
+ $line =~ /^\+\s*builtin_[\w_]*driver/ ||
+ $line =~ /^\+\s*__setup/)) {
+ if (CHK("LINE_SPACING",
+ "Please use a blank line after function/struct/union/enum declarations\n" . $hereprev) &&
+ $fix) {
+ fix_insert_line($fixlinenr, "\+");
+ }
+ }
+
+# check for multiple consecutive blank lines
+ if ($prevline =~ /^[\+ ]\s*$/ &&
+ $line =~ /^\+\s*$/ &&
+ $last_blank_line != ($linenr - 1)) {
+ if (CHK("LINE_SPACING",
+ "Please don't use multiple blank lines\n" . $hereprev) &&
+ $fix) {
+ fix_delete_line($fixlinenr, $rawline);
+ }
+
+ $last_blank_line = $linenr;
+ }
+
+# check for missing blank lines after declarations
+ if ($sline =~ /^\+\s+\S/ && #Not at char 1
+ # actual declarations
+ ($prevline =~ /^\+\s+$Declare\s*$Ident\s*[=,;:\[]/ ||
+ # function pointer declarations
+ $prevline =~ /^\+\s+$Declare\s*\(\s*\*\s*$Ident\s*\)\s*[=,;:\[\(]/ ||
+ # foo bar; where foo is some local typedef or #define
+ $prevline =~ /^\+\s+$Ident(?:\s+|\s*\*\s*)$Ident\s*[=,;\[]/ ||
+ # known declaration macros
+ $prevline =~ /^\+\s+$declaration_macros/) &&
+ # for "else if" which can look like "$Ident $Ident"
+ !($prevline =~ /^\+\s+$c90_Keywords\b/ ||
+ # other possible extensions of declaration lines
+ $prevline =~ /(?:$Compare|$Assignment|$Operators)\s*$/ ||
+ # not starting a section or a macro "\" extended line
+ $prevline =~ /(?:\{\s*|\\)$/) &&
+ # looks like a declaration
+ !($sline =~ /^\+\s+$Declare\s*$Ident\s*[=,;:\[]/ ||
+ # function pointer declarations
+ $sline =~ /^\+\s+$Declare\s*\(\s*\*\s*$Ident\s*\)\s*[=,;:\[\(]/ ||
+ # foo bar; where foo is some local typedef or #define
+ $sline =~ /^\+\s+$Ident(?:\s+|\s*\*\s*)$Ident\s*[=,;\[]/ ||
+ # known declaration macros
+ $sline =~ /^\+\s+$declaration_macros/ ||
+ # start of struct or union or enum
+ $sline =~ /^\+\s+(?:union|struct|enum|typedef)\b/ ||
+ # start or end of block or continuation of declaration
+ $sline =~ /^\+\s+(?:$|[\{\}\.\#\"\?\:\(\[])/ ||
+ # bitfield continuation
+ $sline =~ /^\+\s+$Ident\s*:\s*\d+\s*[,;]/ ||
+ # other possible extensions of declaration lines
+ $sline =~ /^\+\s+\(?\s*(?:$Compare|$Assignment|$Operators)/) &&
+ # indentation of previous and current line are the same
+ (($prevline =~ /\+(\s+)\S/) && $sline =~ /^\+$1\S/)) {
+ if (WARN("LINE_SPACING",
+ "Missing a blank line after declarations\n" . $hereprev) &&
+ $fix) {
+ fix_insert_line($fixlinenr, "\+");
+ }
+ }
+
+# check for spaces at the beginning of a line.
+# Exceptions:
+# 1) within comments
+# 2) indented preprocessor commands
+# 3) hanging labels
+ if ($rawline =~ /^\+ / && $line !~ /^\+ *(?:$;|#|$Ident:)/) {
+ my $herevet = "$here\n" . cat_vet($rawline) . "\n";
+ if (WARN("LEADING_SPACE",
+ "please, no spaces at the start of a line\n" . $herevet) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/^\+([ \t]+)/"\+" . tabify($1)/e;
+ }
+ }
+
+# check we are in a valid C source file if not then ignore this hunk
+ next if ($realfile !~ /\.(h|c)$/);
+
+# check for unusual line ending [ or (
+ if ($line =~ /^\+.*([\[\(])\s*$/) {
+ CHK("OPEN_ENDED_LINE",
+ "Lines should not end with a '$1'\n" . $herecurr);
+ }
+
+# check if this appears to be the start function declaration, save the name
+ if ($sline =~ /^\+\{\s*$/ &&
+ $prevline =~ /^\+(?:(?:(?:$Storage|$Inline)\s*)*\s*$Type\s*)?($Ident)\(/) {
+ $context_function = $1;
+ }
+
+# check if this appears to be the end of function declaration
+ if ($sline =~ /^\+\}\s*$/) {
+ undef $context_function;
+ }
+
+# check indentation of any line with a bare else
+# (but not if it is a multiple line "if (foo) return bar; else return baz;")
+# if the previous line is a break or return and is indented 1 tab more...
+ if ($sline =~ /^\+([\t]+)(?:}[ \t]*)?else(?:[ \t]*{)?\s*$/) {
+ my $tabs = length($1) + 1;
+ if ($prevline =~ /^\+\t{$tabs,$tabs}break\b/ ||
+ ($prevline =~ /^\+\t{$tabs,$tabs}return\b/ &&
+ defined $lines[$linenr] &&
+ $lines[$linenr] !~ /^[ \+]\t{$tabs,$tabs}return/)) {
+ WARN("UNNECESSARY_ELSE",
+ "else is not generally useful after a break or return\n" . $hereprev);
+ }
+ }
+
+# check indentation of a line with a break;
+# if the previous line is a goto or return and is indented the same # of tabs
+ if ($sline =~ /^\+([\t]+)break\s*;\s*$/) {
+ my $tabs = $1;
+ if ($prevline =~ /^\+$tabs(?:goto|return)\b/) {
+ WARN("UNNECESSARY_BREAK",
+ "break is not useful after a goto or return\n" . $hereprev);
+ }
+ }
+
+# check for RCS/CVS revision markers
+ if ($rawline =~ /^\+.*\$(Revision|Log|Id)(?:\$|)/) {
+ WARN("CVS_KEYWORD",
+ "CVS style keyword markers, these will _not_ be updated\n". $herecurr);
+ }
+
+# check for old HOTPLUG __dev<foo> section markings
+ if ($line =~ /\b(__dev(init|exit)(data|const|))\b/) {
+ WARN("HOTPLUG_SECTION",
+ "Using $1 is unnecessary\n" . $herecurr);
+ }
+
+# Check for potential 'bare' types
+ my ($stat, $cond, $line_nr_next, $remain_next, $off_next,
+ $realline_next);
+#print "LINE<$line>\n";
+ if ($linenr > $suppress_statement &&
+ $realcnt && $sline =~ /.\s*\S/) {
+ ($stat, $cond, $line_nr_next, $remain_next, $off_next) =
+ ctx_statement_block($linenr, $realcnt, 0);
+ $stat =~ s/\n./\n /g;
+ $cond =~ s/\n./\n /g;
+
+#print "linenr<$linenr> <$stat>\n";
+ # If this statement has no statement boundaries within
+ # it there is no point in retrying a statement scan
+ # until we hit end of it.
+ my $frag = $stat; $frag =~ s/;+\s*$//;
+ if ($frag !~ /(?:{|;)/) {
+#print "skip<$line_nr_next>\n";
+ $suppress_statement = $line_nr_next;
+ }
+
+ # Find the real next line.
+ $realline_next = $line_nr_next;
+ if (defined $realline_next &&
+ (!defined $lines[$realline_next - 1] ||
+ substr($lines[$realline_next - 1], $off_next) =~ /^\s*$/)) {
+ $realline_next++;
+ }
+
+ my $s = $stat;
+ $s =~ s/{.*$//s;
+
+ # Ignore goto labels.
+ if ($s =~ /$Ident:\*$/s) {
+
+ # Ignore functions being called
+ } elsif ($s =~ /^.\s*$Ident\s*\(/s) {
+
+ } elsif ($s =~ /^.\s*else\b/s) {
+
+ # declarations always start with types
+ } elsif ($prev_values eq 'E' && $s =~ /^.\s*(?:$Storage\s+)?(?:$Inline\s+)?(?:const\s+)?((?:\s*$Ident)+?)\b(?:\s+$Sparse)?\s*\**\s*(?:$Ident|\(\*[^\)]*\))(?:\s*$Modifier)?\s*(?:;|=|,|\()/s) {
+ my $type = $1;
+ $type =~ s/\s+/ /g;
+ possible($type, "A:" . $s);
+
+ # definitions in global scope can only start with types
+ } elsif ($s =~ /^.(?:$Storage\s+)?(?:$Inline\s+)?(?:const\s+)?($Ident)\b\s*(?!:)/s) {
+ possible($1, "B:" . $s);
+ }
+
+ # any (foo ... *) is a pointer cast, and foo is a type
+ while ($s =~ /\(($Ident)(?:\s+$Sparse)*[\s\*]+\s*\)/sg) {
+ possible($1, "C:" . $s);
+ }
+
+ # Check for any sort of function declaration.
+ # int foo(something bar, other baz);
+ # void (*store_gdt)(x86_descr_ptr *);
+ if ($prev_values eq 'E' && $s =~ /^(.(?:typedef\s*)?(?:(?:$Storage|$Inline)\s*)*\s*$Type\s*(?:\b$Ident|\(\*\s*$Ident\))\s*)\(/s) {
+ my ($name_len) = length($1);
+
+ my $ctx = $s;
+ substr($ctx, 0, $name_len + 1, '');
+ $ctx =~ s/\)[^\)]*$//;
+
+ for my $arg (split(/\s*,\s*/, $ctx)) {
+ if ($arg =~ /^(?:const\s+)?($Ident)(?:\s+$Sparse)*\s*\**\s*(:?\b$Ident)?$/s || $arg =~ /^($Ident)$/s) {
+
+ possible($1, "D:" . $s);
+ }
+ }
+ }
+
+ }
+
+#
+# Checks which may be anchored in the context.
+#
+
+# Check for switch () and associated case and default
+# statements should be at the same indent.
+ if ($line=~/\bswitch\s*\(.*\)/) {
+ my $err = '';
+ my $sep = '';
+ my @ctx = ctx_block_outer($linenr, $realcnt);
+ shift(@ctx);
+ for my $ctx (@ctx) {
+ my ($clen, $cindent) = line_stats($ctx);
+ if ($ctx =~ /^\+\s*(case\s+|default:)/ &&
+ $indent != $cindent) {
+ $err .= "$sep$ctx\n";
+ $sep = '';
+ } else {
+ $sep = "[...]\n";
+ }
+ }
+ if ($err ne '') {
+ ERROR("SWITCH_CASE_INDENT_LEVEL",
+ "switch and case should be at the same indent\n$hereline$err");
+ }
+ }
+
+# if/while/etc brace do not go on next line, unless defining a do while loop,
+# or if that brace on the next line is for something else
+ if ($line =~ /(.*)\b((?:if|while|for|switch|(?:[a-z_]+|)for_each[a-z_]+)\s*\(|do\b|else\b)/ && $line !~ /^.\s*\#/) {
+ my $pre_ctx = "$1$2";
+
+ my ($level, @ctx) = ctx_statement_level($linenr, $realcnt, 0);
+
+ if ($line =~ /^\+\t{6,}/) {
+ WARN("DEEP_INDENTATION",
+ "Too many leading tabs - consider code refactoring\n" . $herecurr);
+ }
+
+ my $ctx_cnt = $realcnt - $#ctx - 1;
+ my $ctx = join("\n", @ctx);
+
+ my $ctx_ln = $linenr;
+ my $ctx_skip = $realcnt;
+
+ while ($ctx_skip > $ctx_cnt || ($ctx_skip == $ctx_cnt &&
+ defined $lines[$ctx_ln - 1] &&
+ $lines[$ctx_ln - 1] =~ /^-/)) {
+ ##print "SKIP<$ctx_skip> CNT<$ctx_cnt>\n";
+ $ctx_skip-- if (!defined $lines[$ctx_ln - 1] || $lines[$ctx_ln - 1] !~ /^-/);
+ $ctx_ln++;
+ }
+
+ #print "realcnt<$realcnt> ctx_cnt<$ctx_cnt>\n";
+ #print "pre<$pre_ctx>\nline<$line>\nctx<$ctx>\nnext<$lines[$ctx_ln - 1]>\n";
+
+ if ($ctx !~ /{\s*/ && defined($lines[$ctx_ln - 1]) && $lines[$ctx_ln - 1] =~ /^\+\s*{/) {
+ ERROR("OPEN_BRACE",
+ "that open brace { should be on the previous line\n" .
+ "$here\n$ctx\n$rawlines[$ctx_ln - 1]\n");
+ }
+ if ($level == 0 && $pre_ctx !~ /}\s*while\s*\($/ &&
+ $ctx =~ /\)\s*\;\s*$/ &&
+ defined $lines[$ctx_ln - 1])
+ {
+ my ($nlength, $nindent) = line_stats($lines[$ctx_ln - 1]);
+ if ($nindent > $indent) {
+ WARN("TRAILING_SEMICOLON",
+ "trailing semicolon indicates no statements, indent implies otherwise\n" .
+ "$here\n$ctx\n$rawlines[$ctx_ln - 1]\n");
+ }
+ }
+ }
+
+# Check relative indent for conditionals and blocks.
+ if ($line =~ /\b(?:(?:if|while|for|(?:[a-z_]+|)for_each[a-z_]+)\s*\(|(?:do|else)\b)/ && $line !~ /^.\s*#/ && $line !~ /\}\s*while\s*/) {
+ ($stat, $cond, $line_nr_next, $remain_next, $off_next) =
+ ctx_statement_block($linenr, $realcnt, 0)
+ if (!defined $stat);
+ my ($s, $c) = ($stat, $cond);
+
+ substr($s, 0, length($c), '');
+
+ # remove inline comments
+ $s =~ s/$;/ /g;
+ $c =~ s/$;/ /g;
+
+ # Find out how long the conditional actually is.
+ my @newlines = ($c =~ /\n/gs);
+ my $cond_lines = 1 + $#newlines;
+
+ # Make sure we remove the line prefixes as we have
+ # none on the first line, and are going to readd them
+ # where necessary.
+ $s =~ s/\n./\n/gs;
+ while ($s =~ /\n\s+\\\n/) {
+ $cond_lines += $s =~ s/\n\s+\\\n/\n/g;
+ }
+
+ # We want to check the first line inside the block
+ # starting at the end of the conditional, so remove:
+ # 1) any blank line termination
+ # 2) any opening brace { on end of the line
+ # 3) any do (...) {
+ my $continuation = 0;
+ my $check = 0;
+ $s =~ s/^.*\bdo\b//;
+ $s =~ s/^\s*{//;
+ if ($s =~ s/^\s*\\//) {
+ $continuation = 1;
+ }
+ if ($s =~ s/^\s*?\n//) {
+ $check = 1;
+ $cond_lines++;
+ }
+
+ # Also ignore a loop construct at the end of a
+ # preprocessor statement.
+ if (($prevline =~ /^.\s*#\s*define\s/ ||
+ $prevline =~ /\\\s*$/) && $continuation == 0) {
+ $check = 0;
+ }
+
+ my $cond_ptr = -1;
+ $continuation = 0;
+ while ($cond_ptr != $cond_lines) {
+ $cond_ptr = $cond_lines;
+
+ # If we see an #else/#elif then the code
+ # is not linear.
+ if ($s =~ /^\s*\#\s*(?:else|elif)/) {
+ $check = 0;
+ }
+
+ # Ignore:
+ # 1) blank lines, they should be at 0,
+ # 2) preprocessor lines, and
+ # 3) labels.
+ if ($continuation ||
+ $s =~ /^\s*?\n/ ||
+ $s =~ /^\s*#\s*?/ ||
+ $s =~ /^\s*$Ident\s*:/) {
+ $continuation = ($s =~ /^.*?\\\n/) ? 1 : 0;
+ if ($s =~ s/^.*?\n//) {
+ $cond_lines++;
+ }
+ }
+ }
+
+ my (undef, $sindent) = line_stats("+" . $s);
+ my $stat_real = raw_line($linenr, $cond_lines);
+
+ # Check if either of these lines are modified, else
+ # this is not this patch's fault.
+ if (!defined($stat_real) ||
+ $stat !~ /^\+/ && $stat_real !~ /^\+/) {
+ $check = 0;
+ }
+ if (defined($stat_real) && $cond_lines > 1) {
+ $stat_real = "[...]\n$stat_real";
+ }
+
+ #print "line<$line> prevline<$prevline> indent<$indent> sindent<$sindent> check<$check> continuation<$continuation> s<$s> cond_lines<$cond_lines> stat_real<$stat_real> stat<$stat>\n";
+
+ if ($check && $s ne '' &&
+ (($sindent % 8) != 0 ||
+ ($sindent < $indent) ||
+ ($sindent == $indent &&
+ ($s !~ /^\s*(?:\}|\{|else\b)/)) ||
+ ($sindent > $indent + 8))) {
+ WARN("SUSPECT_CODE_INDENT",
+ "suspect code indent for conditional statements ($indent, $sindent)\n" . $herecurr . "$stat_real\n");
+ }
+ }
+
+ # Track the 'values' across context and added lines.
+ my $opline = $line; $opline =~ s/^./ /;
+ my ($curr_values, $curr_vars) =
+ annotate_values($opline . "\n", $prev_values);
+ $curr_values = $prev_values . $curr_values;
+ if ($dbg_values) {
+ my $outline = $opline; $outline =~ s/\t/ /g;
+ print "$linenr > .$outline\n";
+ print "$linenr > $curr_values\n";
+ print "$linenr > $curr_vars\n";
+ }
+ $prev_values = substr($curr_values, -1);
+
+#ignore lines not being added
+ next if ($line =~ /^[^\+]/);
+
+# check for dereferences that span multiple lines
+ if ($prevline =~ /^\+.*$Lval\s*(?:\.|->)\s*$/ &&
+ $line =~ /^\+\s*(?!\#\s*(?!define\s+|if))\s*$Lval/) {
+ $prevline =~ /($Lval\s*(?:\.|->))\s*$/;
+ my $ref = $1;
+ $line =~ /^.\s*($Lval)/;
+ $ref .= $1;
+ $ref =~ s/\s//g;
+ WARN("MULTILINE_DEREFERENCE",
+ "Avoid multiple line dereference - prefer '$ref'\n" . $hereprev);
+ }
+
+# check for declarations of signed or unsigned without int
+ while ($line =~ m{\b($Declare)\s*(?!char\b|short\b|int\b|long\b)\s*($Ident)?\s*[=,;\[\)\(]}g) {
+ my $type = $1;
+ my $var = $2;
+ $var = "" if (!defined $var);
+ if ($type =~ /^(?:(?:$Storage|$Inline|$Attribute)\s+)*((?:un)?signed)((?:\s*\*)*)\s*$/) {
+ my $sign = $1;
+ my $pointer = $2;
+
+ $pointer = "" if (!defined $pointer);
+
+ if (WARN("UNSPECIFIED_INT",
+ "Prefer '" . trim($sign) . " int" . rtrim($pointer) . "' to bare use of '$sign" . rtrim($pointer) . "'\n" . $herecurr) &&
+ $fix) {
+ my $decl = trim($sign) . " int ";
+ my $comp_pointer = $pointer;
+ $comp_pointer =~ s/\s//g;
+ $decl .= $comp_pointer;
+ $decl = rtrim($decl) if ($var eq "");
+ $fixed[$fixlinenr] =~ s@\b$sign\s*\Q$pointer\E\s*$var\b@$decl$var@;
+ }
+ }
+ }
+
+# TEST: allow direct testing of the type matcher.
+ if ($dbg_type) {
+ if ($line =~ /^.\s*$Declare\s*$/) {
+ ERROR("TEST_TYPE",
+ "TEST: is type\n" . $herecurr);
+ } elsif ($dbg_type > 1 && $line =~ /^.+($Declare)/) {
+ ERROR("TEST_NOT_TYPE",
+ "TEST: is not type ($1 is)\n". $herecurr);
+ }
+ next;
+ }
+# TEST: allow direct testing of the attribute matcher.
+ if ($dbg_attr) {
+ if ($line =~ /^.\s*$Modifier\s*$/) {
+ ERROR("TEST_ATTR",
+ "TEST: is attr\n" . $herecurr);
+ } elsif ($dbg_attr > 1 && $line =~ /^.+($Modifier)/) {
+ ERROR("TEST_NOT_ATTR",
+ "TEST: is not attr ($1 is)\n". $herecurr);
+ }
+ next;
+ }
+
+# check for initialisation to aggregates open brace on the next line
+ if ($line =~ /^.\s*{/ &&
+ $prevline =~ /(?:^|[^=])=\s*$/) {
+ if (ERROR("OPEN_BRACE",
+ "that open brace { should be on the previous line\n" . $hereprev) &&
+ $fix && $prevline =~ /^\+/ && $line =~ /^\+/) {
+ fix_delete_line($fixlinenr - 1, $prevrawline);
+ fix_delete_line($fixlinenr, $rawline);
+ my $fixedline = $prevrawline;
+ $fixedline =~ s/\s*=\s*$/ = {/;
+ fix_insert_line($fixlinenr, $fixedline);
+ $fixedline = $line;
+ $fixedline =~ s/^(.\s*)\{\s*/$1/;
+ fix_insert_line($fixlinenr, $fixedline);
+ }
+ }
+
+#
+# Checks which are anchored on the added line.
+#
+
+# check for malformed paths in #include statements (uses RAW line)
+ if ($rawline =~ m{^.\s*\#\s*include\s+[<"](.*)[">]}) {
+ my $path = $1;
+ if ($path =~ m{//}) {
+ ERROR("MALFORMED_INCLUDE",
+ "malformed #include filename\n" . $herecurr);
+ }
+ if ($path =~ "^uapi/" && $realfile =~ m@\binclude/uapi/@) {
+ ERROR("UAPI_INCLUDE",
+ "No #include in ...include/uapi/... should use a uapi/ path prefix\n" . $herecurr);
+ }
+ }
+
+# no C99 // comments
+ if ($line =~ m{//}) {
+ if (ERROR("C99_COMMENTS",
+ "do not use C99 // comments\n" . $herecurr) &&
+ $fix) {
+ my $line = $fixed[$fixlinenr];
+ if ($line =~ /\/\/(.*)$/) {
+ my $comment = trim($1);
+ $fixed[$fixlinenr] =~ s@\/\/(.*)$@/\* $comment \*/@;
+ }
+ }
+ }
+ # Remove C99 comments.
+ $line =~ s@//.*@@;
+ $opline =~ s@//.*@@;
+
+# EXPORT_SYMBOL should immediately follow the thing it is exporting, consider
+# the whole statement.
+#print "APW <$lines[$realline_next - 1]>\n";
+ if (defined $realline_next &&
+ exists $lines[$realline_next - 1] &&
+ !defined $suppress_export{$realline_next} &&
+ ($lines[$realline_next - 1] =~ /EXPORT_SYMBOL.*\((.*)\)/ ||
+ $lines[$realline_next - 1] =~ /EXPORT_UNUSED_SYMBOL.*\((.*)\)/)) {
+ # Handle definitions which produce identifiers with
+ # a prefix:
+ # XXX(foo);
+ # EXPORT_SYMBOL(something_foo);
+ my $name = $1;
+ if ($stat =~ /^(?:.\s*}\s*\n)?.([A-Z_]+)\s*\(\s*($Ident)/ &&
+ $name =~ /^${Ident}_$2/) {
+#print "FOO C name<$name>\n";
+ $suppress_export{$realline_next} = 1;
+
+ } elsif ($stat !~ /(?:
+ \n.}\s*$|
+ ^.DEFINE_$Ident\(\Q$name\E\)|
+ ^.DECLARE_$Ident\(\Q$name\E\)|
+ ^.LIST_HEAD\(\Q$name\E\)|
+ ^.(?:$Storage\s+)?$Type\s*\(\s*\*\s*\Q$name\E\s*\)\s*\(|
+ \b\Q$name\E(?:\s+$Attribute)*\s*(?:;|=|\[|\()
+ )/x) {
+#print "FOO A<$lines[$realline_next - 1]> stat<$stat> name<$name>\n";
+ $suppress_export{$realline_next} = 2;
+ } else {
+ $suppress_export{$realline_next} = 1;
+ }
+ }
+ if (!defined $suppress_export{$linenr} &&
+ $prevline =~ /^.\s*$/ &&
+ ($line =~ /EXPORT_SYMBOL.*\((.*)\)/ ||
+ $line =~ /EXPORT_UNUSED_SYMBOL.*\((.*)\)/)) {
+#print "FOO B <$lines[$linenr - 1]>\n";
+ $suppress_export{$linenr} = 2;
+ }
+ if (defined $suppress_export{$linenr} &&
+ $suppress_export{$linenr} == 2) {
+ WARN("EXPORT_SYMBOL",
+ "EXPORT_SYMBOL(foo); should immediately follow its function/variable\n" . $herecurr);
+ }
+
+# check for global initialisers.
+ if ($line =~ /^\+$Type\s*$Ident(?:\s+$Modifier)*\s*=\s*($zero_initializer)\s*;/) {
+ if (ERROR("GLOBAL_INITIALISERS",
+ "do not initialise globals to $1\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/(^.$Type\s*$Ident(?:\s+$Modifier)*)\s*=\s*$zero_initializer\s*;/$1;/;
+ }
+ }
+# check for static initialisers.
+ if ($line =~ /^\+.*\bstatic\s.*=\s*($zero_initializer)\s*;/) {
+ if (ERROR("INITIALISED_STATIC",
+ "do not initialise statics to $1\n" .
+ $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/(\bstatic\s.*?)\s*=\s*$zero_initializer\s*;/$1;/;
+ }
+ }
+
+# check for misordered declarations of char/short/int/long with signed/unsigned
+ while ($sline =~ m{(\b$TypeMisordered\b)}g) {
+ my $tmp = trim($1);
+ WARN("MISORDERED_TYPE",
+ "type '$tmp' should be specified in [[un]signed] [short|int|long|long long] order\n" . $herecurr);
+ }
+
+# check for static const char * arrays.
+ if ($line =~ /\bstatic\s+const\s+char\s*\*\s*(\w+)\s*\[\s*\]\s*=\s*/) {
+ WARN("STATIC_CONST_CHAR_ARRAY",
+ "static const char * array should probably be static const char * const\n" .
+ $herecurr);
+ }
+
+# check for static char foo[] = "bar" declarations.
+ if ($line =~ /\bstatic\s+char\s+(\w+)\s*\[\s*\]\s*=\s*"/) {
+ WARN("STATIC_CONST_CHAR_ARRAY",
+ "static char array declaration should probably be static const char\n" .
+ $herecurr);
+ }
+
+# check for const <foo> const where <foo> is not a pointer or array type
+ if ($sline =~ /\bconst\s+($BasicType)\s+const\b/) {
+ my $found = $1;
+ if ($sline =~ /\bconst\s+\Q$found\E\s+const\b\s*\*/) {
+ WARN("CONST_CONST",
+ "'const $found const *' should probably be 'const $found * const'\n" . $herecurr);
+ } elsif ($sline !~ /\bconst\s+\Q$found\E\s+const\s+\w+\s*\[/) {
+ WARN("CONST_CONST",
+ "'const $found const' should probably be 'const $found'\n" . $herecurr);
+ }
+ }
+
+# check for non-global char *foo[] = {"bar", ...} declarations.
+ if ($line =~ /^.\s+(?:static\s+|const\s+)?char\s+\*\s*\w+\s*\[\s*\]\s*=\s*\{/) {
+ WARN("STATIC_CONST_CHAR_ARRAY",
+ "char * array declaration might be better as static const\n" .
+ $herecurr);
+ }
+
+# check for sizeof(foo)/sizeof(foo[0]) that could be ARRAY_SIZE(foo)
+ if ($line =~ m@\bsizeof\s*\(\s*($Lval)\s*\)@) {
+ my $array = $1;
+ if ($line =~ m@\b(sizeof\s*\(\s*\Q$array\E\s*\)\s*/\s*sizeof\s*\(\s*\Q$array\E\s*\[\s*0\s*\]\s*\))@) {
+ my $array_div = $1;
+ if (WARN("ARRAY_SIZE",
+ "Prefer ARRAY_SIZE($array)\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\Q$array_div\E/ARRAY_SIZE($array)/;
+ }
+ }
+ }
+
+# check for function declarations without arguments like "int foo()"
+ if ($line =~ /(\b$Type\s+$Ident)\s*\(\s*\)/) {
+ if (ERROR("FUNCTION_WITHOUT_ARGS",
+ "Bad function definition - $1() should probably be $1(void)\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/(\b($Type)\s+($Ident))\s*\(\s*\)/$2 $3(void)/;
+ }
+ }
+
+# check for new typedefs, only function parameters and sparse annotations
+# make sense.
+ if ($line =~ /\btypedef\s/ &&
+ $line !~ /\btypedef\s+$Type\s*\(\s*\*?$Ident\s*\)\s*\(/ &&
+ $line !~ /\btypedef\s+$Type\s+$Ident\s*\(/ &&
+ $line !~ /\b$typeTypedefs\b/ &&
+ $line !~ /\b__bitwise\b/) {
+ WARN("NEW_TYPEDEFS",
+ "do not add new typedefs\n" . $herecurr);
+ }
+
+# * goes on variable not on type
+ # (char*[ const])
+ while ($line =~ m{(\($NonptrType(\s*(?:$Modifier\b\s*|\*\s*)+)\))}g) {
+ #print "AA<$1>\n";
+ my ($ident, $from, $to) = ($1, $2, $2);
+
+ # Should start with a space.
+ $to =~ s/^(\S)/ $1/;
+ # Should not end with a space.
+ $to =~ s/\s+$//;
+ # '*'s should not have spaces between.
+ while ($to =~ s/\*\s+\*/\*\*/) {
+ }
+
+## print "1: from<$from> to<$to> ident<$ident>\n";
+ if ($from ne $to) {
+ if (ERROR("POINTER_LOCATION",
+ "\"(foo$from)\" should be \"(foo$to)\"\n" . $herecurr) &&
+ $fix) {
+ my $sub_from = $ident;
+ my $sub_to = $ident;
+ $sub_to =~ s/\Q$from\E/$to/;
+ $fixed[$fixlinenr] =~
+ s@\Q$sub_from\E@$sub_to@;
+ }
+ }
+ }
+ while ($line =~ m{(\b$NonptrType(\s*(?:$Modifier\b\s*|\*\s*)+)($Ident))}g) {
+ #print "BB<$1>\n";
+ my ($match, $from, $to, $ident) = ($1, $2, $2, $3);
+
+ # Should start with a space.
+ $to =~ s/^(\S)/ $1/;
+ # Should not end with a space.
+ $to =~ s/\s+$//;
+ # '*'s should not have spaces between.
+ while ($to =~ s/\*\s+\*/\*\*/) {
+ }
+ # Modifiers should have spaces.
+ $to =~ s/(\b$Modifier$)/$1 /;
+
+## print "2: from<$from> to<$to> ident<$ident>\n";
+ if ($from ne $to && $ident !~ /^$Modifier$/) {
+ if (ERROR("POINTER_LOCATION",
+ "\"foo${from}bar\" should be \"foo${to}bar\"\n" . $herecurr) &&
+ $fix) {
+
+ my $sub_from = $match;
+ my $sub_to = $match;
+ $sub_to =~ s/\Q$from\E/$to/;
+ $fixed[$fixlinenr] =~
+ s@\Q$sub_from\E@$sub_to@;
+ }
+ }
+ }
+
+# avoid BUG() or BUG_ON()
+ if ($line =~ /\b(?:BUG|BUG_ON)\b/) {
+ my $msg_level = \&WARN;
+ $msg_level = \&CHK if ($file);
+ &{$msg_level}("AVOID_BUG",
+ "Avoid crashing the kernel - try using WARN_ON & recovery code rather than BUG() or BUG_ON()\n" . $herecurr);
+ }
+
+# avoid LINUX_VERSION_CODE
+ if ($line =~ /\bLINUX_VERSION_CODE\b/) {
+ WARN("LINUX_VERSION_CODE",
+ "LINUX_VERSION_CODE should be avoided, code should be for the version to which it is merged\n" . $herecurr);
+ }
+
+# check for uses of printk_ratelimit
+ if ($line =~ /\bprintk_ratelimit\s*\(/) {
+ WARN("PRINTK_RATELIMITED",
+ "Prefer printk_ratelimited or pr_<level>_ratelimited to printk_ratelimit\n" . $herecurr);
+ }
+
+# printk should use KERN_* levels
+ if ($line =~ /\bprintk\s*\(\s*(?!KERN_[A-Z]+\b)/) {
+ WARN("PRINTK_WITHOUT_KERN_LEVEL",
+ "printk() should include KERN_<LEVEL> facility level\n" . $herecurr);
+ }
+
+ if ($line =~ /\bprintk\s*\(\s*KERN_([A-Z]+)/) {
+ my $orig = $1;
+ my $level = lc($orig);
+ $level = "warn" if ($level eq "warning");
+ my $level2 = $level;
+ $level2 = "dbg" if ($level eq "debug");
+ WARN("PREFER_PR_LEVEL",
+ "Prefer [subsystem eg: netdev]_$level2([subsystem]dev, ... then dev_$level2(dev, ... then pr_$level(... to printk(KERN_$orig ...\n" . $herecurr);
+ }
+
+ if ($line =~ /\bpr_warning\s*\(/) {
+ if (WARN("PREFER_PR_LEVEL",
+ "Prefer pr_warn(... to pr_warning(...\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~
+ s/\bpr_warning\b/pr_warn/;
+ }
+ }
+
+ if ($line =~ /\bdev_printk\s*\(\s*KERN_([A-Z]+)/) {
+ my $orig = $1;
+ my $level = lc($orig);
+ $level = "warn" if ($level eq "warning");
+ $level = "dbg" if ($level eq "debug");
+ WARN("PREFER_DEV_LEVEL",
+ "Prefer dev_$level(... to dev_printk(KERN_$orig, ...\n" . $herecurr);
+ }
+
+# ENOSYS means "bad syscall nr" and nothing else. This will have a small
+# number of false positives, but assembly files are not checked, so at
+# least the arch entry code will not trigger this warning.
+ if ($line =~ /\bENOSYS\b/) {
+ WARN("ENOSYS",
+ "ENOSYS means 'invalid syscall nr' and nothing else\n" . $herecurr);
+ }
+
+# function brace can't be on same line, except for #defines of do while,
+# or if closed on same line
+ if ($^V && $^V ge 5.10.0 &&
+ $sline =~ /$Type\s*$Ident\s*$balanced_parens\s*\{/ &&
+ $sline !~ /\#\s*define\b.*do\s*\{/ &&
+ $sline !~ /}/) {
+ if (ERROR("OPEN_BRACE",
+ "open brace '{' following function definitions go on the next line\n" . $herecurr) &&
+ $fix) {
+ fix_delete_line($fixlinenr, $rawline);
+ my $fixed_line = $rawline;
+ $fixed_line =~ /(^..*$Type\s*$Ident\(.*\)\s*){(.*)$/;
+ my $line1 = $1;
+ my $line2 = $2;
+ fix_insert_line($fixlinenr, ltrim($line1));
+ fix_insert_line($fixlinenr, "\+{");
+ if ($line2 !~ /^\s*$/) {
+ fix_insert_line($fixlinenr, "\+\t" . trim($line2));
+ }
+ }
+ }
+
+# open braces for enum, union and struct go on the same line.
+ if ($line =~ /^.\s*{/ &&
+ $prevline =~ /^.\s*(?:typedef\s+)?(enum|union|struct)(?:\s+$Ident)?\s*$/) {
+ if (ERROR("OPEN_BRACE",
+ "open brace '{' following $1 go on the same line\n" . $hereprev) &&
+ $fix && $prevline =~ /^\+/ && $line =~ /^\+/) {
+ fix_delete_line($fixlinenr - 1, $prevrawline);
+ fix_delete_line($fixlinenr, $rawline);
+ my $fixedline = rtrim($prevrawline) . " {";
+ fix_insert_line($fixlinenr, $fixedline);
+ $fixedline = $rawline;
+ $fixedline =~ s/^(.\s*)\{\s*/$1\t/;
+ if ($fixedline !~ /^\+\s*$/) {
+ fix_insert_line($fixlinenr, $fixedline);
+ }
+ }
+ }
+
+# missing space after union, struct or enum definition
+ if ($line =~ /^.\s*(?:typedef\s+)?(enum|union|struct)(?:\s+$Ident){1,2}[=\{]/) {
+ if (WARN("SPACING",
+ "missing space after $1 definition\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~
+ s/^(.\s*(?:typedef\s+)?(?:enum|union|struct)(?:\s+$Ident){1,2})([=\{])/$1 $2/;
+ }
+ }
+
+# Function pointer declarations
+# check spacing between type, funcptr, and args
+# canonical declaration is "type (*funcptr)(args...)"
+ if ($line =~ /^.\s*($Declare)\((\s*)\*(\s*)($Ident)(\s*)\)(\s*)\(/) {
+ my $declare = $1;
+ my $pre_pointer_space = $2;
+ my $post_pointer_space = $3;
+ my $funcname = $4;
+ my $post_funcname_space = $5;
+ my $pre_args_space = $6;
+
+# the $Declare variable will capture all spaces after the type
+# so check it for a missing trailing missing space but pointer return types
+# don't need a space so don't warn for those.
+ my $post_declare_space = "";
+ if ($declare =~ /(\s+)$/) {
+ $post_declare_space = $1;
+ $declare = rtrim($declare);
+ }
+ if ($declare !~ /\*$/ && $post_declare_space =~ /^$/) {
+ WARN("SPACING",
+ "missing space after return type\n" . $herecurr);
+ $post_declare_space = " ";
+ }
+
+# unnecessary space "type (*funcptr)(args...)"
+# This test is not currently implemented because these declarations are
+# equivalent to
+# int foo(int bar, ...)
+# and this is form shouldn't/doesn't generate a checkpatch warning.
+#
+# elsif ($declare =~ /\s{2,}$/) {
+# WARN("SPACING",
+# "Multiple spaces after return type\n" . $herecurr);
+# }
+
+# unnecessary space "type ( *funcptr)(args...)"
+ if (defined $pre_pointer_space &&
+ $pre_pointer_space =~ /^\s/) {
+ WARN("SPACING",
+ "Unnecessary space after function pointer open parenthesis\n" . $herecurr);
+ }
+
+# unnecessary space "type (* funcptr)(args...)"
+ if (defined $post_pointer_space &&
+ $post_pointer_space =~ /^\s/) {
+ WARN("SPACING",
+ "Unnecessary space before function pointer name\n" . $herecurr);
+ }
+
+# unnecessary space "type (*funcptr )(args...)"
+ if (defined $post_funcname_space &&
+ $post_funcname_space =~ /^\s/) {
+ WARN("SPACING",
+ "Unnecessary space after function pointer name\n" . $herecurr);
+ }
+
+# unnecessary space "type (*funcptr) (args...)"
+ if (defined $pre_args_space &&
+ $pre_args_space =~ /^\s/) {
+ WARN("SPACING",
+ "Unnecessary space before function pointer arguments\n" . $herecurr);
+ }
+
+ if (show_type("SPACING") && $fix) {
+ $fixed[$fixlinenr] =~
+ s/^(.\s*)$Declare\s*\(\s*\*\s*$Ident\s*\)\s*\(/$1 . $declare . $post_declare_space . '(*' . $funcname . ')('/ex;
+ }
+ }
+
+# check for spacing round square brackets; allowed:
+# 1. with a type on the left -- int [] a;
+# 2. at the beginning of a line for slice initialisers -- [0...10] = 5,
+# 3. inside a curly brace -- = { [0...10] = 5 }
+ while ($line =~ /(.*?\s)\[/g) {
+ my ($where, $prefix) = ($-[1], $1);
+ if ($prefix !~ /$Type\s+$/ &&
+ ($where != 0 || $prefix !~ /^.\s+$/) &&
+ $prefix !~ /[{,:]\s+$/) {
+ if (ERROR("BRACKET_SPACE",
+ "space prohibited before open square bracket '['\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~
+ s/^(\+.*?)\s+\[/$1\[/;
+ }
+ }
+ }
+
+# check for spaces between functions and their parentheses.
+ while ($line =~ /($Ident)\s+\(/g) {
+ my $name = $1;
+ my $ctx_before = substr($line, 0, $-[1]);
+ my $ctx = "$ctx_before$name";
+
+ # Ignore those directives where spaces _are_ permitted.
+ if ($name =~ /^(?:
+ if|for|while|switch|return|case|
+ volatile|__volatile__|
+ __attribute__|format|__extension__|
+ asm|__asm__)$/x)
+ {
+ # cpp #define statements have non-optional spaces, ie
+ # if there is a space between the name and the open
+ # parenthesis it is simply not a parameter group.
+ } elsif ($ctx_before =~ /^.\s*\#\s*define\s*$/) {
+
+ # cpp #elif statement condition may start with a (
+ } elsif ($ctx =~ /^.\s*\#\s*elif\s*$/) {
+
+ # If this whole things ends with a type its most
+ # likely a typedef for a function.
+ } elsif ($ctx =~ /$Type$/) {
+
+ } else {
+ if (WARN("SPACING",
+ "space prohibited between function name and open parenthesis '('\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~
+ s/\b$name\s+\(/$name\(/;
+ }
+ }
+ }
+
+# Check operator spacing.
+ if (!($line=~/\#\s*include/)) {
+ my $fixed_line = "";
+ my $line_fixed = 0;
+
+ my $ops = qr{
+ <<=|>>=|<=|>=|==|!=|
+ \+=|-=|\*=|\/=|%=|\^=|\|=|&=|
+ =>|->|<<|>>|<|>|=|!|~|
+ &&|\|\||,|\^|\+\+|--|&|\||\+|-|\*|\/|%|
+ \?:|\?|:
+ }x;
+ my @elements = split(/($ops|;)/, $opline);
+
+## print("element count: <" . $#elements . ">\n");
+## foreach my $el (@elements) {
+## print("el: <$el>\n");
+## }
+
+ my @fix_elements = ();
+ my $off = 0;
+
+ foreach my $el (@elements) {
+ push(@fix_elements, substr($rawline, $off, length($el)));
+ $off += length($el);
+ }
+
+ $off = 0;
+
+ my $blank = copy_spacing($opline);
+ my $last_after = -1;
+
+ for (my $n = 0; $n < $#elements; $n += 2) {
+
+ my $good = $fix_elements[$n] . $fix_elements[$n + 1];
+
+## print("n: <$n> good: <$good>\n");
+
+ $off += length($elements[$n]);
+
+ # Pick up the preceding and succeeding characters.
+ my $ca = substr($opline, 0, $off);
+ my $cc = '';
+ if (length($opline) >= ($off + length($elements[$n + 1]))) {
+ $cc = substr($opline, $off + length($elements[$n + 1]));
+ }
+ my $cb = "$ca$;$cc";
+
+ my $a = '';
+ $a = 'V' if ($elements[$n] ne '');
+ $a = 'W' if ($elements[$n] =~ /\s$/);
+ $a = 'C' if ($elements[$n] =~ /$;$/);
+ $a = 'B' if ($elements[$n] =~ /(\[|\()$/);
+ $a = 'O' if ($elements[$n] eq '');
+ $a = 'E' if ($ca =~ /^\s*$/);
+
+ my $op = $elements[$n + 1];
+
+ my $c = '';
+ if (defined $elements[$n + 2]) {
+ $c = 'V' if ($elements[$n + 2] ne '');
+ $c = 'W' if ($elements[$n + 2] =~ /^\s/);
+ $c = 'C' if ($elements[$n + 2] =~ /^$;/);
+ $c = 'B' if ($elements[$n + 2] =~ /^(\)|\]|;)/);
+ $c = 'O' if ($elements[$n + 2] eq '');
+ $c = 'E' if ($elements[$n + 2] =~ /^\s*\\$/);
+ } else {
+ $c = 'E';
+ }
+
+ my $ctx = "${a}x${c}";
+
+ my $at = "(ctx:$ctx)";
+
+ my $ptr = substr($blank, 0, $off) . "^";
+ my $hereptr = "$hereline$ptr\n";
+
+ # Pull out the value of this operator.
+ my $op_type = substr($curr_values, $off + 1, 1);
+
+ # Get the full operator variant.
+ my $opv = $op . substr($curr_vars, $off, 1);
+
+ # Ignore operators passed as parameters.
+ if ($op_type ne 'V' &&
+ $ca =~ /\s$/ && $cc =~ /^\s*[,\)]/) {
+
+# # Ignore comments
+# } elsif ($op =~ /^$;+$/) {
+
+ # ; should have either the end of line or a space or \ after it
+ } elsif ($op eq ';') {
+ if ($ctx !~ /.x[WEBC]/ &&
+ $cc !~ /^\\/ && $cc !~ /^;/) {
+ if (ERROR("SPACING",
+ "space required after that '$op' $at\n" . $hereptr)) {
+ $good = $fix_elements[$n] . trim($fix_elements[$n + 1]) . " ";
+ $line_fixed = 1;
+ }
+ }
+
+ # // is a comment
+ } elsif ($op eq '//') {
+
+ # : when part of a bitfield
+ } elsif ($opv eq ':B') {
+ # skip the bitfield test for now
+
+ # No spaces for:
+ # ->
+ } elsif ($op eq '->') {
+ if ($ctx =~ /Wx.|.xW/) {
+ if (ERROR("SPACING",
+ "spaces prohibited around that '$op' $at\n" . $hereptr)) {
+ $good = rtrim($fix_elements[$n]) . trim($fix_elements[$n + 1]);
+ if (defined $fix_elements[$n + 2]) {
+ $fix_elements[$n + 2] =~ s/^\s+//;
+ }
+ $line_fixed = 1;
+ }
+ }
+
+ # , must not have a space before and must have a space on the right.
+ } elsif ($op eq ',') {
+ my $rtrim_before = 0;
+ my $space_after = 0;
+ if ($ctx =~ /Wx./) {
+ if (ERROR("SPACING",
+ "space prohibited before that '$op' $at\n" . $hereptr)) {
+ $line_fixed = 1;
+ $rtrim_before = 1;
+ }
+ }
+ if ($ctx !~ /.x[WEC]/ && $cc !~ /^}/) {
+ if (ERROR("SPACING",
+ "space required after that '$op' $at\n" . $hereptr)) {
+ $line_fixed = 1;
+ $last_after = $n;
+ $space_after = 1;
+ }
+ }
+ if ($rtrim_before || $space_after) {
+ if ($rtrim_before) {
+ $good = rtrim($fix_elements[$n]) . trim($fix_elements[$n + 1]);
+ } else {
+ $good = $fix_elements[$n] . trim($fix_elements[$n + 1]);
+ }
+ if ($space_after) {
+ $good .= " ";
+ }
+ }
+
+ # '*' as part of a type definition -- reported already.
+ } elsif ($opv eq '*_') {
+ #warn "'*' is part of type\n";
+
+ # unary operators should have a space before and
+ # none after. May be left adjacent to another
+ # unary operator, or a cast
+ } elsif ($op eq '!' || $op eq '~' ||
+ $opv eq '*U' || $opv eq '-U' ||
+ $opv eq '&U' || $opv eq '&&U') {
+ if ($ctx !~ /[WEBC]x./ && $ca !~ /(?:\)|!|~|\*|-|\&|\||\+\+|\-\-|\{)$/) {
+ if (ERROR("SPACING",
+ "space required before that '$op' $at\n" . $hereptr)) {
+ if ($n != $last_after + 2) {
+ $good = $fix_elements[$n] . " " . ltrim($fix_elements[$n + 1]);
+ $line_fixed = 1;
+ }
+ }
+ }
+ if ($op eq '*' && $cc =~/\s*$Modifier\b/) {
+ # A unary '*' may be const
+
+ } elsif ($ctx =~ /.xW/) {
+ if (ERROR("SPACING",
+ "space prohibited after that '$op' $at\n" . $hereptr)) {
+ $good = $fix_elements[$n] . rtrim($fix_elements[$n + 1]);
+ if (defined $fix_elements[$n + 2]) {
+ $fix_elements[$n + 2] =~ s/^\s+//;
+ }
+ $line_fixed = 1;
+ }
+ }
+
+ # unary ++ and unary -- are allowed no space on one side.
+ } elsif ($op eq '++' or $op eq '--') {
+ if ($ctx !~ /[WEOBC]x[^W]/ && $ctx !~ /[^W]x[WOBEC]/) {
+ if (ERROR("SPACING",
+ "space required one side of that '$op' $at\n" . $hereptr)) {
+ $good = $fix_elements[$n] . trim($fix_elements[$n + 1]) . " ";
+ $line_fixed = 1;
+ }
+ }
+ if ($ctx =~ /Wx[BE]/ ||
+ ($ctx =~ /Wx./ && $cc =~ /^;/)) {
+ if (ERROR("SPACING",
+ "space prohibited before that '$op' $at\n" . $hereptr)) {
+ $good = rtrim($fix_elements[$n]) . trim($fix_elements[$n + 1]);
+ $line_fixed = 1;
+ }
+ }
+ if ($ctx =~ /ExW/) {
+ if (ERROR("SPACING",
+ "space prohibited after that '$op' $at\n" . $hereptr)) {
+ $good = $fix_elements[$n] . trim($fix_elements[$n + 1]);
+ if (defined $fix_elements[$n + 2]) {
+ $fix_elements[$n + 2] =~ s/^\s+//;
+ }
+ $line_fixed = 1;
+ }
+ }
+
+ # << and >> may either have or not have spaces both sides
+ } elsif ($op eq '<<' or $op eq '>>' or
+ $op eq '&' or $op eq '^' or $op eq '|' or
+ $op eq '+' or $op eq '-' or
+ $op eq '*' or $op eq '/' or
+ $op eq '%')
+ {
+ if ($check) {
+ if (defined $fix_elements[$n + 2] && $ctx !~ /[EW]x[EW]/) {
+ if (CHK("SPACING",
+ "spaces preferred around that '$op' $at\n" . $hereptr)) {
+ $good = rtrim($fix_elements[$n]) . " " . trim($fix_elements[$n + 1]) . " ";
+ $fix_elements[$n + 2] =~ s/^\s+//;
+ $line_fixed = 1;
+ }
+ } elsif (!defined $fix_elements[$n + 2] && $ctx !~ /Wx[OE]/) {
+ if (CHK("SPACING",
+ "space preferred before that '$op' $at\n" . $hereptr)) {
+ $good = rtrim($fix_elements[$n]) . " " . trim($fix_elements[$n + 1]);
+ $line_fixed = 1;
+ }
+ }
+ } elsif ($ctx =~ /Wx[^WCE]|[^WCE]xW/) {
+ if (ERROR("SPACING",
+ "need consistent spacing around '$op' $at\n" . $hereptr)) {
+ $good = rtrim($fix_elements[$n]) . " " . trim($fix_elements[$n + 1]) . " ";
+ if (defined $fix_elements[$n + 2]) {
+ $fix_elements[$n + 2] =~ s/^\s+//;
+ }
+ $line_fixed = 1;
+ }
+ }
+
+ # A colon needs no spaces before when it is
+ # terminating a case value or a label.
+ } elsif ($opv eq ':C' || $opv eq ':L') {
+ if ($ctx =~ /Wx./) {
+ if (ERROR("SPACING",
+ "space prohibited before that '$op' $at\n" . $hereptr)) {
+ $good = rtrim($fix_elements[$n]) . trim($fix_elements[$n + 1]);
+ $line_fixed = 1;
+ }
+ }
+
+ # All the others need spaces both sides.
+ } elsif ($ctx !~ /[EWC]x[CWE]/) {
+ my $ok = 0;
+
+ # Ignore email addresses <foo@bar>
+ if (($op eq '<' &&
+ $cc =~ /^\S+\@\S+>/) ||
+ ($op eq '>' &&
+ $ca =~ /<\S+\@\S+$/))
+ {
+ $ok = 1;
+ }
+
+ # for asm volatile statements
+ # ignore a colon with another
+ # colon immediately before or after
+ if (($op eq ':') &&
+ ($ca =~ /:$/ || $cc =~ /^:/)) {
+ $ok = 1;
+ }
+
+ # messages are ERROR, but ?: are CHK
+ if ($ok == 0) {
+ my $msg_level = \&ERROR;
+ $msg_level = \&CHK if (($op eq '?:' || $op eq '?' || $op eq ':') && $ctx =~ /VxV/);
+
+ if (&{$msg_level}("SPACING",
+ "spaces required around that '$op' $at\n" . $hereptr)) {
+ $good = rtrim($fix_elements[$n]) . " " . trim($fix_elements[$n + 1]) . " ";
+ if (defined $fix_elements[$n + 2]) {
+ $fix_elements[$n + 2] =~ s/^\s+//;
+ }
+ $line_fixed = 1;
+ }
+ }
+ }
+ $off += length($elements[$n + 1]);
+
+## print("n: <$n> GOOD: <$good>\n");
+
+ $fixed_line = $fixed_line . $good;
+ }
+
+ if (($#elements % 2) == 0) {
+ $fixed_line = $fixed_line . $fix_elements[$#elements];
+ }
+
+ if ($fix && $line_fixed && $fixed_line ne $fixed[$fixlinenr]) {
+ $fixed[$fixlinenr] = $fixed_line;
+ }
+
+
+ }
+
+# check for whitespace before a non-naked semicolon
+ if ($line =~ /^\+.*\S\s+;\s*$/) {
+ if (WARN("SPACING",
+ "space prohibited before semicolon\n" . $herecurr) &&
+ $fix) {
+ 1 while $fixed[$fixlinenr] =~
+ s/^(\+.*\S)\s+;/$1;/;
+ }
+ }
+
+# check for multiple assignments
+ if ($line =~ /^.\s*$Lval\s*=\s*$Lval\s*=(?!=)/) {
+ CHK("MULTIPLE_ASSIGNMENTS",
+ "multiple assignments should be avoided\n" . $herecurr);
+ }
+
+## # check for multiple declarations, allowing for a function declaration
+## # continuation.
+## if ($line =~ /^.\s*$Type\s+$Ident(?:\s*=[^,{]*)?\s*,\s*$Ident.*/ &&
+## $line !~ /^.\s*$Type\s+$Ident(?:\s*=[^,{]*)?\s*,\s*$Type\s*$Ident.*/) {
+##
+## # Remove any bracketed sections to ensure we do not
+## # falsly report the parameters of functions.
+## my $ln = $line;
+## while ($ln =~ s/\([^\(\)]*\)//g) {
+## }
+## if ($ln =~ /,/) {
+## WARN("MULTIPLE_DECLARATION",
+## "declaring multiple variables together should be avoided\n" . $herecurr);
+## }
+## }
+
+#need space before brace following if, while, etc
+ if (($line =~ /\(.*\)\{/ && $line !~ /\($Type\)\{/) ||
+ $line =~ /do\{/) {
+ if (ERROR("SPACING",
+ "space required before the open brace '{'\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/^(\+.*(?:do|\)))\{/$1 {/;
+ }
+ }
+
+## # check for blank lines before declarations
+## if ($line =~ /^.\t+$Type\s+$Ident(?:\s*=.*)?;/ &&
+## $prevrawline =~ /^.\s*$/) {
+## WARN("SPACING",
+## "No blank lines before declarations\n" . $hereprev);
+## }
+##
+
+# closing brace should have a space following it when it has anything
+# on the line
+ if ($line =~ /}(?!(?:,|;|\)))\S/) {
+ if (ERROR("SPACING",
+ "space required after that close brace '}'\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~
+ s/}((?!(?:,|;|\)))\S)/} $1/;
+ }
+ }
+
+# check spacing on square brackets
+ if ($line =~ /\[\s/ && $line !~ /\[\s*$/) {
+ if (ERROR("SPACING",
+ "space prohibited after that open square bracket '['\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~
+ s/\[\s+/\[/;
+ }
+ }
+ if ($line =~ /\s\]/) {
+ if (ERROR("SPACING",
+ "space prohibited before that close square bracket ']'\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~
+ s/\s+\]/\]/;
+ }
+ }
+
+# check spacing on parentheses
+ if ($line =~ /\(\s/ && $line !~ /\(\s*(?:\\)?$/ &&
+ $line !~ /for\s*\(\s+;/) {
+ if (ERROR("SPACING",
+ "space prohibited after that open parenthesis '('\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~
+ s/\(\s+/\(/;
+ }
+ }
+ if ($line =~ /(\s+)\)/ && $line !~ /^.\s*\)/ &&
+ $line !~ /for\s*\(.*;\s+\)/ &&
+ $line !~ /:\s+\)/) {
+ if (ERROR("SPACING",
+ "space prohibited before that close parenthesis ')'\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~
+ s/\s+\)/\)/;
+ }
+ }
+
+# check unnecessary parentheses around addressof/dereference single $Lvals
+# ie: &(foo->bar) should be &foo->bar and *(foo->bar) should be *foo->bar
+
+ while ($line =~ /(?:[^&]&\s*|\*)\(\s*($Ident\s*(?:$Member\s*)+)\s*\)/g) {
+ my $var = $1;
+ if (CHK("UNNECESSARY_PARENTHESES",
+ "Unnecessary parentheses around $var\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\(\s*\Q$var\E\s*\)/$var/;
+ }
+ }
+
+# check for unnecessary parentheses around function pointer uses
+# ie: (foo->bar)(); should be foo->bar();
+# but not "if (foo->bar) (" to avoid some false positives
+ if ($line =~ /(\bif\s*|)(\(\s*$Ident\s*(?:$Member\s*)+\))[ \t]*\(/ && $1 !~ /^if/) {
+ my $var = $2;
+ if (CHK("UNNECESSARY_PARENTHESES",
+ "Unnecessary parentheses around function pointer $var\n" . $herecurr) &&
+ $fix) {
+ my $var2 = deparenthesize($var);
+ $var2 =~ s/\s//g;
+ $fixed[$fixlinenr] =~ s/\Q$var\E/$var2/;
+ }
+ }
+
+# check for unnecessary parentheses around comparisons in if uses
+# when !drivers/staging or command-line uses --strict
+ if (($realfile !~ m@^(?:drivers/staging/)@ || $check_orig) &&
+ $^V && $^V ge 5.10.0 && defined($stat) &&
+ $stat =~ /(^.\s*if\s*($balanced_parens))/) {
+ my $if_stat = $1;
+ my $test = substr($2, 1, -1);
+ my $herectx;
+ while ($test =~ /(?:^|[^\w\&\!\~])+\s*\(\s*([\&\!\~]?\s*$Lval\s*(?:$Compare\s*$FuncArg)?)\s*\)/g) {
+ my $match = $1;
+ # avoid parentheses around potential macro args
+ next if ($match =~ /^\s*\w+\s*$/);
+ if (!defined($herectx)) {
+ $herectx = $here . "\n";
+ my $cnt = statement_rawlines($if_stat);
+ for (my $n = 0; $n < $cnt; $n++) {
+ my $rl = raw_line($linenr, $n);
+ $herectx .= $rl . "\n";
+ last if $rl =~ /^[ \+].*\{/;
+ }
+ }
+ CHK("UNNECESSARY_PARENTHESES",
+ "Unnecessary parentheses around '$match'\n" . $herectx);
+ }
+ }
+
+#goto labels aren't indented, allow a single space however
+ if ($line=~/^.\s+[A-Za-z\d_]+:(?![0-9]+)/ and
+ !($line=~/^. [A-Za-z\d_]+:/) and !($line=~/^.\s+default:/)) {
+ if (WARN("INDENTED_LABEL",
+ "labels should not be indented\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~
+ s/^(.)\s+/$1/;
+ }
+ }
+
+# return is not a function
+ if (defined($stat) && $stat =~ /^.\s*return(\s*)\(/s) {
+ my $spacing = $1;
+ if ($^V && $^V ge 5.10.0 &&
+ $stat =~ /^.\s*return\s*($balanced_parens)\s*;\s*$/) {
+ my $value = $1;
+ $value = deparenthesize($value);
+ if ($value =~ m/^\s*$FuncArg\s*(?:\?|$)/) {
+ ERROR("RETURN_PARENTHESES",
+ "return is not a function, parentheses are not required\n" . $herecurr);
+ }
+ } elsif ($spacing !~ /\s+/) {
+ ERROR("SPACING",
+ "space required before the open parenthesis '('\n" . $herecurr);
+ }
+ }
+
+# unnecessary return in a void function
+# at end-of-function, with the previous line a single leading tab, then return;
+# and the line before that not a goto label target like "out:"
+ if ($sline =~ /^[ \+]}\s*$/ &&
+ $prevline =~ /^\+\treturn\s*;\s*$/ &&
+ $linenr >= 3 &&
+ $lines[$linenr - 3] =~ /^[ +]/ &&
+ $lines[$linenr - 3] !~ /^[ +]\s*$Ident\s*:/) {
+ WARN("RETURN_VOID",
+ "void function return statements are not generally useful\n" . $hereprev);
+ }
+
+# if statements using unnecessary parentheses - ie: if ((foo == bar))
+ if ($^V && $^V ge 5.10.0 &&
+ $line =~ /\bif\s*((?:\(\s*){2,})/) {
+ my $openparens = $1;
+ my $count = $openparens =~ tr@\(@\(@;
+ my $msg = "";
+ if ($line =~ /\bif\s*(?:\(\s*){$count,$count}$LvalOrFunc\s*($Compare)\s*$LvalOrFunc(?:\s*\)){$count,$count}/) {
+ my $comp = $4; #Not $1 because of $LvalOrFunc
+ $msg = " - maybe == should be = ?" if ($comp eq "==");
+ WARN("UNNECESSARY_PARENTHESES",
+ "Unnecessary parentheses$msg\n" . $herecurr);
+ }
+ }
+
+# comparisons with a constant or upper case identifier on the left
+# avoid cases like "foo + BAR < baz"
+# only fix matches surrounded by parentheses to avoid incorrect
+# conversions like "FOO < baz() + 5" being "misfixed" to "baz() > FOO + 5"
+ if ($^V && $^V ge 5.10.0 &&
+ $line =~ /^\+(.*)\b($Constant|[A-Z_][A-Z0-9_]*)\s*($Compare)\s*($LvalOrFunc)/) {
+ my $lead = $1;
+ my $const = $2;
+ my $comp = $3;
+ my $to = $4;
+ my $newcomp = $comp;
+ if ($lead !~ /(?:$Operators|\.)\s*$/ &&
+ $to !~ /^(?:Constant|[A-Z_][A-Z0-9_]*)$/ &&
+ WARN("CONSTANT_COMPARISON",
+ "Comparisons should place the constant on the right side of the test\n" . $herecurr) &&
+ $fix) {
+ if ($comp eq "<") {
+ $newcomp = ">";
+ } elsif ($comp eq "<=") {
+ $newcomp = ">=";
+ } elsif ($comp eq ">") {
+ $newcomp = "<";
+ } elsif ($comp eq ">=") {
+ $newcomp = "<=";
+ }
+ $fixed[$fixlinenr] =~ s/\(\s*\Q$const\E\s*$Compare\s*\Q$to\E\s*\)/($to $newcomp $const)/;
+ }
+ }
+
+# Return of what appears to be an errno should normally be negative
+ if ($sline =~ /\breturn(?:\s*\(+\s*|\s+)(E[A-Z]+)(?:\s*\)+\s*|\s*)[;:,]/) {
+ my $name = $1;
+ if ($name ne 'EOF' && $name ne 'ERROR') {
+ WARN("USE_NEGATIVE_ERRNO",
+ "return of an errno should typically be negative (ie: return -$1)\n" . $herecurr);
+ }
+ }
+
+# Need a space before open parenthesis after if, while etc
+ if ($line =~ /\b(if|while|for|switch)\(/) {
+ if (ERROR("SPACING",
+ "space required before the open parenthesis '('\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~
+ s/\b(if|while|for|switch)\(/$1 \(/;
+ }
+ }
+
+# Check for illegal assignment in if conditional -- and check for trailing
+# statements after the conditional.
+ if ($line =~ /do\s*(?!{)/) {
+ ($stat, $cond, $line_nr_next, $remain_next, $off_next) =
+ ctx_statement_block($linenr, $realcnt, 0)
+ if (!defined $stat);
+ my ($stat_next) = ctx_statement_block($line_nr_next,
+ $remain_next, $off_next);
+ $stat_next =~ s/\n./\n /g;
+ ##print "stat<$stat> stat_next<$stat_next>\n";
+
+ if ($stat_next =~ /^\s*while\b/) {
+ # If the statement carries leading newlines,
+ # then count those as offsets.
+ my ($whitespace) =
+ ($stat_next =~ /^((?:\s*\n[+-])*\s*)/s);
+ my $offset =
+ statement_rawlines($whitespace) - 1;
+
+ $suppress_whiletrailers{$line_nr_next +
+ $offset} = 1;
+ }
+ }
+ if (!defined $suppress_whiletrailers{$linenr} &&
+ defined($stat) && defined($cond) &&
+ $line =~ /\b(?:if|while|for)\s*\(/ && $line !~ /^.\s*#/) {
+ my ($s, $c) = ($stat, $cond);
+
+ if ($c =~ /\bif\s*\(.*[^<>!=]=[^=].*/s) {
+ ERROR("ASSIGN_IN_IF",
+ "do not use assignment in if condition\n" . $herecurr);
+ }
+
+ # Find out what is on the end of the line after the
+ # conditional.
+ substr($s, 0, length($c), '');
+ $s =~ s/\n.*//g;
+ $s =~ s/$;//g; # Remove any comments
+ if (length($c) && $s !~ /^\s*{?\s*\\*\s*$/ &&
+ $c !~ /}\s*while\s*/)
+ {
+ # Find out how long the conditional actually is.
+ my @newlines = ($c =~ /\n/gs);
+ my $cond_lines = 1 + $#newlines;
+ my $stat_real = '';
+
+ $stat_real = raw_line($linenr, $cond_lines)
+ . "\n" if ($cond_lines);
+ if (defined($stat_real) && $cond_lines > 1) {
+ $stat_real = "[...]\n$stat_real";
+ }
+
+ ERROR("TRAILING_STATEMENTS",
+ "trailing statements should be on next line\n" . $herecurr . $stat_real);
+ }
+ }
+
+# Check for bitwise tests written as boolean
+ if ($line =~ /
+ (?:
+ (?:\[|\(|\&\&|\|\|)
+ \s*0[xX][0-9]+\s*
+ (?:\&\&|\|\|)
+ |
+ (?:\&\&|\|\|)
+ \s*0[xX][0-9]+\s*
+ (?:\&\&|\|\||\)|\])
+ )/x)
+ {
+ WARN("HEXADECIMAL_BOOLEAN_TEST",
+ "boolean test with hexadecimal, perhaps just 1 \& or \|?\n" . $herecurr);
+ }
+
+# if and else should not have general statements after it
+ if ($line =~ /^.\s*(?:}\s*)?else\b(.*)/) {
+ my $s = $1;
+ $s =~ s/$;//g; # Remove any comments
+ if ($s !~ /^\s*(?:\sif|(?:{|)\s*\\?\s*$)/) {
+ ERROR("TRAILING_STATEMENTS",
+ "trailing statements should be on next line\n" . $herecurr);
+ }
+ }
+# if should not continue a brace
+ if ($line =~ /}\s*if\b/) {
+ ERROR("TRAILING_STATEMENTS",
+ "trailing statements should be on next line (or did you mean 'else if'?)\n" .
+ $herecurr);
+ }
+# case and default should not have general statements after them
+ if ($line =~ /^.\s*(?:case\s*.*|default\s*):/g &&
+ $line !~ /\G(?:
+ (?:\s*$;*)(?:\s*{)?(?:\s*$;*)(?:\s*\\)?\s*$|
+ \s*return\s+
+ )/xg)
+ {
+ ERROR("TRAILING_STATEMENTS",
+ "trailing statements should be on next line\n" . $herecurr);
+ }
+
+ # Check for }<nl>else {, these must be at the same
+ # indent level to be relevant to each other.
+ if ($prevline=~/}\s*$/ and $line=~/^.\s*else\s*/ &&
+ $previndent == $indent) {
+ if (ERROR("ELSE_AFTER_BRACE",
+ "else should follow close brace '}'\n" . $hereprev) &&
+ $fix && $prevline =~ /^\+/ && $line =~ /^\+/) {
+ fix_delete_line($fixlinenr - 1, $prevrawline);
+ fix_delete_line($fixlinenr, $rawline);
+ my $fixedline = $prevrawline;
+ $fixedline =~ s/}\s*$//;
+ if ($fixedline !~ /^\+\s*$/) {
+ fix_insert_line($fixlinenr, $fixedline);
+ }
+ $fixedline = $rawline;
+ $fixedline =~ s/^(.\s*)else/$1} else/;
+ fix_insert_line($fixlinenr, $fixedline);
+ }
+ }
+
+ if ($prevline=~/}\s*$/ and $line=~/^.\s*while\s*/ &&
+ $previndent == $indent) {
+ my ($s, $c) = ctx_statement_block($linenr, $realcnt, 0);
+
+ # Find out what is on the end of the line after the
+ # conditional.
+ substr($s, 0, length($c), '');
+ $s =~ s/\n.*//g;
+
+ if ($s =~ /^\s*;/) {
+ if (ERROR("WHILE_AFTER_BRACE",
+ "while should follow close brace '}'\n" . $hereprev) &&
+ $fix && $prevline =~ /^\+/ && $line =~ /^\+/) {
+ fix_delete_line($fixlinenr - 1, $prevrawline);
+ fix_delete_line($fixlinenr, $rawline);
+ my $fixedline = $prevrawline;
+ my $trailing = $rawline;
+ $trailing =~ s/^\+//;
+ $trailing = trim($trailing);
+ $fixedline =~ s/}\s*$/} $trailing/;
+ fix_insert_line($fixlinenr, $fixedline);
+ }
+ }
+ }
+
+#Specific variable tests
+ while ($line =~ m{($Constant|$Lval)}g) {
+ my $var = $1;
+
+#gcc binary extension
+ if ($var =~ /^$Binary$/) {
+ if (WARN("GCC_BINARY_CONSTANT",
+ "Avoid gcc v4.3+ binary constant extension: <$var>\n" . $herecurr) &&
+ $fix) {
+ my $hexval = sprintf("0x%x", oct($var));
+ $fixed[$fixlinenr] =~
+ s/\b$var\b/$hexval/;
+ }
+ }
+
+#CamelCase
+ if ($var !~ /^$Constant$/ &&
+ $var =~ /[A-Z][a-z]|[a-z][A-Z]/ &&
+#Ignore Page<foo> variants
+ $var !~ /^(?:Clear|Set|TestClear|TestSet|)Page[A-Z]/ &&
+#Ignore SI style variants like nS, mV and dB (ie: max_uV, regulator_min_uA_show)
+ $var !~ /^(?:[a-z_]*?)_?[a-z][A-Z](?:_[a-z_]+)?$/ &&
+#Ignore some three character SI units explicitly, like MiB and KHz
+ $var !~ /^(?:[a-z_]*?)_?(?:[KMGT]iB|[KMGT]?Hz)(?:_[a-z_]+)?$/) {
+ while ($var =~ m{($Ident)}g) {
+ my $word = $1;
+ next if ($word !~ /[A-Z][a-z]|[a-z][A-Z]/);
+ if ($check) {
+ seed_camelcase_includes();
+ if (!$file && !$camelcase_file_seeded) {
+ seed_camelcase_file($realfile);
+ $camelcase_file_seeded = 1;
+ }
+ }
+ if (!defined $camelcase{$word}) {
+ $camelcase{$word} = 1;
+ CHK("CAMELCASE",
+ "Avoid CamelCase: <$word>\n" . $herecurr);
+ }
+ }
+ }
+ }
+
+#no spaces allowed after \ in define
+ if ($line =~ /\#\s*define.*\\\s+$/) {
+ if (WARN("WHITESPACE_AFTER_LINE_CONTINUATION",
+ "Whitespace after \\ makes next lines useless\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\s+$//;
+ }
+ }
+
+# warn if <asm/foo.h> is #included and <linux/foo.h> is available and includes
+# itself <asm/foo.h> (uses RAW line)
+ if ($tree && $rawline =~ m{^.\s*\#\s*include\s*\<asm\/(.*)\.h\>}) {
+ my $file = "$1.h";
+ my $checkfile = "include/linux/$file";
+ if (-f "$root/$checkfile" &&
+ $realfile ne $checkfile &&
+ $1 !~ /$allowed_asm_includes/)
+ {
+ my $asminclude = `grep -Ec "#include\\s+<asm/$file>" $root/$checkfile`;
+ if ($asminclude > 0) {
+ if ($realfile =~ m{^arch/}) {
+ CHK("ARCH_INCLUDE_LINUX",
+ "Consider using #include <linux/$file> instead of <asm/$file>\n" . $herecurr);
+ } else {
+ WARN("INCLUDE_LINUX",
+ "Use #include <linux/$file> instead of <asm/$file>\n" . $herecurr);
+ }
+ }
+ }
+ }
+
+# multi-statement macros should be enclosed in a do while loop, grab the
+# first statement and ensure its the whole macro if its not enclosed
+# in a known good container
+ if ($realfile !~ m@/vmlinux.lds.h$@ &&
+ $line =~ /^.\s*\#\s*define\s*$Ident(\()?/) {
+ my $ln = $linenr;
+ my $cnt = $realcnt;
+ my ($off, $dstat, $dcond, $rest);
+ my $ctx = '';
+ my $has_flow_statement = 0;
+ my $has_arg_concat = 0;
+ ($dstat, $dcond, $ln, $cnt, $off) =
+ ctx_statement_block($linenr, $realcnt, 0);
+ $ctx = $dstat;
+ #print "dstat<$dstat> dcond<$dcond> cnt<$cnt> off<$off>\n";
+ #print "LINE<$lines[$ln-1]> len<" . length($lines[$ln-1]) . "\n";
+
+ $has_flow_statement = 1 if ($ctx =~ /\b(goto|return)\b/);
+ $has_arg_concat = 1 if ($ctx =~ /\#\#/ && $ctx !~ /\#\#\s*(?:__VA_ARGS__|args)\b/);
+
+ $dstat =~ s/^.\s*\#\s*define\s+$Ident(\([^\)]*\))?\s*//;
+ my $define_args = $1;
+ my $define_stmt = $dstat;
+ my @def_args = ();
+
+ if (defined $define_args && $define_args ne "") {
+ $define_args = substr($define_args, 1, length($define_args) - 2);
+ $define_args =~ s/\s*//g;
+ @def_args = split(",", $define_args);
+ }
+
+ $dstat =~ s/$;//g;
+ $dstat =~ s/\\\n.//g;
+ $dstat =~ s/^\s*//s;
+ $dstat =~ s/\s*$//s;
+
+ # Flatten any parentheses and braces
+ while ($dstat =~ s/\([^\(\)]*\)/1/ ||
+ $dstat =~ s/\{[^\{\}]*\}/1/ ||
+ $dstat =~ s/.\[[^\[\]]*\]/1/)
+ {
+ }
+
+ # Flatten any obvious string concatentation.
+ while ($dstat =~ s/($String)\s*$Ident/$1/ ||
+ $dstat =~ s/$Ident\s*($String)/$1/)
+ {
+ }
+
+ # Make asm volatile uses seem like a generic function
+ $dstat =~ s/\b_*asm_*\s+_*volatile_*\b/asm_volatile/g;
+
+ my $exceptions = qr{
+ $Declare|
+ module_param_named|
+ MODULE_PARM_DESC|
+ DECLARE_PER_CPU|
+ DEFINE_PER_CPU|
+ __typeof__\(|
+ union|
+ struct|
+ \.$Ident\s*=\s*|
+ ^\"|\"$|
+ ^\[
+ }x;
+ #print "REST<$rest> dstat<$dstat> ctx<$ctx>\n";
+
+ $ctx =~ s/\n*$//;
+ my $stmt_cnt = statement_rawlines($ctx);
+ my $herectx = get_stat_here($linenr, $stmt_cnt, $here);
+
+ if ($dstat ne '' &&
+ $dstat !~ /^(?:$Ident|-?$Constant),$/ && # 10, // foo(),
+ $dstat !~ /^(?:$Ident|-?$Constant);$/ && # foo();
+ $dstat !~ /^[!~-]?(?:$Lval|$Constant)$/ && # 10 // foo() // !foo // ~foo // -foo // foo->bar // foo.bar->baz
+ $dstat !~ /^'X'$/ && $dstat !~ /^'XX'$/ && # character constants
+ $dstat !~ /$exceptions/ &&
+ $dstat !~ /^\.$Ident\s*=/ && # .foo =
+ $dstat !~ /^(?:\#\s*$Ident|\#\s*$Constant)\s*$/ && # stringification #foo
+ $dstat !~ /^do\s*$Constant\s*while\s*$Constant;?$/ && # do {...} while (...); // do {...} while (...)
+ $dstat !~ /^for\s*$Constant$/ && # for (...)
+ $dstat !~ /^for\s*$Constant\s+(?:$Ident|-?$Constant)$/ && # for (...) bar()
+ $dstat !~ /^do\s*{/ && # do {...
+ $dstat !~ /^\(\{/ && # ({...
+ $ctx !~ /^.\s*#\s*define\s+TRACE_(?:SYSTEM|INCLUDE_FILE|INCLUDE_PATH)\b/)
+ {
+ if ($dstat =~ /^\s*if\b/) {
+ ERROR("MULTISTATEMENT_MACRO_USE_DO_WHILE",
+ "Macros starting with if should be enclosed by a do - while loop to avoid possible if/else logic defects\n" . "$herectx");
+ } elsif ($dstat =~ /;/) {
+ ERROR("MULTISTATEMENT_MACRO_USE_DO_WHILE",
+ "Macros with multiple statements should be enclosed in a do - while loop\n" . "$herectx");
+ } else {
+ ERROR("COMPLEX_MACRO",
+ "Macros with complex values should be enclosed in parentheses\n" . "$herectx");
+ }
+
+ }
+
+ # Make $define_stmt single line, comment-free, etc
+ my @stmt_array = split('\n', $define_stmt);
+ my $first = 1;
+ $define_stmt = "";
+ foreach my $l (@stmt_array) {
+ $l =~ s/\\$//;
+ if ($first) {
+ $define_stmt = $l;
+ $first = 0;
+ } elsif ($l =~ /^[\+ ]/) {
+ $define_stmt .= substr($l, 1);
+ }
+ }
+ $define_stmt =~ s/$;//g;
+ $define_stmt =~ s/\s+/ /g;
+ $define_stmt = trim($define_stmt);
+
+# check if any macro arguments are reused (ignore '...' and 'type')
+ foreach my $arg (@def_args) {
+ next if ($arg =~ /\.\.\./);
+ next if ($arg =~ /^type$/i);
+ my $tmp_stmt = $define_stmt;
+ $tmp_stmt =~ s/\b(typeof|__typeof__|__builtin\w+|typecheck\s*\(\s*$Type\s*,|\#+)\s*\(*\s*$arg\s*\)*\b//g;
+ $tmp_stmt =~ s/\#+\s*$arg\b//g;
+ $tmp_stmt =~ s/\b$arg\s*\#\#//g;
+ my $use_cnt = $tmp_stmt =~ s/\b$arg\b//g;
+ if ($use_cnt > 1) {
+ CHK("MACRO_ARG_REUSE",
+ "Macro argument reuse '$arg' - possible side-effects?\n" . "$herectx");
+ }
+# check if any macro arguments may have other precedence issues
+ if ($tmp_stmt =~ m/($Operators)?\s*\b$arg\b\s*($Operators)?/m &&
+ ((defined($1) && $1 ne ',') ||
+ (defined($2) && $2 ne ','))) {
+ CHK("MACRO_ARG_PRECEDENCE",
+ "Macro argument '$arg' may be better as '($arg)' to avoid precedence issues\n" . "$herectx");
+ }
+ }
+
+# check for macros with flow control, but without ## concatenation
+# ## concatenation is commonly a macro that defines a function so ignore those
+ if ($has_flow_statement && !$has_arg_concat) {
+ my $cnt = statement_rawlines($ctx);
+ my $herectx = get_stat_here($linenr, $cnt, $here);
+
+ WARN("MACRO_WITH_FLOW_CONTROL",
+ "Macros with flow control statements should be avoided\n" . "$herectx");
+ }
+
+# check for line continuations outside of #defines, preprocessor #, and asm
+
+ } else {
+ if ($prevline !~ /^..*\\$/ &&
+ $line !~ /^\+\s*\#.*\\$/ && # preprocessor
+ $line !~ /^\+.*\b(__asm__|asm)\b.*\\$/ && # asm
+ $line =~ /^\+.*\\$/) {
+ WARN("LINE_CONTINUATIONS",
+ "Avoid unnecessary line continuations\n" . $herecurr);
+ }
+ }
+
+# do {} while (0) macro tests:
+# single-statement macros do not need to be enclosed in do while (0) loop,
+# macro should not end with a semicolon
+ if ($^V && $^V ge 5.10.0 &&
+ $realfile !~ m@/vmlinux.lds.h$@ &&
+ $line =~ /^.\s*\#\s*define\s+$Ident(\()?/) {
+ my $ln = $linenr;
+ my $cnt = $realcnt;
+ my ($off, $dstat, $dcond, $rest);
+ my $ctx = '';
+ ($dstat, $dcond, $ln, $cnt, $off) =
+ ctx_statement_block($linenr, $realcnt, 0);
+ $ctx = $dstat;
+
+ $dstat =~ s/\\\n.//g;
+ $dstat =~ s/$;/ /g;
+
+ if ($dstat =~ /^\+\s*#\s*define\s+$Ident\s*${balanced_parens}\s*do\s*{(.*)\s*}\s*while\s*\(\s*0\s*\)\s*([;\s]*)\s*$/) {
+ my $stmts = $2;
+ my $semis = $3;
+
+ $ctx =~ s/\n*$//;
+ my $cnt = statement_rawlines($ctx);
+ my $herectx = get_stat_here($linenr, $cnt, $here);
+
+ if (($stmts =~ tr/;/;/) == 1 &&
+ $stmts !~ /^\s*(if|while|for|switch)\b/) {
+ WARN("SINGLE_STATEMENT_DO_WHILE_MACRO",
+ "Single statement macros should not use a do {} while (0) loop\n" . "$herectx");
+ }
+ if (defined $semis && $semis ne "") {
+ WARN("DO_WHILE_MACRO_WITH_TRAILING_SEMICOLON",
+ "do {} while (0) macros should not be semicolon terminated\n" . "$herectx");
+ }
+ } elsif ($dstat =~ /^\+\s*#\s*define\s+$Ident.*;\s*$/) {
+ $ctx =~ s/\n*$//;
+ my $cnt = statement_rawlines($ctx);
+ my $herectx = get_stat_here($linenr, $cnt, $here);
+
+ WARN("TRAILING_SEMICOLON",
+ "macros should not use a trailing semicolon\n" . "$herectx");
+ }
+ }
+
+# make sure symbols are always wrapped with VMLINUX_SYMBOL() ...
+# all assignments may have only one of the following with an assignment:
+# .
+# ALIGN(...)
+# VMLINUX_SYMBOL(...)
+ if ($realfile eq 'vmlinux.lds.h' && $line =~ /(?:(?:^|\s)$Ident\s*=|=\s*$Ident(?:\s|$))/) {
+ WARN("MISSING_VMLINUX_SYMBOL",
+ "vmlinux.lds.h needs VMLINUX_SYMBOL() around C-visible symbols\n" . $herecurr);
+ }
+
+# check for redundant bracing round if etc
+ if ($line =~ /(^.*)\bif\b/ && $1 !~ /else\s*$/) {
+ my ($level, $endln, @chunks) =
+ ctx_statement_full($linenr, $realcnt, 1);
+ #print "chunks<$#chunks> linenr<$linenr> endln<$endln> level<$level>\n";
+ #print "APW: <<$chunks[1][0]>><<$chunks[1][1]>>\n";
+ if ($#chunks > 0 && $level == 0) {
+ my @allowed = ();
+ my $allow = 0;
+ my $seen = 0;
+ my $herectx = $here . "\n";
+ my $ln = $linenr - 1;
+ for my $chunk (@chunks) {
+ my ($cond, $block) = @{$chunk};
+
+ # If the condition carries leading newlines, then count those as offsets.
+ my ($whitespace) = ($cond =~ /^((?:\s*\n[+-])*\s*)/s);
+ my $offset = statement_rawlines($whitespace) - 1;
+
+ $allowed[$allow] = 0;
+ #print "COND<$cond> whitespace<$whitespace> offset<$offset>\n";
+
+ # We have looked at and allowed this specific line.
+ $suppress_ifbraces{$ln + $offset} = 1;
+
+ $herectx .= "$rawlines[$ln + $offset]\n[...]\n";
+ $ln += statement_rawlines($block) - 1;
+
+ substr($block, 0, length($cond), '');
+
+ $seen++ if ($block =~ /^\s*{/);
+
+ #print "cond<$cond> block<$block> allowed<$allowed[$allow]>\n";
+ if (statement_lines($cond) > 1) {
+ #print "APW: ALLOWED: cond<$cond>\n";
+ $allowed[$allow] = 1;
+ }
+ if ($block =~/\b(?:if|for|while)\b/) {
+ #print "APW: ALLOWED: block<$block>\n";
+ $allowed[$allow] = 1;
+ }
+ if (statement_block_size($block) > 1) {
+ #print "APW: ALLOWED: lines block<$block>\n";
+ $allowed[$allow] = 1;
+ }
+ $allow++;
+ }
+ if ($seen) {
+ my $sum_allowed = 0;
+ foreach (@allowed) {
+ $sum_allowed += $_;
+ }
+ if ($sum_allowed == 0) {
+ WARN("BRACES",
+ "braces {} are not necessary for any arm of this statement\n" . $herectx);
+ } elsif ($sum_allowed != $allow &&
+ $seen != $allow) {
+ CHK("BRACES",
+ "braces {} should be used on all arms of this statement\n" . $herectx);
+ }
+ }
+ }
+ }
+ if (!defined $suppress_ifbraces{$linenr - 1} &&
+ $line =~ /\b(if|while|for|else)\b/) {
+ my $allowed = 0;
+
+ # Check the pre-context.
+ if (substr($line, 0, $-[0]) =~ /(\}\s*)$/) {
+ #print "APW: ALLOWED: pre<$1>\n";
+ $allowed = 1;
+ }
+
+ my ($level, $endln, @chunks) =
+ ctx_statement_full($linenr, $realcnt, $-[0]);
+
+ # Check the condition.
+ my ($cond, $block) = @{$chunks[0]};
+ #print "CHECKING<$linenr> cond<$cond> block<$block>\n";
+ if (defined $cond) {
+ substr($block, 0, length($cond), '');
+ }
+ if (statement_lines($cond) > 1) {
+ #print "APW: ALLOWED: cond<$cond>\n";
+ $allowed = 1;
+ }
+ if ($block =~/\b(?:if|for|while)\b/) {
+ #print "APW: ALLOWED: block<$block>\n";
+ $allowed = 1;
+ }
+ if (statement_block_size($block) > 1) {
+ #print "APW: ALLOWED: lines block<$block>\n";
+ $allowed = 1;
+ }
+ # Check the post-context.
+ if (defined $chunks[1]) {
+ my ($cond, $block) = @{$chunks[1]};
+ if (defined $cond) {
+ substr($block, 0, length($cond), '');
+ }
+ if ($block =~ /^\s*\{/) {
+ #print "APW: ALLOWED: chunk-1 block<$block>\n";
+ $allowed = 1;
+ }
+ }
+ if ($level == 0 && $block =~ /^\s*\{/ && !$allowed) {
+ my $cnt = statement_rawlines($block);
+ my $herectx = get_stat_here($linenr, $cnt, $here);
+
+ WARN("BRACES",
+ "braces {} are not necessary for single statement blocks\n" . $herectx);
+ }
+ }
+
+# check for single line unbalanced braces
+ if ($sline =~ /^.\s*\}\s*else\s*$/ ||
+ $sline =~ /^.\s*else\s*\{\s*$/) {
+ CHK("BRACES", "Unbalanced braces around else statement\n" . $herecurr);
+ }
+
+# check for unnecessary blank lines around braces
+ if (($line =~ /^.\s*}\s*$/ && $prevrawline =~ /^.\s*$/)) {
+ if (CHK("BRACES",
+ "Blank lines aren't necessary before a close brace '}'\n" . $hereprev) &&
+ $fix && $prevrawline =~ /^\+/) {
+ fix_delete_line($fixlinenr - 1, $prevrawline);
+ }
+ }
+ if (($rawline =~ /^.\s*$/ && $prevline =~ /^..*{\s*$/)) {
+ if (CHK("BRACES",
+ "Blank lines aren't necessary after an open brace '{'\n" . $hereprev) &&
+ $fix) {
+ fix_delete_line($fixlinenr, $rawline);
+ }
+ }
+
+# no volatiles please
+ my $asm_volatile = qr{\b(__asm__|asm)\s+(__volatile__|volatile)\b};
+ if ($line =~ /\bvolatile\b/ && $line !~ /$asm_volatile/) {
+ WARN("VOLATILE",
+ "Use of volatile is usually wrong: see Documentation/process/volatile-considered-harmful.rst\n" . $herecurr);
+ }
+
+# Check for user-visible strings broken across lines, which breaks the ability
+# to grep for the string. Make exceptions when the previous string ends in a
+# newline (multiple lines in one string constant) or '\t', '\r', ';', or '{'
+# (common in inline assembly) or is a octal \123 or hexadecimal \xaf value
+ if ($line =~ /^\+\s*$String/ &&
+ $prevline =~ /"\s*$/ &&
+ $prevrawline !~ /(?:\\(?:[ntr]|[0-7]{1,3}|x[0-9a-fA-F]{1,2})|;\s*|\{\s*)"\s*$/) {
+ if (WARN("SPLIT_STRING",
+ "quoted string split across lines\n" . $hereprev) &&
+ $fix &&
+ $prevrawline =~ /^\+.*"\s*$/ &&
+ $last_coalesced_string_linenr != $linenr - 1) {
+ my $extracted_string = get_quoted_string($line, $rawline);
+ my $comma_close = "";
+ if ($rawline =~ /\Q$extracted_string\E(\s*\)\s*;\s*$|\s*,\s*)/) {
+ $comma_close = $1;
+ }
+
+ fix_delete_line($fixlinenr - 1, $prevrawline);
+ fix_delete_line($fixlinenr, $rawline);
+ my $fixedline = $prevrawline;
+ $fixedline =~ s/"\s*$//;
+ $fixedline .= substr($extracted_string, 1) . trim($comma_close);
+ fix_insert_line($fixlinenr - 1, $fixedline);
+ $fixedline = $rawline;
+ $fixedline =~ s/\Q$extracted_string\E\Q$comma_close\E//;
+ if ($fixedline !~ /\+\s*$/) {
+ fix_insert_line($fixlinenr, $fixedline);
+ }
+ $last_coalesced_string_linenr = $linenr;
+ }
+ }
+
+# check for missing a space in a string concatenation
+ if ($prevrawline =~ /[^\\]\w"$/ && $rawline =~ /^\+[\t ]+"\w/) {
+ WARN('MISSING_SPACE',
+ "break quoted strings at a space character\n" . $hereprev);
+ }
+
+# check for an embedded function name in a string when the function is known
+# This does not work very well for -f --file checking as it depends on patch
+# context providing the function name or a single line form for in-file
+# function declarations
+ if ($line =~ /^\+.*$String/ &&
+ defined($context_function) &&
+ get_quoted_string($line, $rawline) =~ /\b$context_function\b/ &&
+ length(get_quoted_string($line, $rawline)) != (length($context_function) + 2)) {
+ WARN("EMBEDDED_FUNCTION_NAME",
+ "Prefer using '\"%s...\", __func__' to using '$context_function', this function's name, in a string\n" . $herecurr);
+ }
+
+# check for spaces before a quoted newline
+ if ($rawline =~ /^.*\".*\s\\n/) {
+ if (WARN("QUOTED_WHITESPACE_BEFORE_NEWLINE",
+ "unnecessary whitespace before a quoted newline\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/^(\+.*\".*)\s+\\n/$1\\n/;
+ }
+
+ }
+
+# concatenated string without spaces between elements
+ if ($line =~ /$String[A-Z_]/ || $line =~ /[A-Za-z0-9_]$String/) {
+ CHK("CONCATENATED_STRING",
+ "Concatenated strings should use spaces between elements\n" . $herecurr);
+ }
+
+# uncoalesced string fragments
+ if ($line =~ /$String\s*"/) {
+ WARN("STRING_FRAGMENTS",
+ "Consecutive strings are generally better as a single string\n" . $herecurr);
+ }
+
+# check for non-standard and hex prefixed decimal printf formats
+ my $show_L = 1; #don't show the same defect twice
+ my $show_Z = 1;
+ while ($line =~ /(?:^|")([X\t]*)(?:"|$)/g) {
+ my $string = substr($rawline, $-[1], $+[1] - $-[1]);
+ $string =~ s/%%/__/g;
+ # check for %L
+ if ($show_L && $string =~ /%[\*\d\.\$]*L([diouxX])/) {
+ WARN("PRINTF_L",
+ "\%L$1 is non-standard C, use %ll$1\n" . $herecurr);
+ $show_L = 0;
+ }
+ # check for %Z
+ if ($show_Z && $string =~ /%[\*\d\.\$]*Z([diouxX])/) {
+ WARN("PRINTF_Z",
+ "%Z$1 is non-standard C, use %z$1\n" . $herecurr);
+ $show_Z = 0;
+ }
+ # check for 0x<decimal>
+ if ($string =~ /0x%[\*\d\.\$\Llzth]*[diou]/) {
+ ERROR("PRINTF_0XDECIMAL",
+ "Prefixing 0x with decimal output is defective\n" . $herecurr);
+ }
+ }
+
+# check for line continuations in quoted strings with odd counts of "
+ if ($rawline =~ /\\$/ && $sline =~ tr/"/"/ % 2) {
+ WARN("LINE_CONTINUATIONS",
+ "Avoid line continuations in quoted strings\n" . $herecurr);
+ }
+
+# warn about #if 0
+ if ($line =~ /^.\s*\#\s*if\s+0\b/) {
+ CHK("REDUNDANT_CODE",
+ "if this code is redundant consider removing it\n" .
+ $herecurr);
+ }
+
+# check for needless "if (<foo>) fn(<foo>)" uses
+ if ($prevline =~ /\bif\s*\(\s*($Lval)\s*\)/) {
+ my $tested = quotemeta($1);
+ my $expr = '\s*\(\s*' . $tested . '\s*\)\s*;';
+ if ($line =~ /\b(kfree|usb_free_urb|debugfs_remove(?:_recursive)?|(?:kmem_cache|mempool|dma_pool)_destroy)$expr/) {
+ my $func = $1;
+ if (WARN('NEEDLESS_IF',
+ "$func(NULL) is safe and this check is probably not required\n" . $hereprev) &&
+ $fix) {
+ my $do_fix = 1;
+ my $leading_tabs = "";
+ my $new_leading_tabs = "";
+ if ($lines[$linenr - 2] =~ /^\+(\t*)if\s*\(\s*$tested\s*\)\s*$/) {
+ $leading_tabs = $1;
+ } else {
+ $do_fix = 0;
+ }
+ if ($lines[$linenr - 1] =~ /^\+(\t+)$func\s*\(\s*$tested\s*\)\s*;\s*$/) {
+ $new_leading_tabs = $1;
+ if (length($leading_tabs) + 1 ne length($new_leading_tabs)) {
+ $do_fix = 0;
+ }
+ } else {
+ $do_fix = 0;
+ }
+ if ($do_fix) {
+ fix_delete_line($fixlinenr - 1, $prevrawline);
+ $fixed[$fixlinenr] =~ s/^\+$new_leading_tabs/\+$leading_tabs/;
+ }
+ }
+ }
+ }
+
+# check for unnecessary "Out of Memory" messages
+ if ($line =~ /^\+.*\b$logFunctions\s*\(/ &&
+ $prevline =~ /^[ \+]\s*if\s*\(\s*(\!\s*|NULL\s*==\s*)?($Lval)(\s*==\s*NULL\s*)?\s*\)/ &&
+ (defined $1 || defined $3) &&
+ $linenr > 3) {
+ my $testval = $2;
+ my $testline = $lines[$linenr - 3];
+
+ my ($s, $c) = ctx_statement_block($linenr - 3, $realcnt, 0);
+# print("line: <$line>\nprevline: <$prevline>\ns: <$s>\nc: <$c>\n\n\n");
+
+ if ($s =~ /(?:^|\n)[ \+]\s*(?:$Type\s*)?\Q$testval\E\s*=\s*(?:\([^\)]*\)\s*)?\s*(?:devm_)?(?:[kv][czm]alloc(?:_node|_array)?\b|kstrdup|kmemdup|(?:dev_)?alloc_skb)/) {
+ WARN("OOM_MESSAGE",
+ "Possible unnecessary 'out of memory' message\n" . $hereprev);
+ }
+ }
+
+# check for logging functions with KERN_<LEVEL>
+ if ($line !~ /printk(?:_ratelimited|_once)?\s*\(/ &&
+ $line =~ /\b$logFunctions\s*\(.*\b(KERN_[A-Z]+)\b/) {
+ my $level = $1;
+ if (WARN("UNNECESSARY_KERN_LEVEL",
+ "Possible unnecessary $level\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\s*$level\s*//;
+ }
+ }
+
+# check for logging continuations
+ if ($line =~ /\bprintk\s*\(\s*KERN_CONT\b|\bpr_cont\s*\(/) {
+ WARN("LOGGING_CONTINUATION",
+ "Avoid logging continuation uses where feasible\n" . $herecurr);
+ }
+
+# check for mask then right shift without a parentheses
+ if ($^V && $^V ge 5.10.0 &&
+ $line =~ /$LvalOrFunc\s*\&\s*($LvalOrFunc)\s*>>/ &&
+ $4 !~ /^\&/) { # $LvalOrFunc may be &foo, ignore if so
+ WARN("MASK_THEN_SHIFT",
+ "Possible precedence defect with mask then right shift - may need parentheses\n" . $herecurr);
+ }
+
+# check for pointer comparisons to NULL
+ if ($^V && $^V ge 5.10.0) {
+ while ($line =~ /\b$LvalOrFunc\s*(==|\!=)\s*NULL\b/g) {
+ my $val = $1;
+ my $equal = "!";
+ $equal = "" if ($4 eq "!=");
+ if (CHK("COMPARISON_TO_NULL",
+ "Comparison to NULL could be written \"${equal}${val}\"\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\b\Q$val\E\s*(?:==|\!=)\s*NULL\b/$equal$val/;
+ }
+ }
+ }
+
+# check for bad placement of section $InitAttribute (e.g.: __initdata)
+ if ($line =~ /(\b$InitAttribute\b)/) {
+ my $attr = $1;
+ if ($line =~ /^\+\s*static\s+(?:const\s+)?(?:$attr\s+)?($NonptrTypeWithAttr)\s+(?:$attr\s+)?($Ident(?:\[[^]]*\])?)\s*[=;]/) {
+ my $ptr = $1;
+ my $var = $2;
+ if ((($ptr =~ /\b(union|struct)\s+$attr\b/ &&
+ ERROR("MISPLACED_INIT",
+ "$attr should be placed after $var\n" . $herecurr)) ||
+ ($ptr !~ /\b(union|struct)\s+$attr\b/ &&
+ WARN("MISPLACED_INIT",
+ "$attr should be placed after $var\n" . $herecurr))) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/(\bstatic\s+(?:const\s+)?)(?:$attr\s+)?($NonptrTypeWithAttr)\s+(?:$attr\s+)?($Ident(?:\[[^]]*\])?)\s*([=;])\s*/"$1" . trim(string_find_replace($2, "\\s*$attr\\s*", " ")) . " " . trim(string_find_replace($3, "\\s*$attr\\s*", "")) . " $attr" . ("$4" eq ";" ? ";" : " = ")/e;
+ }
+ }
+ }
+
+# check for $InitAttributeData (ie: __initdata) with const
+ if ($line =~ /\bconst\b/ && $line =~ /($InitAttributeData)/) {
+ my $attr = $1;
+ $attr =~ /($InitAttributePrefix)(.*)/;
+ my $attr_prefix = $1;
+ my $attr_type = $2;
+ if (ERROR("INIT_ATTRIBUTE",
+ "Use of const init definition must use ${attr_prefix}initconst\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~
+ s/$InitAttributeData/${attr_prefix}initconst/;
+ }
+ }
+
+# check for $InitAttributeConst (ie: __initconst) without const
+ if ($line !~ /\bconst\b/ && $line =~ /($InitAttributeConst)/) {
+ my $attr = $1;
+ if (ERROR("INIT_ATTRIBUTE",
+ "Use of $attr requires a separate use of const\n" . $herecurr) &&
+ $fix) {
+ my $lead = $fixed[$fixlinenr] =~
+ /(^\+\s*(?:static\s+))/;
+ $lead = rtrim($1);
+ $lead = "$lead " if ($lead !~ /^\+$/);
+ $lead = "${lead}const ";
+ $fixed[$fixlinenr] =~ s/(^\+\s*(?:static\s+))/$lead/;
+ }
+ }
+
+# check for __read_mostly with const non-pointer (should just be const)
+ if ($line =~ /\b__read_mostly\b/ &&
+ $line =~ /($Type)\s*$Ident/ && $1 !~ /\*\s*$/ && $1 =~ /\bconst\b/) {
+ if (ERROR("CONST_READ_MOSTLY",
+ "Invalid use of __read_mostly with const type\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\s+__read_mostly\b//;
+ }
+ }
+
+# don't use __constant_<foo> functions outside of include/uapi/
+ if ($realfile !~ m@^include/uapi/@ &&
+ $line =~ /(__constant_(?:htons|ntohs|[bl]e(?:16|32|64)_to_cpu|cpu_to_[bl]e(?:16|32|64)))\s*\(/) {
+ my $constant_func = $1;
+ my $func = $constant_func;
+ $func =~ s/^__constant_//;
+ if (WARN("CONSTANT_CONVERSION",
+ "$constant_func should be $func\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\b$constant_func\b/$func/g;
+ }
+ }
+
+# prefer usleep_range over udelay
+ if ($line =~ /\budelay\s*\(\s*(\d+)\s*\)/) {
+ my $delay = $1;
+ # ignore udelay's < 10, however
+ if (! ($delay < 10) ) {
+ CHK("USLEEP_RANGE",
+ "usleep_range is preferred over udelay; see Documentation/timers/timers-howto.txt\n" . $herecurr);
+ }
+ if ($delay > 2000) {
+ WARN("LONG_UDELAY",
+ "long udelay - prefer mdelay; see arch/arm/include/asm/delay.h\n" . $herecurr);
+ }
+ }
+
+# warn about unexpectedly long msleep's
+ if ($line =~ /\bmsleep\s*\((\d+)\);/) {
+ if ($1 < 20) {
+ WARN("MSLEEP",
+ "msleep < 20ms can sleep for up to 20ms; see Documentation/timers/timers-howto.txt\n" . $herecurr);
+ }
+ }
+
+# check for comparisons of jiffies
+ if ($line =~ /\bjiffies\s*$Compare|$Compare\s*jiffies\b/) {
+ WARN("JIFFIES_COMPARISON",
+ "Comparing jiffies is almost always wrong; prefer time_after, time_before and friends\n" . $herecurr);
+ }
+
+# check for comparisons of get_jiffies_64()
+ if ($line =~ /\bget_jiffies_64\s*\(\s*\)\s*$Compare|$Compare\s*get_jiffies_64\s*\(\s*\)/) {
+ WARN("JIFFIES_COMPARISON",
+ "Comparing get_jiffies_64() is almost always wrong; prefer time_after64, time_before64 and friends\n" . $herecurr);
+ }
+
+# warn about #ifdefs in C files
+# if ($line =~ /^.\s*\#\s*if(|n)def/ && ($realfile =~ /\.c$/)) {
+# print "#ifdef in C files should be avoided\n";
+# print "$herecurr";
+# $clean = 0;
+# }
+
+# warn about spacing in #ifdefs
+ if ($line =~ /^.\s*\#\s*(ifdef|ifndef|elif)\s\s+/) {
+ if (ERROR("SPACING",
+ "exactly one space required after that #$1\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~
+ s/^(.\s*\#\s*(ifdef|ifndef|elif))\s{2,}/$1 /;
+ }
+
+ }
+
+# check for spinlock_t definitions without a comment.
+ if ($line =~ /^.\s*(struct\s+mutex|spinlock_t)\s+\S+;/ ||
+ $line =~ /^.\s*(DEFINE_MUTEX)\s*\(/) {
+ my $which = $1;
+ if (!ctx_has_comment($first_line, $linenr)) {
+ CHK("UNCOMMENTED_DEFINITION",
+ "$1 definition without comment\n" . $herecurr);
+ }
+ }
+# check for memory barriers without a comment.
+
+ my $barriers = qr{
+ mb|
+ rmb|
+ wmb|
+ read_barrier_depends
+ }x;
+ my $barrier_stems = qr{
+ mb__before_atomic|
+ mb__after_atomic|
+ store_release|
+ load_acquire|
+ store_mb|
+ (?:$barriers)
+ }x;
+ my $all_barriers = qr{
+ (?:$barriers)|
+ smp_(?:$barrier_stems)|
+ virt_(?:$barrier_stems)
+ }x;
+
+ if ($line =~ /\b(?:$all_barriers)\s*\(/) {
+ if (!ctx_has_comment($first_line, $linenr)) {
+ WARN("MEMORY_BARRIER",
+ "memory barrier without comment\n" . $herecurr);
+ }
+ }
+
+ my $underscore_smp_barriers = qr{__smp_(?:$barrier_stems)}x;
+
+ if ($realfile !~ m@^include/asm-generic/@ &&
+ $realfile !~ m@/barrier\.h$@ &&
+ $line =~ m/\b(?:$underscore_smp_barriers)\s*\(/ &&
+ $line !~ m/^.\s*\#\s*define\s+(?:$underscore_smp_barriers)\s*\(/) {
+ WARN("MEMORY_BARRIER",
+ "__smp memory barriers shouldn't be used outside barrier.h and asm-generic\n" . $herecurr);
+ }
+
+# check for waitqueue_active without a comment.
+ if ($line =~ /\bwaitqueue_active\s*\(/) {
+ if (!ctx_has_comment($first_line, $linenr)) {
+ WARN("WAITQUEUE_ACTIVE",
+ "waitqueue_active without comment\n" . $herecurr);
+ }
+ }
+
+# check for smp_read_barrier_depends and read_barrier_depends
+ if (!$file && $line =~ /\b(smp_|)read_barrier_depends\s*\(/) {
+ WARN("READ_BARRIER_DEPENDS",
+ "$1read_barrier_depends should only be used in READ_ONCE or DEC Alpha code\n" . $herecurr);
+ }
+
+# check of hardware specific defines
+ if ($line =~ m@^.\s*\#\s*if.*\b(__i386__|__powerpc64__|__sun__|__s390x__)\b@ && $realfile !~ m@include/asm-@) {
+ CHK("ARCH_DEFINES",
+ "architecture specific defines should be avoided\n" . $herecurr);
+ }
+
+# check that the storage class is not after a type
+ if ($line =~ /\b($Type)\s+($Storage)\b/) {
+ WARN("STORAGE_CLASS",
+ "storage class '$2' should be located before type '$1'\n" . $herecurr);
+ }
+# Check that the storage class is at the beginning of a declaration
+ if ($line =~ /\b$Storage\b/ &&
+ $line !~ /^.\s*$Storage/ &&
+ $line =~ /^.\s*(.+?)\$Storage\s/ &&
+ $1 !~ /[\,\)]\s*$/) {
+ WARN("STORAGE_CLASS",
+ "storage class should be at the beginning of the declaration\n" . $herecurr);
+ }
+
+# check the location of the inline attribute, that it is between
+# storage class and type.
+ if ($line =~ /\b$Type\s+$Inline\b/ ||
+ $line =~ /\b$Inline\s+$Storage\b/) {
+ ERROR("INLINE_LOCATION",
+ "inline keyword should sit between storage class and type\n" . $herecurr);
+ }
+
+# Check for __inline__ and __inline, prefer inline
+ if ($realfile !~ m@\binclude/uapi/@ &&
+ $line =~ /\b(__inline__|__inline)\b/) {
+ if (WARN("INLINE",
+ "plain inline is preferred over $1\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\b(__inline__|__inline)\b/inline/;
+
+ }
+ }
+
+# Check for __attribute__ packed, prefer __packed
+ if ($realfile !~ m@\binclude/uapi/@ &&
+ $line =~ /\b__attribute__\s*\(\s*\(.*\bpacked\b/) {
+ WARN("PREFER_PACKED",
+ "__packed is preferred over __attribute__((packed))\n" . $herecurr);
+ }
+
+# Check for __attribute__ aligned, prefer __aligned
+ if ($realfile !~ m@\binclude/uapi/@ &&
+ $line =~ /\b__attribute__\s*\(\s*\(.*aligned/) {
+ WARN("PREFER_ALIGNED",
+ "__aligned(size) is preferred over __attribute__((aligned(size)))\n" . $herecurr);
+ }
+
+# Check for __attribute__ format(printf, prefer __printf
+ if ($realfile !~ m@\binclude/uapi/@ &&
+ $line =~ /\b__attribute__\s*\(\s*\(\s*format\s*\(\s*printf/) {
+ if (WARN("PREFER_PRINTF",
+ "__printf(string-index, first-to-check) is preferred over __attribute__((format(printf, string-index, first-to-check)))\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\b__attribute__\s*\(\s*\(\s*format\s*\(\s*printf\s*,\s*(.*)\)\s*\)\s*\)/"__printf(" . trim($1) . ")"/ex;
+
+ }
+ }
+
+# Check for __attribute__ format(scanf, prefer __scanf
+ if ($realfile !~ m@\binclude/uapi/@ &&
+ $line =~ /\b__attribute__\s*\(\s*\(\s*format\s*\(\s*scanf\b/) {
+ if (WARN("PREFER_SCANF",
+ "__scanf(string-index, first-to-check) is preferred over __attribute__((format(scanf, string-index, first-to-check)))\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\b__attribute__\s*\(\s*\(\s*format\s*\(\s*scanf\s*,\s*(.*)\)\s*\)\s*\)/"__scanf(" . trim($1) . ")"/ex;
+ }
+ }
+
+# Check for __attribute__ weak, or __weak declarations (may have link issues)
+ if ($^V && $^V ge 5.10.0 &&
+ $line =~ /(?:$Declare|$DeclareMisordered)\s*$Ident\s*$balanced_parens\s*(?:$Attribute)?\s*;/ &&
+ ($line =~ /\b__attribute__\s*\(\s*\(.*\bweak\b/ ||
+ $line =~ /\b__weak\b/)) {
+ ERROR("WEAK_DECLARATION",
+ "Using weak declarations can have unintended link defects\n" . $herecurr);
+ }
+
+# check for c99 types like uint8_t used outside of uapi/ and tools/
+ if ($realfile !~ m@\binclude/uapi/@ &&
+ $realfile !~ m@\btools/@ &&
+ $line =~ /\b($Declare)\s*$Ident\s*[=;,\[]/) {
+ my $type = $1;
+ if ($type =~ /\b($typeC99Typedefs)\b/) {
+ $type = $1;
+ my $kernel_type = 'u';
+ $kernel_type = 's' if ($type =~ /^_*[si]/);
+ $type =~ /(\d+)/;
+ $kernel_type .= $1;
+ if (CHK("PREFER_KERNEL_TYPES",
+ "Prefer kernel type '$kernel_type' over '$type'\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\b$type\b/$kernel_type/;
+ }
+ }
+ }
+
+# check for cast of C90 native int or longer types constants
+ if ($line =~ /(\(\s*$C90_int_types\s*\)\s*)($Constant)\b/) {
+ my $cast = $1;
+ my $const = $2;
+ if (WARN("TYPECAST_INT_CONSTANT",
+ "Unnecessary typecast of c90 int constant\n" . $herecurr) &&
+ $fix) {
+ my $suffix = "";
+ my $newconst = $const;
+ $newconst =~ s/${Int_type}$//;
+ $suffix .= 'U' if ($cast =~ /\bunsigned\b/);
+ if ($cast =~ /\blong\s+long\b/) {
+ $suffix .= 'LL';
+ } elsif ($cast =~ /\blong\b/) {
+ $suffix .= 'L';
+ }
+ $fixed[$fixlinenr] =~ s/\Q$cast\E$const\b/$newconst$suffix/;
+ }
+ }
+
+# check for sizeof(&)
+ if ($line =~ /\bsizeof\s*\(\s*\&/) {
+ WARN("SIZEOF_ADDRESS",
+ "sizeof(& should be avoided\n" . $herecurr);
+ }
+
+# check for sizeof without parenthesis
+ if ($line =~ /\bsizeof\s+((?:\*\s*|)$Lval|$Type(?:\s+$Lval|))/) {
+ if (WARN("SIZEOF_PARENTHESIS",
+ "sizeof $1 should be sizeof($1)\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\bsizeof\s+((?:\*\s*|)$Lval|$Type(?:\s+$Lval|))/"sizeof(" . trim($1) . ")"/ex;
+ }
+ }
+
+# check for struct spinlock declarations
+ if ($line =~ /^.\s*\bstruct\s+spinlock\s+\w+\s*;/) {
+ WARN("USE_SPINLOCK_T",
+ "struct spinlock should be spinlock_t\n" . $herecurr);
+ }
+
+# check for seq_printf uses that could be seq_puts
+ if ($sline =~ /\bseq_printf\s*\(.*"\s*\)\s*;\s*$/) {
+ my $fmt = get_quoted_string($line, $rawline);
+ $fmt =~ s/%%//g;
+ if ($fmt !~ /%/) {
+ if (WARN("PREFER_SEQ_PUTS",
+ "Prefer seq_puts to seq_printf\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\bseq_printf\b/seq_puts/;
+ }
+ }
+ }
+
+# check for vsprintf extension %p<foo> misuses
+ if ($^V && $^V ge 5.10.0 &&
+ defined $stat &&
+ $stat =~ /^\+(?![^\{]*\{\s*).*\b(\w+)\s*\(.*$String\s*,/s &&
+ $1 !~ /^_*volatile_*$/) {
+ my $specifier;
+ my $extension;
+ my $bad_specifier = "";
+ my $stat_real;
+
+ my $lc = $stat =~ tr@\n@@;
+ $lc = $lc + $linenr;
+ for (my $count = $linenr; $count <= $lc; $count++) {
+ my $fmt = get_quoted_string($lines[$count - 1], raw_line($count, 0));
+ $fmt =~ s/%%//g;
+
+ while ($fmt =~ /(\%[\*\d\.]*p(\w))/g) {
+ $specifier = $1;
+ $extension = $2;
+ if ($extension !~ /[SsBKRraEhMmIiUDdgVCbGNOx]/) {
+ $bad_specifier = $specifier;
+ last;
+ }
+ if ($extension eq "x" && !defined($stat_real)) {
+ if (!defined($stat_real)) {
+ $stat_real = get_stat_real($linenr, $lc);
+ }
+ WARN("VSPRINTF_SPECIFIER_PX",
+ "Using vsprintf specifier '\%px' potentially exposes the kernel memory layout, if you don't really need the address please consider using '\%p'.\n" . "$here\n$stat_real\n");
+ }
+ }
+ if ($bad_specifier ne "") {
+ my $stat_real = get_stat_real($linenr, $lc);
+ my $ext_type = "Invalid";
+ my $use = "";
+ if ($bad_specifier =~ /p[Ff]/) {
+ $ext_type = "Deprecated";
+ $use = " - use %pS instead";
+ $use =~ s/pS/ps/ if ($bad_specifier =~ /pf/);
+ }
+
+ WARN("VSPRINTF_POINTER_EXTENSION",
+ "$ext_type vsprintf pointer extension '$bad_specifier'$use\n" . "$here\n$stat_real\n");
+ }
+ }
+ }
+
+# Check for misused memsets
+ if ($^V && $^V ge 5.10.0 &&
+ defined $stat &&
+ $stat =~ /^\+(?:.*?)\bmemset\s*\(\s*$FuncArg\s*,\s*$FuncArg\s*\,\s*$FuncArg\s*\)/) {
+
+ my $ms_addr = $2;
+ my $ms_val = $7;
+ my $ms_size = $12;
+
+ if ($ms_size =~ /^(0x|)0$/i) {
+ ERROR("MEMSET",
+ "memset to 0's uses 0 as the 2nd argument, not the 3rd\n" . "$here\n$stat\n");
+ } elsif ($ms_size =~ /^(0x|)1$/i) {
+ WARN("MEMSET",
+ "single byte memset is suspicious. Swapped 2nd/3rd argument?\n" . "$here\n$stat\n");
+ }
+ }
+
+# Check for memcpy(foo, bar, ETH_ALEN) that could be ether_addr_copy(foo, bar)
+# if ($^V && $^V ge 5.10.0 &&
+# defined $stat &&
+# $stat =~ /^\+(?:.*?)\bmemcpy\s*\(\s*$FuncArg\s*,\s*$FuncArg\s*\,\s*ETH_ALEN\s*\)/) {
+# if (WARN("PREFER_ETHER_ADDR_COPY",
+# "Prefer ether_addr_copy() over memcpy() if the Ethernet addresses are __aligned(2)\n" . "$here\n$stat\n") &&
+# $fix) {
+# $fixed[$fixlinenr] =~ s/\bmemcpy\s*\(\s*$FuncArg\s*,\s*$FuncArg\s*\,\s*ETH_ALEN\s*\)/ether_addr_copy($2, $7)/;
+# }
+# }
+
+# Check for memcmp(foo, bar, ETH_ALEN) that could be ether_addr_equal*(foo, bar)
+# if ($^V && $^V ge 5.10.0 &&
+# defined $stat &&
+# $stat =~ /^\+(?:.*?)\bmemcmp\s*\(\s*$FuncArg\s*,\s*$FuncArg\s*\,\s*ETH_ALEN\s*\)/) {
+# WARN("PREFER_ETHER_ADDR_EQUAL",
+# "Prefer ether_addr_equal() or ether_addr_equal_unaligned() over memcmp()\n" . "$here\n$stat\n")
+# }
+
+# check for memset(foo, 0x0, ETH_ALEN) that could be eth_zero_addr
+# check for memset(foo, 0xFF, ETH_ALEN) that could be eth_broadcast_addr
+# if ($^V && $^V ge 5.10.0 &&
+# defined $stat &&
+# $stat =~ /^\+(?:.*?)\bmemset\s*\(\s*$FuncArg\s*,\s*$FuncArg\s*\,\s*ETH_ALEN\s*\)/) {
+#
+# my $ms_val = $7;
+#
+# if ($ms_val =~ /^(?:0x|)0+$/i) {
+# if (WARN("PREFER_ETH_ZERO_ADDR",
+# "Prefer eth_zero_addr over memset()\n" . "$here\n$stat\n") &&
+# $fix) {
+# $fixed[$fixlinenr] =~ s/\bmemset\s*\(\s*$FuncArg\s*,\s*$FuncArg\s*,\s*ETH_ALEN\s*\)/eth_zero_addr($2)/;
+# }
+# } elsif ($ms_val =~ /^(?:0xff|255)$/i) {
+# if (WARN("PREFER_ETH_BROADCAST_ADDR",
+# "Prefer eth_broadcast_addr() over memset()\n" . "$here\n$stat\n") &&
+# $fix) {
+# $fixed[$fixlinenr] =~ s/\bmemset\s*\(\s*$FuncArg\s*,\s*$FuncArg\s*,\s*ETH_ALEN\s*\)/eth_broadcast_addr($2)/;
+# }
+# }
+# }
+
+# typecasts on min/max could be min_t/max_t
+ if ($^V && $^V ge 5.10.0 &&
+ defined $stat &&
+ $stat =~ /^\+(?:.*?)\b(min|max)\s*\(\s*$FuncArg\s*,\s*$FuncArg\s*\)/) {
+ if (defined $2 || defined $7) {
+ my $call = $1;
+ my $cast1 = deparenthesize($2);
+ my $arg1 = $3;
+ my $cast2 = deparenthesize($7);
+ my $arg2 = $8;
+ my $cast;
+
+ if ($cast1 ne "" && $cast2 ne "" && $cast1 ne $cast2) {
+ $cast = "$cast1 or $cast2";
+ } elsif ($cast1 ne "") {
+ $cast = $cast1;
+ } else {
+ $cast = $cast2;
+ }
+ WARN("MINMAX",
+ "$call() should probably be ${call}_t($cast, $arg1, $arg2)\n" . "$here\n$stat\n");
+ }
+ }
+
+# check usleep_range arguments
+ if ($^V && $^V ge 5.10.0 &&
+ defined $stat &&
+ $stat =~ /^\+(?:.*?)\busleep_range\s*\(\s*($FuncArg)\s*,\s*($FuncArg)\s*\)/) {
+ my $min = $1;
+ my $max = $7;
+ if ($min eq $max) {
+ WARN("USLEEP_RANGE",
+ "usleep_range should not use min == max args; see Documentation/timers/timers-howto.txt\n" . "$here\n$stat\n");
+ } elsif ($min =~ /^\d+$/ && $max =~ /^\d+$/ &&
+ $min > $max) {
+ WARN("USLEEP_RANGE",
+ "usleep_range args reversed, use min then max; see Documentation/timers/timers-howto.txt\n" . "$here\n$stat\n");
+ }
+ }
+
+# check for naked sscanf
+ if ($^V && $^V ge 5.10.0 &&
+ defined $stat &&
+ $line =~ /\bsscanf\b/ &&
+ ($stat !~ /$Ident\s*=\s*sscanf\s*$balanced_parens/ &&
+ $stat !~ /\bsscanf\s*$balanced_parens\s*(?:$Compare)/ &&
+ $stat !~ /(?:$Compare)\s*\bsscanf\s*$balanced_parens/)) {
+ my $lc = $stat =~ tr@\n@@;
+ $lc = $lc + $linenr;
+ my $stat_real = get_stat_real($linenr, $lc);
+ WARN("NAKED_SSCANF",
+ "unchecked sscanf return value\n" . "$here\n$stat_real\n");
+ }
+
+# check for simple sscanf that should be kstrto<foo>
+ if ($^V && $^V ge 5.10.0 &&
+ defined $stat &&
+ $line =~ /\bsscanf\b/) {
+ my $lc = $stat =~ tr@\n@@;
+ $lc = $lc + $linenr;
+ my $stat_real = get_stat_real($linenr, $lc);
+ if ($stat_real =~ /\bsscanf\b\s*\(\s*$FuncArg\s*,\s*("[^"]+")/) {
+ my $format = $6;
+ my $count = $format =~ tr@%@%@;
+ if ($count == 1 &&
+ $format =~ /^"\%(?i:ll[udxi]|[udxi]ll|ll|[hl]h?[udxi]|[udxi][hl]h?|[hl]h?|[udxi])"$/) {
+ WARN("SSCANF_TO_KSTRTO",
+ "Prefer kstrto<type> to single variable sscanf\n" . "$here\n$stat_real\n");
+ }
+ }
+ }
+
+# check for new externs in .h files.
+ if ($realfile =~ /\.h$/ &&
+ $line =~ /^\+\s*(extern\s+)$Type\s*$Ident\s*\(/s) {
+ if (CHK("AVOID_EXTERNS",
+ "extern prototypes should be avoided in .h files\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/(.*)\bextern\b\s*(.*)/$1$2/;
+ }
+ }
+
+# check for new externs in .c files.
+ if ($realfile =~ /\.c$/ && defined $stat &&
+ $stat =~ /^.\s*(?:extern\s+)?$Type\s+($Ident)(\s*)\(/s)
+ {
+ my $function_name = $1;
+ my $paren_space = $2;
+
+ my $s = $stat;
+ if (defined $cond) {
+ substr($s, 0, length($cond), '');
+ }
+ if ($s =~ /^\s*;/ &&
+ $function_name ne 'uninitialized_var')
+ {
+ WARN("AVOID_EXTERNS",
+ "externs should be avoided in .c files\n" . $herecurr);
+ }
+
+ if ($paren_space =~ /\n/) {
+ WARN("FUNCTION_ARGUMENTS",
+ "arguments for function declarations should follow identifier\n" . $herecurr);
+ }
+
+ } elsif ($realfile =~ /\.c$/ && defined $stat &&
+ $stat =~ /^.\s*extern\s+/)
+ {
+ WARN("AVOID_EXTERNS",
+ "externs should be avoided in .c files\n" . $herecurr);
+ }
+
+# check for function declarations that have arguments without identifier names
+ if (defined $stat &&
+ $stat =~ /^.\s*(?:extern\s+)?$Type\s*(?:$Ident|\(\s*\*\s*$Ident\s*\))\s*\(\s*([^{]+)\s*\)\s*;/s &&
+ $1 ne "void") {
+ my $args = trim($1);
+ while ($args =~ m/\s*($Type\s*(?:$Ident|\(\s*\*\s*$Ident?\s*\)\s*$balanced_parens)?)/g) {
+ my $arg = trim($1);
+ if ($arg =~ /^$Type$/ && $arg !~ /enum\s+$Ident$/) {
+ WARN("FUNCTION_ARGUMENTS",
+ "function definition argument '$arg' should also have an identifier name\n" . $herecurr);
+ }
+ }
+ }
+
+# check for function definitions
+ if ($^V && $^V ge 5.10.0 &&
+ defined $stat &&
+ $stat =~ /^.\s*(?:$Storage\s+)?$Type\s*($Ident)\s*$balanced_parens\s*{/s) {
+ $context_function = $1;
+
+# check for multiline function definition with misplaced open brace
+ my $ok = 0;
+ my $cnt = statement_rawlines($stat);
+ my $herectx = $here . "\n";
+ for (my $n = 0; $n < $cnt; $n++) {
+ my $rl = raw_line($linenr, $n);
+ $herectx .= $rl . "\n";
+ $ok = 1 if ($rl =~ /^[ \+]\{/);
+ $ok = 1 if ($rl =~ /\{/ && $n == 0);
+ last if $rl =~ /^[ \+].*\{/;
+ }
+ if (!$ok) {
+ ERROR("OPEN_BRACE",
+ "open brace '{' following function definitions go on the next line\n" . $herectx);
+ }
+ }
+
+# checks for new __setup's
+ if ($rawline =~ /\b__setup\("([^"]*)"/) {
+ my $name = $1;
+
+ if (!grep(/$name/, @setup_docs)) {
+ CHK("UNDOCUMENTED_SETUP",
+ "__setup appears un-documented -- check Documentation/admin-guide/kernel-parameters.rst\n" . $herecurr);
+ }
+ }
+
+# check for pointless casting of kmalloc return
+ if ($line =~ /\*\s*\)\s*[kv][czm]alloc(_node){0,1}\b/) {
+ WARN("UNNECESSARY_CASTS",
+ "unnecessary cast may hide bugs, see http://c-faq.com/malloc/mallocnocast.html\n" . $herecurr);
+ }
+
+# alloc style
+# p = alloc(sizeof(struct foo), ...) should be p = alloc(sizeof(*p), ...)
+ if ($^V && $^V ge 5.10.0 &&
+ $line =~ /\b($Lval)\s*\=\s*(?:$balanced_parens)?\s*([kv][mz]alloc(?:_node)?)\s*\(\s*(sizeof\s*\(\s*struct\s+$Lval\s*\))/) {
+ CHK("ALLOC_SIZEOF_STRUCT",
+ "Prefer $3(sizeof(*$1)...) over $3($4...)\n" . $herecurr);
+ }
+
+# check for k[mz]alloc with multiplies that could be kmalloc_array/kcalloc
+ if ($^V && $^V ge 5.10.0 &&
+ defined $stat &&
+ $stat =~ /^\+\s*($Lval)\s*\=\s*(?:$balanced_parens)?\s*(k[mz]alloc)\s*\(\s*($FuncArg)\s*\*\s*($FuncArg)\s*,/) {
+ my $oldfunc = $3;
+ my $a1 = $4;
+ my $a2 = $10;
+ my $newfunc = "kmalloc_array";
+ $newfunc = "kcalloc" if ($oldfunc eq "kzalloc");
+ my $r1 = $a1;
+ my $r2 = $a2;
+ if ($a1 =~ /^sizeof\s*\S/) {
+ $r1 = $a2;
+ $r2 = $a1;
+ }
+ if ($r1 !~ /^sizeof\b/ && $r2 =~ /^sizeof\s*\S/ &&
+ !($r1 =~ /^$Constant$/ || $r1 =~ /^[A-Z_][A-Z0-9_]*$/)) {
+ my $cnt = statement_rawlines($stat);
+ my $herectx = get_stat_here($linenr, $cnt, $here);
+
+ if (WARN("ALLOC_WITH_MULTIPLY",
+ "Prefer $newfunc over $oldfunc with multiply\n" . $herectx) &&
+ $cnt == 1 &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\b($Lval)\s*\=\s*(?:$balanced_parens)?\s*(k[mz]alloc)\s*\(\s*($FuncArg)\s*\*\s*($FuncArg)/$1 . ' = ' . "$newfunc(" . trim($r1) . ', ' . trim($r2)/e;
+ }
+ }
+ }
+
+# check for krealloc arg reuse
+ if ($^V && $^V ge 5.10.0 &&
+ $line =~ /\b($Lval)\s*\=\s*(?:$balanced_parens)?\s*krealloc\s*\(\s*\1\s*,/) {
+ WARN("KREALLOC_ARG_REUSE",
+ "Reusing the krealloc arg is almost always a bug\n" . $herecurr);
+ }
+
+# check for alloc argument mismatch
+ if ($line =~ /\b(kcalloc|kmalloc_array)\s*\(\s*sizeof\b/) {
+ WARN("ALLOC_ARRAY_ARGS",
+ "$1 uses number as first arg, sizeof is generally wrong\n" . $herecurr);
+ }
+
+# check for multiple semicolons
+ if ($line =~ /;\s*;\s*$/) {
+ if (WARN("ONE_SEMICOLON",
+ "Statements terminations use 1 semicolon\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/(\s*;\s*){2,}$/;/g;
+ }
+ }
+
+# check for #defines like: 1 << <digit> that could be BIT(digit), it is not exported to uapi
+ if ($realfile !~ m@^include/uapi/@ &&
+ $line =~ /#\s*define\s+\w+\s+\(?\s*1\s*([ulUL]*)\s*\<\<\s*(?:\d+|$Ident)\s*\)?/) {
+ my $ull = "";
+ $ull = "_ULL" if (defined($1) && $1 =~ /ll/i);
+ if (CHK("BIT_MACRO",
+ "Prefer using the BIT$ull macro\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\(?\s*1\s*[ulUL]*\s*<<\s*(\d+|$Ident)\s*\)?/BIT${ull}($1)/;
+ }
+ }
+
+# check for #if defined CONFIG_<FOO> || defined CONFIG_<FOO>_MODULE
+ if ($line =~ /^\+\s*#\s*if\s+defined(?:\s*\(?\s*|\s+)(CONFIG_[A-Z_]+)\s*\)?\s*\|\|\s*defined(?:\s*\(?\s*|\s+)\1_MODULE\s*\)?\s*$/) {
+ my $config = $1;
+ if (WARN("PREFER_IS_ENABLED",
+ "Prefer IS_ENABLED(<FOO>) to CONFIG_<FOO> || CONFIG_<FOO>_MODULE\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] = "\+#if IS_ENABLED($config)";
+ }
+ }
+
+# check for case / default statements not preceded by break/fallthrough/switch
+ if ($line =~ /^.\s*(?:case\s+(?:$Ident|$Constant)\s*|default):/) {
+ my $has_break = 0;
+ my $has_statement = 0;
+ my $count = 0;
+ my $prevline = $linenr;
+ while ($prevline > 1 && ($file || $count < 3) && !$has_break) {
+ $prevline--;
+ my $rline = $rawlines[$prevline - 1];
+ my $fline = $lines[$prevline - 1];
+ last if ($fline =~ /^\@\@/);
+ next if ($fline =~ /^\-/);
+ next if ($fline =~ /^.(?:\s*(?:case\s+(?:$Ident|$Constant)[\s$;]*|default):[\s$;]*)*$/);
+ $has_break = 1 if ($rline =~ /fall[\s_-]*(through|thru)/i);
+ next if ($fline =~ /^.[\s$;]*$/);
+ $has_statement = 1;
+ $count++;
+ $has_break = 1 if ($fline =~ /\bswitch\b|\b(?:break\s*;[\s$;]*$|exit\s*\(\b|return\b|goto\b|continue\b)/);
+ }
+ if (!$has_break && $has_statement) {
+ WARN("MISSING_BREAK",
+ "Possible switch case/default not preceded by break or fallthrough comment\n" . $herecurr);
+ }
+ }
+
+# check for switch/default statements without a break;
+ if ($^V && $^V ge 5.10.0 &&
+ defined $stat &&
+ $stat =~ /^\+[$;\s]*(?:case[$;\s]+\w+[$;\s]*:[$;\s]*|)*[$;\s]*\bdefault[$;\s]*:[$;\s]*;/g) {
+ my $cnt = statement_rawlines($stat);
+ my $herectx = get_stat_here($linenr, $cnt, $here);
+
+ WARN("DEFAULT_NO_BREAK",
+ "switch default: should use break\n" . $herectx);
+ }
+
+# check for gcc specific __FUNCTION__
+ if ($line =~ /\b__FUNCTION__\b/) {
+ if (WARN("USE_FUNC",
+ "__func__ should be used instead of gcc specific __FUNCTION__\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\b__FUNCTION__\b/__func__/g;
+ }
+ }
+
+# check for uses of __DATE__, __TIME__, __TIMESTAMP__
+ while ($line =~ /\b(__(?:DATE|TIME|TIMESTAMP)__)\b/g) {
+ ERROR("DATE_TIME",
+ "Use of the '$1' macro makes the build non-deterministic\n" . $herecurr);
+ }
+
+# check for use of yield()
+ if ($line =~ /\byield\s*\(\s*\)/) {
+ WARN("YIELD",
+ "Using yield() is generally wrong. See yield() kernel-doc (sched/core.c)\n" . $herecurr);
+ }
+
+# check for comparisons against true and false
+ if ($line =~ /\+\s*(.*?)\b(true|false|$Lval)\s*(==|\!=)\s*(true|false|$Lval)\b(.*)$/i) {
+ my $lead = $1;
+ my $arg = $2;
+ my $test = $3;
+ my $otype = $4;
+ my $trail = $5;
+ my $op = "!";
+
+ ($arg, $otype) = ($otype, $arg) if ($arg =~ /^(?:true|false)$/i);
+
+ my $type = lc($otype);
+ if ($type =~ /^(?:true|false)$/) {
+ if (("$test" eq "==" && "$type" eq "true") ||
+ ("$test" eq "!=" && "$type" eq "false")) {
+ $op = "";
+ }
+
+ CHK("BOOL_COMPARISON",
+ "Using comparison to $otype is error prone\n" . $herecurr);
+
+## maybe suggesting a correct construct would better
+## "Using comparison to $otype is error prone. Perhaps use '${lead}${op}${arg}${trail}'\n" . $herecurr);
+
+ }
+ }
+
+# check for bool bitfields
+ if ($sline =~ /^.\s+bool\s*$Ident\s*:\s*\d+\s*;/) {
+ WARN("BOOL_BITFIELD",
+ "Avoid using bool as bitfield. Prefer bool bitfields as unsigned int or u<8|16|32>\n" . $herecurr);
+ }
+
+# check for semaphores initialized locked
+ if ($line =~ /^.\s*sema_init.+,\W?0\W?\)/) {
+ WARN("CONSIDER_COMPLETION",
+ "consider using a completion\n" . $herecurr);
+ }
+
+# recommend kstrto* over simple_strto* and strict_strto*
+ if ($line =~ /\b((simple|strict)_(strto(l|ll|ul|ull)))\s*\(/) {
+ WARN("CONSIDER_KSTRTO",
+ "$1 is obsolete, use k$3 instead\n" . $herecurr);
+ }
+
+# check for __initcall(), use device_initcall() explicitly or more appropriate function please
+ if ($line =~ /^.\s*__initcall\s*\(/) {
+ WARN("USE_DEVICE_INITCALL",
+ "please use device_initcall() or more appropriate function instead of __initcall() (see include/linux/init.h)\n" . $herecurr);
+ }
+
+# check for various structs that are normally const (ops, kgdb, device_tree)
+# and avoid what seem like struct definitions 'struct foo {'
+ if ($line !~ /\bconst\b/ &&
+ $line =~ /\bstruct\s+($const_structs)\b(?!\s*\{)/) {
+ WARN("CONST_STRUCT",
+ "struct $1 should normally be const\n" . $herecurr);
+ }
+
+# use of NR_CPUS is usually wrong
+# ignore definitions of NR_CPUS and usage to define arrays as likely right
+ if ($line =~ /\bNR_CPUS\b/ &&
+ $line !~ /^.\s*\s*#\s*if\b.*\bNR_CPUS\b/ &&
+ $line !~ /^.\s*\s*#\s*define\b.*\bNR_CPUS\b/ &&
+ $line !~ /^.\s*$Declare\s.*\[[^\]]*NR_CPUS[^\]]*\]/ &&
+ $line !~ /\[[^\]]*\.\.\.[^\]]*NR_CPUS[^\]]*\]/ &&
+ $line !~ /\[[^\]]*NR_CPUS[^\]]*\.\.\.[^\]]*\]/)
+ {
+ WARN("NR_CPUS",
+ "usage of NR_CPUS is often wrong - consider using cpu_possible(), num_possible_cpus(), for_each_possible_cpu(), etc\n" . $herecurr);
+ }
+
+# Use of __ARCH_HAS_<FOO> or ARCH_HAVE_<BAR> is wrong.
+ if ($line =~ /\+\s*#\s*define\s+((?:__)?ARCH_(?:HAS|HAVE)\w*)\b/) {
+ ERROR("DEFINE_ARCH_HAS",
+ "#define of '$1' is wrong - use Kconfig variables or standard guards instead\n" . $herecurr);
+ }
+
+# likely/unlikely comparisons similar to "(likely(foo) > 0)"
+ if ($^V && $^V ge 5.10.0 &&
+ $line =~ /\b((?:un)?likely)\s*\(\s*$FuncArg\s*\)\s*$Compare/) {
+ WARN("LIKELY_MISUSE",
+ "Using $1 should generally have parentheses around the comparison\n" . $herecurr);
+ }
+
+# whine mightly about in_atomic
+ if ($line =~ /\bin_atomic\s*\(/) {
+ if ($realfile =~ m@^drivers/@) {
+ ERROR("IN_ATOMIC",
+ "do not use in_atomic in drivers\n" . $herecurr);
+ } elsif ($realfile !~ m@^kernel/@) {
+ WARN("IN_ATOMIC",
+ "use of in_atomic() is incorrect outside core kernel code\n" . $herecurr);
+ }
+ }
+
+# check for mutex_trylock_recursive usage
+ if ($line =~ /mutex_trylock_recursive/) {
+ ERROR("LOCKING",
+ "recursive locking is bad, do not use this ever.\n" . $herecurr);
+ }
+
+# check for lockdep_set_novalidate_class
+ if ($line =~ /^.\s*lockdep_set_novalidate_class\s*\(/ ||
+ $line =~ /__lockdep_no_validate__\s*\)/ ) {
+ if ($realfile !~ m@^kernel/lockdep@ &&
+ $realfile !~ m@^include/linux/lockdep@ &&
+ $realfile !~ m@^drivers/base/core@) {
+ ERROR("LOCKDEP",
+ "lockdep_no_validate class is reserved for device->mutex.\n" . $herecurr);
+ }
+ }
+
+ if ($line =~ /debugfs_create_\w+.*\b$mode_perms_world_writable\b/ ||
+ $line =~ /DEVICE_ATTR.*\b$mode_perms_world_writable\b/) {
+ WARN("EXPORTED_WORLD_WRITABLE",
+ "Exporting world writable files is usually an error. Consider more restrictive permissions.\n" . $herecurr);
+ }
+
+# check for DEVICE_ATTR uses that could be DEVICE_ATTR_<FOO>
+# and whether or not function naming is typical and if
+# DEVICE_ATTR permissions uses are unusual too
+ if ($^V && $^V ge 5.10.0 &&
+ defined $stat &&
+ $stat =~ /\bDEVICE_ATTR\s*\(\s*(\w+)\s*,\s*\(?\s*(\s*(?:${multi_mode_perms_string_search}|0[0-7]{3,3})\s*)\s*\)?\s*,\s*(\w+)\s*,\s*(\w+)\s*\)/) {
+ my $var = $1;
+ my $perms = $2;
+ my $show = $3;
+ my $store = $4;
+ my $octal_perms = perms_to_octal($perms);
+ if ($show =~ /^${var}_show$/ &&
+ $store =~ /^${var}_store$/ &&
+ $octal_perms eq "0644") {
+ if (WARN("DEVICE_ATTR_RW",
+ "Use DEVICE_ATTR_RW\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\bDEVICE_ATTR\s*\(\s*$var\s*,\s*\Q$perms\E\s*,\s*$show\s*,\s*$store\s*\)/DEVICE_ATTR_RW(${var})/;
+ }
+ } elsif ($show =~ /^${var}_show$/ &&
+ $store =~ /^NULL$/ &&
+ $octal_perms eq "0444") {
+ if (WARN("DEVICE_ATTR_RO",
+ "Use DEVICE_ATTR_RO\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\bDEVICE_ATTR\s*\(\s*$var\s*,\s*\Q$perms\E\s*,\s*$show\s*,\s*NULL\s*\)/DEVICE_ATTR_RO(${var})/;
+ }
+ } elsif ($show =~ /^NULL$/ &&
+ $store =~ /^${var}_store$/ &&
+ $octal_perms eq "0200") {
+ if (WARN("DEVICE_ATTR_WO",
+ "Use DEVICE_ATTR_WO\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\bDEVICE_ATTR\s*\(\s*$var\s*,\s*\Q$perms\E\s*,\s*NULL\s*,\s*$store\s*\)/DEVICE_ATTR_WO(${var})/;
+ }
+ } elsif ($octal_perms eq "0644" ||
+ $octal_perms eq "0444" ||
+ $octal_perms eq "0200") {
+ my $newshow = "$show";
+ $newshow = "${var}_show" if ($show ne "NULL" && $show ne "${var}_show");
+ my $newstore = $store;
+ $newstore = "${var}_store" if ($store ne "NULL" && $store ne "${var}_store");
+ my $rename = "";
+ if ($show ne $newshow) {
+ $rename .= " '$show' to '$newshow'";
+ }
+ if ($store ne $newstore) {
+ $rename .= " '$store' to '$newstore'";
+ }
+ WARN("DEVICE_ATTR_FUNCTIONS",
+ "Consider renaming function(s)$rename\n" . $herecurr);
+ } else {
+ WARN("DEVICE_ATTR_PERMS",
+ "DEVICE_ATTR unusual permissions '$perms' used\n" . $herecurr);
+ }
+ }
+
+# Mode permission misuses where it seems decimal should be octal
+# This uses a shortcut match to avoid unnecessary uses of a slow foreach loop
+# o Ignore module_param*(...) uses with a decimal 0 permission as that has a
+# specific definition of not visible in sysfs.
+# o Ignore proc_create*(...) uses with a decimal 0 permission as that means
+# use the default permissions
+ if ($^V && $^V ge 5.10.0 &&
+ defined $stat &&
+ $line =~ /$mode_perms_search/) {
+ foreach my $entry (@mode_permission_funcs) {
+ my $func = $entry->[0];
+ my $arg_pos = $entry->[1];
+
+ my $lc = $stat =~ tr@\n@@;
+ $lc = $lc + $linenr;
+ my $stat_real = get_stat_real($linenr, $lc);
+
+ my $skip_args = "";
+ if ($arg_pos > 1) {
+ $arg_pos--;
+ $skip_args = "(?:\\s*$FuncArg\\s*,\\s*){$arg_pos,$arg_pos}";
+ }
+ my $test = "\\b$func\\s*\\(${skip_args}($FuncArg(?:\\|\\s*$FuncArg)*)\\s*[,\\)]";
+ if ($stat =~ /$test/) {
+ my $val = $1;
+ $val = $6 if ($skip_args ne "");
+ if (!($func =~ /^(?:module_param|proc_create)/ && $val eq "0") &&
+ (($val =~ /^$Int$/ && $val !~ /^$Octal$/) ||
+ ($val =~ /^$Octal$/ && length($val) ne 4))) {
+ ERROR("NON_OCTAL_PERMISSIONS",
+ "Use 4 digit octal (0777) not decimal permissions\n" . "$here\n" . $stat_real);
+ }
+ if ($val =~ /^$Octal$/ && (oct($val) & 02)) {
+ ERROR("EXPORTED_WORLD_WRITABLE",
+ "Exporting writable files is usually an error. Consider more restrictive permissions.\n" . "$here\n" . $stat_real);
+ }
+ }
+ }
+ }
+
+# check for uses of S_<PERMS> that could be octal for readability
+ while ($line =~ m{\b($multi_mode_perms_string_search)\b}g) {
+ my $oval = $1;
+ my $octal = perms_to_octal($oval);
+ if (WARN("SYMBOLIC_PERMS",
+ "Symbolic permissions '$oval' are not preferred. Consider using octal permissions '$octal'.\n" . $herecurr) &&
+ $fix) {
+ $fixed[$fixlinenr] =~ s/\Q$oval\E/$octal/;
+ }
+ }
+
+# validate content of MODULE_LICENSE against list from include/linux/module.h
+ if ($line =~ /\bMODULE_LICENSE\s*\(\s*($String)\s*\)/) {
+ my $extracted_string = get_quoted_string($line, $rawline);
+ my $valid_licenses = qr{
+ GPL|
+ GPL\ v2|
+ GPL\ and\ additional\ rights|
+ Dual\ BSD/GPL|
+ Dual\ MIT/GPL|
+ Dual\ MPL/GPL|
+ Proprietary
+ }x;
+ if ($extracted_string !~ /^"(?:$valid_licenses)"$/x) {
+ WARN("MODULE_LICENSE",
+ "unknown module license " . $extracted_string . "\n" . $herecurr);
+ }
+ }
+ }
+
+ # If we have no input at all, then there is nothing to report on
+ # so just keep quiet.
+ if ($#rawlines == -1) {
+ exit(0);
+ }
+
+ # In mailback mode only produce a report in the negative, for
+ # things that appear to be patches.
+ if ($mailback && ($clean == 1 || !$is_patch)) {
+ exit(0);
+ }
+
+ # This is not a patch, and we are are in 'no-patch' mode so
+ # just keep quiet.
+ if (!$chk_patch && !$is_patch) {
+ exit(0);
+ }
+
+ if (!$is_patch && $filename !~ /cover-letter\.patch$/) {
+ ERROR("NOT_UNIFIED_DIFF",
+ "Does not appear to be a unified-diff format patch\n");
+ }
+ if ($is_patch && $has_commit_log && $chk_signoff && $signoff == 0) {
+ ERROR("MISSING_SIGN_OFF",
+ "Missing Signed-off-by: line(s)\n");
+ }
+
+ print report_dump();
+ if ($summary && !($clean == 1 && $quiet == 1)) {
+ print "$filename " if ($summary_file);
+ print "total: $cnt_error errors, $cnt_warn warnings, " .
+ (($check)? "$cnt_chk checks, " : "") .
+ "$cnt_lines lines checked\n";
+ }
+
+ if ($quiet == 0) {
+ # If there were any defects found and not already fixing them
+ if (!$clean and !$fix) {
+ print << "EOM"
+
+NOTE: For some of the reported defects, checkpatch may be able to
+ mechanically convert to the typical style using --fix or --fix-inplace.
+EOM
+ }
+ # If there were whitespace errors which cleanpatch can fix
+ # then suggest that.
+ if ($rpt_cleaners) {
+ $rpt_cleaners = 0;
+ print << "EOM"
+
+NOTE: Whitespace errors detected.
+ You may wish to use scripts/cleanpatch or scripts/cleanfile
+EOM
+ }
+ }
+
+ if ($clean == 0 && $fix &&
+ ("@rawlines" ne "@fixed" ||
+ $#fixed_inserted >= 0 || $#fixed_deleted >= 0)) {
+ my $newfile = $filename;
+ $newfile .= ".EXPERIMENTAL-checkpatch-fixes" if (!$fix_inplace);
+ my $linecount = 0;
+ my $f;
+
+ @fixed = fix_inserted_deleted_lines(\@fixed, \@fixed_inserted, \@fixed_deleted);
+
+ open($f, '>', $newfile)
+ or die "$P: Can't open $newfile for write\n";
+ foreach my $fixed_line (@fixed) {
+ $linecount++;
+ if ($file) {
+ if ($linecount > 3) {
+ $fixed_line =~ s/^\+//;
+ print $f $fixed_line . "\n";
+ }
+ } else {
+ print $f $fixed_line . "\n";
+ }
+ }
+ close($f);
+
+ if (!$quiet) {
+ print << "EOM";
+
+Wrote EXPERIMENTAL --fix correction(s) to '$newfile'
+
+Do _NOT_ trust the results written to this file.
+Do _NOT_ submit these changes without inspecting them for correctness.
+
+This EXPERIMENTAL file is simply a convenience to help rewrite patches.
+No warranties, expressed or implied...
+EOM
+ }
+ }
+
+ if ($quiet == 0) {
+ print "\n";
+ if ($clean == 1) {
+ print "$vname has no obvious style problems and is ready for submission.\n";
+ } else {
+ print "$vname has style problems, please review.\n";
+ }
+ }
+ return $clean;
+}
diff --git a/script/static-checks/const_structs.checkpatch b/script/static-checks/const_structs.checkpatch
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/script/static-checks/const_structs.checkpatch
diff --git a/script/static-checks/spelling.txt b/script/static-checks/spelling.txt
new file mode 100644
index 0000000..9a058cf
--- /dev/null
+++ b/script/static-checks/spelling.txt
@@ -0,0 +1,1254 @@
+# Originally from Debian's Lintian tool. Various false positives have been
+# removed, and various additions have been made as they've been discovered
+# in the kernel source.
+#
+# License: GPLv2
+#
+# The format of each line is:
+# mistake||correction
+#
+abandonning||abandoning
+abigious||ambiguous
+abitrate||arbitrate
+abov||above
+abreviated||abbreviated
+absense||absence
+absolut||absolute
+absoulte||absolute
+acccess||access
+acceess||access
+acceleratoin||acceleration
+accelleration||acceleration
+accesing||accessing
+accesnt||accent
+accessable||accessible
+accesss||access
+accidentaly||accidentally
+accidentually||accidentally
+accoding||according
+accomodate||accommodate
+accomodates||accommodates
+accordign||according
+accoring||according
+accout||account
+accquire||acquire
+accquired||acquired
+accross||across
+acessable||accessible
+acess||access
+achitecture||architecture
+acient||ancient
+acitions||actions
+acitve||active
+acknowldegement||acknowledgment
+acknowledgement||acknowledgment
+ackowledge||acknowledge
+ackowledged||acknowledged
+acording||according
+activete||activate
+actived||activated
+actualy||actually
+acumulating||accumulating
+acumulator||accumulator
+adapater||adapter
+addional||additional
+additionaly||additionally
+additonal||additional
+addres||address
+adddress||address
+addreses||addresses
+addresss||address
+aditional||additional
+aditionally||additionally
+aditionaly||additionally
+adminstrative||administrative
+adress||address
+adresses||addresses
+adviced||advised
+afecting||affecting
+againt||against
+agaist||against
+aggreataon||aggregation
+aggreation||aggregation
+albumns||albums
+alegorical||allegorical
+algined||aligned
+algorith||algorithm
+algorithmical||algorithmically
+algoritm||algorithm
+algoritms||algorithms
+algorrithm||algorithm
+algorritm||algorithm
+aligment||alignment
+alignement||alignment
+allign||align
+alligned||aligned
+alllocate||allocate
+alloated||allocated
+allocatote||allocate
+allocatrd||allocated
+allocte||allocate
+allpication||application
+alocate||allocate
+alogirhtms||algorithms
+alogrithm||algorithm
+alot||a lot
+alow||allow
+alows||allows
+altough||although
+alue||value
+ambigious||ambiguous
+amoung||among
+amout||amount
+an union||a union
+an user||a user
+an userspace||a userspace
+an one||a one
+analysator||analyzer
+ang||and
+anniversery||anniversary
+annoucement||announcement
+anomolies||anomalies
+anomoly||anomaly
+anway||anyway
+aplication||application
+appearence||appearance
+applicaion||application
+appliction||application
+applictions||applications
+applys||applies
+appplications||applications
+appropiate||appropriate
+appropriatly||appropriately
+approriate||appropriate
+approriately||appropriately
+apropriate||appropriate
+aquainted||acquainted
+aquired||acquired
+aquisition||acquisition
+arbitary||arbitrary
+architechture||architecture
+arguement||argument
+arguements||arguments
+aritmetic||arithmetic
+arne't||aren't
+arraival||arrival
+artifical||artificial
+artillary||artillery
+asign||assign
+asser||assert
+assertation||assertion
+assiged||assigned
+assigment||assignment
+assigments||assignments
+assistent||assistant
+assocation||association
+associcated||associated
+assotiated||associated
+assum||assume
+assumtpion||assumption
+asuming||assuming
+asycronous||asynchronous
+asynchnous||asynchronous
+atomatically||automatically
+atomicly||atomically
+atempt||attempt
+attachement||attachment
+attched||attached
+attemps||attempts
+attemping||attempting
+attruibutes||attributes
+authentification||authentication
+automaticaly||automatically
+automaticly||automatically
+automatize||automate
+automatized||automated
+automatizes||automates
+autonymous||autonomous
+auxillary||auxiliary
+auxilliary||auxiliary
+avaiable||available
+avaible||available
+availabe||available
+availabled||available
+availablity||availability
+availale||available
+availavility||availability
+availble||available
+availiable||available
+availible||available
+avalable||available
+avaliable||available
+aysnc||async
+backgroud||background
+backword||backward
+backwords||backwards
+bahavior||behavior
+bakup||backup
+baloon||balloon
+baloons||balloons
+bandwith||bandwidth
+banlance||balance
+batery||battery
+beacuse||because
+becasue||because
+becomming||becoming
+becuase||because
+beeing||being
+befor||before
+begining||beginning
+beter||better
+betweeen||between
+bianries||binaries
+bitmast||bitmask
+boardcast||broadcast
+borad||board
+boundry||boundary
+brievely||briefly
+broadcat||broadcast
+cacluated||calculated
+caculation||calculation
+calender||calendar
+calescing||coalescing
+calle||called
+callibration||calibration
+calucate||calculate
+calulate||calculate
+cancelation||cancellation
+cancle||cancel
+capabilites||capabilities
+capabilty||capability
+capabitilies||capabilities
+capatibilities||capabilities
+capapbilities||capabilities
+carefuly||carefully
+cariage||carriage
+catagory||category
+cehck||check
+challange||challenge
+challanges||challenges
+chanell||channel
+changable||changeable
+chanined||chained
+channle||channel
+channnel||channel
+charachter||character
+charachters||characters
+charactor||character
+charater||character
+charaters||characters
+charcter||character
+chcek||check
+chck||check
+checksuming||checksumming
+childern||children
+childs||children
+chiled||child
+chked||checked
+chnage||change
+chnages||changes
+chnnel||channel
+choosen||chosen
+chouse||chose
+circumvernt||circumvent
+claread||cleared
+clared||cleared
+closeing||closing
+clustred||clustered
+coexistance||coexistence
+collapsable||collapsible
+colorfull||colorful
+comand||command
+comit||commit
+commerical||commercial
+comming||coming
+comminucation||communication
+commited||committed
+commiting||committing
+committ||commit
+commoditiy||commodity
+comsume||consume
+comsumer||consumer
+comsuming||consuming
+compability||compatibility
+compaibility||compatibility
+compatability||compatibility
+compatable||compatible
+compatibiliy||compatibility
+compatibilty||compatibility
+compatiblity||compatibility
+competion||completion
+compilant||compliant
+compleatly||completely
+completition||completion
+completly||completely
+complient||compliant
+componnents||components
+compoment||component
+compres||compress
+compresion||compression
+comression||compression
+comunication||communication
+conbination||combination
+conditionaly||conditionally
+conected||connected
+connecetd||connected
+configuartion||configuration
+configuratoin||configuration
+configuraton||configuration
+configuretion||configuration
+configutation||configuration
+conider||consider
+conjuction||conjunction
+connectinos||connections
+connnection||connection
+connnections||connections
+consistancy||consistency
+consistant||consistent
+containes||contains
+containts||contains
+contaisn||contains
+contant||contact
+contence||contents
+continious||continuous
+continous||continuous
+continously||continuously
+continueing||continuing
+contraints||constraints
+contol||control
+contoller||controller
+controled||controlled
+controler||controller
+controll||control
+contruction||construction
+contry||country
+conuntry||country
+convertion||conversion
+convertor||converter
+convienient||convenient
+convinient||convenient
+corected||corrected
+correponding||corresponding
+correponds||corresponds
+correspoding||corresponding
+cotrol||control
+cound||could
+couter||counter
+coutner||counter
+cryptocraphic||cryptographic
+cunter||counter
+curently||currently
+cylic||cyclic
+dafault||default
+deafult||default
+deamon||daemon
+decompres||decompress
+decription||description
+dectected||detected
+defailt||default
+defferred||deferred
+definate||definite
+definately||definitely
+defintion||definition
+defintions||definitions
+defualt||default
+defult||default
+deintializing||deinitializing
+deintialize||deinitialize
+deintialized||deinitialized
+deivce||device
+delared||declared
+delare||declare
+delares||declares
+delaring||declaring
+delemiter||delimiter
+demodualtor||demodulator
+demension||dimension
+dependancies||dependencies
+dependancy||dependency
+dependant||dependent
+depreacted||deprecated
+depreacte||deprecate
+desactivate||deactivate
+desciptor||descriptor
+desciptors||descriptors
+descripton||description
+descrition||description
+descritptor||descriptor
+desctiptor||descriptor
+desriptor||descriptor
+desriptors||descriptors
+destionation||destination
+destory||destroy
+destoryed||destroyed
+destorys||destroys
+destroied||destroyed
+detabase||database
+deteced||detected
+develope||develop
+developement||development
+developped||developed
+developpement||development
+developper||developer
+developpment||development
+deveolpment||development
+devided||divided
+deviece||device
+diable||disable
+dictionnary||dictionary
+didnt||didn't
+diferent||different
+differrence||difference
+diffrent||different
+diffrentiate||differentiate
+difinition||definition
+dimesions||dimensions
+diplay||display
+direectly||directly
+disassocation||disassociation
+disapear||disappear
+disapeared||disappeared
+disappared||disappeared
+disble||disable
+disbled||disabled
+disconnet||disconnect
+discontinous||discontinuous
+dispertion||dispersion
+dissapears||disappears
+distiction||distinction
+docuentation||documentation
+documantation||documentation
+documentaion||documentation
+documment||document
+doesnt||doesn't
+dorp||drop
+dosen||doesn
+downlad||download
+downlads||downloads
+druing||during
+dynmaic||dynamic
+easilly||easily
+ecspecially||especially
+edditable||editable
+editting||editing
+efective||effective
+efficently||efficiently
+ehther||ether
+eigth||eight
+elementry||elementary
+eletronic||electronic
+embeded||embedded
+enabledi||enabled
+enchanced||enhanced
+encorporating||incorporating
+encrupted||encrypted
+encrypiton||encryption
+encryptio||encryption
+endianess||endianness
+enhaced||enhanced
+enlightnment||enlightenment
+entrys||entries
+enocded||encoded
+enterily||entirely
+enviroiment||environment
+enviroment||environment
+environement||environment
+environent||environment
+eqivalent||equivalent
+equiped||equipped
+equivelant||equivalent
+equivilant||equivalent
+eror||error
+errorr||error
+estbalishment||establishment
+etsablishment||establishment
+etsbalishment||establishment
+excecutable||executable
+exceded||exceeded
+excellant||excellent
+exeed||exceed
+existance||existence
+existant||existent
+exixt||exist
+exlcude||exclude
+exlcusive||exclusive
+exmaple||example
+expecially||especially
+explicite||explicit
+explicitely||explicitly
+explict||explicit
+explictely||explicitly
+explictly||explicitly
+expresion||expression
+exprimental||experimental
+extened||extended
+extensability||extensibility
+extention||extension
+extracter||extractor
+falied||failed
+faild||failed
+faill||fail
+failied||failed
+faillure||failure
+failue||failure
+failuer||failure
+failng||failing
+faireness||fairness
+falied||failed
+faliure||failure
+fallbck||fallback
+familar||familiar
+fatser||faster
+feauture||feature
+feautures||features
+fetaure||feature
+fetaures||features
+fileystem||filesystem
+fimware||firmware
+firware||firmware
+finanize||finalize
+findn||find
+finilizes||finalizes
+finsih||finish
+flusing||flushing
+folloing||following
+followign||following
+followings||following
+follwing||following
+fonud||found
+forseeable||foreseeable
+forse||force
+fortan||fortran
+forwardig||forwarding
+framming||framing
+framwork||framework
+frequncy||frequency
+frome||from
+fucntion||function
+fuction||function
+fuctions||functions
+funcion||function
+functionallity||functionality
+functionaly||functionally
+functionnality||functionality
+functonality||functionality
+funtion||function
+funtions||functions
+furthur||further
+futhermore||furthermore
+futrue||future
+gaurenteed||guaranteed
+generiously||generously
+genereate||generate
+genric||generic
+globel||global
+grabing||grabbing
+grahical||graphical
+grahpical||graphical
+grapic||graphic
+grranted||granted
+guage||gauge
+guarenteed||guaranteed
+guarentee||guarantee
+halfs||halves
+hander||handler
+handfull||handful
+hanled||handled
+happend||happened
+harware||hardware
+heirarchically||hierarchically
+helpfull||helpful
+hybernate||hibernate
+hierachy||hierarchy
+hierarchie||hierarchy
+howver||however
+hsould||should
+hypervior||hypervisor
+hypter||hyper
+identidier||identifier
+iligal||illegal
+illigal||illegal
+imblance||imbalance
+immeadiately||immediately
+immedaite||immediate
+immediatelly||immediately
+immediatly||immediately
+immidiate||immediate
+impelentation||implementation
+impementated||implemented
+implemantation||implementation
+implemenation||implementation
+implementaiton||implementation
+implementated||implemented
+implemention||implementation
+implementd||implemented
+implemetation||implementation
+implemntation||implementation
+implentation||implementation
+implmentation||implementation
+implmenting||implementing
+incative||inactive
+incomming||incoming
+incompatabilities||incompatibilities
+incompatable||incompatible
+inconsistant||inconsistent
+increas||increase
+incremeted||incremented
+incrment||increment
+indendation||indentation
+indended||intended
+independant||independent
+independantly||independently
+independed||independent
+indiate||indicate
+indicat||indicate
+inexpect||inexpected
+infomation||information
+informatiom||information
+informations||information
+informtion||information
+infromation||information
+ingore||ignore
+inital||initial
+initalized||initialized
+initalised||initialized
+initalise||initialize
+initalize||initialize
+initation||initiation
+initators||initiators
+initialiazation||initialization
+initializiation||initialization
+initialzed||initialized
+initilization||initialization
+initilize||initialize
+inofficial||unofficial
+insititute||institute
+instal||install
+instanciated||instantiated
+inteface||interface
+integreated||integrated
+integrety||integrity
+integrey||integrity
+intendet||intended
+intented||intended
+interanl||internal
+interchangable||interchangeable
+interferring||interfering
+interger||integer
+intermittant||intermittent
+internel||internal
+interoprability||interoperability
+interuupt||interrupt
+interrface||interface
+interrrupt||interrupt
+interrup||interrupt
+interrups||interrupts
+interruptted||interrupted
+interupted||interrupted
+interupt||interrupt
+intial||initial
+intialisation||initialisation
+intialised||initialised
+intialise||initialise
+intialization||initialization
+intialized||initialized
+intialize||initialize
+intregral||integral
+intrrupt||interrupt
+intterrupt||interrupt
+intuative||intuitive
+invaid||invalid
+invald||invalid
+invalde||invalid
+invalide||invalid
+invalidiate||invalidate
+invalud||invalid
+invididual||individual
+invokation||invocation
+invokations||invocations
+irrelevent||irrelevant
+isnt||isn't
+isssue||issue
+iternations||iterations
+itertation||iteration
+itslef||itself
+jave||java
+jeffies||jiffies
+juse||just
+jus||just
+kown||known
+langage||language
+langauage||language
+langauge||language
+langugage||language
+lauch||launch
+layed||laid
+leightweight||lightweight
+lengh||length
+lenght||length
+lenth||length
+lesstiff||lesstif
+libaries||libraries
+libary||library
+librairies||libraries
+libraris||libraries
+licenceing||licencing
+loggging||logging
+loggin||login
+logile||logfile
+loosing||losing
+losted||lost
+machinary||machinery
+maintainance||maintenance
+maintainence||maintenance
+maintan||maintain
+makeing||making
+malplaced||misplaced
+malplace||misplace
+managable||manageable
+managment||management
+mangement||management
+manoeuvering||maneuvering
+mappping||mapping
+mathimatical||mathematical
+mathimatic||mathematic
+mathimatics||mathematics
+maxium||maximum
+mechamism||mechanism
+meetign||meeting
+ment||meant
+mergable||mergeable
+mesage||message
+messags||messages
+messgaes||messages
+messsage||message
+messsages||messages
+micropone||microphone
+microprocesspr||microprocessor
+milliseonds||milliseconds
+minium||minimum
+minimam||minimum
+minumum||minimum
+misalinged||misaligned
+miscelleneous||miscellaneous
+misformed||malformed
+mispelled||misspelled
+mispelt||misspelt
+mising||missing
+mismactch||mismatch
+missmanaged||mismanaged
+missmatch||mismatch
+miximum||maximum
+mmnemonic||mnemonic
+mnay||many
+modulues||modules
+momery||memory
+memomry||memory
+monochorome||monochrome
+monochromo||monochrome
+monocrome||monochrome
+mopdule||module
+mroe||more
+mulitplied||multiplied
+multidimensionnal||multidimensional
+multple||multiple
+mumber||number
+muticast||multicast
+mutilcast||multicast
+mutiple||multiple
+mutli||multi
+nams||names
+navagating||navigating
+nead||need
+neccecary||necessary
+neccesary||necessary
+neccessary||necessary
+necesary||necessary
+neded||needed
+negaive||negative
+negoitation||negotiation
+negotation||negotiation
+nerver||never
+nescessary||necessary
+nessessary||necessary
+noticable||noticeable
+notications||notifications
+notifed||notified
+numebr||number
+numner||number
+obtaion||obtain
+occassionally||occasionally
+occationally||occasionally
+occurance||occurrence
+occurances||occurrences
+occured||occurred
+occurence||occurrence
+occure||occurred
+occured||occurred
+occuring||occurring
+offet||offset
+omited||omitted
+omiting||omitting
+omitt||omit
+ommiting||omitting
+ommitted||omitted
+onself||oneself
+ony||only
+operatione||operation
+opertaions||operations
+optionnal||optional
+optmizations||optimizations
+orientatied||orientated
+orientied||oriented
+orignal||original
+otherise||otherwise
+ouput||output
+oustanding||outstanding
+overaall||overall
+overhread||overhead
+overlaping||overlapping
+overide||override
+overrided||overridden
+overriden||overridden
+overun||overrun
+overwritting||overwriting
+overwriten||overwritten
+pacakge||package
+pachage||package
+packacge||package
+packege||package
+packge||package
+packtes||packets
+pakage||package
+pallette||palette
+paln||plan
+paramameters||parameters
+paramaters||parameters
+paramater||parameter
+parametes||parameters
+parametised||parametrised
+paramter||parameter
+paramters||parameters
+particuarly||particularly
+particularily||particularly
+partiton||partition
+pased||passed
+passin||passing
+pathes||paths
+pecularities||peculiarities
+peformance||performance
+peice||piece
+pendantic||pedantic
+peprocessor||preprocessor
+perfoming||performing
+permissons||permissions
+peroid||period
+persistance||persistence
+persistant||persistent
+plalform||platform
+platfrom||platform
+plattform||platform
+pleaes||please
+ploting||plotting
+plugable||pluggable
+poinnter||pointer
+pointeur||pointer
+poiter||pointer
+posible||possible
+positon||position
+possibilites||possibilities
+powerfull||powerful
+preample||preamble
+preapre||prepare
+preceeded||preceded
+preceeding||preceding
+preceed||precede
+precendence||precedence
+precission||precision
+preemptable||preemptible
+prefered||preferred
+prefferably||preferably
+premption||preemption
+prepaired||prepared
+pressre||pressure
+primative||primitive
+princliple||principle
+priorty||priority
+privilaged||privileged
+privilage||privilege
+priviledge||privilege
+priviledges||privileges
+probaly||probably
+procceed||proceed
+proccesors||processors
+procesed||processed
+proces||process
+procesing||processing
+processessing||processing
+processess||processes
+processpr||processor
+processsed||processed
+processsing||processing
+procteted||protected
+prodecure||procedure
+progams||programs
+progess||progress
+programers||programmers
+programm||program
+programms||programs
+progresss||progress
+promiscous||promiscuous
+promps||prompts
+pronnounced||pronounced
+prononciation||pronunciation
+pronouce||pronounce
+pronunce||pronounce
+propery||property
+propigate||propagate
+propigation||propagation
+propogate||propagate
+prosess||process
+protable||portable
+protcol||protocol
+protecion||protection
+protocoll||protocol
+promixity||proximity
+psudo||pseudo
+psuedo||pseudo
+psychadelic||psychedelic
+pwoer||power
+quering||querying
+randomally||randomly
+raoming||roaming
+reasearcher||researcher
+reasearchers||researchers
+reasearch||research
+recepient||recipient
+receving||receiving
+recieved||received
+recieve||receive
+reciever||receiver
+recieves||receives
+recogniced||recognised
+recognizeable||recognizable
+recommanded||recommended
+recyle||recycle
+redircet||redirect
+redirectrion||redirection
+reename||rename
+refcounf||refcount
+refence||reference
+refered||referred
+referenace||reference
+refering||referring
+refernces||references
+refernnce||reference
+refrence||reference
+registerd||registered
+registeresd||registered
+registerred||registered
+registes||registers
+registraration||registration
+regsiter||register
+regster||register
+regualar||regular
+reguator||regulator
+regulamentations||regulations
+reigstration||registration
+releated||related
+relevent||relevant
+remoote||remote
+remore||remote
+removeable||removable
+repectively||respectively
+replacable||replaceable
+replacments||replacements
+replys||replies
+reponse||response
+representaion||representation
+reqeust||request
+requestied||requested
+requiere||require
+requirment||requirement
+requred||required
+requried||required
+requst||request
+reseting||resetting
+resizeable||resizable
+resouce||resource
+resouces||resources
+resoures||resources
+responce||response
+ressizes||resizes
+ressource||resource
+ressources||resources
+retransmited||retransmitted
+retreived||retrieved
+retreive||retrieve
+retrive||retrieve
+retuned||returned
+reudce||reduce
+reuest||request
+reuqest||request
+reutnred||returned
+revsion||revision
+rmeoved||removed
+rmeove||remove
+rmeoves||removes
+rountine||routine
+routins||routines
+rquest||request
+runing||running
+runned||ran
+runnning||running
+runtine||runtime
+sacrifying||sacrificing
+safly||safely
+safty||safety
+savable||saveable
+scaned||scanned
+scaning||scanning
+scarch||search
+seach||search
+searchs||searches
+secquence||sequence
+secund||second
+segement||segment
+senarios||scenarios
+sentivite||sensitive
+separatly||separately
+sepcify||specify
+sepc||spec
+seperated||separated
+seperately||separately
+seperate||separate
+seperatly||separately
+seperator||separator
+sepperate||separate
+sequece||sequence
+sequencial||sequential
+serveral||several
+setts||sets
+settting||setting
+shotdown||shutdown
+shoud||should
+shouldnt||shouldn't
+shoule||should
+shrinked||shrunk
+siginificantly||significantly
+signabl||signal
+similary||similarly
+similiar||similar
+simlar||similar
+simliar||similar
+simpified||simplified
+singaled||signaled
+singal||signal
+singed||signed
+sleeped||slept
+softwares||software
+speach||speech
+specfic||specific
+speciefied||specified
+specifc||specific
+specifed||specified
+specificatin||specification
+specificaton||specification
+specifing||specifying
+specifiying||specifying
+speficied||specified
+speicify||specify
+speling||spelling
+spinlcok||spinlock
+spinock||spinlock
+splitted||split
+spreaded||spread
+spurrious||spurious
+sructure||structure
+stablilization||stabilization
+staically||statically
+staion||station
+standardss||standards
+standartization||standardization
+standart||standard
+staticly||statically
+stoped||stopped
+stoppped||stopped
+straming||streaming
+struc||struct
+structres||structures
+stuct||struct
+strucuture||structure
+stucture||structure
+sturcture||structure
+subdirectoires||subdirectories
+suble||subtle
+substract||subtract
+submition||submission
+succesfully||successfully
+succesful||successful
+successed||succeeded
+successfull||successful
+successfuly||successfully
+sucessfully||successfully
+sucess||success
+superflous||superfluous
+superseeded||superseded
+suplied||supplied
+suported||supported
+suport||support
+supportet||supported
+suppored||supported
+supportin||supporting
+suppoted||supported
+suppported||supported
+suppport||support
+supress||suppress
+surpressed||suppressed
+surpresses||suppresses
+susbsystem||subsystem
+suspeneded||suspended
+suspicously||suspiciously
+swaping||swapping
+switchs||switches
+swith||switch
+swithable||switchable
+swithc||switch
+swithced||switched
+swithcing||switching
+swithed||switched
+swithing||switching
+swtich||switch
+symetric||symmetric
+synax||syntax
+synchonized||synchronized
+syncronize||synchronize
+syncronized||synchronized
+syncronizing||synchronizing
+syncronus||synchronous
+syste||system
+sytem||system
+sythesis||synthesis
+taht||that
+targetted||targeted
+targetting||targeting
+teh||the
+temorary||temporary
+temproarily||temporarily
+therfore||therefore
+thier||their
+threds||threads
+threshhold||threshold
+thresold||threshold
+throught||through
+troughput||throughput
+thses||these
+tiggered||triggered
+tipically||typically
+timout||timeout
+tmis||this
+torerable||tolerable
+tramsmitted||transmitted
+tramsmit||transmit
+tranasction||transaction
+tranfer||transfer
+transciever||transceiver
+transferd||transferred
+transfered||transferred
+transfering||transferring
+transision||transition
+transmittd||transmitted
+transormed||transformed
+trasfer||transfer
+trasmission||transmission
+treshold||threshold
+trigerring||triggering
+trun||turn
+tunning||tuning
+ture||true
+tyep||type
+udpate||update
+uesd||used
+uncommited||uncommitted
+unconditionaly||unconditionally
+underun||underrun
+unecessary||unnecessary
+unexecpted||unexpected
+unexepected||unexpected
+unexpcted||unexpected
+unexpectd||unexpected
+unexpeted||unexpected
+unexpexted||unexpected
+unfortunatelly||unfortunately
+unifiy||unify
+unintialized||uninitialized
+unkmown||unknown
+unknonw||unknown
+unknow||unknown
+unkown||unknown
+unneded||unneeded
+unneccecary||unnecessary
+unneccesary||unnecessary
+unneccessary||unnecessary
+unnecesary||unnecessary
+unneedingly||unnecessarily
+unnsupported||unsupported
+unmached||unmatched
+unregester||unregister
+unresgister||unregister
+unrgesiter||unregister
+unsinged||unsigned
+unstabel||unstable
+unsolicitied||unsolicited
+unsuccessfull||unsuccessful
+unsuported||unsupported
+untill||until
+unuseful||useless
+upate||update
+usefule||useful
+usefull||useful
+usege||usage
+usera||users
+usualy||usually
+utilites||utilities
+utillities||utilities
+utilties||utilities
+utiltity||utility
+utitity||utility
+utitlty||utility
+vaid||valid
+vaild||valid
+valide||valid
+variantions||variations
+varible||variable
+varient||variant
+vaule||value
+verbse||verbose
+verisons||versions
+verison||version
+verson||version
+vicefersa||vice-versa
+virtal||virtual
+virtaul||virtual
+virtiual||virtual
+visiters||visitors
+vitual||virtual
+wakeus||wakeups
+wating||waiting
+wiat||wait
+wether||whether
+whataver||whatever
+whcih||which
+whenver||whenever
+wheter||whether
+whe||when
+wierd||weird
+wiil||will
+wirte||write
+withing||within
+wnat||want
+workarould||workaround
+writeing||writing
+writting||writing
+zombe||zombie
+zomebie||zombie
diff --git a/script/static-checks/static-checks-check-copyright.sh b/script/static-checks/static-checks-check-copyright.sh
new file mode 100755
index 0000000..837e721
--- /dev/null
+++ b/script/static-checks/static-checks-check-copyright.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# test-package-check-copyright.sh DIRECTORY
+DIRECTORY="$1"
+
+TEST_CASE="Copyright headers of files modified by this patch"
+
+echo "# Check Copyright Test"
+
+LOG_FILE=`mktemp -t common.XXXX`
+
+"$CI_ROOT"/script/static-checks/check-copyright.py --tree "$DIRECTORY" --patch &> "$LOG_FILE"
+RES=$?
+
+if [ -s "$LOG_FILE" ]; then
+ if [ "$RES" -eq 0 ]; then
+ EXIT_VALUE=0
+ else
+ EXIT_VALUE=1
+ fi
+ cat "$LOG_FILE"
+else
+ echo "ERROR: Empty output log of copyright check script."
+ EXIT_VALUE=1
+fi
+
+echo >> "$LOG_TEST_FILENAME"
+echo "****** $TEST_CASE ******" >> "$LOG_TEST_FILENAME"
+echo >> "$LOG_TEST_FILENAME"
+if [[ "$EXIT_VALUE" == 0 ]]; then
+ echo "Result : SUCCESS" >> "$LOG_TEST_FILENAME"
+else
+ echo "Result : FAILURE" >> "$LOG_TEST_FILENAME"
+fi
+echo >> "$LOG_TEST_FILENAME"
+cat "$LOG_FILE" >> "$LOG_TEST_FILENAME"
+echo >> "$LOG_TEST_FILENAME"
+
+rm "$LOG_FILE"
+
+exit "$EXIT_VALUE"
diff --git a/script/static-checks/static-checks-coding-style-entire-src-tree.sh b/script/static-checks/static-checks-coding-style-entire-src-tree.sh
new file mode 100755
index 0000000..1d066ae
--- /dev/null
+++ b/script/static-checks/static-checks-coding-style-entire-src-tree.sh
@@ -0,0 +1,54 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# Check the coding style of the entire source tree against the Linux coding
+# style using the checkpatch.pl script from the Linux kernel source tree.
+
+TEST_CASE="Coding style of entire source tree"
+
+echo "# Check coding style of the entire source tree"
+
+LOG_FILE=$(mktemp -t coding-style-check.XXXX)
+
+# Passing V=1 to 'make checkcodebase' will make it generate a per-file summary
+CHECKPATCH=$CI_ROOT/script/static-checks/checkpatch.pl \
+ make checkcodebase V=1 &> "$LOG_FILE"
+RES=$?
+
+if [[ "$RES" == 0 ]]; then
+ # Ignore warnings, only mark the test as failed if there are errors.
+ # We'll get as many 'total:' lines as the number of files in the source tree.
+ # Search for lines that show a non-null number of errors.
+ grep --quiet 'total: [^0][0-9]* errors' "$LOG_FILE"
+ # grep returns 0 when it founds the pattern, which means there is an error
+ RES=$?
+else
+ RES=0
+fi
+
+if [[ "$RES" == 0 ]]; then
+ EXIT_VALUE=1
+else
+ EXIT_VALUE=0
+fi
+
+echo >> "$LOG_TEST_FILENAME"
+echo "****** $TEST_CASE ******" >> "$LOG_TEST_FILENAME"
+echo >> "$LOG_TEST_FILENAME"
+if [[ "$EXIT_VALUE" == 0 ]]; then
+ echo "Result : SUCCESS" >> "$LOG_TEST_FILENAME"
+else
+ echo "Result : FAILURE" >> "$LOG_TEST_FILENAME"
+fi
+# Always print the script output to show the warnings
+echo >> "$LOG_TEST_FILENAME"
+cat "$LOG_FILE" >> "$LOG_TEST_FILENAME"
+echo >> "$LOG_TEST_FILENAME"
+
+rm -f "$LOG_FILE"
+
+exit "$EXIT_VALUE"
diff --git a/script/static-checks/static-checks-coding-style-line-endings.sh b/script/static-checks/static-checks-coding-style-line-endings.sh
new file mode 100755
index 0000000..87e149c
--- /dev/null
+++ b/script/static-checks/static-checks-coding-style-line-endings.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+TEST_CASE="Line endings not valid"
+
+echo "# Check Line Endings"
+
+LOG_FILE=`mktemp -t common.XXXX`
+
+# For all the source and doc files (*.h,*.c,*.S,*.mk,*.md)
+# We only return the files that contain CRLF
+find "." -\( \
+ -name '*.S' -or \
+ -name '*.c' -or \
+ -name '*.h' -or \
+ -name '*.md' -or \
+ -name 'Makefile' -or \
+ -name '*.mk' \
+-\) -exec grep --files-with-matches $'\r$' {} \; &> "$LOG_FILE"
+
+if [[ -s "$LOG_FILE" ]]; then
+ EXIT_VALUE=1
+else
+ EXIT_VALUE=0
+fi
+
+echo >> "$LOG_TEST_FILENAME"
+echo "****** $TEST_CASE ******" >> "$LOG_TEST_FILENAME"
+echo >> "$LOG_TEST_FILENAME"
+if [[ "$EXIT_VALUE" == 0 ]]; then
+ echo "Result : SUCCESS" >> "$LOG_TEST_FILENAME"
+else
+ echo "Result : FAILURE" >> "$LOG_TEST_FILENAME"
+ echo >> "$LOG_TEST_FILENAME"
+ cat "$LOG_FILE" >> "$LOG_TEST_FILENAME"
+fi
+echo >> "$LOG_TEST_FILENAME"
+
+rm "$LOG_FILE"
+
+exit "$EXIT_VALUE"
+
diff --git a/script/static-checks/static-checks-coding-style.sh b/script/static-checks/static-checks-coding-style.sh
new file mode 100755
index 0000000..5341e34
--- /dev/null
+++ b/script/static-checks/static-checks-coding-style.sh
@@ -0,0 +1,60 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# Check the coding style of the current patch (not the entire code base)
+# against the Linux coding style using the checkpatch.pl script from
+# the Linux kernel source tree.
+
+TEST_CASE="Coding style on current patch"
+
+echo "# Check coding style on the last patch"
+
+git show --summary
+
+LOG_FILE=$(mktemp -t coding-style-check.XXXX)
+
+# Make the patch against the specified remote branch
+if [ -n "$CODING_STYLE_BASE_BRANCH" ]; then
+ BASE_COMMIT="BASE_COMMIT=$CODING_STYLE_BASE_BRANCH"
+fi
+
+chmod +x $CI_ROOT/script/static-checks/checkpatch.pl
+
+CHECKPATCH=$CI_ROOT/script/static-checks/checkpatch.pl \
+ make checkpatch &> "$LOG_FILE"
+RES=$?
+
+if [[ "$RES" == 0 ]]; then
+ # Ignore warnings, only mark the test as failed if there are errors.
+ grep --quiet "total: [^0][0-9]* errors" "$LOG_FILE"
+ RES=$?
+else
+ RES=0
+fi
+
+if [[ "$RES" == 0 ]]; then
+ EXIT_VALUE=1
+else
+ EXIT_VALUE=0
+fi
+
+echo >> "$LOG_TEST_FILENAME"
+echo "****** $TEST_CASE ******" >> "$LOG_TEST_FILENAME"
+echo >> "$LOG_TEST_FILENAME"
+if [[ "$EXIT_VALUE" == 0 ]]; then
+ echo "Result : SUCCESS" >> "$LOG_TEST_FILENAME"
+else
+ echo "Result : FAILURE" >> "$LOG_TEST_FILENAME"
+fi
+# Always print the script output to show the warnings
+echo >> "$LOG_TEST_FILENAME"
+cat "$LOG_FILE" >> "$LOG_TEST_FILENAME"
+echo >> "$LOG_TEST_FILENAME"
+
+rm -f "$LOG_FILE"
+
+exit "$EXIT_VALUE"
diff --git a/script/static-checks/static-checks-cppcheck.sh b/script/static-checks/static-checks-cppcheck.sh
new file mode 100755
index 0000000..7f5ed8f
--- /dev/null
+++ b/script/static-checks/static-checks-cppcheck.sh
@@ -0,0 +1,71 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+TEST_CASE="cppcheck to the entire source tree"
+
+echo "# cppcheck to the entire source tree"
+
+TF_BASE="$(pwd)"
+
+# cppcheck configuration
+COMMON_ARGS=(-j 16 -q -f --std=c99 --error-exitcode=1 --relative-paths="$TF_BASE")
+CHECKS_ARGS=(--enable=warning,style,portability)
+SUPPRESSIONS=(--suppress=variableScope)
+
+# Excluded directories
+EXCLUDES=(
+plat/hisilicon
+plat/mediatek
+plat/nvidia
+plat/qemu
+plat/rockchip
+plat/socionext
+plat/xilinx
+)
+
+do_lint()
+{
+ local EXCLUDED_DIRS=()
+ local HDR_INCS=()
+
+ LOG_FILE=$(mktemp -t cppcheck_log.XXXX)
+
+ # Build a list of excluded directories
+ for exc in "${EXCLUDES[@]}"; do
+ EXCLUDED_DIRS+=(-i "$exc")
+ done
+
+ while read -r dir; do
+ HDR_INCS+=(-I "$dir")
+ done < <(find "$TF_BASE" -name "*.h" -exec dirname {} \; | sort -u)
+
+ cppcheck \
+ "${COMMON_ARGS[@]}" \
+ "${CHECKS_ARGS[@]}" \
+ "${HDR_INCS[@]}" \
+ "${SUPPRESSIONS[@]}" \
+ "${EXCLUDED_DIRS[@]}" "$TF_BASE" &> "$LOG_FILE"
+ EXIT_VALUE="$?"
+
+ echo >> "$LOG_TEST_FILENAME"
+ echo "****** $TEST_CASE ******" >> "$LOG_TEST_FILENAME"
+ echo >> "$LOG_TEST_FILENAME"
+ if [[ "$EXIT_VALUE" == 0 ]]; then
+ echo "Result : SUCCESS" >> "$LOG_TEST_FILENAME"
+ else
+ echo "Result : FAILURE" >> "$LOG_TEST_FILENAME"
+ echo >> "$LOG_TEST_FILENAME"
+ cat "$LOG_FILE" >> "$LOG_TEST_FILENAME"
+ fi
+ echo >> "$LOG_TEST_FILENAME"
+
+ rm -f "$LOG_FILE"
+
+ exit "$EXIT_VALUE"
+}
+
+do_lint
diff --git a/script/static-checks/static-checks-include-order.sh b/script/static-checks/static-checks-include-order.sh
new file mode 100755
index 0000000..0f2b46c
--- /dev/null
+++ b/script/static-checks/static-checks-include-order.sh
@@ -0,0 +1,41 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# unittest-include-order.sh <path-to-root-folder> [patch]
+
+LOG_FILE=$(mktemp -t include-order-check.XXXX)
+
+if [[ "$2" == "patch" ]]; then
+ echo "# Check order of includes on the last patch"
+ TEST_CASE="Order of includes on the last patch(es)"
+ "$CI_ROOT/script/static-checks/check-include-order.py" --tree "$1" \
+ --patch --from-ref origin/master \
+ &> "$LOG_FILE"
+else
+ echo "# Check order of includes of the entire source tree"
+ TEST_CASE="Order of includes of the entire source tree"
+ "$CI_ROOT/script/static-checks/check-include-order.py" --tree "$1" \
+ &> "$LOG_FILE"
+fi
+
+EXIT_VALUE=$?
+
+echo >> "$LOG_TEST_FILENAME"
+echo "****** $TEST_CASE ******" >> "$LOG_TEST_FILENAME"
+echo >> "$LOG_TEST_FILENAME"
+if [[ "$EXIT_VALUE" == 0 ]]; then
+ echo "Result : SUCCESS" >> "$LOG_TEST_FILENAME"
+else
+ echo "Result : FAILURE" >> "$LOG_TEST_FILENAME"
+ echo >> "$LOG_TEST_FILENAME"
+ cat "$LOG_FILE" >> "$LOG_TEST_FILENAME"
+fi
+echo >> "$LOG_TEST_FILENAME"
+
+rm -f "$LOG_FILE"
+
+exit "$EXIT_VALUE"
diff --git a/script/static-checks/static-checks.sh b/script/static-checks/static-checks.sh
new file mode 100755
index 0000000..c9b980c
--- /dev/null
+++ b/script/static-checks/static-checks.sh
@@ -0,0 +1,100 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+echo '----------------------------------------------'
+echo '-- Running static checks on the source code --'
+echo '----------------------------------------------'
+
+# Find the absolute path of the scripts' top directory
+
+cd "$(dirname "$0")/../.."
+export CI_ROOT=$(pwd)
+cd -
+
+# Initialize log file
+
+export LOG_TEST_FILENAME=$(pwd)/static-checks.log
+
+echo
+echo "###### Static checks ######"
+echo
+
+echo "###### Static checks ######" > "$LOG_TEST_FILENAME"
+echo >> "$LOG_TEST_FILENAME"
+
+# Reset error counters
+
+ERROR_COUNT=0
+WARNING_COUNT=0
+
+# Ensure all the files contain a copyright
+
+echo 'Checking copyright in source files...'
+echo
+"$CI_ROOT"/script/static-checks/static-checks-check-copyright.sh .
+if [ "$?" != 0 ]; then
+ echo "Copyright test: FAILURE"
+ ((ERROR_COUNT++))
+else
+ echo "Copyright test: PASS"
+fi
+echo
+
+# Check alphabetic order of headers included.
+
+if [ "$IS_CONTINUOUS_INTEGRATION" == 1 ]; then
+ "$CI_ROOT"/script/static-checks/static-checks-include-order.sh . patch
+else
+ "$CI_ROOT"/script/static-checks/static-checks-include-order.sh .
+fi
+if [ "$?" != 0 ]; then
+ echo "Include order test: FAILURE"
+ ((WARNING_COUNT++))
+else
+ echo "Include order test: PASS"
+fi
+echo
+
+# Check line endings
+
+"$CI_ROOT"/script/static-checks/static-checks-coding-style-line-endings.sh
+if [ "$?" != 0 ]; then
+ echo "Line ending test: FAILURE"
+ ((ERROR_COUNT++))
+else
+ echo "Line ending test: PASS"
+fi
+echo
+
+# Check coding style
+
+echo 'Checking coding style compliance...'
+echo
+if [ "$IS_CONTINUOUS_INTEGRATION" == 1 ]; then
+ "$CI_ROOT"/script/static-checks/static-checks-coding-style.sh
+else
+ "$CI_ROOT"/script/static-checks/static-checks-coding-style-entire-src-tree.sh
+fi
+if [ "$?" != 0 ]; then
+ echo "Coding style test: FAILURE"
+ ((ERROR_COUNT++))
+else
+ echo "Coding style test: PASS"
+fi
+echo
+
+# Check error count
+
+if [ "$ERROR_COUNT" != 0 ] || [ "$WARNING_COUNT" != 0 ]; then
+ echo "Some static checks have failed."
+fi
+
+if [ "$ERROR_COUNT" != 0 ]; then
+ exit 1
+fi
+
+exit 0
diff --git a/script/static-checks/utils.py b/script/static-checks/utils.py
new file mode 100644
index 0000000..c6a7fdd
--- /dev/null
+++ b/script/static-checks/utils.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+import os
+import subprocess
+import sys
+import textwrap
+
+
+def dir_is_ignored(relative_path, ignored_folders):
+ '''Checks if a directory is on the ignore list or inside one of the ignored
+ directories. relative_path mustn't end in "/".'''
+
+ # Check if directory is in ignore list
+ if relative_path in ignored_folders:
+ return True
+
+ # Check if directory is a subdirectory of one in ignore list
+ return (relative_path + '/').startswith(ignored_folders)
+
+
+def file_is_ignored(relative_path, valid_file_extensions, ignored_files, ignored_folders):
+ '''Checks if a file is ignored based on its folder, name and extension.'''
+ if not relative_path.endswith(valid_file_extensions):
+ return True
+
+ if relative_path in ignored_files:
+ return True
+
+ return dir_is_ignored(os.path.dirname(relative_path), ignored_folders)
+
+
+def print_exception_info():
+ '''Print some information about the cause of an exception.'''
+ print("ERROR: Exception:")
+ print(textwrap.indent(str(sys.exc_info()[0])," "))
+ print(textwrap.indent(str(sys.exc_info()[1])," "))
+
+
+def decode_string(string):
+ '''Tries to decode a binary string into ASCII. It gives an error if it
+ finds non-ASCII characters, but it will return the string converted
+ anyway, ignoring these characters.'''
+ try:
+ string = string.decode("ascii")
+ except UnicodeDecodeError:
+ # Capture exceptions caused by non-ASCII characters.
+ print("ERROR:Non-ASCII characters detected.")
+ print_exception_info()
+ string = string.decode("ascii", "ignore")
+
+ return string
+
+
+def shell_command(cmd_line):
+ '''Executes a shell command. Returns (returncode, stdout, stderr), where
+ stdout and stderr are ASCII-encoded strings.'''
+
+ try:
+ p = subprocess.Popen(cmd_line, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ (stdout, stderr) = p.communicate()
+ # No need for p.wait(), p.communicate() does it by default.
+ except:
+ print("ERROR: Shell command: ", end="")
+ print(cmd_line)
+ print_exception_info()
+ return (1, None, None)
+
+ stdout = decode_string(stdout)
+ stderr = decode_string(stderr)
+
+ if p.returncode != 0:
+ print("ERROR: Shell command failed:")
+ print(textwrap.indent(str(cmd_line)," "))
+ print("ERROR: stdout:")
+ print(textwrap.indent(stdout," "))
+ print("ERROR: stderr:")
+ print(textwrap.indent(stderr," "))
+
+ return (p.returncode, stdout, stderr)
+
diff --git a/script/tf-coverity/common-def.sh b/script/tf-coverity/common-def.sh
new file mode 100644
index 0000000..71640f2
--- /dev/null
+++ b/script/tf-coverity/common-def.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+JENKINS_URL=http://ssg-sw.cambridge.arm.com/jenkins
+
+# mbed TLS source tag to checkout when building Trusted Firmware with Trusted
+# Board Boot support.
+MBED_TLS_SOURCES_TAG="mbedtls-2.16.0"
+
+ARMCLANG_PATH=
+CRYPTOCELL_LIB_PATH=/arm/projectscratch/ssg/trusted-fw/dummy-crypto-lib
diff --git a/script/tf-coverity/coverity_tf_conf.py b/script/tf-coverity/coverity_tf_conf.py
new file mode 100644
index 0000000..00fb945
--- /dev/null
+++ b/script/tf-coverity/coverity_tf_conf.py
@@ -0,0 +1,44 @@
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+#
+# This file lists the source files that are expected to be excluded from
+# Coverity's analysis, and the reason why.
+#
+
+# The expected format is an array of tuples (filename_pattern, description).
+# - filename_pattern is a Python regular expression (as in the 're' module)
+# describing the file(s) to exclude.
+# - description aims at providing the reason why the files are expected
+# to be excluded.
+exclude_paths = [
+ ("drivers/arm/cci400/cci400.c", "deprecated driver"),
+ ("drivers/arm/gic/v3/arm_gicv3_common.c", "platform to exercise GIC-500/600 powerdown not available yet"),
+ ("drivers/arm/tzc400/tzc400.c", "deprecated driver"),
+ ("drivers/arm/tzc/tzc_common_private.c",
+ "file included, actually indirectly analyzed"),
+ ("drivers/arm/tzc/tzc_dmc500.c", "not used by any upstream platform"),
+
+ ("drivers/io/io_dummy.c", "not used by any upstream platform"),
+ ("drivers/partition/gpt.c", "not used by any upstream platform"),
+ ("drivers/partition/partition.c", "not used by any upstream platform"),
+
+ ("lib/aarch64/xlat_tables.c", "deprecated library code"),
+
+ ("plat/arm/common/arm_tzc_dmc500.c", "not used by any upstream platform"),
+
+ ("plat/mediatek/mt8173/plat_mt_gic.c", "deprecated code"),
+
+ ("lib/aarch32/arm32_aeabi_divmod.c", "not used by any upstream platform"),
+
+ # Waiting for the following patch to be available:
+ # http://ssg-sw.cambridge.arm.com/gerrit/#/c/49862/
+ ("plat/rockchip/rk3399/drivers/m0/.*",
+ "work around the lack of support for the M0 compiler in the scripts"),
+
+ ("tools/.*", "Host tools"),
+ ("plat/qemu/sp_min/sp_min_setup.c", "not used in any upstream platform - see GENFW-2164")
+]
diff --git a/script/tf-coverity/run_coverity_on_tf.py b/script/tf-coverity/run_coverity_on_tf.py
new file mode 100755
index 0000000..a29b5f3
--- /dev/null
+++ b/script/tf-coverity/run_coverity_on_tf.py
@@ -0,0 +1,262 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+#
+# Run the Coverity tool on the Trusted Firmware and produce a tarball ready to
+# be submitted to Coverity Scan Online.
+#
+
+import sys
+import argparse
+import urllib.request
+import tarfile
+import os
+import subprocess
+import re
+import utils
+import coverity_tf_conf
+
+
+def tarball_name(filename):
+ "Isolate the tarball name without the filename's extension."
+ # Handle a selection of "composite" extensions
+ for ext in [".tar.gz", ".tar.bz2"]:
+ if filename.endswith(ext):
+ return filename[:-len(ext)]
+ # For all other extensions, let the vanilla splitext() function handle it
+ return os.path.splitext(filename)[0]
+
+assert tarball_name("foo.gz") == "foo"
+assert tarball_name("bar.tar.gz") == "bar"
+assert tarball_name("baz.tar.bz2") == "baz"
+
+
+def get_coverity_tool():
+ coverity_tarball = "cov-analysis-linux64-2017.07.tar.gz"
+ url = "http://files.oss.arm.com/downloads/tf-a/" + coverity_tarball
+ print("Downloading Coverity Build tool from %s..." % url)
+ file_handle = urllib.request.urlopen(url)
+ output = open(coverity_tarball, "wb")
+ output.write(file_handle.read())
+ output.close()
+ print("Download complete.")
+
+ print("\nUnpacking tarball %s..." % coverity_tarball)
+ tarfile.open(coverity_tarball).extractall()
+ print("Tarball unpacked.")
+
+ print("\nNow please load the Coverity tool in your PATH...")
+ print("E.g.:")
+ cov_dir_name = tarball_name(coverity_tarball)
+ cov_dir_path = os.path.abspath(os.path.join(cov_dir_name, "bin"))
+ print(" export PATH=%s$PATH" % (cov_dir_path + os.pathsep))
+
+
+def print_coverage(coverity_dir, tf_dir, exclude_paths=[], log_filename=None):
+ analyzed = []
+ not_analyzed = []
+ excluded = []
+
+ # Print the coverage report to a file (or stdout if no file is specified)
+ if log_filename is not None:
+ log_file = open(log_filename, "w")
+ else:
+ log_file = sys.stdout
+
+ # Get the list of files analyzed by Coverity.
+ #
+ # To do that, we examine the build log file Coverity generated and look for
+ # compilation lines. These are the lines starting with "COMPILING:". We consider
+ # only those lines that actually compile C files, i.e. lines of the form:
+ # gcc -c file.c -o file.o
+ # This filters out other compilation lines like generation of dependency files
+ # (*.d) and such.
+ # We then extract the C filename.
+ coverity_build_log = os.path.join(coverity_dir, "build-log.txt")
+ with open(coverity_build_log, encoding="utf-8") as build_log:
+ for line in build_log:
+ results = re.search("COMPILING:.*-c *(.*\.c).*-o.*\.o", line)
+ if results is not None:
+ filename = results.group(1)
+ if filename not in analyzed:
+ analyzed.append(filename)
+
+ # Now get the list of C files in the Trusted Firmware source tree.
+ # Header files and assembly files are ignored, as well as anything that
+ # matches the patterns list in the exclude_paths[] list.
+ # Build a list of files that are in this source tree but were not analyzed
+ # by comparing the 2 sets of files.
+ all_files_count = 0
+ old_cwd = os.path.abspath(os.curdir)
+ os.chdir(tf_dir)
+ git_process = utils.exec_prog("git", ["ls-files", "*.c"],
+ out=subprocess.PIPE, out_text_mode=True)
+ for filename in git_process.stdout:
+ # Remove final \n in filename
+ filename = filename.strip()
+
+ def is_excluded(filename, excludes):
+ for pattern in excludes:
+ if re.match(pattern[0], filename):
+ excluded.append((filename, pattern[1]))
+ return True
+ return False
+
+ if is_excluded(filename, exclude_paths):
+ continue
+
+ # Keep track of the number of C files in the source tree. Used to
+ # compute the coverage percentage at the end.
+ all_files_count += 1
+ if filename not in analyzed:
+ not_analyzed.append(filename)
+ os.chdir(old_cwd)
+
+ # Compute the coverage percentage
+ # Note: The 1.0 factor here is used to make a float division instead of an
+ # integer one.
+ percentage = (1 - ((1.0 * len(not_analyzed) ) / all_files_count)) * 100
+
+ #
+ # Print a report
+ #
+ log_file.write("Files coverage: %d%%\n\n" % percentage)
+ log_file.write("Analyzed %d files\n" % len(analyzed))
+
+ if len(excluded) > 0:
+ log_file.write("\n%d files were ignored on purpose:\n" % len(excluded))
+ for exc in excluded:
+ log_file.write(" - {0:50} (Reason: {1})\n".format(exc[0], exc[1]))
+
+ if len(not_analyzed) > 0:
+ log_file.write("\n%d files were not analyzed:\n" % len(not_analyzed))
+ for f in not_analyzed:
+ log_file.write(" - %s\n" % f)
+ log_file.write("""
+===============================================================================
+Please investigate why the above files are not run through Coverity.
+
+There are 2 possible reasons:
+
+1) The build coverage is insufficient. Please review the tf-cov-make script to
+ add the missing build config(s) that will involve the file in the build.
+
+2) The file is expected to be ignored, for example because it is deprecated
+ code. Please update the TF Coverity configuration to list the file and
+ indicate the reason why it is safe to ignore it.
+===============================================================================
+""")
+ log_file.close()
+
+
+def parse_cmd_line(argv, prog_name):
+ parser = argparse.ArgumentParser(
+ prog=prog_name,
+ description="Run Coverity on Trusted Firmware",
+ epilog="""
+ Please ensure the AArch64 & AArch32 cross-toolchains are loaded in your
+ PATH. Ditto for the Coverity tools. If you don't have the latter then
+ you can use the --get-coverity-tool to download them for you.
+ """)
+ parser.add_argument("--tf", default=None,
+ metavar="<Trusted Firmware source dir>",
+ help="Specify the location of ARM Trusted Firmware sources to analyze")
+ parser.add_argument("--get-coverity-tool", default=False,
+ help="Download the Coverity build tool and exit",
+ action="store_true")
+ parser.add_argument("--mode", choices=["offline", "online"], default="online",
+ help="Choose between online or offline mode for the analysis")
+ parser.add_argument("--output", "-o",
+ help="Name of the output file containing the results of the analysis")
+ parser.add_argument("--build-cmd", "-b",
+ help="Command used to build TF through Coverity")
+ parser.add_argument("--analysis-profile", "-p",
+ action="append", nargs=1,
+ help="Analysis profile for a local analysis")
+ args = parser.parse_args(argv)
+
+ # Set a default name for the output file if none is provided.
+ # If running in offline mode, this will be a text file;
+ # If running in online mode, this will be a tarball name.
+ if not args.output:
+ if args.mode == "offline":
+ args.output = "arm-tf-coverity-report.txt"
+ else:
+ args.output = "arm-tf-coverity-results.tgz"
+
+ return args
+
+
+if __name__ == "__main__":
+ prog_name = sys.argv[0]
+ args = parse_cmd_line(sys.argv[1:], prog_name)
+
+ # If the user asked to download the Coverity build tool then just do that
+ # and exit.
+ if args.get_coverity_tool:
+ # If running locally, use the commercial version of Coverity from the
+ # EUHPC cluster.
+ if args.mode == "offline":
+ print("To load the Coverity tools, use the following command:")
+ print("export PATH=/arm/tools/coverity/static-analysis/8.7.1/bin/:$PATH")
+ else:
+ get_coverity_tool()
+ sys.exit(0)
+
+ if args.tf is None:
+ print("ERROR: Please specify the Trusted Firmware sources using the --tf option.",
+ file=sys.stderr)
+ sys.exit(1)
+
+ # Get some important paths in the platform-ci scripts
+ tf_scripts_dir = os.path.abspath(os.path.dirname(prog_name))
+ tf_coverity_dir = os.path.join(os.path.normpath(
+ os.path.join(tf_scripts_dir, os.pardir, os.pardir)),"coverity")
+
+ if not args.build_cmd:
+ tf_build_script = os.path.join(tf_scripts_dir, "tf-cov-make")
+ args.build_cmd = tf_build_script + " " + args.tf
+
+ run_coverity_script = os.path.join(tf_coverity_dir, "run_coverity.sh")
+
+ ret = subprocess.call([run_coverity_script, "check_tools", args.mode])
+ if ret != 0:
+ sys.exit(1)
+
+ ret = subprocess.call([run_coverity_script, "configure"])
+ if ret != 0:
+ sys.exit(1)
+
+ ret = subprocess.call([run_coverity_script, "build", args.build_cmd])
+ if ret != 0:
+ sys.exit(1)
+
+ if args.mode == "online":
+ ret = subprocess.call([run_coverity_script, "package", args.output])
+ else:
+ for profile in args.analysis_profile:
+ ret = subprocess.call([run_coverity_script, "analyze",
+ args.output,
+ args.tf,
+ profile[0]])
+ if ret != 0:
+ break
+ if ret != 0:
+ print("An error occured (%d)." % ret, file=sys.stderr)
+ sys.exit(ret)
+
+ print("-----------------------------------------------------------------")
+ print("Results can be found in file '%s'" % args.output)
+ if args.mode == "online":
+ print("This tarball can be uploaded at Coverity Scan Online:" )
+ print("https://scan.coverity.com/projects/arm-software-arm-trusted-firmware/builds/new?tab=upload")
+ print("-----------------------------------------------------------------")
+
+ print_coverage("cov-int", args.tf, coverity_tf_conf.exclude_paths, "tf_coverage.log")
+ with open("tf_coverage.log") as log_file:
+ for line in log_file:
+ print(line, end="")
diff --git a/script/tf-coverity/tf-cov-make b/script/tf-coverity/tf-cov-make
new file mode 100755
index 0000000..36cc290
--- /dev/null
+++ b/script/tf-coverity/tf-cov-make
@@ -0,0 +1,204 @@
+#! /bin/sh
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+#
+# This script builds the TF in different configs.
+# Rather than telling cov-build to build TF using a simple 'make all' command,
+# the goal here is to combine several build flags to analyse more of our source
+# code in a single 'build'. The Coverity Scan service does not have the notion
+# of separate types of build - there is just one linear sequence of builds in
+# the project history.
+#
+
+# Bail out as soon as an error is encountered.
+set -e
+
+TF_SOURCES=$1
+if [ ! -d "$TF_SOURCES" ]; then
+ echo "ERROR: '$TF_SOURCES' does not exist or is not a directory"
+ echo "Usage: $(basename "$0") <trusted-firmware-directory>"
+ exit 1
+fi
+
+export CROSS_COMPILE=aarch64-linux-gnu-
+
+# Get mbed TLS library code to build Trusted Firmware with Trusted Board Boot
+# support. The version of mbed TLS to use here must be the same as when
+# building TF in the usual context.
+if [ ! -d mbedtls ]; then
+ git clone https://github.com/ARMmbed/mbedtls.git
+fi
+cd mbedtls
+containing_dir="$(readlink -f "$(dirname "$0")/")"
+. $containing_dir/common-def.sh
+git checkout "$MBED_TLS_SOURCES_TAG"
+cd -
+TBB_OPTIONS="TRUSTED_BOARD_BOOT=1 GENERATE_COT=1 MBEDTLS_DIR=$(pwd)/mbedtls"
+ARM_TBB_OPTIONS="$TBB_OPTIONS ARM_ROTPK_LOCATION=devel_rsa"
+
+cd "$TF_SOURCES"
+
+# Clean TF source dir to make sure we don't analyse temporary files.
+make distclean
+
+#
+# Build TF in different configurations to get as much coverage as possible
+#
+
+# We need to clean the platform build between each configuration because Trusted
+# Firmware's build system doesn't track build options dependencies and won't
+# rebuild the files affected by build options changes.
+clean_build()
+{
+ local flags="$*"
+ echo "Building TF with the following build flags:"
+ echo " $flags"
+ make $flags clean
+ make $flags all
+ echo "Build config complete."
+ echo
+}
+
+#
+# FVP platform
+# We'll use the following flags for all FVP builds.
+#
+fvp_common_flags="-j PLAT=fvp DEBUG=1"
+
+# Try all possible SPDs.
+clean_build $fvp_common_flags ${ARM_TBB_OPTIONS} ARM_TSP_RAM_LOCATION=dram SPD=tspd
+clean_build $fvp_common_flags ${ARM_TBB_OPTIONS} ARM_TSP_RAM_LOCATION=dram SPD=tspd TSP_INIT_ASYNC=1 \
+ TSP_NS_INTR_ASYNC_PREEMPT=1
+clean_build $fvp_common_flags ${ARM_TBB_OPTIONS} SPD=opteed
+clean_build $fvp_common_flags ${ARM_TBB_OPTIONS} SPD=tlkd
+
+clean_build -j PLAT=fvp DEBUG=1 SPD=trusty
+clean_build -j PLAT=fvp DEBUG=1 SPD=trusty TRUSTY_SPD_WITH_GENERIC_SERVICES=1
+
+# SDEI
+clean_build PLAT=fvp DEBUG=1 SDEI_SUPPORT=1 EL3_EXCEPTION_HANDLING=1
+
+# Without coherent memory
+clean_build $fvp_common_flags ${ARM_TBB_OPTIONS} ARM_TSP_RAM_LOCATION=dram SPD=tspd USE_COHERENT_MEM=0
+
+# Using PSCI extended State ID format rather than the original format
+clean_build $fvp_common_flags ${ARM_TBB_OPTIONS} ARM_TSP_RAM_LOCATION=dram SPD=tspd PSCI_EXTENDED_STATE_ID=1 \
+ ARM_RECOM_STATE_ID_ENC=1
+
+# Alternative boot flows (This changes some of the platform initialisation code)
+clean_build $fvp_common_flags EL3_PAYLOAD=0x80000000
+clean_build $fvp_common_flags PRELOADED_BL33_BASE=0x80000000
+
+# Using the SP804 timer instead of the Generic Timer
+clean_build $fvp_common_flags FVP_USE_SP804_TIMER=1
+
+# Using the CCN driver and multi cluster topology
+clean_build $fvp_common_flags FVP_CLUSTER_COUNT=4
+
+# PMF
+clean_build $fvp_common_flags ENABLE_PMF=1
+
+# stack protector
+clean_build $fvp_common_flags ENABLE_STACK_PROTECTOR=strong
+
+# AArch32 build
+clean_build $fvp_common_flags CROSS_COMPILE=arm-linux-gnueabihf- \
+ ARCH=aarch32 AARCH32_SP=sp_min \
+ RESET_TO_SP_MIN=1 PRELOADED_BL33_BASE=0x80000000
+clean_build $fvp_common_flags CROSS_COMPILE=arm-linux-gnueabihf- \
+ ARCH=aarch32 AARCH32_SP=sp_min
+
+# Xlat tables lib version 1 (AArch64 and AArch32)
+clean_build $fvp_common_flags ARM_XLAT_TABLES_LIB_V1=1 RECLAIM_INIT_CODE=0
+clean_build $fvp_common_flags CROSS_COMPILE=arm-linux-gnueabihf- \
+ ARCH=aarch32 AARCH32_SP=sp_min ARM_XLAT_TABLES_LIB_V1=1 RECLAIM_INIT_CODE=0
+
+# Using GIC600 driver
+clean_build $fvp_common_flags FVP_USE_GIC_DRIVER=FVP_GIC600
+
+# SPM support
+clean_build $fvp_common_flags ENABLE_SPM=1 EL3_EXCEPTION_HANDLING=1
+
+#BL2 at EL3 support
+clean_build $fvp_common_flags BL2_AT_EL3=1
+clean_build $fvp_common_flags CROSS_COMPILE=arm-linux-gnueabihf- \
+ ARCH=aarch32 AARCH32_SP=sp_min BL2_AT_EL3=1
+
+#
+# Juno platform
+# We'll use the following flags for all Juno builds.
+#
+juno_common_flags="-j PLAT=juno DEBUG=1"
+clean_build $juno_common_flags SPD=tspd ${ARM_TBB_OPTIONS}
+clean_build $juno_common_flags EL3_PAYLOAD=0x80000000
+clean_build $juno_common_flags ENABLE_STACK_PROTECTOR=strong
+clean_build $juno_common_flags CSS_USE_SCMI_SDS_DRIVER=0
+clean_build $juno_common_flags SPD=tspd ${ARM_TBB_OPTIONS} ARM_CRYPTOCELL_INTEG=1 CCSBROM_LIB_PATH=${CRYPTOCELL_LIB_PATH}
+
+#
+# System Guidance for Infrastructure platform SGI575
+#
+make -j DEBUG=1 PLAT=sgi575 all
+
+#
+# System Guidance for Infrastructure platform RD-N1Edge
+#
+make -j DEBUG=1 PLAT=rdn1edge all
+
+#
+# System Guidance for Infrastructure platform RD-E1Edge
+#
+make -j DEBUG=1 PLAT=rde1edge all
+
+# Partners' platforms.
+# Enable as many features as possible.
+# We don't need to clean between each build here because we only do one build
+# per platform so we don't hit the build flags dependency problem.
+external_plat_common_flags="-j DEBUG=1"
+
+make PLAT=mt8173 $external_plat_common_flags all
+
+make PLAT=rk3368 $external_plat_common_flags COREBOOT=1 all
+make PLAT=rk3399 $external_plat_common_flags COREBOOT=1 all
+make PLAT=rk3328 $external_plat_common_flags COREBOOT=1 all
+
+# Although we do several consecutive builds for the Tegra platform below, we
+# don't need to clean between each one because the Tegra makefiles specify
+# a different build directory per SoC.
+make PLAT=tegra TARGET_SOC=t210 $external_plat_common_flags all
+make PLAT=tegra TARGET_SOC=t132 $external_plat_common_flags all
+make PLAT=tegra TARGET_SOC=t186 $external_plat_common_flags all
+
+# For the Xilinx platform, artificially increase the extents of BL31 memory
+# (using the platform-specific build options ZYNQMP_ATF_MEM_{BASE,SIZE}).
+# If we keep the default values, BL31 doesn't fit when it is built with all
+# these build flags.
+make PLAT=zynqmp $external_plat_common_flags \
+ RESET_TO_BL31=1 SPD=tspd \
+ ZYNQMP_ATF_MEM_BASE=0xFFFC0000 ZYNQMP_ATF_MEM_SIZE=0x00040000 \
+ all
+
+clean_build PLAT=qemu $external_plat_common_flags ${TBB_OPTIONS}
+clean_build PLAT=qemu $external_plat_common_flags ENABLE_STACK_PROTECTOR=strong
+
+# For hikey enable PMF to include all files in the platform port
+make PLAT=hikey $external_plat_common_flags ENABLE_PMF=1 all
+make PLAT=hikey960 $external_plat_common_flags all
+
+clean_build PLAT=uniphier $external_plat_common_flags ${TBB_OPTIONS} SPD=tspd
+clean_build PLAT=uniphier $external_plat_common_flags FIP_GZIP=1
+
+make PLAT=poplar $external_plat_common_flags all
+
+make PLAT=rpi3 $external_plat_common_flags PRELOADED_BL33_BASE=0xDEADBEEF all
+
+# Cannot use $external_plat_common_flags for LS1043 platform, as then
+# the binaries do not fit in memory.
+clean_build PLAT=ls1043 SPD=opteed ENABLE_STACK_PROTECTOR=strong
+clean_build PLAT=ls1043 SPD=tspd
+
+cd ..
diff --git a/script/tf-coverity/utils.py b/script/tf-coverity/utils.py
new file mode 100644
index 0000000..f86667f
--- /dev/null
+++ b/script/tf-coverity/utils.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+import subprocess
+
+def exec_prog(prog, args=[], out=None, out_text_mode=False):
+ # Build the command line to execute
+ cmd = [ prog ] + args
+
+ # Spawn process.
+ # Note: The standard error output is captured into the same file handle as
+ # for stdout.
+ process = subprocess.Popen(cmd, stdout=out, stderr=subprocess.STDOUT,
+ universal_newlines=out_text_mode, bufsize=0)
+ print("Spawned process with PID %u" % process.pid)
+ return process
diff --git a/script/translate_refspec.py b/script/translate_refspec.py
new file mode 100755
index 0000000..9e6b370
--- /dev/null
+++ b/script/translate_refspec.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+# This scripts translates certain accepted refspec schemes to something that can
+# be used on git command line. For example, given the refspec 'topic:foo/bar'
+# for a given project, this script translates and prints the full commit hash.
+#
+# If a scheme is not recognized, print the received refspec unchanged.
+
+import argparse
+import gerrit
+import sys
+
+# Gerrit servers we care about.
+gerrit_arm = gerrit.GerritServer("gerrit.oss.arm.com")
+gerrit_tforg = gerrit.GerritServer("review.trustedfirmware.org")
+
+# Trusted Firmware-A and associated projects.
+# Different projects are hosted on different Gerrit servers.
+projects = {
+ # Projects hosted on Arm Gerrit server.
+ "arm": {
+ "trusted-firmware": gerrit.GerritProject("pdcs-platforms/ap/tf-topics", gerrit_arm),
+ "trusted-firmware-tf": gerrit.GerritProject("trusted-firmware/tf-a-tests", gerrit_arm),
+ "trusted-firmware-ci": gerrit.GerritProject("pdswinf/ci/pdcs-platforms/platform-ci", gerrit_arm),
+ "scp": gerrit.GerritProject("scp/firmware", gerrit_arm),
+ },
+
+ # Projects hosted on trustedfirmware.org Gerrit server.
+ "tforg": {
+ "trusted-firmware": gerrit.GerritProject("TF-A/trusted-firmware-a", gerrit_tforg),
+ "trusted-firmware-tf": gerrit.GerritProject("TF-A/tf-a-tests", gerrit_tforg),
+ },
+}
+
+# Argument setup
+parser = argparse.ArgumentParser()
+parser.add_argument("--project", "-p",
+ help="Gerrit project identifier this refspec belongs to")
+parser.add_argument("--server", "-s", help="Gerrit server hosting this project",
+ choices=["arm", "tforg"])
+parser.add_argument("--user", "-u",
+ help="Username to use to query the Gerrit server")
+parser.add_argument("--key", "-k",
+ help="SSH private key to use to authenticate with the Gerrit server")
+parser.add_argument("refspec", help="Refspec to translate")
+opts = parser.parse_args()
+
+project = projects[opts.server][opts.project]
+
+# Default action: print refspec and exit
+def do_default():
+ print(opts.refspec)
+ sys.exit(0)
+
+def print_topic_tip(query_results):
+ patchsets = []
+ parents = []
+
+ # For each change, get its most recent patchset
+ for change in query_results:
+ patchsets.append(change["patchSets"][-1])
+
+ # For each patchset, get its parent commit
+ for patchset in patchsets:
+ parents.append(patchset["parents"][0])
+
+ # If a patchset's revision is NOT in the list of parents then it should
+ # be the tip commit
+ tips = list(filter(lambda x: x["revision"] not in parents, patchsets))
+
+ # There must be only one patchset remaining, otherwise the tip is ambiguous
+ if len(tips) > 1:
+ raise Exception("{} in {} has no unique tip commit.".format(opts.refspec,
+ opts.project))
+ if len(tips) == 0:
+ raise Exception("No tip commit found for {} in {}.".format(opts.refspec,
+ opts.project))
+ # Print the reference of the topic tip patchset
+ print(tips[0]["ref"])
+
+query = ["status:open"]
+
+# If we don't understand the refspec, that's OK. We don't translate it, but
+# print it as is.
+try:
+ scheme, rest = opts.refspec.split(":")
+ if scheme == "topic":
+ query += ["topic:" + rest]
+ elif scheme == "change":
+ query += [opts.refspec]
+ else:
+ do_default()
+except:
+ do_default()
+
+changes = project.query(query, username=opts.user, keyfile=opts.key)
+
+# The last object is a summary; drop it as it's not of interest to us.
+changes.pop()
+
+if not changes:
+ raise Exception("{} for {} resolved to nothing.".format(opts.refspec,
+ opts.project))
+
+if scheme == "topic":
+ if len(changes) > 1:
+ print_topic_tip(changes)
+ else:
+ print(changes[0]["currentPatchSet"]["ref"])
+elif scheme == "change":
+ if len(changes) > 1:
+ # When querying for a specific change there must be just a single result
+ raise Exception("{} for {} did not resolve uniquely.".format(opts.refspec,
+ opts.project))
+ print(changes[0]["currentPatchSet"]["revision"])
diff --git a/script/trusted-firmware.nomination.py b/script/trusted-firmware.nomination.py
new file mode 100644
index 0000000..f9df9e7
--- /dev/null
+++ b/script/trusted-firmware.nomination.py
@@ -0,0 +1,31 @@
+#
+# Copyright (c) 2019, Arm Limited. All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+#
+
+#
+# Nomination map for Trusted Firmware.
+#
+# This file is EXECED from gen_nomination.py
+
+nomination_rules = {
+ # Run RDN1EDGE TF-A Tests for any platform changes
+ "path:plat/arm/board/rdn1edge":
+ ["tf-l3-boot-tests-css/fvp-rdn1edge-tbb,fvp-rdn1edge-default:fvp-tftf-fip.tftf-rdn1edge"],
+
+ # Run SGI575 boot test and TF-A Tests for any platform changes
+ "path:plat/arm/board/sgi575":
+ ["tf-l3-boot-tests-css/fvp-sgi575-tbb:fvp-linux.sgi-fip.sgi-sgi575-debug",
+ "tf-l3-boot-tests-css/fvp-sgi575-tbb,fvp-sgi575-default:fvp-tftf-fip.tftf-sgi575"],
+
+ # Run SGM775 boot test for any platform changes
+ "path:plat/arm/board/sgm775":
+ ["tf-l3-boot-tests-css/fvp-sgm775-tbb:fvp-linux.sgm-dtb.sgm775-fip.sgm-sgm775-debug"],
+
+ # Run SDEI boot test for SDEI, EHF, or RAS changes or mention
+ ("pathre:sdei", "pathre:ehf", "pathre:ras", "has:SDEI_SUPPORT",
+ "has:EL3_EXCEPTION_HANDLING"):
+ ["tftf-l2-tests/fvp-aarch64-sdei,fvp-default:fvp-tftf-fip.tftf-aemv8a-debug"],
+
+ }