diff options
author | Andrei Pavel <andrei@isc.org> | 2024-06-17 16:07:22 +0200 |
---|---|---|
committer | Andrei Pavel <andrei@isc.org> | 2024-06-20 17:31:30 +0200 |
commit | 3efbe09a45a01a16f9869c837d8242bd7bb6604b (patch) | |
tree | 5bee21797e98b002e7ce7848487576b43a0815d8 | |
parent | [#3287] automatically determine list of files to shellcheck (diff) | |
download | kea-3efbe09a45a01a16f9869c837d8242bd7bb6604b.tar.xz kea-3efbe09a45a01a16f9869c837d8242bd7bb6604b.zip |
[#3287] fix reported shellcheck warnings
-rw-r--r-- | src/bin/keactrl/keactrl.in | 14 | ||||
-rw-r--r-- | src/bin/shell/tests/tls_ca_process_tests.sh.in | 7 | ||||
-rw-r--r-- | src/hooks/dhcp/run_script/run_script.dox | 2 | ||||
-rw-r--r-- | src/lib/testutils/dhcp_test_lib.sh.in | 2 | ||||
-rw-r--r-- | src/share/database/scripts/mysql/upgrade_009.5_to_009.6.sh.in | 2 | ||||
-rwxr-xr-x | src/share/yang/modules/utils/bump-up-revisions.sh | 7 | ||||
-rwxr-xr-x | tools/check-for-json-errors-in-doc.sh | 199 | ||||
-rwxr-xr-x | tools/check-lib-dependencies.sh | 409 | ||||
-rwxr-xr-x | tools/clang-format.sh | 8 | ||||
-rw-r--r-- | tools/extract_bnf.sh.in | 11 | ||||
-rwxr-xr-x | tools/shellcheck-all.sh | 8 | ||||
-rwxr-xr-x | tools/uncrustify.sh | 8 |
12 files changed, 343 insertions, 334 deletions
diff --git a/src/bin/keactrl/keactrl.in b/src/bin/keactrl/keactrl.in index cccfdac303..a7cf355630 100644 --- a/src/bin/keactrl/keactrl.in +++ b/src/bin/keactrl/keactrl.in @@ -16,16 +16,22 @@ # shellcheck disable=SC2154 # SC2154: ... is referenced but not assigned. # Reason: some variables are taken from keactrl.conf +# +# shellcheck disable=SC2317 +# SC2317: Command appears to be unreachable. Check usage (or ignore if invoked indirectly). +# Reason: shellcheck is deceived by the 'if test "${HAVE_NETCONF}" = 'yes'' condition which it +# evaluates to always false and thinks the rest of the script is never executed. # Exit with error if commands exit with non-zero and if undefined variables are # used. set -eu -PACKAGE_VERSION="@PACKAGE_VERSION@" +HAVE_NETCONF='@HAVE_NETCONF@' EXTENDED_VERSION="@EXTENDED_VERSION@" +PACKAGE_VERSION="@PACKAGE_VERSION@" # Set the have_netconf flag to know if netconf is available. -if test '@HAVE_NETCONF@' = 'yes'; then +if test "${HAVE_NETCONF}" = 'yes'; then have_netconf=true else have_netconf=false @@ -145,9 +151,9 @@ check_running() { # Get the PID from the PID file (if it exists) get_pid_from_file "${proc_name}" - if [ ${_pid} -gt 0 ]; then + if [ "${_pid}" -gt 0 ]; then # Use ps to check if PID is alive - if ps -p ${_pid} 1>/dev/null; then + if ps -p "${_pid}" 1>/dev/null; then # No error, so PID IS ALIVE _running=1 fi diff --git a/src/bin/shell/tests/tls_ca_process_tests.sh.in b/src/bin/shell/tests/tls_ca_process_tests.sh.in index 94d608f48e..2ece6c6c61 100644 --- a/src/bin/shell/tests/tls_ca_process_tests.sh.in +++ b/src/bin/shell/tests/tls_ca_process_tests.sh.in @@ -149,10 +149,17 @@ list_commands_test() { fi # Main test phase: send command, check response. + + # shellcheck disable=SC2086 + # SC2086: Double quote to prevent globbing and word splitting. + # Reason: we spcifically want ${arguments} to split because there may be multiple words in it. tmp="echo | ${shell_bin_path}/${shell_bin} --port 8443 \ ${arguments} > ${tmpfile_path}/shell-stdout.txt" echo "Executing kea-shell ($tmp)" + # shellcheck disable=SC2086 + # SC2086: Double quote to prevent globbing and word splitting. + # Reason: we spcifically want ${arguments} to split because there may be multiple words in it. echo | ${shell_bin_path}/${shell_bin} --port 8443 \ ${arguments} > ${tmpfile_path}/shell-stdout.txt EXIT_CODE=$? diff --git a/src/hooks/dhcp/run_script/run_script.dox b/src/hooks/dhcp/run_script/run_script.dox index 585c983a13..63690b1136 100644 --- a/src/hooks/dhcp/run_script/run_script.dox +++ b/src/hooks/dhcp/run_script/run_script.dox @@ -1,4 +1,4 @@ -// Copyright (C) 2021 Internet Systems Consortium, Inc. ("ISC") +// Copyright (C) 2021-2024 Internet Systems Consortium, Inc. ("ISC") // // This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this diff --git a/src/lib/testutils/dhcp_test_lib.sh.in b/src/lib/testutils/dhcp_test_lib.sh.in index ddc74c2489..300df0d388 100644 --- a/src/lib/testutils/dhcp_test_lib.sh.in +++ b/src/lib/testutils/dhcp_test_lib.sh.in @@ -425,7 +425,7 @@ set_logger() { clean_exit 1 fi printf 'Kea log will be stored in %s.\n' "${LOG_FILE}" - export KEA_LOGGER_DESTINATION=${LOG_FILE} + export KEA_LOGGER_DESTINATION="${LOG_FILE}" } # Checks if specified process is running. diff --git a/src/share/database/scripts/mysql/upgrade_009.5_to_009.6.sh.in b/src/share/database/scripts/mysql/upgrade_009.5_to_009.6.sh.in index 8002db8091..0094882d23 100644 --- a/src/share/database/scripts/mysql/upgrade_009.5_to_009.6.sh.in +++ b/src/share/database/scripts/mysql/upgrade_009.5_to_009.6.sh.in @@ -37,7 +37,7 @@ fi # Add column only if it doesn't exist to work around the 1.9.4 leak of # cache_threshold and cache_max_age column alters in subnet and shared network # tables in schema version 9.5. -if ! mysql "${@}" -e 'SELECT cache_threshold FROM dhcp4_subnet LIMIT 1' &> /dev/null; then +if ! mysql "${@}" -e 'SELECT cache_threshold FROM dhcp4_subnet LIMIT 1' > /dev/null 2>&1; then mysql "${@}" <<EOF # Add new lease cache parameters. ALTER TABLE dhcp4_subnet diff --git a/src/share/yang/modules/utils/bump-up-revisions.sh b/src/share/yang/modules/utils/bump-up-revisions.sh index 45f84c8aaf..86778fc581 100755 --- a/src/share/yang/modules/utils/bump-up-revisions.sh +++ b/src/share/yang/modules/utils/bump-up-revisions.sh @@ -25,6 +25,9 @@ Options: "$(basename "${0}")" } +red='\033[91m' +reset='\033[0m' + # Parse parameters. while test ${#} -gt 0; do case "${1}" in @@ -44,7 +47,7 @@ done # Get script path. script_path=$(cd "$(dirname "${0}")" && pwd) -pushd "${script_path}/.." +cd "${script_path}/.." # Get the last wednesday of the month. this_month=$(date +%Y-%m) @@ -59,7 +62,7 @@ done # - rename it to the new revision # - change its name in Makefile.am # - change its name in yang_revisions.h -ca=$(git merge-base origin/master $(git rev-parse --abbrev-ref HEAD)) +ca=$(git merge-base origin/master "$(git rev-parse --abbrev-ref HEAD)") for module in $(git diff "${ca}" --name-only . | grep -E '\.yang$'); do module=$(basename "${module}") new_module="$(printf '%s' "${module}" | sed "s/@.*\.yang/@${wednesday}.yang/g")" diff --git a/tools/check-for-json-errors-in-doc.sh b/tools/check-for-json-errors-in-doc.sh index 88675f1723..764827e5c6 100755 --- a/tools/check-for-json-errors-in-doc.sh +++ b/tools/check-for-json-errors-in-doc.sh @@ -1,114 +1,123 @@ -#!/bin/bash +#!/bin/sh # Usage: # check-for-json-errors-in-doc.sh [--all] [<file1>, <file2>, ...] +set -eu + # Change directory to the root of the repository. script_path=$(cd "$(dirname "${0}")" && pwd) -cd "${script_path}/.." +cd "${script_path}/.." || exit 1 # Parse parameters. if test ${#} -gt 0; then - if test "${1}" = '--all'; then - files='doc src' - else - files="${*}" - fi + if test "${1}" = '--all'; then + files='doc src' + else + files="${*}" + fi else - # By default, check only modified files. - files=$(git diff --name-only $(git merge-base origin/master HEAD)) + # By default, check only modified files. + files=$(git diff --name-only "$(git merge-base origin/master HEAD)") - # If there is nothing to check, exit early. Otherwise, it checks everything. - if test -z "${files}"; then - exit 0 - fi + # If there is nothing to check, exit early. Otherwise, it checks everything. + if test -z "${files}"; then + exit 0 + fi fi exit_code=0 +work_file=$(mktemp) # Get the files. -files=$(find $(echo $files) -type f \( -name '*.rst' -or -name '*.json' \) -and -not -path '*/_build/*' -and -not -path '*/man/*' | sort -uV) -work_file=$(mktemp) -for file in $(echo $files); do - json=0 - comment=0 - line_num=0 - echo "processing: $file" - IFS= - while read -r line; do - line_num=$((line_num+1)) - if [ $comment -eq 0 -a $json -eq 0 -a $(echo "$line" | grep "^[A-Za-z]+\|^\s*\`" | wc -l) -eq 1 ]; then - # ignore line if it starts with 'A-Za-z' or spaces followed by '`' - continue - elif [ $comment -eq 0 -a $(echo "$line" | grep "/\*" | grep -v "\*/" | wc -l) -eq 1 ]; then - # if the line contains /* and it does not contain */ on the same line - comment=1 - echo "" >> $work_file - continue - elif [ $comment -eq 1 -a $(echo "$line" | grep "\*/" | wc -l) -eq 1 ]; then - # if the line contains */ - comment=0 - echo "" >> $work_file - continue - elif [ $comment -eq 0 -a $json -eq 0 -a $(echo "$line" | grep "^\s*{\|^\s*\".*{\|^\s*\[\s*$" | grep -v "}" | wc -l) -eq 1 ]; then - # if this is not a comment and the line starts with spaces followed by '{' or by '"' followed by "{" - json=1 - # ignore any map name before top level map - line=$(echo "$line" | sed 's/.*{/{/g') - echo "" > $work_file - elif [ $comment -eq 0 -a $json -eq 1 -a $(echo "$line" | grep "^\s*[A-Za-z]\|^\s*\`" | wc -l) -eq 1 ]; then - # if the line is not a comment and the line starts with spaces followed by 'A-Za-z' or followed by "`" and the parser is processing a json structure - json=0 - cat $work_file | jq . > /dev/null - if [ $? -ne 0 ]; then - # if the jq tool returned error - echo "file $file contains invalid JSON near line $line_num" - echo "===start of JSON block===" - cat $work_file - echo "====end of JSON block====" - exit_code=1 - fi - fi - if [ $comment -eq 0 -a $json -eq 1 ]; then - if [ $(echo "$line" | grep "^\s*\.\.\s" | wc -l) -eq 1 ]; then - echo "" >> $work_file - else - # if file is .json the following replace in line are done: - # 1. delete everything after '#' - # 2. delete everything after // - # 3. ignore <?include?> - # 4. replace all '[ <DATA> ]' with '[ "<DATA>" ]' where DATA contains: '-' and 'A-Za-z0-9' and ' ' - # 5. replace all ' <DATA>:' with ' "<DATA>":' - # 6. replace all ': <DATA>' with ': "<DATA>"' - # 7. replace ' ...' with ' "placeholder": "value" - # 8. replace ', ... ' with ' ' - # 9. replace ' <DATA>' with ' "placeholder": "value"' - # 10. replace ' <DATA>' with ' "placeholder"' - if [ $(echo "$file" | grep "\.json" | wc -l) -eq 0 ]; then - echo "$line" | cut -d "#" -f 1 | sed 's/\/\/ .*//g' | sed 's/<?.*?>//g' | sed 's/\[ <\([-A-Za-z0-9 ]*\)> \]/\[ \"<\1>\" \]/g' | sed 's/ <\(.*\)>:/ \"<\1>\":/g' | sed 's/: <\(.*\)>/: \"<\1>\"/g' | sed 's/ \.\.\./ \"placeholder\": \"value\"/g' | sed 's/, \.\.\. / /g' | sed 's/ <\(.*\)>/ \"placeholder\": \"value\"/g' | sed 's/ <\(.*\)>/ \"placeholder\"/g' >> $work_file - else - # if file is .rst the following replace in line are done: - # 1. delete everything after '#' - # 2. delete everything after // - # 3. ignore <?include?> - echo "$line" | cut -d "#" -f 1 | sed 's/\/\/ .*//g' | sed 's/<?.*?>//g' >> $work_file - fi - fi - fi - done <<< $(cat $file | tr '\n' '\r' | sed -r 's/,[[:blank:]]*\r[[:blank:]]*\.\.\.//g' | sed -r 's/\\[[:blank:]]*\r[[:blank:]]*//g' | tr '\r' '\n') - if [ $comment -eq 0 -a $json -eq 1 ]; then - # if the file ended but the parser is processing a json structure - cat $work_file | jq . > /dev/null - if [ $? -ne 0 ]; then - # if the jq tool returned error - echo "file $file contains invalid JSON near line $line_num" - echo "===start of JSON block===" - cat $work_file - echo "====end of JSON block====" - exit_code=1 - fi - fi +# shellcheck disable=SC2086 +# SC2086: Double quote to prevent globbing and word splitting. +# Reason: There may be multiple files in ${files} so we explicitly want it expanded to not be treated as a single long file name. +files=$(find ${files} -type f \( -name '*.rst' -or -name '*.json' \) -and -not -path '*/_build/*' -and -not -path '*/man/*' | sort -uV) +for file in ${files}; do + json=0 + comment=0 + line_num=0 + echo "processing: $file" + IFS= + content=$(tr '\n' '\r' < "${file}" | sed -r 's/,[[:blank:]]*\r[[:blank:]]*\.\.\.//g' | sed -r 's/\\[[:blank:]]*\r[[:blank:]]*//g' | tr '\r' '\n') + stop_at=$(echo "${content}" | wc -l) + while true; do + line_num=$((line_num + 1)) + if test "${line_num}" -gt "${stop_at}"; then + break + fi + line=$(echo "${content}" | head -n "${line_num}" | tail -n 1) + if [ $comment -eq 0 ] && [ $json -eq 0 ] && [ "$(echo "$line" | grep -c "^[A-Za-z]+\|^\s*\`")" -eq 1 ]; then + # ignore line if it starts with 'A-Za-z' or spaces followed by '`' + continue + elif [ $comment -eq 0 ] && [ "$(echo "$line" | grep "/\*" | grep -cv "\*/")" -eq 1 ]; then + # if the line contains /* and it does not contain */ on the same line + comment=1 + echo >> "${work_file}" + continue + elif [ $comment -eq 1 ] && [ "$(echo "$line" | grep -c "\*/")" -eq 1 ]; then + # if the line contains */ + comment=0 + echo >> "${work_file}" + continue + elif [ $comment -eq 0 ] && [ $json -eq 0 ] && [ "$(echo "$line" | grep "^\s*{\|^\s*\".*{\|^\s*\[\s*$" | grep -cv "}")" -eq 1 ]; then + # if this is not a comment and the line starts with spaces followed by '{' or by '"' followed by "{" + json=1 + # ignore any map name before top level map + line=$(echo "$line" | sed 's/.*{/{/g') + echo > "${work_file}" + elif [ $comment -eq 0 ] && [ $json -eq 1 ] && [ "$(echo "$line" | grep -c "^\s*[A-Za-z]\|^\s*\`")" -eq 1 ]; then + # if the line is not a comment and the line starts with spaces followed by 'A-Za-z' or followed by "`" and the parser is processing a json structure + json=0 + if ! jq . "${work_file}" > /dev/null; then + # if the jq tool returned error + echo "file $file contains invalid JSON near line $line_num" + echo "===start of JSON block===" + cat "${work_file}" + echo "====end of JSON block====" + exit_code=1 + fi + fi + if [ $comment -eq 0 ] && [ $json -eq 1 ]; then + if [ "$(echo "$line" | grep -c "^\s*\.\.\s")" -eq 1 ]; then + echo >> "${work_file}" + else + # if file is .json the following replace in line are done: + # 1. delete everything after '#' + # 2. delete everything after // + # 3. ignore <?include?> + # 4. replace all '[ <DATA> ]' with '[ "<DATA>" ]' where DATA contains: '-' and 'A-Za-z0-9' and ' ' + # 5. replace all ' <DATA>:' with ' "<DATA>":' + # 6. replace all ': <DATA>' with ': "<DATA>"' + # 7. replace ' ...' with ' "placeholder": "value" + # 8. replace ', ... ' with ' ' + # 9. replace ' <DATA>' with ' "placeholder": "value"' + # 10. replace ' <DATA>' with ' "placeholder"' + if [ "$(echo "$file" | grep -c "\.json")" -eq 0 ]; then + echo "$line" | cut -d "#" -f 1 | sed 's/\/\/ .*//g' | sed 's/<?.*?>//g' | sed 's/\[ <\([-A-Za-z0-9 ]*\)> \]/\[ \"<\1>\" \]/g' | sed 's/ <\(.*\)>:/ \"<\1>\":/g' | sed 's/: <\(.*\)>/: \"<\1>\"/g' | sed 's/ \.\.\./ \"placeholder\": \"value\"/g' | sed 's/, \.\.\. / /g' | sed 's/ <\(.*\)>/ \"placeholder\": \"value\"/g' | sed 's/ <\(.*\)>/ \"placeholder\"/g' >> "${work_file}" + else + # if file is .rst the following replace in line are done: + # 1. delete everything after '#' + # 2. delete everything after // + # 3. ignore <?include?> + echo "$line" | cut -d "#" -f 1 | sed 's/\/\/ .*//g' | sed 's/<?.*?>//g' >> "${work_file}" + fi + fi + fi + done + if [ $comment -eq 0 ] && [ $json -eq 1 ]; then + # if the file ended but the parser is processing a json structure + if ! jq . "${work_file}" > /dev/null; then + # if the jq tool returned error + echo "file $file contains invalid JSON near line $line_num" + echo "===start of JSON block===" + cat "${work_file}" + echo "====end of JSON block====" + exit_code=1 + fi + fi done -rm $work_file +rm "${work_file}" exit ${exit_code} diff --git a/tools/check-lib-dependencies.sh b/tools/check-lib-dependencies.sh index b2ae9acdcf..be91bb8ef3 100755 --- a/tools/check-lib-dependencies.sh +++ b/tools/check-lib-dependencies.sh @@ -1,48 +1,44 @@ -#!/bin/bash +#!/bin/sh -# extract folder name containing file -# -# param ${1} file name -# return folder name -extract_folder_name() { - # return name of the file until last '/' - echo "$(echo "${1}" | rev | cut -d '/' -f 2- | rev)" -} +set -eu + +script_path=$(cd "$(dirname "${0}")" && pwd) +top_srcdir=$(realpath "${script_path}/..") # extract all includes found in source files found in the same folder as specified Makefile.am # # param ${1} path to a Makefile.am # return all dependencies libs in the order of compilation extract_includes() { - # extract folder name from current library Makefile.am - CURRENT_FOLDER=$(extract_folder_name "${1}")"/" - # select only files in current folder - SEARCH_FILES=$(echo "${FILE_LIST}" | grep "${CURRENT_FOLDER}") - # select all lines containing '#include ' directive - RAW_INCLUDES_LIST=$(echo "${SEARCH_FILES}" | xargs grep "^#include " 2>/dev/null) - # filter only included dependencies found in other libraries by using the form 'other_lib_name/header_file.h' - # to do this it is required to select the string between '<' and '>', searching for '/' character and returning the name until last '/' - RAW_INCLUDES_LIST=$(echo "${RAW_INCLUDES_LIST}" | cut -d "#" -f 2 | tr "\"" " " | cut -d "<" -f 2 | cut -d ">" -f 1 | grep "\/" | rev | cut -d "/" -f 2 | rev | sort | uniq) - # filter includes that are not compiled by the project's Makefiles - INCLUDES_LIST= - for i in ${LIBRARIES_LIST}; do - for j in ${RAW_INCLUDES_LIST}; do - if test "${j}" = "${i}"; then - INCLUDES_LIST="${i} ${INCLUDES_LIST}" - break - fi - done - done - # remove empty spaces - INCLUDES_LIST=$(echo ${INCLUDES_LIST} | tr -s " ") - # order dependencies in the order of compilation - FILTERED_INCLUDES_LIST= - for i in ${LIBRARIES_LIST}; do - if test $(echo "${INCLUDES_LIST}" | grep "\b${i}\b" | wc -l) -ne 0; then - FILTERED_INCLUDES_LIST="${i} ${FILTERED_INCLUDES_LIST}" - fi - done - echo "${FILTERED_INCLUDES_LIST}" + # extract folder name from current library Makefile.am + CURRENT_FOLDER=$(dirname "${1}")"/" + # select only files in current folder + SEARCH_FILES=$(echo "${FILE_LIST}" | grep "${CURRENT_FOLDER}") + # select all lines containing '#include ' directive + RAW_INCLUDES_LIST=$(echo "${SEARCH_FILES}" | xargs grep "^#include " 2>/dev/null) + # filter only included dependencies found in other libraries by using the form 'other_lib_name/header_file.h' + # to do this it is required to select the string between '<' and '>', searching for '/' character and returning the name until last '/' + RAW_INCLUDES_LIST=$(echo "${RAW_INCLUDES_LIST}" | cut -d "#" -f 2 | tr "\"" " " | cut -d "<" -f 2 | cut -d ">" -f 1 | grep "/" | rev | cut -d "/" -f 2 | rev | sort -u) + # filter includes that are not compiled by the project's Makefiles + INCLUDES_LIST= + for i in ${LIBRARIES_LIST}; do + for j in ${RAW_INCLUDES_LIST}; do + if test "${j}" = "${i}"; then + INCLUDES_LIST="${i} ${INCLUDES_LIST}" + break + fi + done + done + # remove empty spaces + INCLUDES_LIST=$(echo "${INCLUDES_LIST}" | tr -s " ") + # order dependencies in the order of compilation + FILTERED_INCLUDES_LIST= + for i in ${LIBRARIES_LIST}; do + if test "$(echo "${INCLUDES_LIST}" | grep -c "\b${i}\b")" -ne 0; then + FILTERED_INCLUDES_LIST="${i} ${FILTERED_INCLUDES_LIST}" + fi + done + echo "${FILTERED_INCLUDES_LIST}" } # extract all header only files and headers and source files found in the external library required by specified library @@ -50,50 +46,50 @@ extract_includes() { # param ${2} name of the external dependency library required by current library # return the list of header only files as 'HEADERS: heaser1.h header2.h' and header and source files as 'HEADERS_AND_SOURCES: source1.h source1.cc source2.h source2.cpp' extract_non_include_files() { - # extract folder name for current library Makefile.am - CURRENT_FOLDER=$(extract_folder_name "src/lib/${1}/Makefile.am")"/" - # extract folder name for external dependency library Makefile.am - EXTERNAL_FOLDER=$(extract_folder_name "src/lib/${2}/Makefile.am")"/" - # select only files in current folder - SEARCH_FILES=$(echo "${FILE_LIST}" | grep "${CURRENT_FOLDER}") - HEADERS_LIST= - NON_HEADERS_LIST= - # select all lines containing '#include ' directive - RAW_INCLUDES_LIST=$(echo "${SEARCH_FILES}" | xargs grep "^#include " 2>/dev/null) - # filter only included headers found in other libraries by using the form 'other_lib_name/header_file.h' - # to do this it is required to select the string between '<' and '>', searching for '/' character, search for the extension marker '.' and returning the name after last '/' - RAW_INCLUDES_LIST=$(echo "${RAW_INCLUDES_LIST}" | cut -d "#" -f 2 | tr "\"" " " | cut -d "<" -f 2 | cut -d ">" -f 1 | grep "\/" | grep "\b${2}\b" | cut -d "/" -f 2 | grep "\." | sort | uniq) - # select only files in dependency library folder and strip full path - RELATIVE_SEARCH_FILES=$(echo "${FILE_LIST}" | grep "${EXTERNAL_FOLDER}" | sed -e "s#${REPO_FOLDER}${EXTERNAL_FOLDER}##g") - # search for the header file but also for source files - for i in ${RAW_INCLUDES_LIST}; do - # filter by name only (no extension) - FILTER=$(echo "${i}" | cut -d "." -f 1) - # filter non header files with exact name of the header file without the extension - NON_HEADER=$(echo "${RELATIVE_SEARCH_FILES}" | grep "\b${FILTER}\." | grep -v "${i}") - if test $(echo "${NON_HEADER}" | wc -w) -ne 0; then - # append header and source file names - NON_HEADERS_LIST="${i} ${NON_HEADER} ${NON_HEADERS_LIST}" - else - # append header only file name - HEADERS_LIST="${i} ${HEADERS_LIST}" - fi - done - # sort header only files - HEADERS_LIST=$(echo ${HEADERS_LIST} | tr -s " " | sort | uniq) - # sort header and source files - NON_HEADERS_LIST=$(echo ${NON_HEADERS_LIST} | tr -s " " | sort | uniq) - echo "HEADERS_AND_SOURCES:${NON_HEADERS_LIST}" - echo "HEADERS:${HEADERS_LIST}" + # extract folder name for current library Makefile.am + CURRENT_FOLDER=$(dirname "src/lib/${1}/Makefile.am")"/" + # extract folder name for external dependency library Makefile.am + EXTERNAL_FOLDER=$(dirname "src/lib/${2}/Makefile.am")"/" + # select only files in current folder + SEARCH_FILES=$(echo "${FILE_LIST}" | grep "${CURRENT_FOLDER}") + HEADERS_LIST= + NON_HEADERS_LIST= + # select all lines containing '#include ' directive + RAW_INCLUDES_LIST=$(echo "${SEARCH_FILES}" | xargs grep "^#include " 2>/dev/null) + # filter only included headers found in other libraries by using the form 'other_lib_name/header_file.h' + # to do this it is required to select the string between '<' and '>', searching for '/' character, search for the extension marker '.' and returning the name after last '/' + RAW_INCLUDES_LIST=$(echo "${RAW_INCLUDES_LIST}" | cut -d "#" -f 2 | tr "\"" " " | cut -d "<" -f 2 | cut -d ">" -f 1 | grep "/" | grep "\b${2}\b" | cut -d "/" -f 2 | grep "\." | sort -u) + # select only files in dependency library folder and strip full path + RELATIVE_SEARCH_FILES=$(echo "${FILE_LIST}" | grep "${EXTERNAL_FOLDER}" | sed -e "s#${EXTERNAL_FOLDER}##g") + # search for the header file but also for source files + for i in ${RAW_INCLUDES_LIST}; do + # filter by name only (no extension) + FILTER=$(echo "${i}" | cut -d "." -f 1) + # filter non header files with exact name of the header file without the extension + NON_HEADER=$(echo "${RELATIVE_SEARCH_FILES}" | grep "\b${FILTER}\." | grep -v "${i}") + if test "$(echo "${NON_HEADER}" | wc -w)" -ne 0; then + # append header and source file names + NON_HEADERS_LIST="${i} ${NON_HEADER} ${NON_HEADERS_LIST}" + else + # append header only file name + HEADERS_LIST="${i} ${HEADERS_LIST}" + fi + done + # sort header only files + HEADERS_LIST=$(echo "${HEADERS_LIST}" | tr -s " " | sort -u) + # sort header and source files + NON_HEADERS_LIST=$(echo "${NON_HEADERS_LIST}" | tr -s " " | sort -u) + echo "HEADERS_AND_SOURCES:${NON_HEADERS_LIST}" + echo "HEADERS:${HEADERS_LIST}" } # extract all valid dependencies of a specified library # # param ${1} list of all libraries in the reverse compilation order # param ${2} library name for which the dependency list is computed -# return the list of dependencies for specified library in the reverse compilation order +# return the list of dependencies for specified library in the reverse compilation order extract_dependencies() { - echo "${1}" | grep -Eo "\b${2}\b.*$" + echo "${1}" | grep -Eo "\b${2}\b.*$" } # extract computed dependency for specified library @@ -102,29 +98,29 @@ extract_dependencies() { # param ${2} library path for which the dependency list is retrieved # return stored value of computed dependencies or 'NONE' if dependencies have not been computed yet extract_computed_dependencies() { - PATH_TO_NAME=$(echo "${2}" | tr -s "/" "_") - NAME="COMPUTED_DEPENDENCIES_${PATH_TO_NAME}_${1}" - if test -n "${!NAME+x}"; then - echo "${!NAME}" - else - echo "NONE" - fi + PATH_TO_NAME=$(echo "${2}" | tr -s "/" "_") + NAME="COMPUTED_DEPENDENCIES_${PATH_TO_NAME}_${1}" + if test -n "$(eval "echo \"\${$NAME+x}\"")"; then + eval "echo \"\${$NAME}\"" + else + echo "NONE" + fi } # extract library directive # # param ${1} artifact path extract_library_directive() { - ARTIFACT_PATH="${1}" - echo `cat ${ARTIFACT_PATH}/Makefile.am | grep "LIBADD\|LDADD" | sort | tr -s ' ' | cut -d " " -f 1 | sort -u` + ARTIFACT_PATH="${1}" + grep 'LIBADD\|LDADD' "${ARTIFACT_PATH}/Makefile.am" | sort | tr -s ' ' | cut -d " " -f 1 | sort -u | tr '\n' ' ' | sed 's/ *$//' } # extract library name # # param ${1} artifact path extract_library_name() { - ARTIFACT_PATH="${1}" - echo `cat ${ARTIFACT_PATH}/Makefile.am | grep "LIBRARIES" | tr -s ' ' | cut -d " " -f 3` + ARTIFACT_PATH="${1}" + grep 'LIBRARIES' "${ARTIFACT_PATH}/Makefile.am" | grep "LIBRARIES" | tr -s ' ' | cut -d " " -f 3 } # compute artifact dependencies @@ -132,115 +128,107 @@ extract_library_name() { # param ${1} artifact name # param ${2} artifact path compute_dependencies() { - ARTIFACT="${1}" - ARTIFACT_PATH="${2}" - echo "" - echo "########################################" - echo "### ${ARTIFACT_PATH}/${ARTIFACT}" - echo "########################################" - echo "" - # all valid dependencies that can be added by each dependency library - echo "${ARTIFACT_PATH}/${ARTIFACT} valid dependencies:" - echo "${VALID_LIST}" - # detect dependencies errors by searching for dependencies that are compiled after the current library and can generate missing symbols - NON_RECURSIVE_BASE_DEPENDENCIES= - for j in ${BASE_DEPENDENCIES}; do - # only add the dependency if it is in the valid dependencies list to prevent infinite recursion and log the error otherwise - if test $(echo "${VALID_LIST}" | grep "\b${j}\b" | wc -l) -eq 0; then - # search for external header and source files - INVALID_EXTERNAL_DEPENDENCIES=$(extract_non_include_files "${ARTIFACT}" "${j}") - # filter header only external files - EXTERNAL_HEADERS=$(echo "${INVALID_EXTERNAL_DEPENDENCIES}" | grep "HEADERS:" | cut -d ":" -f 2) - # filter header and source external files - EXTERNAL_ALL=$(echo "${INVALID_EXTERNAL_DEPENDENCIES}" | grep "HEADERS_AND_SOURCES:" | cut -d ":" -f 2) - echo "### ERROR ### dependencies ERROR for ${ARTIFACT_PATH}/${ARTIFACT} on ${j} with:" - # if there are any header only external files - if test $(echo "${EXTERNAL_ALL}" | wc -w) -ne 0; then - echo "non header only files: ${EXTERNAL_ALL}" - fi - # if there are any header and source external files - if test $(echo "${EXTERNAL_HEADERS}" | wc -w) -ne 0; then - echo "header only files: ${EXTERNAL_HEADERS}" - fi - else - # don't add current library to it's dependencies list - if test ${j} != ${ARTIFACT}; then - NON_RECURSIVE_BASE_DEPENDENCIES="${NON_RECURSIVE_BASE_DEPENDENCIES} ${j}" - fi - fi - done - # all found dependencies in the reverse compilation order - BASE_DEPENDENCIES=$(echo "${BASE_DEPENDENCIES}" | xargs) - # all found and valid dependencies in the reverse compilation order - NON_RECURSIVE_BASE_DEPENDENCIES=$(echo "${NON_RECURSIVE_BASE_DEPENDENCIES}" | xargs) - echo "${ARTIFACT_PATH}/${ARTIFACT} base dependencies:" - echo "${BASE_DEPENDENCIES}" - echo "${ARTIFACT_PATH}/${ARTIFACT} non recursive dependencies:" - echo "${NON_RECURSIVE_BASE_DEPENDENCIES}" - # minimum set of dependencies for current library - DEPENDENCIES= - for j in ${NON_RECURSIVE_BASE_DEPENDENCIES}; do - NEW_DEPENDENCIES=$(extract_computed_dependencies "${j}" "src/lib") - if test "${NEW_DEPENDENCIES}" == "NONE"; then - echo "### ERROR ### computed dependency not found for ${j}" - else - DEPENDENCIES="${NEW_DEPENDENCIES} ${DEPENDENCIES}" - fi - done - DEPENDENCIES=$(echo "${DEPENDENCIES} ${NON_RECURSIVE_BASE_DEPENDENCIES}" | tr -s " " "\n" | sort | uniq | xargs) - # order dependencies in the order of compilation - SORTED_DEPENDENCIES= - for j in ${LIBRARIES_LIST}; do - if test $(echo "${DEPENDENCIES}" | grep "\b${j}\b" | wc -l) -ne 0; then - SORTED_DEPENDENCIES="${j} ${SORTED_DEPENDENCIES}" - fi - done - echo "${ARTIFACT_PATH}/${ARTIFACT} minimum dependencies:" - echo "${SORTED_DEPENDENCIES}" - echo "" - echo "++++++++++++++++++++++++++++++++++++++++" - ARTIFACT_DIRECTIVE=$(extract_library_directive ${ARTIFACT_PATH}/${ARTIFACT}) - for j in ${SORTED_DEPENDENCIES}; do - DEPENDENCY_LIBRARY_NAME=$(extract_library_name "src/lib/${j}") - echo "${ARTIFACT_DIRECTIVE} += \$(top_builddir)/src/lib/${j}/${DEPENDENCY_LIBRARY_NAME}" - done - echo "++++++++++++++++++++++++++++++++++++++++" - echo "########################################" - echo "" + ARTIFACT="${1}" + ARTIFACT_PATH="${2}" + echo "" + echo "########################################" + echo "### ${ARTIFACT_PATH}/${ARTIFACT}" + echo "########################################" + echo "" + # all valid dependencies that can be added by each dependency library + echo "${ARTIFACT_PATH}/${ARTIFACT} valid dependencies:" + echo "${VALID_LIST}" + # detect dependencies errors by searching for dependencies that are compiled after the current library and can generate missing symbols + NON_RECURSIVE_BASE_DEPENDENCIES= + for j in ${BASE_DEPENDENCIES}; do + # only add the dependency if it is in the valid dependencies list to prevent infinite recursion and log the error otherwise + if test "$(echo "${VALID_LIST}" | grep -c "\b${j}\b")" -eq 0; then + # search for external header and source files + INVALID_EXTERNAL_DEPENDENCIES=$(extract_non_include_files "${ARTIFACT}" "${j}") || true + # filter header only external files + EXTERNAL_HEADERS=$(echo "${INVALID_EXTERNAL_DEPENDENCIES}" | grep "HEADERS:" | cut -d ":" -f 2) + # filter header and source external files + EXTERNAL_ALL=$(echo "${INVALID_EXTERNAL_DEPENDENCIES}" | grep "HEADERS_AND_SOURCES:" | cut -d ":" -f 2) + echo "### ERROR ### dependencies ERROR for ${ARTIFACT_PATH}/${ARTIFACT} on ${j} with:" + # if there are any header only external files + if test "$(echo "${EXTERNAL_ALL}" | wc -w)" -ne 0; then + echo "non header only files: ${EXTERNAL_ALL}" + fi + # if there are any header and source external files + if test "$(echo "${EXTERNAL_HEADERS}" | wc -w)" -ne 0; then + echo "header only files: ${EXTERNAL_HEADERS}" + fi + else + # don't add current library to it's dependencies list + if test "${j}" != "${ARTIFACT}"; then + NON_RECURSIVE_BASE_DEPENDENCIES="${NON_RECURSIVE_BASE_DEPENDENCIES} ${j}" + fi + fi + done + # all found dependencies in the reverse compilation order + BASE_DEPENDENCIES=$(echo "${BASE_DEPENDENCIES}" | xargs) + # all found and valid dependencies in the reverse compilation order + NON_RECURSIVE_BASE_DEPENDENCIES=$(echo "${NON_RECURSIVE_BASE_DEPENDENCIES}" | xargs) + echo "${ARTIFACT_PATH}/${ARTIFACT} base dependencies:" + echo "${BASE_DEPENDENCIES}" + echo "${ARTIFACT_PATH}/${ARTIFACT} non recursive dependencies:" + echo "${NON_RECURSIVE_BASE_DEPENDENCIES}" + # minimum set of dependencies for current library + DEPENDENCIES= + for j in ${NON_RECURSIVE_BASE_DEPENDENCIES}; do + NEW_DEPENDENCIES=$(extract_computed_dependencies "${j}" "src/lib") + if test "${NEW_DEPENDENCIES}" == "NONE"; then + echo "### ERROR ### computed dependency not found for ${j}" + else + DEPENDENCIES="${NEW_DEPENDENCIES} ${DEPENDENCIES}" + fi + done + DEPENDENCIES=$(echo "${DEPENDENCIES} ${NON_RECURSIVE_BASE_DEPENDENCIES}" | tr -s " " "\n" | sort -u | xargs) + # order dependencies in the order of compilation + SORTED_DEPENDENCIES= + for j in ${LIBRARIES_LIST}; do + if test "$(echo "${DEPENDENCIES}" | grep -c "\b${j}\b")" -ne 0; then + SORTED_DEPENDENCIES="${j} ${SORTED_DEPENDENCIES}" + fi + done + SORTED_DEPENDENCIES=$(echo "${SORTED_DEPENDENCIES}" | sed 's/ *$//g') + echo "${ARTIFACT_PATH}/${ARTIFACT} minimum dependencies:" + echo "${SORTED_DEPENDENCIES}" + echo "" + echo "++++++++++++++++++++++++++++++++++++++++" + ARTIFACT_DIRECTIVE=$(extract_library_directive "${ARTIFACT_PATH}/${ARTIFACT}") + for j in ${SORTED_DEPENDENCIES}; do + DEPENDENCY_LIBRARY_NAME=$(extract_library_name "src/lib/${j}") + echo "${ARTIFACT_DIRECTIVE} += \$(top_builddir)/src/lib/${j}/${DEPENDENCY_LIBRARY_NAME}" + done + echo "++++++++++++++++++++++++++++++++++++++++" + echo "########################################" + echo "" } -# if wrong number of parameters print usage -if test ${#} -ne 1; then - echo "Usage: ${0} path/to/kea/repo" - exit -fi - -# folder containing full repo -REPO_FOLDER=${1} - -if test $(echo -n ${REPO_FOLDER} | tail -c 1) != "/"; then - REPO_FOLDER="${REPO_FOLDER}/" -fi +# Folder containing full repo. Default is "tools/.." +REPO_FOLDER="${1-${top_srcdir}}" +cd "${REPO_FOLDER}" # filter all Makefile.am files -MAKEFILES_LIST=$(find ${REPO_FOLDER} | grep "Makefile\.am" | sed -e "s#${REPO_FOLDER}##g" | grep "src\/" | sort) +MAKEFILES_LIST=$(find . -type f -wholename '*src/*Makefile.am' | sed 's#\./##g' | sort) # if no Makefile.am found exit if test -z "${MAKEFILES_LIST}"; then - echo "invalid repo path: no Makefile.am file found" - exit + echo "invalid repo path: no Makefile.am file found" + exit fi echo "list of Makefile.am:" echo "${MAKEFILES_LIST}" # base Makefile.am for all sources is in src/lib/Makefile.am -BASE_MAKEFILE=$(echo "${MAKEFILES_LIST}" | grep "src\/lib\/Makefile.am") +BASE_MAKEFILE=$(echo "${MAKEFILES_LIST}" | grep "src/lib/Makefile.am") # if no src/lib/Makefile.am found exit -if test -z ${BASE_MAKEFILE}; then - echo "invalid repo path: no src/lib/Makefile.am file found" - exit +if test -z "${BASE_MAKEFILE}"; then + echo "invalid repo path: no src/lib/Makefile.am file found" + exit fi echo "base Makefile.am:" @@ -248,19 +236,11 @@ echo "${BASE_MAKEFILE}" # generate the list of libraries in the compilation order LIBRARIES_LIST= -RAW_LIBRARIES_LIST=$(cat "${REPO_FOLDER}${BASE_MAKEFILE}" | grep "SUBDIRS") -for i in ${RAW_LIBRARIES_LIST}; do - LIBRARIES_LIST="${LIBRARIES_LIST} $(echo ${i} | grep -v "SUBDIRS" | grep -v '=')" -done - -# remove empty spaces -LIBRARIES_LIST=$(echo "${LIBRARIES_LIST}" | tr -s ' ' | xargs) +RAW_LIBRARIES_LIST=$(grep 'SUBDIRS' "${BASE_MAKEFILE}") +LIBRARIES_LIST=$(echo "${RAW_LIBRARIES_LIST}" | tr ' ' '\n' | grep -v SUBDIRS | grep -v '=' | tr '\n' ' ' | sed 's/ *$//') # generate the list of libraries in the reverse compilation order -REVERSE_LIBRARIES_LIST= -for i in ${LIBRARIES_LIST}; do - REVERSE_LIBRARIES_LIST="${i} ${REVERSE_LIBRARIES_LIST}" -done +REVERSE_LIBRARIES_LIST=$(echo "${LIBRARIES_LIST}" | tr ' ' '\n' | tac | tr '\n' ' ' | sed 's/ *$//') echo "list of libraries:" echo "${LIBRARIES_LIST}" @@ -270,7 +250,7 @@ echo "${REVERSE_LIBRARIES_LIST}" # filter all files of interest ignoring irrelevant ones # ignore .git, .libs, .deps doc folders and .o .lo .Plo .Po .gcno .gcda .m4 .dox .json .mes files -FILE_LIST=$(find "${REPO_FOLDER}" 2>/dev/null | grep -v "\.git" | grep -v "\/\.libs\/" | grep -v "\.o$" | grep -v "\/\.deps\/" | grep -v "\.lo$" | grep -v "\.Plo$" | grep -v "\.Po$" | grep -v "\.gcno$" | grep -v "gcda" | grep -v "\.m4$" | grep -v "\.dox$" | grep -v "\.json$" | grep -v "\/doc\/" | grep -v "\.mes$" | sort) +FILE_LIST=$(find . 2>/dev/null | grep -v "\.git" | grep -v "/\.libs/" | grep -v "\.o$" | grep -v "/\.deps/" | grep -v "\.lo$" | grep -v "\.Plo$" | grep -v "\.Po$" | grep -v "\.gcno$" | grep -v "gcda" | grep -v "\.m4$" | grep -v "\.dox$" | grep -v "\.json$" | grep -v "/doc/" | grep -v "\.mes$" | sort) #echo "files:" #echo "${FILE_LIST}" @@ -279,41 +259,36 @@ BASE_LIBRARIES_MAKEFILES= # generate the list of dependencies for all libraries in src/lib for i in ${LIBRARIES_LIST}; do - # generate current library Makefile.am path - BASE_LIBRARIES_MAKEFILES="${BASE_LIBRARIES_MAKEFILES} src/lib/${i}/Makefile.am" - # extract dependencies found in the library folder - BASE_DEPENDENCIES=$(extract_includes "src/lib/${i}/Makefile.am") - # generate the list of valid dependencies for the current library (take compilation order into account) - VALID_LIST=$(extract_dependencies "${REVERSE_LIBRARIES_LIST}" "${i}") - compute_dependencies "${i}" "src/lib" - PATH_TO_NAME=$(echo "src/lib" | tr -s "/" "_") - declare COMPUTED_DEPENDENCIES_${PATH_TO_NAME}_${i}="${SORTED_DEPENDENCIES}" + # generate current library Makefile.am path + BASE_LIBRARIES_MAKEFILES="${BASE_LIBRARIES_MAKEFILES} src/lib/${i}/Makefile.am" + # extract dependencies found in the library folder + BASE_DEPENDENCIES=$(extract_includes "src/lib/${i}/Makefile.am") || true + # generate the list of valid dependencies for the current library (take compilation order into account) + VALID_LIST=$(extract_dependencies "${REVERSE_LIBRARIES_LIST}" "${i}") + compute_dependencies "${i}" "src/lib" + PATH_TO_NAME=$(echo "src/lib" | tr -s "/" "_") + export "COMPUTED_DEPENDENCIES_${PATH_TO_NAME}_${i}=${SORTED_DEPENDENCIES}" done # remove empty spaces -BASE_LIBRARIES_MAKEFILES=$(echo "${BASE_LIBRARIES_MAKEFILES}" | xargs | tr -s " " "\n") +BASE_LIBRARIES_MAKEFILES=$(echo "${BASE_LIBRARIES_MAKEFILES}" | sed 's/ *$//' | tr ' ' '\n') -echo "base Makefiles.am files:" +echo "base Makefile.am files:" echo "${BASE_LIBRARIES_MAKEFILES}" OTHER_MAKEFILES=$(echo "${MAKEFILES_LIST}" | tr -s " " "\n" | grep -v "src/lib/" | grep -v "src/share/" | grep -v "src/Makefile.am") -# remove empty spaces -OTHER_MAKEFILES=$(echo "${OTHER_MAKEFILES}" | xargs | tr -s " " "\n") echo "remaining Makefile.am files:" echo "${OTHER_MAKEFILES}" for i in ${OTHER_MAKEFILES}; do - # extract dependencies found in the artifact folder - BASE_DEPENDENCIES=$(extract_includes "${i}") - # generate the list of valid dependencies for the current artifact (take compilation order into account) - VALID_LIST="${REVERSE_LIBRARIES_LIST}" - ARTIFACT=$(echo "${i}" | rev | cut -d "/" -f 2 | rev) - ARTIFACT_PATH=$(echo "${i}" | rev | cut -d "/" -f 3- | rev) - compute_dependencies "${ARTIFACT}" "${ARTIFACT_PATH}" - PATH_TO_NAME=$(echo "${ARTIFACT_PATH}" | tr -s "/" "_") - declare COMPUTED_DEPENDENCIES_${PATH_TO_NAME}_${ARTIFACT}="${SORTED_DEPENDENCIES}" + # extract dependencies found in the artifact folder + BASE_DEPENDENCIES=$(extract_includes "${i}") || true + # generate the list of valid dependencies for the current artifact (take compilation order into account) + VALID_LIST="${REVERSE_LIBRARIES_LIST}" + ARTIFACT=$(echo "${i}" | rev | cut -d "/" -f 2 | rev) + ARTIFACT_PATH=$(echo "${i}" | rev | cut -d "/" -f 3- | rev) + compute_dependencies "${ARTIFACT}" "${ARTIFACT_PATH}" + PATH_TO_NAME=$(echo "${ARTIFACT_PATH}" | tr -s "/" "_") + export "COMPUTED_DEPENDENCIES_${PATH_TO_NAME}_${ARTIFACT}=${SORTED_DEPENDENCIES}" done - -exit - diff --git a/tools/clang-format.sh b/tools/clang-format.sh index 08aafc3f09..3ff718a21e 100755 --- a/tools/clang-format.sh +++ b/tools/clang-format.sh @@ -22,6 +22,9 @@ extensions_regex='(\.cpp|\.cc|\.C|\.cxx|\.m|\.hpp|\.hh|\.h|\.H|\.hxx|\.tpp)$' # Print usage. print_usage() { + # shellcheck disable=SC2016 + # SC2016: Expressions don't expand in single quotes, use double quotes for that. + # Reason: $directory and $file should be displayed ad-literam. This way, it is expressed that a parameter is expected there. printf \ 'Usage: %s {{options}} Options: @@ -56,7 +59,7 @@ script_path=$(cd "$(dirname "${0}")" && pwd) list_of_files= if ${changed-false}; then - list_of_files=$(git diff $(git merge-base origin/master HEAD) --name-only | grep -E "${extensions_regex}") + list_of_files=$(git diff --name-only "$(git merge-base origin/master HEAD)" | grep -E "${extensions_regex}") elif test ${#} = 0; then # Use current directory when called without an argument. set -- . @@ -98,7 +101,8 @@ while test ${#} -gt 0 || test -n "${list_of_files}"; do if test -f "${file}"; then # Format file. # shellcheck disable=SC2046 - # We specifically want word splitting for the parameters. + # SC2046: Quote this to prevent word splitting. + # Reason: We specifically want word splitting for the parameters. clang-format --style=file -i $(printf '%s' "${parameters}") "${file}" elif test -d "${file}"; then # Keep CWD for later use. diff --git a/tools/extract_bnf.sh.in b/tools/extract_bnf.sh.in index 8761b3db4c..02c69e3335 100644 --- a/tools/extract_bnf.sh.in +++ b/tools/extract_bnf.sh.in @@ -1,6 +1,6 @@ #!/bin/sh -# Copyright (C) 2019-2022 Internet Systems Consortium, Inc. ("ISC") +# Copyright (C) 2019-2024 Internet Systems Consortium, Inc. ("ISC") # # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this @@ -40,7 +40,7 @@ if [ -f "${base}.yy" ]; then # if you system is set to Polish, rather than "Terminals") and this will # confuse our script. LANG=en_US LANGUAGE=en_US @YACC@ -v "${base}.yy" -o output - rm -f output output.h *.hh + rm -f output output.h ./*.hh mv output.output /tmp/output output=/tmp/output else @@ -57,7 +57,7 @@ fi # - replace : by BNF ::= # - squeeze multiple blank lines -cat $output |\ +@AWK@ '{ print }' $output |\ @AWK@ '/^Terminal/ { exit }; // { print }' |\ @AWK@ '// { gsub("^ +[0-9]+ ", ""); print }' |\ @AWK@ '/^\$@[0-9]+:/ { next }; // { print }' |\ @@ -83,7 +83,10 @@ $header :linenos: EOF - cat $output.2 | @AWK@ '/^.+$/ { print " ",$0 }; /^$/ { print } ' >> $output.3 + # shellcheck disable=SC2016 + # SC2016: Expressions don't expand in single quotes, use double quotes for that. + # Reason: we spcifically do not want $0 to expand. + @AWK@ '/^.+$/ { print " ",$0 }; /^$/ { print } ' $output.2 >> $output.3 cat $output.3 else cat $output.2 diff --git a/tools/shellcheck-all.sh b/tools/shellcheck-all.sh index c1514c7197..3d80fef4fe 100755 --- a/tools/shellcheck-all.sh +++ b/tools/shellcheck-all.sh @@ -56,11 +56,9 @@ root_path=$(cd "$(dirname "${0}")/.." && pwd) cd "${root_path}" # Disable shellcheck warnings: -# SC1117: Backslash is literal in "\/". Prefer explicit escaping: "\\/". # SC2119: Use "$@" if function's $1 should mean script's $1. -# SC2039: In POSIX sh, 'local' is undefined. # SC3043: In POSIX sh, 'local' is undefined. -shellcheck_opts="--exclude=SC1117 --exclude=SC2119 --exclude=SC2039 --exclude=SC3043" +shellcheck_opts="--exclude=SC2119 --exclude=SC3043" files="$(find . -type f -name '*.sh' -or -name '*.sh.in' | sort)" @@ -75,7 +73,7 @@ for i in \ fi done -# shellcheck disable=SC2046 -# SC2046: Quote this to prevent word splitting. +# shellcheck disable=SC2086 +# SC2086: Double quote to prevent globbing and word splitting. # Reason: We explicitly want the parameters split. shellcheck ${shellcheck_opts} ${files} diff --git a/tools/uncrustify.sh b/tools/uncrustify.sh index e4cdb5ba84..e03d320cf6 100755 --- a/tools/uncrustify.sh +++ b/tools/uncrustify.sh @@ -22,6 +22,9 @@ extensions_regex='(\.cpp|\.cc|\.C|\.cxx|\.m|\.hpp|\.hh|\.h|\.H|\.hxx|\.tpp)$' # Print usage. print_usage() { + # shellcheck disable=SC2016 + # SC2016: Expressions don't expand in single quotes, use double quotes for that. + # Reason: $directory and $file should be displayed ad-literam. This way, it is expressed that a parameter is expected there. printf \ 'Usage: %s {{options}} Options: @@ -56,7 +59,7 @@ script_path=$(cd "$(dirname "${0}")" && pwd) list_of_files= if ${changed-false}; then - list_of_files=$(git diff $(git merge-base origin/master HEAD) --name-only | grep -E "${extensions_regex}") + list_of_files=$(git diff --name-only "$(git merge-base origin/master HEAD)" | grep -E "${extensions_regex}") elif test ${#} = 0; then # Use current directory when called without an argument. set -- . @@ -98,7 +101,8 @@ while test ${#} -gt 0 || test -n "${list_of_files}"; do if test -f "${file}"; then # Format file. # shellcheck disable=SC2046 - # We specifically want word splitting for the parameters. + # SC2046: Quote this to prevent word splitting. + # Reason: We specifically want word splitting for the parameters. uncrustify -c "${script_path}/../.uncrustify.cfg" --replace $(printf '%s' "${parameters}") "${file}" elif test -d "${file}"; then # Keep CWD for later use. |