[CI][doc][build] Trim script and sonic-slave-* folders files trailing blanks (#15161)

- run pre-commit tox profile to trim all trailing blanks
- use several commits with a per-folder based strategy
  to ease their merge

Issue #15114

Signed-off-by: Guillaume Lambert <guillaume.lambert@orange.com>
This commit is contained in:
Guilt 2023-05-24 18:25:12 +02:00 committed by GitHub
parent 4467f43449
commit 6745691eb5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 53 additions and 53 deletions

View File

@ -1,6 +1,6 @@
#!/bin/bash
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
CONFIGURED_ARCH=$1
IMAGE_DISTRO=$2

View File

@ -74,7 +74,7 @@ if [[ "$vs_build_prepare_mem" == "yes" ]]; then
bash -c 'echo 1 > /proc/sys/vm/drop_caches'
# Not all kernels support compact_memory
if [[ -w '/proc/sys/vm/compact_memory' ]]
then
then
bash -c 'echo 1 > /proc/sys/vm/compact_memory'
fi
free -m

View File

@ -1,6 +1,6 @@
#!/bin/bash
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
RET=$1
BLDENV=$2

View File

@ -1,14 +1,14 @@
#!/bin/bash
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
BUILDINFO_BASE=/usr/local/share/buildinfo
SCRIPT_SRC_PATH=src/sonic-build-hooks
if [ -e ${SCRIPT_SRC_PATH} ]; then
. ${SCRIPT_SRC_PATH}/scripts/utils.sh
. ${SCRIPT_SRC_PATH}/scripts/utils.sh
else
. ${BUILDINFO_BASE}/scripts/utils.sh
. ${BUILDINFO_BASE}/scripts/utils.sh
fi
DOCKER_IMAGE=$1
@ -85,15 +85,15 @@ if [[ ! -z ${SONIC_VERSION_CACHE} && -e ${CACHE_ENCODE_FILE} ]]; then
GIT_FILE_STATUS=$(git status -s ${DEP_FILES})
# If the cache file is not exists in the global cache for the given SHA,
# If the cache file is not exists in the global cache for the given SHA,
# store the new cache file into version cache path.
if [ -f ${LOCAL_CACHE_FILE} ]; then
if [[ -z ${GIT_FILE_STATUS} && ! -e ${GLOBAL_CACHE_FILE} ]]; then
mkdir -p ${GLOBAL_CACHE_DIR}
chmod -f 777 ${GLOBAL_CACHE_DIR}
FLOCK ${GLOBAL_CACHE_FILE}
cp ${LOCAL_CACHE_FILE} ${GLOBAL_CACHE_FILE}
chmod -f 777 ${LOCAL_CACHE_FILE} ${GLOBAL_CACHE_FILE}
cp ${LOCAL_CACHE_FILE} ${GLOBAL_CACHE_FILE}
chmod -f 777 ${LOCAL_CACHE_FILE} ${GLOBAL_CACHE_FILE}
FUNLOCK ${GLOBAL_CACHE_FILE}
fi
fi

View File

@ -1,10 +1,10 @@
#!/bin/bash
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
SCRIPT_SRC_PATH=src/sonic-build-hooks
if [ -e ${SCRIPT_SRC_PATH} ]; then
. ${SCRIPT_SRC_PATH}/scripts/utils.sh
. ${SCRIPT_SRC_PATH}/scripts/utils.sh
fi
ARCH=$1

View File

@ -92,9 +92,9 @@ def delete_file_if_exist(file):
os.remove(file)
except:
print_msg(PRINT_LEVEL_WARN, "Cannot delete " + file)
# Logic functions
def generate_output_file(resources, dest_url_valid, dest_url, output_file_name):
try:
with open(output_file_name, 'w') as f:
@ -111,7 +111,7 @@ def generate_output_file(resources, dest_url_valid, dest_url, output_file_name):
except:
print_msg(PRINT_LEVEL_WARN, output_file_name + " cannot be created")
return RET_CODE_CANNOT_CREATE_FILE
return RET_CODE_SUCCESS
def upload_resource_to_server(resource_path, resource_name, user, key, server_url):
@ -142,7 +142,7 @@ def upload_resource_to_server(resource_path, resource_name, user, key, server_ur
if reported_md5 != None and reported_md5 != file_md5:
print_msg(PRINT_LEVEL_WARN, f"Server reported file's chsum {reported_md5}, expected {file_md5}")
return RET_CODE_SUCCESS
def download_external_resouce(resource, cache_path):
@ -204,13 +204,13 @@ def parse_args():
parser.add_argument('-c', '--cache', default="." + os.sep + "tmp",
help='Path to cache for storing content before uploading to server')
parser.add_argument('-p', '--print', default=PRINT_LEVEL_INFO,
parser.add_argument('-p', '--print', default=PRINT_LEVEL_INFO,
choices=[PRINT_LEVEL_ERROR, PRINT_LEVEL_WARN, PRINT_LEVEL_INFO, PRINT_LEVEL_VERBOSE],
help='Print level verbosity')
parser.add_argument('-o', '--output', default=DEFAULT_INVALID_INPUT,
help='Output file name to hold the list of packages')
help='Output file name to hold the list of packages')
parser.add_argument('-u', '--user', default=DEFAULT_INVALID_INPUT,
help='User for server authentication')
@ -223,33 +223,33 @@ def parse_args():
return parser.parse_args()
def main():
global g_current_print_level
global g_current_print_level
ret_val = RET_CODE_SUCCESS
resource_counter = 0.0
resource_dict = dict()
args = parse_args()
g_current_print_level = args.print
resource_files_list = get_resource_files_list(args.source)
resource_list = get_resources_list(resource_files_list)
#remove duplications
#remove duplications
for resource in resource_list:
unique_name = resource.get_unique_name()
if not unique_name in resource_dict.keys():
resource_dict[unique_name] = resource
print_msg(PRINT_LEVEL_INFO, "Found " + str(len(resource_files_list)) + " version files and " + str(len(resource_dict.keys())) + " unique resources")
if args.dest != DEFAULT_INVALID_INPUT:
upload_files_to_server = True
print_msg(PRINT_LEVEL_INFO, "Upload files to URL - " + args.dest)
else:
upload_files_to_server = False
print_msg(PRINT_LEVEL_INFO, "Skipping files upload to server")
print_msg(PRINT_LEVEL_INFO, "Skipping files upload to server")
#create cache directory if not exist
create_dir_if_not_exist(args.cache)
@ -265,29 +265,29 @@ def main():
#download content to cache
file_in_cache = download_external_resouce(resource, args.cache)
if "" == file_in_cache:
return RET_CODE_CANNOT_WRITE_FILE
if True == upload_files_to_server:
#upload content to web server
ret_val = upload_resource_to_server(file_in_cache, unique_name, args.user, args.key, args.dest)
if ret_val != RET_CODE_SUCCESS:
return ret_val
if True == g_delete_resources_in_cache:
delete_file_if_exist(file_in_cache)
print_msg(PRINT_LEVEL_INFO, "Downloading Data. Progress " + str(int(100.0*resource_counter/len(resource_dict.keys()))) + "%", True) #print progress bar
# generate version output file as needed
if args.output != DEFAULT_INVALID_INPUT:
ret_val = generate_output_file(resource_dict, upload_files_to_server, args.dest, args.output)
print_msg(PRINT_LEVEL_INFO, "Generate output file " + args.output)
return ret_val
# Entry function
# Entry function
if __name__ == '__main__':
ret_val = main()

View File

@ -1,6 +1,6 @@
#!/bin/bash
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
ARCH=$1

View File

@ -1,14 +1,14 @@
#!/bin/bash
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
BUILDINFO_BASE=/usr/local/share/buildinfo
SCRIPT_SRC_PATH=src/sonic-build-hooks
if [ -e ${SCRIPT_SRC_PATH} ]; then
. ${SCRIPT_SRC_PATH}/scripts/utils.sh
. ${SCRIPT_SRC_PATH}/scripts/utils.sh
else
. ${BUILDINFO_BASE}/scripts/utils.sh
. ${BUILDINFO_BASE}/scripts/utils.sh
fi
IMAGENAME=$1
@ -89,7 +89,7 @@ if [[ "$SKIP_BUILD_HOOK" == y || ${ENABLE_VERSION_CONTROL_DOCKER} != y ]]; then
exit 0
fi
# Version cache
# Version cache
DOCKER_IMAGE_NAME=${IMAGENAME}
IMAGE_DBGS_NAME=${DOCKER_IMAGE_NAME//-/_}_image_dbgs

View File

@ -1,6 +1,6 @@
#!/bin/bash
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
SLAVE_DIR=$1
ARCH=$2

View File

@ -77,7 +77,7 @@ if [ -d "$KERNEL_MODULES_DIR" ]; then
# Do sign for each found module
kernel_modules_cnt=0
for mod in $modules_list
do
do
# check Kernel module is signed.
if ! grep -q "~Module signature appended~" "${mod}"; then
echo "Error: Kernel module=${mod} have no signature appened."
@ -87,7 +87,7 @@ if [ -d "$KERNEL_MODULES_DIR" ]; then
if [ $VERBOSE = 'true' ]; then
echo "kernel module named=${mod} have signature appended."
fi
kernel_modules_cnt=$((kernel_modules_cnt+1))
done
echo "Num of kernel modules signed: kernel_modules_cnt=$kernel_modules_cnt"

View File

@ -1,5 +1,5 @@
#!/bin/bash
# This script is signing kernel modules by using sign-file tool
# This script is signing kernel modules by using sign-file tool
usage() {
cat <<EOF

View File

@ -1,7 +1,7 @@
#!/bin/bash
# This script is signing boot components: shim, mmx, grub, kernel and kernel modules in development env.
## Enable debug output for script & exit code when failing occurs
## Enable debug output for script & exit code when failing occurs
set -x -e
print_usage() {
@ -79,9 +79,9 @@ for efi in $efi_file_list
do
# grep filename from full path
efi_filename=$(echo $efi | grep -o '[^/]*$')
if echo $efi_filename | grep -e "shim" -e "grub" -e "mm"; then
clean_file ${efi}-signed
echo "signing efi file - full path: ${efi} filename: ${efi_filename}"
@ -92,7 +92,7 @@ do
cp ${efi}-signed $FS_ROOT/boot/${efi_filename}
# verifying signature of mm & shim efi files.
./scripts/secure_boot_signature_verification.sh -c $PEM_CERT -e $FS_ROOT/boot/${efi_filename}
./scripts/secure_boot_signature_verification.sh -c $PEM_CERT -e $FS_ROOT/boot/${efi_filename}
fi
done

View File

@ -97,7 +97,7 @@ class Component:
def print(self, file_path):
if len(self.versions) <= 0:
return
if self.verbose is None:
return
@ -122,7 +122,7 @@ class Component:
and not re.search(self.verbose, "cver={}".format(self.versions[package])):
continue
print('{0}=={1}'.format(package, self.versions[package]))
# Check if the self component can be overwritten by the input component
def check_overwritable(self, component, for_all_dist=False, for_all_arch=False):
if self.ctype != component.ctype:

View File

@ -57,7 +57,7 @@ ENV DEBIAN_FRONTEND=noninteractive
{%- if CROSS_BUILD_ENVIRON == "y" %}
RUN apt-get install -y python3 python3-pip
RUN apt-get install -y python3-minimal:$arch python3.9:$arch python3:$arch python3-dev:$arch python3-setuptools:$arch
RUN apt-get install -y python3-minimal:$arch python3.9:$arch python3:$arch python3-dev:$arch python3-setuptools:$arch
RUN apt-get download python3-distutils && dpkg --force-all -i python3-distutils*
RUN apt-get download python3-pip && dpkg --force-all -i python3-pip*
RUN which pip3 && pip3 install enum34
@ -67,7 +67,7 @@ RUN pip3 install virtualenv
RUN mkdir /python_virtualenv
RUN cd /python_virtualenv && python3 -m virtualenv --copies -p /usr/bin/python3 env3
RUN PATH=/python_virtualenv/env3/bin/:$PATH pip3 install setuptools==49.6.00 wheel==0.35.1 fastentrypoints pytest pytest-cov pytest-runner==5.2 nose==1.3.7 mockredispy==2.9.3 mock==3.0.5 PyYAML==5.4.1 redis==3.5.3 pexpect==4.8.0 Pympler==0.8 parameterized natsort==6.2.1 MarkupSafe==2.0.1 Jinja2==3.0.3 click tabulate netaddr netifaces pyroute2 pyfakefs sphinx && ln -s /python_virtualenv/env3/bin/pytest /python_virtualenv/env3/bin/pytest-3
RUN PATH=/python_virtualenv/env3/bin/:$PATH pip3 install setuptools==49.6.00 wheel==0.35.1 fastentrypoints pytest pytest-cov pytest-runner==5.2 nose==1.3.7 mockredispy==2.9.3 mock==3.0.5 PyYAML==5.4.1 redis==3.5.3 pexpect==4.8.0 Pympler==0.8 parameterized natsort==6.2.1 MarkupSafe==2.0.1 Jinja2==3.0.3 click tabulate netaddr netifaces pyroute2 pyfakefs sphinx && ln -s /python_virtualenv/env3/bin/pytest /python_virtualenv/env3/bin/pytest-3
RUN apt-get --fix-broken install -y
RUN LIBPYTHON3_DEPS="`apt-cache depends libpython3-dev:$arch |grep Depends|awk {'print \$2;'}|tr "\n" " "`" && apt-get install -y libpython2.7-dev:$arch $LIBPYTHON3_DEPS libxml2-dev:$arch libxslt-dev:$arch libssl-dev:$arch libz-dev:$arch
@ -615,7 +615,7 @@ RUN apt-get install -y nodejs
{%- if CROSS_BUILD_ENVIRON == "y" %}
RUN apt-get install -y rsync dh-python
RUN apt-get install -y libelf-dev:$arch libdw-dev:$arch libbz2-dev:$arch liblzo2-dev:$arch libedit-dev:$arch libevent-dev:$arch libopts25-dev:$arch libssl-dev:$arch pps-tools:$arch libpam-cap:$arch libcap-dev:$arch libpam0g-dev:$arch libaudit-dev:$arch libgtk-3-dev:$arch libkrb5-dev:$arch libsystemd-dev:$arch libwrap0-dev:$arch libkrb5-dev:$arch libboost1.74-dev:$arch libboost-dev:$arch libzmq5:$arch libzmq3-dev:$arch libdaemon-dev:$arch libjansson-dev:$arch libmnl-dev:$arch libsensors5:$arch libsensors4-dev:$arch libperl-dev:$arch libmariadb-dev:$arch libmariadb-dev-compat:$arch libpci-dev:$arch libjson-c-dev:$arch libreadline-dev:$arch librtr-dev:$arch librrd-dev:$arch libnetfilter-conntrack-dev:$arch libnetfilter-conntrack3:$arch libnfnetlink-dev:$arch libnftnl-dev:$arch libldap2-dev:$arch libbind-export-dev:$arch check:$arch libboost-atomic-dev:$arch libboost-test-dev:$arch libglib2.0-dev:$arch libexplain-dev:$arch libc-ares-dev:$arch libiptc0:$arch libxtables12:$arch libatm1-dev:$arch libbpf-dev:$arch libdb-dev:$arch pkg-config:$arch libnghttp2-14:$arch librtmp1:$arch libssh2-1:$arch libcjson1:$arch libcjson-dev:$arch libcurl4-openssl-dev:$arch libboost-thread1.74-dev:$arch libboost-thread-dev:$arch libboost-system1.74-dev:$arch libboost-system-dev:$arch libgtest-dev:$arch libgmock-dev:$arch libfido2-dev:$arch libcunit1:$arch libcunit1-dev:$arch libauparse-dev:$arch libnetsnmptrapd40:$arch qtbase5-dev:$arch libboost-log-dev:$arch libboost-filesystem-dev:$arch libboost-program-options-dev:$arch
RUN apt-get install -y libelf-dev:$arch libdw-dev:$arch libbz2-dev:$arch liblzo2-dev:$arch libedit-dev:$arch libevent-dev:$arch libopts25-dev:$arch libssl-dev:$arch pps-tools:$arch libpam-cap:$arch libcap-dev:$arch libpam0g-dev:$arch libaudit-dev:$arch libgtk-3-dev:$arch libkrb5-dev:$arch libsystemd-dev:$arch libwrap0-dev:$arch libkrb5-dev:$arch libboost1.74-dev:$arch libboost-dev:$arch libzmq5:$arch libzmq3-dev:$arch libdaemon-dev:$arch libjansson-dev:$arch libmnl-dev:$arch libsensors5:$arch libsensors4-dev:$arch libperl-dev:$arch libmariadb-dev:$arch libmariadb-dev-compat:$arch libpci-dev:$arch libjson-c-dev:$arch libreadline-dev:$arch librtr-dev:$arch librrd-dev:$arch libnetfilter-conntrack-dev:$arch libnetfilter-conntrack3:$arch libnfnetlink-dev:$arch libnftnl-dev:$arch libldap2-dev:$arch libbind-export-dev:$arch check:$arch libboost-atomic-dev:$arch libboost-test-dev:$arch libglib2.0-dev:$arch libexplain-dev:$arch libc-ares-dev:$arch libiptc0:$arch libxtables12:$arch libatm1-dev:$arch libbpf-dev:$arch libdb-dev:$arch pkg-config:$arch libnghttp2-14:$arch librtmp1:$arch libssh2-1:$arch libcjson1:$arch libcjson-dev:$arch libcurl4-openssl-dev:$arch libboost-thread1.74-dev:$arch libboost-thread-dev:$arch libboost-system1.74-dev:$arch libboost-system-dev:$arch libgtest-dev:$arch libgmock-dev:$arch libfido2-dev:$arch libcunit1:$arch libcunit1-dev:$arch libauparse-dev:$arch libnetsnmptrapd40:$arch qtbase5-dev:$arch libboost-log-dev:$arch libboost-filesystem-dev:$arch libboost-program-options-dev:$arch
RUN apt-get download libgirepository1.0-dev:$arch && dpkg --force-all -i libgirepository1.0-dev*
RUN PATH=/python_virtualenv/env3/bin/:$PATH pip3 install pycairo

View File

@ -57,7 +57,7 @@ ENV DEBIAN_FRONTEND=noninteractive
{%- if CROSS_BUILD_ENVIRON == "y" %}
RUN apt-get install -y python3 python3-pip
RUN apt-get install -y python:$arch python3-minimal:$arch python3.7:$arch python3:$arch python-setuptools:$arch
RUN apt-get install -y python:$arch python3-minimal:$arch python3.7:$arch python3:$arch python-setuptools:$arch
RUN apt-get download python3-distutils && dpkg --force-all -i python3-distutils*
RUN apt-get download python3-pip && dpkg --force-all -i python3-pip*
RUN which pip3 && pip3 install enum34
@ -592,7 +592,7 @@ EXPOSE 22
RUN git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git /usr/share/depot_tools
ENV PATH /usr/share/depot_tools:$PATH
# Install dependencies for dhcp relay test
# Install dependencies for dhcp relay test
RUN pip3 install parameterized==0.8.1
RUN pip3 install pyfakefs

View File

@ -272,7 +272,7 @@ RUN apt-get update && apt-get install -y \
# For gmock
RUN apt-get install -y libgmock-dev -t stretch-backports
# Install dependencies for dhcp relay test
# Install dependencies for dhcp relay test
RUN pip3 install parameterized==0.8.1
RUN pip3 install pyfakefs