05.Version cache - docker dpkg caching support (#12005)

This feature caches all the deb files during docker build and stores them
into version cache.

It loads the cache file if already exists in the version cache and copies the extracted
deb file from cache file into Debian cache path( /var/cache/apt/archives).

The apt-install always installs the deb file from the cache if exists, this
avoid unnecessary package download from the repo and speeds up the overall build.

The cache file is selected based on the SHA value of version dependency
files.

Why I did it
How I did it
How to verify it


* 03.Version-cache - framework environment settings

It defines and passes the necessary version cache environment variables
to the caching framework.

It adds the utils script for shared cache file access.

It also adds the post-cleanup logic for cleaning the unwanted files from
the docker/image after the version cache creation.

* 04.Version cache - debug framework

Added DBGOPT Make variable to enable the cache framework
scripts in trace mode. This option takes the part name of the script to
enable the particular shell script in trace mode.

Multiple shell script names can also be given.

	Eg: make DBGOPT="image|docker"

Added verbose mode to dump the version merge details during
build/dry-run mode.
	Eg: scripts/versions_manager.py freeze -v \
		'dryrun|cmod=docker-swss|cfile=versions-deb|cname=all|stage=sub|stage=add'

* 05.Version cache - docker dpkg caching support

This feature caches all the deb files during docker build and stores them
into version cache.

It loads the cache file if already exists in the version cache and copies the extracted
deb file from cache file into Debian cache path( /var/cache/apt/archives).

The apt-install always installs the deb file from the cache if exists, this
avoid unnecessary package download from the repo and speeds up the overall build.

The cache file is selected based on the SHA value of version dependency
files.
This commit is contained in:
Kalimuthu-Velappan 2022-12-12 06:50:56 +05:30 committed by GitHub
parent 5624d15a7c
commit 0dc22bd27c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 586 additions and 76 deletions

View File

@ -175,11 +175,22 @@ ifeq ($(ENABLE_FIPS), y)
endif endif
endif endif
SONIC_VERSION_CACHE := $(filter-out none,$(SONIC_VERSION_CACHE_METHOD))
SONIC_OVERRIDE_BUILD_VARS += SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE)
SONIC_OVERRIDE_BUILD_VARS += SONIC_VERSION_CACHE_SOURCE=$(SONIC_VERSION_CACHE_SOURCE)
export SONIC_VERSION_CACHE SONIC_VERSION_CACHE_SOURCE
$(shell test -d $(SONIC_VERSION_CACHE_SOURCE) || \
mkdir -p $(SONIC_VERSION_CACHE_SOURCE) && chmod -f 777 $(SONIC_VERSION_CACHE_SOURCE) 2>/dev/null )
# Generate the version control build info # Generate the version control build info
$(shell \ $(shell \
SONIC_VERSION_CONTROL_COMPONENTS=$(SONIC_VERSION_CONTROL_COMPONENTS) \ SONIC_VERSION_CONTROL_COMPONENTS=$(SONIC_VERSION_CONTROL_COMPONENTS) \
TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) \ TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) \
PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \ PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \
DISTRO=$(BLDENV) \
SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \
SONIC_VERSION_CACHE_SOURCE=$(SONIC_VERSION_CACHE_SOURCE) \
DBGOPT='$(DBGOPT)' \
scripts/generate_buildinfo_config.sh) scripts/generate_buildinfo_config.sh)
# Generate the slave Dockerfile, and prepare build info for it # Generate the slave Dockerfile, and prepare build info for it
@ -198,6 +209,8 @@ $(shell CONFIGURED_ARCH=$(CONFIGURED_ARCH) \
PREPARE_DOCKER=BUILD_SLAVE=y \ PREPARE_DOCKER=BUILD_SLAVE=y \
DEFAULT_CONTAINER_REGISTRY=$(DEFAULT_CONTAINER_REGISTRY) \ DEFAULT_CONTAINER_REGISTRY=$(DEFAULT_CONTAINER_REGISTRY) \
SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \
DBGOPT='$(DBGOPT)' \
scripts/prepare_docker_buildinfo.sh \ scripts/prepare_docker_buildinfo.sh \
$(SLAVE_BASE_IMAGE) \ $(SLAVE_BASE_IMAGE) \
$(SLAVE_DIR)/Dockerfile \ $(SLAVE_DIR)/Dockerfile \
@ -220,12 +233,13 @@ SLAVE_TAG = $(shell \
(cat $(SLAVE_DIR)/Dockerfile.user \ (cat $(SLAVE_DIR)/Dockerfile.user \
$(SLAVE_DIR)/Dockerfile \ $(SLAVE_DIR)/Dockerfile \
$(SLAVE_DIR)/buildinfo/versions/versions-* \ $(SLAVE_DIR)/buildinfo/versions/versions-* \
.git/HEAD \
&& echo $(USER)/$(PWD)/$(CONFIGURED_PLATFORM)) \ && echo $(USER)/$(PWD)/$(CONFIGURED_PLATFORM)) \
| sha1sum \ | sha1sum \
| awk '{print substr($$1,0,11);}') | awk '{print substr($$1,0,11);}')
COLLECT_DOCKER=DEFAULT_CONTAINER_REGISTRY=$(DEFAULT_CONTAINER_REGISTRY) \ COLLECT_DOCKER=DEFAULT_CONTAINER_REGISTRY=$(DEFAULT_CONTAINER_REGISTRY) \
SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \
DBGOPT='$(DBGOPT)' \
scripts/collect_docker_version_files.sh \ scripts/collect_docker_version_files.sh \
$(SLAVE_BASE_IMAGE) \ $(SLAVE_BASE_IMAGE) \
target \ target \
@ -286,6 +300,10 @@ ifneq ($(SONIC_DPKG_CACHE_SOURCE),)
DOCKER_RUN += -v "$(SONIC_DPKG_CACHE_SOURCE):/dpkg_cache:rw" DOCKER_RUN += -v "$(SONIC_DPKG_CACHE_SOURCE):/dpkg_cache:rw"
endif endif
ifneq ($(SONIC_VERSION_CACHE_SOURCE),)
DOCKER_RUN += -v "$(SONIC_VERSION_CACHE_SOURCE):/vcache:rw"
endif
ifeq ($(SONIC_ENABLE_SECUREBOOT_SIGNATURE), y) ifeq ($(SONIC_ENABLE_SECUREBOOT_SIGNATURE), y)
ifneq ($(SIGNING_KEY),) ifneq ($(SIGNING_KEY),)
DOCKER_SIGNING_SOURCE := $(shell dirname $(SIGNING_KEY)) DOCKER_SIGNING_SOURCE := $(shell dirname $(SIGNING_KEY))
@ -379,6 +397,8 @@ DOCKER_SLAVE_BASE_BUILD = docker build --no-cache \
--build-arg http_proxy=$(http_proxy) \ --build-arg http_proxy=$(http_proxy) \
--build-arg https_proxy=$(https_proxy) \ --build-arg https_proxy=$(https_proxy) \
--build-arg no_proxy=$(no_proxy) \ --build-arg no_proxy=$(no_proxy) \
--build-arg SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \
--build-arg SONIC_VERSION_CONTROL_COMPONENTS=$(SONIC_VERSION_CONTROL_COMPONENTS) \
$(SLAVE_DIR) \ $(SLAVE_DIR) \
$(SPLIT_LOG) $(DOCKER_BASE_LOG) $(SPLIT_LOG) $(DOCKER_BASE_LOG)

View File

@ -81,7 +81,10 @@ echo '[INFO] Build host debian base system...'
TARGET_PATH=$TARGET_PATH scripts/build_debian_base_system.sh $CONFIGURED_ARCH $IMAGE_DISTRO $FILESYSTEM_ROOT TARGET_PATH=$TARGET_PATH scripts/build_debian_base_system.sh $CONFIGURED_ARCH $IMAGE_DISTRO $FILESYSTEM_ROOT
# Prepare buildinfo # Prepare buildinfo
sudo scripts/prepare_debian_image_buildinfo.sh $CONFIGURED_ARCH $IMAGE_DISTRO $FILESYSTEM_ROOT $http_proxy sudo SONIC_VERSION_CACHE=${SONIC_VERSION_CACHE} \
DBGOPT="${DBGOPT}" \
scripts/prepare_debian_image_buildinfo.sh $CONFIGURED_ARCH $IMAGE_DISTRO $FILESYSTEM_ROOT $http_proxy
sudo chown root:root $FILESYSTEM_ROOT sudo chown root:root $FILESYSTEM_ROOT
@ -440,10 +443,10 @@ if [[ $TARGET_BOOTLOADER == grub ]]; then
GRUB_PKG=grub-efi-arm64-bin GRUB_PKG=grub-efi-arm64-bin
fi fi
sudo LANG=C DEBIAN_FRONTEND=noninteractive chroot $FILESYSTEM_ROOT apt-get -y download \ sudo LANG=C DEBIAN_FRONTEND=noninteractive chroot $FILESYSTEM_ROOT apt-get install -d -o dir::cache=/var/cache/apt \
$GRUB_PKG $GRUB_PKG
sudo mv $FILESYSTEM_ROOT/grub*.deb $FILESYSTEM_ROOT/$PLATFORM_DIR/grub sudo cp $FILESYSTEM_ROOT/var/cache/apt/archives/grub*.deb $FILESYSTEM_ROOT/$PLATFORM_DIR/grub
fi fi
## Disable kexec supported reboot which was installed by default ## Disable kexec supported reboot which was installed by default
@ -635,7 +638,9 @@ if [[ $TARGET_BOOTLOADER == uboot ]]; then
fi fi
# Collect host image version files before cleanup # Collect host image version files before cleanup
scripts/collect_host_image_version_files.sh $TARGET_PATH $FILESYSTEM_ROOT SONIC_VERSION_CACHE=${SONIC_VERSION_CACHE} \
DBGOPT="${DBGOPT}" \
scripts/collect_host_image_version_files.sh $CONFIGURED_ARCH $IMAGE_DISTRO $TARGET_PATH $FILESYSTEM_ROOT
# Remove GCC # Remove GCC
sudo LANG=C DEBIAN_FRONTEND=noninteractive chroot $FILESYSTEM_ROOT apt-get -y remove gcc sudo LANG=C DEBIAN_FRONTEND=noninteractive chroot $FILESYSTEM_ROOT apt-get -y remove gcc

View File

@ -17,4 +17,9 @@ $(BRCM_DNX_SAI)_URL = "$(LIBSAIBCM_DNX_URL_PREFIX)/$(BRCM_DNX_SAI)"
SONIC_ONLINE_DEBS += $(BRCM_XGS_SAI) SONIC_ONLINE_DEBS += $(BRCM_XGS_SAI)
SONIC_ONLINE_DEBS += $(BRCM_DNX_SAI) SONIC_ONLINE_DEBS += $(BRCM_DNX_SAI)
$(BRCM_XGS_SAI_DEV)_DEPENDS += $(BRCM_XGS_SAI) $(BRCM_XGS_SAI_DEV)_DEPENDS += $(BRCM_XGS_SAI)
$(BRCM_XGS_SAI)_SKIP_VERSION=y
$(BRCM_XGS_SAI_DEV)_SKIP_VERSION=y
$(BRCM_DNX_SAI)_SKIP_VERSION=y
$(eval $(call add_conflict_package,$(BRCM_XGS_SAI_DEV),$(LIBSAIVS_DEV))) $(eval $(call add_conflict_package,$(BRCM_XGS_SAI_DEV),$(LIBSAIVS_DEV)))

View File

@ -229,6 +229,15 @@ TRUSTED_GPG_URLS = https://packages.trafficmanager.net/debian/public_key.gpg,htt
# docker: docker base images # docker: docker base images
SONIC_VERSION_CONTROL_COMPONENTS ?= none SONIC_VERSION_CONTROL_COMPONENTS ?= none
# SONIC_VERSION_CACHE allows the .deb,.py, wget, git, docker and go files to be stored in the cache path. This allows the submodules to
# cache standard installation package and restored back to avoid the package download every time.
# SONIC_VERSION_CACHE - Method of deb package caching
# none : no caching
# cache : Use cache if exists else build the source and update the cache
# SONIC_VERSION_CACHE_SOURCE - Defines the version cache location details
SONIC_VERSION_CACHE_METHOD ?= none
SONIC_VERSION_CACHE_SOURCE ?= $(SONIC_DPKG_CACHE_SOURCE)/vcache
# SONiC docker registry # SONiC docker registry
# #
# Set the env variable ENABLE_DOCKER_BASE_PULL = y to enable pulling sonic-slave docker from registry # Set the env variable ENABLE_DOCKER_BASE_PULL = y to enable pulling sonic-slave docker from registry

View File

@ -1,5 +1,7 @@
#!/bin/bash #!/bin/bash
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
CONFIGURED_ARCH=$1 CONFIGURED_ARCH=$1
IMAGE_DISTRO=$2 IMAGE_DISTRO=$2
FILESYSTEM_ROOT=$3 FILESYSTEM_ROOT=$3

View File

@ -1,5 +1,7 @@
#!/bin/bash #!/bin/bash
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
RET=$1 RET=$1
BLDENV=$2 BLDENV=$2
TARGET_PATH=$3 TARGET_PATH=$3
@ -15,12 +17,17 @@ TIMESTAMP=$(date +"%Y%m%d%H%M%S")
VERSION_BUILD_PATH=$TARGET_PATH/versions/build VERSION_BUILD_PATH=$TARGET_PATH/versions/build
VERSION_SLAVE_PATH=$VERSION_BUILD_PATH/build-sonic-slave-${BLDENV} VERSION_SLAVE_PATH=$VERSION_BUILD_PATH/build-sonic-slave-${BLDENV}
LOG_VERSION_PATH=$VERSION_BUILD_PATH/log-${TIMESTAMP} LOG_VERSION_PATH=$VERSION_BUILD_PATH/log-${TIMESTAMP}
DEFAULT_VERSION_PATH=files/build/versions/default
BUILD_LOG_PATH=/sonic/target/versions/log/sonic-slave-${BLDENV}/
sudo chmod -R a+rw $BUILDINFO_PATH sudo chmod -R a+rw $BUILDINFO_PATH
collect_version_files $LOG_VERSION_PATH collect_version_files $LOG_VERSION_PATH
([ -d $BUILD_VERSION_PATH ] && [ ! -z "$(ls $BUILD_VERSION_PATH/)" ]) && cp -rf $BUILD_VERSION_PATH/* $LOG_VERSION_PATH/ ([ -d $BUILD_VERSION_PATH ] && [ ! -z "$(ls $BUILD_VERSION_PATH/)" ]) && cp -rf $BUILD_VERSION_PATH/* $LOG_VERSION_PATH/
mkdir -p $VERSION_SLAVE_PATH mkdir -p $VERSION_SLAVE_PATH
mkdir -p ${BUILD_LOG_PATH}
([ -d ${LOG_PATH} ] && [ ! -z "$(ls ${LOG_PATH})" ]) && cp ${LOG_PATH}/* ${BUILD_LOG_PATH}
scripts/versions_manager.py merge -t $VERSION_SLAVE_PATH -b $LOG_VERSION_PATH -e $POST_VERSION_PATH scripts/versions_manager.py merge -t $VERSION_SLAVE_PATH -b $LOG_VERSION_PATH -e $POST_VERSION_PATH
[ -d $BUILD_VERSION_PATH ] && rm -rf $BUILD_VERSION_PATH/* [ -d $BUILD_VERSION_PATH ] && rm -rf $BUILD_VERSION_PATH/*

View File

@ -1,6 +1,16 @@
#!/bin/bash #!/bin/bash
set -x [[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
BUILDINFO_BASE=/usr/local/share/buildinfo
SCRIPT_SRC_PATH=src/sonic-build-hooks
if [ -e ${SCRIPT_SRC_PATH} ]; then
. ${SCRIPT_SRC_PATH}/scripts/utils.sh
else
. ${BUILDINFO_BASE}/scripts/utils.sh
fi
DOCKER_IMAGE=$1 DOCKER_IMAGE=$1
TARGET_PATH=$2 TARGET_PATH=$2
DOCKER_IMAGE_TAG=$3 DOCKER_IMAGE_TAG=$3
@ -13,6 +23,8 @@ DOCKER_IMAGE_NAME=$(echo $DOCKER_IMAGE | cut -d: -f1 | sed "s/-$DOCKER_USERNAME\
#Create the container specific to the user tag and slave tag #Create the container specific to the user tag and slave tag
DOCKER_CONTAINER=${DOCKER_IMAGE_TAG/:/-} DOCKER_CONTAINER=${DOCKER_IMAGE_TAG/:/-}
TARGET_VERSIONS_PATH=$TARGET_PATH/versions/dockers/$DOCKER_IMAGE_NAME TARGET_VERSIONS_PATH=$TARGET_PATH/versions/dockers/$DOCKER_IMAGE_NAME
BUILD_LOG_PATH=target/versions/log/$DOCKER_IMAGE_NAME
mkdir -p ${BUILD_LOG_PATH}
[ -d $TARGET_VERSIONS_PATH ] && rm -rf $TARGET_VERSIONS_PATH [ -d $TARGET_VERSIONS_PATH ] && rm -rf $TARGET_VERSIONS_PATH
mkdir -p $TARGET_VERSIONS_PATH mkdir -p $TARGET_VERSIONS_PATH
@ -34,5 +46,57 @@ docker tag ${DOCKER_IMAGE_TAG} tmp-${DOCKER_IMAGE_TAG}
DOCKER_BUILDKIT=1 docker build -f ${DOCKER_PATH}/Dockerfile.cleanup --target output -o target/vcache/${DOCKER_IMAGE_NAME} ${DOCKER_PATH} DOCKER_BUILDKIT=1 docker build -f ${DOCKER_PATH}/Dockerfile.cleanup --target output -o target/vcache/${DOCKER_IMAGE_NAME} ${DOCKER_PATH}
DOCKER_BUILDKIT=1 docker build -f ${DOCKER_PATH}/Dockerfile.cleanup --no-cache --target final --tag ${DOCKER_IMAGE_TAG} ${DOCKER_PATH} DOCKER_BUILDKIT=1 docker build -f ${DOCKER_PATH}/Dockerfile.cleanup --no-cache --target final --tag ${DOCKER_IMAGE_TAG} ${DOCKER_PATH}
docker rmi tmp-${DOCKER_IMAGE_TAG} docker rmi tmp-${DOCKER_IMAGE_TAG}
docker cp -L $DOCKER_CONTAINER:/usr/local/share/buildinfo/log ${BUILD_LOG_PATH}/
# Save the cache contents from docker build
LOCAL_CACHE_FILE=target/vcache/${DOCKER_IMAGE_NAME}/cache.tgz
CACHE_ENCODE_FILE=${DOCKER_PATH}/vcache/cache.base64
sleep 1; sync ${CACHE_ENCODE_FILE}
# Decode the cache content into gz format
SRC_VERSION_PATH=files/build/versions
if [[ -e ${CACHE_ENCODE_FILE} ]]; then
cat ${CACHE_ENCODE_FILE} | base64 -d >${LOCAL_CACHE_FILE}
rm -f ${CACHE_ENCODE_FILE}
fi
# Version package cache
IMAGE_DBGS_NAME=${DOCKER_IMAGE_NAME//-/_}_image_dbgs
if [[ ${DOCKER_IMAGE_NAME} == sonic-slave-* ]]; then
GLOBAL_CACHE_DIR=${SONIC_VERSION_CACHE_SOURCE}/${DOCKER_IMAGE_NAME}
else
GLOBAL_CACHE_DIR=/vcache/${DOCKER_IMAGE_NAME}
fi
if [[ ! -z ${SONIC_VERSION_CACHE} && -e ${CACHE_ENCODE_FILE} ]]; then
# Select version files for SHA calculation
VERSION_FILES="${SRC_VERSION_PATH}/dockers/${DOCKER_IMAGE_NAME}/versions-*-${DISTRO}-${ARCH} ${SRC_VERSION_PATH}/default/versions-*"
DEP_FILES="${DOCKER_PATH}/Dockerfile.j2"
if [[ ${DOCKER_IMAGE_NAME} =~ '-dbg' ]]; then
DEP_FILES="${DEP_FILES} build_debug_docker_j2.sh"
fi
# Calculate the version SHA
VERSION_SHA="$( (echo -n "${!IMAGE_DBGS_NAME}"; cat ${DEP_FILES} ${VERSION_FILES}) | sha1sum | awk '{print substr($1,0,23);}')"
GLOBAL_CACHE_FILE=${GLOBAL_CACHE_DIR}/${DOCKER_IMAGE_NAME}-${VERSION_SHA}.tgz
GIT_FILE_STATUS=$(git status -s ${DEP_FILES})
# If the cache file is not exists in the global cache for the given SHA,
# store the new cache file into version cache path.
if [ -f ${LOCAL_CACHE_FILE} ]; then
if [[ -z ${GIT_FILE_STATUS} && ! -e ${GLOBAL_CACHE_FILE} ]]; then
mkdir -p ${GLOBAL_CACHE_DIR}
chmod -f 777 ${GLOBAL_CACHE_DIR}
FLOCK ${GLOBAL_CACHE_FILE}
cp ${LOCAL_CACHE_FILE} ${GLOBAL_CACHE_FILE}
chmod -f 777 ${LOCAL_CACHE_FILE} ${GLOBAL_CACHE_FILE}
FUNLOCK ${GLOBAL_CACHE_FILE}
fi
fi
fi
docker container rm $DOCKER_CONTAINER docker container rm $DOCKER_CONTAINER

View File

@ -1,7 +1,16 @@
#!/bin/bash #!/bin/bash
TARGET=$1 [[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
FILESYSTEM_ROOT=$2
SCRIPT_SRC_PATH=src/sonic-build-hooks
if [ -e ${SCRIPT_SRC_PATH} ]; then
. ${SCRIPT_SRC_PATH}/scripts/utils.sh
fi
ARCH=$1
DISTRO=$2
TARGET=$3
FILESYSTEM_ROOT=$4
VERSIONS_PATH=$TARGET/versions/host-image VERSIONS_PATH=$TARGET/versions/host-image
IMAGENAME="host-image" IMAGENAME="host-image"
@ -13,3 +22,5 @@ sudo LANG=C chroot $FILESYSTEM_ROOT post_run_buildinfo ${IMAGENAME}
cp -r $FILESYSTEM_ROOT/usr/local/share/buildinfo/pre-versions $VERSIONS_PATH/ cp -r $FILESYSTEM_ROOT/usr/local/share/buildinfo/pre-versions $VERSIONS_PATH/
cp -r $FILESYSTEM_ROOT/usr/local/share/buildinfo/post-versions $VERSIONS_PATH/ cp -r $FILESYSTEM_ROOT/usr/local/share/buildinfo/post-versions $VERSIONS_PATH/
sudo LANG=C chroot $FILESYSTEM_ROOT post_run_cleanup ${IMAGENAME}

View File

@ -6,5 +6,8 @@ BUILDINFO_CONFIG=$BUILDINFO_PATH/buildinfo/config/buildinfo.config
mkdir -p $BUILDINFO_PATH/buildinfo/config mkdir -p $BUILDINFO_PATH/buildinfo/config
echo "PACKAGE_URL_PREFIX=$PACKAGE_URL_PREFIX" > $BUILDINFO_CONFIG echo "export PACKAGE_URL_PREFIX=$PACKAGE_URL_PREFIX" > $BUILDINFO_CONFIG
echo "SONIC_VERSION_CONTROL_COMPONENTS=$SONIC_VERSION_CONTROL_COMPONENTS" >> $BUILDINFO_CONFIG echo "export SONIC_VERSION_CONTROL_COMPONENTS=$SONIC_VERSION_CONTROL_COMPONENTS" >> $BUILDINFO_CONFIG
echo "export SONIC_VERSION_CACHE=${SONIC_VERSION_CACHE}" >> $BUILDINFO_CONFIG
echo "export SONIC_VERSION_CACHE_SOURCE=${SONIC_VERSION_CACHE_SOURCE}" >> $BUILDINFO_CONFIG
echo "export DISTRO=${DISTRO}" >> $BUILDINFO_CONFIG

View File

@ -1,9 +1,13 @@
#!/bin/bash #!/bin/bash
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
ARCH=$1 ARCH=$1
DISTRO=$2 DISTRO=$2
FILESYSTEM_ROOT=$3 FILESYSTEM_ROOT=$3
HOST_IMAGE_NAME=host-image
IMAGENAME=${HOST_IMAGE_NAME}
. /usr/local/share/buildinfo/scripts/buildinfo_base.sh . /usr/local/share/buildinfo/scripts/buildinfo_base.sh
VERSION_DEB_PREFERENCE="01-versions-deb" VERSION_DEB_PREFERENCE="01-versions-deb"
@ -26,4 +30,8 @@ if [ "$ENABLE_VERSION_CONTROL_DEB" == "y" ]; then
fi fi
sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "dpkg -i /usr/local/share/buildinfo/sonic-build-hooks_1.0_all.deb" sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "dpkg -i /usr/local/share/buildinfo/sonic-build-hooks_1.0_all.deb"
sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "pre_run_buildinfo" #sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "pre_run_buildinfo"
sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "echo export DISTRO=${DISTRO} >> /usr/local/share/buildinfo/config/buildinfo.config"
sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "echo export IMAGENAME=${IMAGENAME} >> /usr/local/share/buildinfo/config/buildinfo.config"
sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "pre_run_buildinfo ${HOST_IMAGE_NAME}"

View File

@ -1,14 +1,26 @@
#!/bin/bash #!/bin/bash
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
BUILDINFO_BASE=/usr/local/share/buildinfo
SCRIPT_SRC_PATH=src/sonic-build-hooks
if [ -e ${SCRIPT_SRC_PATH} ]; then
. ${SCRIPT_SRC_PATH}/scripts/utils.sh
else
. ${BUILDINFO_BASE}/scripts/utils.sh
fi
IMAGENAME=$1 IMAGENAME=$1
DOCKERFILE=$2 DOCKERFILE=$2
ARCH=$3 ARCH=$3
DOCKERFILE_TARGE=$4 DOCKERFILE_TARGET=$4
DISTRO=$5 DISTRO=$5
[ -z "$BUILD_SLAVE" ] && BUILD_SLAVE=n [ -z "$BUILD_SLAVE" ] && BUILD_SLAVE=n
[ -z "$DOCKERFILE_TARGE" ] && DOCKERFILE_TARGE=$DOCKERFILE [ -z "$DOCKERFILE_TARGET" ] && DOCKERFILE_TARGET=$DOCKERFILE
DOCKERFILE_PATH=$(dirname "$DOCKERFILE_TARGE") DOCKERFILE_PATH=$(dirname "$DOCKERFILE_TARGET")
BUILDINFO_PATH="${DOCKERFILE_PATH}/buildinfo" BUILDINFO_PATH="${DOCKERFILE_PATH}/buildinfo"
BUILDINFO_VERSION_PATH="${BUILDINFO_PATH}/versions" BUILDINFO_VERSION_PATH="${BUILDINFO_PATH}/versions"
DOCKER_PATH=$(dirname $DOCKERFILE) DOCKER_PATH=$(dirname $DOCKERFILE)
@ -34,15 +46,18 @@ fi
scripts/docker_version_control.sh $@ scripts/docker_version_control.sh $@
DOCKERFILE_PRE_SCRIPT='# Auto-Generated for buildinfo DOCKERFILE_PRE_SCRIPT='# Auto-Generated for buildinfo
ARG SONIC_VERSION_CACHE
ARG SONIC_VERSION_CONTROL_COMPONENTS
COPY ["buildinfo", "/usr/local/share/buildinfo"] COPY ["buildinfo", "/usr/local/share/buildinfo"]
COPY vcache/ /sonic/target/vcache/'${IMAGENAME}' COPY vcache/ /sonic/target/vcache/'${IMAGENAME}'
RUN dpkg -i /usr/local/share/buildinfo/sonic-build-hooks_1.0_all.deb RUN dpkg -i /usr/local/share/buildinfo/sonic-build-hooks_1.0_all.deb
ENV IMAGENAME='${IMAGENAME}' ENV IMAGENAME='${IMAGENAME}'
ENV DISTRO='${DISTRO}'
RUN pre_run_buildinfo '${IMAGENAME}' RUN pre_run_buildinfo '${IMAGENAME}'
' '
# Add the auto-generate code if it is not added in the target Dockerfile # Add the auto-generate code if it is not added in the target Dockerfile
if [ ! -f $DOCKERFILE_TARGE ] || ! grep -q "Auto-Generated for buildinfo" $DOCKERFILE_TARGE; then if [ ! -f $DOCKERFILE_TARGET ] || ! grep -q "Auto-Generated for buildinfo" $DOCKERFILE_TARGET; then
# Insert the docker build script before the RUN command # Insert the docker build script before the RUN command
LINE_NUMBER=$(grep -Fn -m 1 'RUN' $DOCKERFILE | cut -d: -f1) LINE_NUMBER=$(grep -Fn -m 1 'RUN' $DOCKERFILE | cut -d: -f1)
TEMP_FILE=$(mktemp) TEMP_FILE=$(mktemp)
@ -50,12 +65,14 @@ if [ ! -f $DOCKERFILE_TARGE ] || ! grep -q "Auto-Generated for buildinfo" $DOCKE
# Append the docker build script at the end of the docker file # Append the docker build script at the end of the docker file
echo -e "\nRUN post_run_buildinfo ${IMAGENAME} " >> $TEMP_FILE echo -e "\nRUN post_run_buildinfo ${IMAGENAME} " >> $TEMP_FILE
echo -e "\nRUN post_run_cleanup ${IMAGENAME} " >> $TEMP_FILE
cat $TEMP_FILE > $DOCKERFILE_TARGE cat $TEMP_FILE > $DOCKERFILE_TARGET
rm -f $TEMP_FILE rm -f $TEMP_FILE
fi fi
# Copy the build info config # Copy the build info config
mkdir -p ${BUILDINFO_PATH}
cp -rf src/sonic-build-hooks/buildinfo/* $BUILDINFO_PATH cp -rf src/sonic-build-hooks/buildinfo/* $BUILDINFO_PATH
# Generate the version lock files # Generate the version lock files
@ -67,3 +84,75 @@ touch $BUILDINFO_VERSION_PATH/versions-deb
LOCAL_CACHE_DIR=target/vcache/${IMAGENAME} LOCAL_CACHE_DIR=target/vcache/${IMAGENAME}
mkdir -p ${LOCAL_CACHE_DIR} ${DOCKER_PATH}/vcache/ mkdir -p ${LOCAL_CACHE_DIR} ${DOCKER_PATH}/vcache/
chmod -f 777 ${LOCAL_CACHE_DIR} ${DOCKER_PATH}/vcache/ chmod -f 777 ${LOCAL_CACHE_DIR} ${DOCKER_PATH}/vcache/
if [[ "$SKIP_BUILD_HOOK" == y || ${ENABLE_VERSION_CONTROL_DOCKER} != y ]]; then
exit 0
fi
# Version cache
DOCKER_IMAGE_NAME=${IMAGENAME}
IMAGE_DBGS_NAME=${DOCKER_IMAGE_NAME//-/_}_image_dbgs
if [[ ${DOCKER_IMAGE_NAME} == sonic-slave-* ]]; then
GLOBAL_CACHE_DIR=${SONIC_VERSION_CACHE_SOURCE}/${DOCKER_IMAGE_NAME}
else
GLOBAL_CACHE_DIR=/vcache/${DOCKER_IMAGE_NAME}
fi
SRC_VERSION_PATH=files/build/versions
if [ ! -z ${SONIC_VERSION_CACHE} ]; then
# Version files for SHA calculation
VERSION_FILES="${SRC_VERSION_PATH}/dockers/${DOCKER_IMAGE_NAME}/versions-*-${DISTRO}-${ARCH} ${SRC_VERSION_PATH}/default/versions-*"
DEP_FILES="Dockerfile.j2"
if [[ ${DOCKER_IMAGE_NAME} =~ '-dbg' ]]; then
DEP_DBG_FILES="build_debug_docker_j2.sh"
fi
#Calculate the version SHA
VERSION_SHA="$( (echo -n "${!IMAGE_DBGS_NAME}"; \
(cd ${DOCKER_PATH}; cat ${DEP_FILES}); \
cat ${DEP_DBG_FILES} ${VERSION_FILES}) \
| sha1sum | awk '{print substr($1,0,23);}')"
GLOBAL_CACHE_FILE=${GLOBAL_CACHE_DIR}/${DOCKER_IMAGE_NAME}-${VERSION_SHA}.tgz
LOCAL_CACHE_FILE=${LOCAL_CACHE_DIR}/cache.tgz
GIT_FILE_STATUS=$(git status -s ${DEP_FILES})
# Create the empty cache tar file as local cache
if [[ ! -f ${LOCAL_CACHE_FILE} ]]; then
tar -zcf ${LOCAL_CACHE_FILE} -T /dev/null
chmod -f 777 ${LOCAL_CACHE_FILE}
fi
# Global cache file exists, load from global cache.
if [[ -e ${GLOBAL_CACHE_FILE} ]]; then
cp ${GLOBAL_CACHE_FILE} ${LOCAL_CACHE_FILE}
touch ${GLOBAL_CACHE_FILE}
else
# When file is modified, Global SHA is calculated with the local change.
# Load from the previous version of build cache if exists
VERSIONS=( "HEAD" "HEAD~1" "HEAD~2" )
for VERSION in ${VERSIONS[@]}; do
VERSION_PREV_SHA="$( (echo -n "${!IMAGE_DBGS_NAME}"; \
(cd ${DOCKER_PATH}; git --no-pager show $(ls -f ${DEP_FILES}|sed 's|.*|'${VERSION}':./&|g')); \
(git --no-pager show $(ls -f ${DEP_DBG_FILES} ${VERSION_FILES}|sed 's|.*|'${VERSION}':&|g'))) \
| sha1sum | awk '{print substr($1,0,23);}')"
GLOBAL_PREV_CACHE_FILE=${GLOBAL_CACHE_DIR}/${DOCKER_IMAGE_NAME}-${VERSION_PREV_SHA}.tgz
if [[ -e ${GLOBAL_PREV_CACHE_FILE} ]]; then
cp ${GLOBAL_PREV_CACHE_FILE} ${LOCAL_CACHE_FILE}
touch ${GLOBAL_PREV_CACHE_FILE}
break
fi
done
fi
rm -f ${DOCKER_PATH}/vcache/cache.tgz
ln -f ${LOCAL_CACHE_FILE} ${DOCKER_PATH}/vcache/cache.tgz
else
# Delete the cache file if version cache is disabled.
rm -f ${DOCKER_PATH}/vcache/cache.tgz
fi

View File

@ -1,14 +1,19 @@
#!/bin/bash #!/bin/bash
[[ ! -z "${DBGOPT}" && $0 =~ ${DBGOPT} ]] && set -x
SLAVE_DIR=$1 SLAVE_DIR=$1
ARCH=$2 ARCH=$2
DISTRO=$3 DISTRO=$3
# Install the latest debian package sonic-build-hooks in the slave container # Install the latest debian package sonic-build-hooks in the slave container
sudo dpkg -i --force-overwrite $SLAVE_DIR/buildinfo/sonic-build-hooks_*.deb > /dev/null sudo dpkg -i --force-overwrite $SLAVE_DIR/buildinfo/sonic-build-hooks_*.deb &> /dev/null
# Enable the build hooks # Enable the build hooks
symlink_build_hooks sudo symlink_build_hooks
# set the global permissions
sudo chmod -f 777 /usr/local/share/buildinfo/ -R
# Build the slave running config # Build the slave running config
cp -rf $SLAVE_DIR/buildinfo/* /usr/local/share/buildinfo/ cp -rf $SLAVE_DIR/buildinfo/* /usr/local/share/buildinfo/
@ -21,8 +26,8 @@ apt-get update > /dev/null 2>&1
# Build the slave version config # Build the slave version config
[ -d /usr/local/share/buildinfo/versions ] && rm -rf /usr/local/share/buildinfo/versions [ -d /usr/local/share/buildinfo/versions ] && rm -rf /usr/local/share/buildinfo/versions
scripts/versions_manager.py generate -t "/usr/local/share/buildinfo/versions" -n "build-${SLAVE_DIR}" -d "$DISTRO" -a "$ARCH" scripts/versions_manager.py generate -t "/usr/local/share/buildinfo/versions" -n "build-${SLAVE_DIR}" -d "$DISTRO" -a "$ARCH"
touch ${BUILDINFO_PATH}/versions/versions-deb touch ${BUILDINFO_PATH}/versions/versions-deb ${BUILDINFO_PATH}/versions/versions-web
rm -f /etc/apt/preferences.d/01-versions-deb sudo rm -f /etc/apt/preferences.d/01-versions-deb
([ "$ENABLE_VERSION_CONTROL_DEB" == "y" ] && [ -f $VERSION_DEB_PREFERENCE ]) && cp -f $VERSION_DEB_PREFERENCE /etc/apt/preferences.d/ ([ "$ENABLE_VERSION_CONTROL_DEB" == "y" ] && [ -f $VERSION_DEB_PREFERENCE ]) && sudo cp -f $VERSION_DEB_PREFERENCE /etc/apt/preferences.d/
exit 0 exit 0

View File

@ -4,6 +4,7 @@ import argparse
import glob import glob
import os import os
import sys import sys
import re
ALL_DIST = 'all' ALL_DIST = 'all'
ALL_ARCH = 'all' ALL_ARCH = 'all'
@ -24,7 +25,7 @@ class Component:
arch -- Architectrue, such as amd64, arm64, etc arch -- Architectrue, such as amd64, arm64, etc
''' '''
def __init__(self, versions, ctype, dist=ALL_DIST, arch=ALL_ARCH): def __init__(self, verbose=None, versions={}, ctype="deb", dist=ALL_DIST, arch=ALL_ARCH):
self.versions = versions self.versions = versions
self.ctype = ctype self.ctype = ctype
if not dist: if not dist:
@ -33,6 +34,7 @@ class Component:
arch = ALL_ARCH arch = ALL_ARCH
self.dist = dist self.dist = dist
self.arch = arch self.arch = arch
self.verbose = verbose
@classmethod @classmethod
def get_versions(cls, version_file): def get_versions(cls, version_file):
@ -51,7 +53,7 @@ class Component:
return result return result
def clone(self): def clone(self):
return Component(self.versions.copy(), self.ctype, self.dist, self.arch) return Component(self.verbose, self.versions.copy(), self.ctype, self.dist, self.arch)
def merge(self, versions, overwritten=True): def merge(self, versions, overwritten=True):
for package in versions: for package in versions:
@ -71,7 +73,7 @@ class Component:
result.append(lines) result.append(lines)
else: else:
result.append('{0}=={1}'.format(package, self.versions[package])) result.append('{0}=={1}'.format(package, self.versions[package]))
return "\n".join(result) return "\n".join(result)+'\n'
def dump_to_file(self, version_file, config=False, priority=999): def dump_to_file(self, version_file, config=False, priority=999):
if len(self.versions) <= 0: if len(self.versions) <= 0:
@ -92,6 +94,35 @@ class Component:
file_path = os.path.join(file_path, filename) file_path = os.path.join(file_path, filename)
self.dump_to_file(file_path, config, priority) self.dump_to_file(file_path, config, priority)
def print(self, file_path):
if len(self.versions) <= 0:
return
if self.verbose is None:
return
filename = self.get_filename()
file_path = os.path.join(file_path, filename)
if self.verbose and re.search("cfile=", self.verbose) \
and not re.search(self.verbose, "cfile=all".format(filename)) \
and not re.search(self.verbose, "cfile={}".format(filename)):
return
print("VERSION : {}".format(file_path))
for package in sorted(self.versions.keys(), key=str.casefold):
if self.verbose and re.search("ctype=", self.verbose) \
and not re.search("ctype=all".format(self.ctype), self.verbose) \
and not re.search("ctype={}".format(self.ctype), self.verbose):
continue
if self.verbose and re.search("cname=", self.verbose) \
and not re.search(self.verbose, "cname=all".format(package)) \
and not re.search(self.verbose, "cname={}".format(package)):
continue
if self.verbose and re.search("cver=", self.verbose) \
and not re.search(self.verbose, "cver=all".format(self.versions[package])) \
and not re.search(self.verbose, "cver={}".format(self.versions[package])):
continue
print('{0}=={1}'.format(package, self.versions[package]))
# Check if the self component can be overwritten by the input component # Check if the self component can be overwritten by the input component
def check_overwritable(self, component, for_all_dist=False, for_all_arch=False): def check_overwritable(self, component, for_all_dist=False, for_all_arch=False):
if self.ctype != component.ctype: if self.ctype != component.ctype:
@ -153,9 +184,11 @@ class VersionModule:
name -- The name of the image, such as sonic-slave-buster, docker-lldp, etc name -- The name of the image, such as sonic-slave-buster, docker-lldp, etc
''' '''
def __init__(self, name=None, components=None): def __init__(self, verbose=None, name=None, components=None):
self.name = name self.name = name
self.components = components self.components = components
self.module_path=""
self.verbose=verbose
# Overwrite the docker/host image/base image versions # Overwrite the docker/host image/base image versions
def overwrite(self, module, for_all_dist=False, for_all_arch=False): def overwrite(self, module, for_all_dist=False, for_all_arch=False):
@ -191,6 +224,7 @@ class VersionModule:
module = default_module.clone(exclude_ctypes=DEFAULT_OVERWRITE_COMPONENTS) module = default_module.clone(exclude_ctypes=DEFAULT_OVERWRITE_COMPONENTS)
return self._get_config_module(module, dist, arch) return self._get_config_module(module, dist, arch)
#Merge the default with specific version
def _get_config_module(self, default_module, dist, arch): def _get_config_module(self, default_module, dist, arch):
module = default_module.clone() module = default_module.clone()
default_ctype_components = module._get_components_per_ctypes() default_ctype_components = module._get_components_per_ctypes()
@ -205,11 +239,11 @@ class VersionModule:
continue continue
config_component = self._get_config_for_ctype(components, dist, arch) config_component = self._get_config_for_ctype(components, dist, arch)
config_components.append(config_component) config_components.append(config_component)
config_module = VersionModule(self.name, config_components) config_module = VersionModule(self.verbose, self.name, config_components)
return config_module return config_module
def _get_config_for_ctype(self, components, dist, arch): def _get_config_for_ctype(self, components, dist, arch):
result = Component({}, components[0].ctype, dist, arch) result = Component(self.verbose, {}, components[0].ctype, dist, arch)
for component in sorted(components, key = lambda x : x.get_order_keys()): for component in sorted(components, key = lambda x : x.get_order_keys()):
if result.check_inheritable(component): if result.check_inheritable(component):
result.merge(component.versions, True) result.merge(component.versions, True)
@ -224,7 +258,7 @@ class VersionModule:
components = sorted(components, key = lambda x : x.get_order_keys()) components = sorted(components, key = lambda x : x.get_order_keys())
for i in range(0, len(components)): for i in range(0, len(components)):
component = components[i] component = components[i]
base_module = VersionModule(self.name, components[0:i]) base_module = VersionModule(self.verbose, self.name, components[0:i])
config_module = base_module._get_config_module(default_module, component.dist, component.arch) config_module = base_module._get_config_module(default_module, component.dist, component.arch)
config_components = config_module._get_components_by_ctype(ctype) config_components = config_module._get_components_by_ctype(ctype)
if len(config_components) > 0: if len(config_components) > 0:
@ -253,7 +287,7 @@ class VersionModule:
result = [] result = []
for i in range(0, len(components)): for i in range(0, len(components)):
component = components[i] component = components[i]
inheritable_component = Component({}, component.ctype) inheritable_component = Component(self.verbose, {}, component.ctype)
for j in range(0, i): for j in range(0, i):
base_component = components[j] base_component = components[j]
if component.check_inheritable(base_component): if component.check_inheritable(base_component):
@ -276,6 +310,7 @@ class VersionModule:
file_paths = glob.glob(version_file_pattern) file_paths = glob.glob(version_file_pattern)
components = [] components = []
self.name = os.path.basename(image_path) self.name = os.path.basename(image_path)
self.module_path = image_path
self.components = components self.components = components
for file_path in file_paths: for file_path in file_paths:
filename = os.path.basename(file_path) filename = os.path.basename(file_path)
@ -296,18 +331,25 @@ class VersionModule:
if filter_arch and arch and filter_arch != arch and arch != ALL_ARCH: if filter_arch and arch and filter_arch != arch and arch != ALL_ARCH:
continue continue
versions = Component.get_versions(file_path) versions = Component.get_versions(file_path)
component = Component(versions, ctype, dist, arch) component = Component(self.verbose, versions, ctype, dist, arch)
components.append(component) components.append(component)
if self.verbose and re.search("stage=load", self.verbose):
component.print(file_path)
def load_from_target(self, image_path): def load_from_target(self, image_path):
self.module_path=image_path
post_versions = os.path.join(image_path, 'post-versions') post_versions = os.path.join(image_path, 'post-versions')
if os.path.exists(post_versions): if os.path.exists(post_versions):
self.load(post_versions) self.load(post_versions)
self.name = os.path.basename(image_path) self.name = os.path.basename(image_path)
if self.verbose and re.search("stage=post", self.verbose):
self.print(post_versions)
pre_versions = os.path.join(image_path, 'pre-versions') pre_versions = os.path.join(image_path, 'pre-versions')
if os.path.exists(pre_versions): if os.path.exists(pre_versions):
pre_module = VersionModule() pre_module = VersionModule(self.verbose)
pre_module.load(pre_versions) pre_module.load(pre_versions)
if self.verbose and re.search("stage=pre", self.verbose):
pre_module.print(pre_versions)
self.subtract(pre_module) self.subtract(pre_module)
else: else:
self.load(image_path) self.load(image_path)
@ -319,6 +361,15 @@ class VersionModule:
for component in self.components: for component in self.components:
component.dump_to_path(module_path, config, priority) component.dump_to_path(module_path, config, priority)
def print(self, module_path):
if self.verbose is None:
return
if re.search("cmod=", self.verbose) \
and not re.search(self.verbose, "cmod=all".format(self.name)) \
and not re.search(self.verbose, "cmod={}".format(self.name)):
return
for component in self.components:
component.print(module_path)
def filter(self, ctypes=[]): def filter(self, ctypes=[]):
if 'all' in ctypes: if 'all' in ctypes:
return self return self
@ -340,7 +391,7 @@ class VersionModule:
if ctypes and component.ctype not in ctypes: if ctypes and component.ctype not in ctypes:
continue continue
components.append(component.clone()) components.append(component.clone())
return VersionModule(self.name, components) return VersionModule(self.verbose, self.name, components)
def is_slave_module(self): def is_slave_module(self):
return self.name.startswith('sonic-slave-') return self.name.startswith('sonic-slave-')
@ -370,14 +421,18 @@ class VersionModule:
return os.path.join(source_path, 'files/build/versions/build', module_name) return os.path.join(source_path, 'files/build/versions/build', module_name)
return os.path.join(source_path, 'files/build/versions/dockers', module_name) return os.path.join(source_path, 'files/build/versions/dockers', module_name)
def __repr__(self):
return repr(self.name)
class VersionBuild: class VersionBuild:
''' '''
The VersionBuild consists of multiple version modules. The VersionBuild consists of multiple version modules.
''' '''
def __init__(self, target_path="./target", source_path='.'): def __init__(self, verbose=None, target_path="./target", source_path='.'):
self.target_path = target_path self.target_path = target_path
self.source_path = source_path self.source_path = source_path
self.verbose = verbose
self.modules = {} self.modules = {}
def load_from_target(self): def load_from_target(self):
@ -394,8 +449,11 @@ class VersionBuild:
for file_path in file_paths: for file_path in file_paths:
if not os.path.isdir(file_path): if not os.path.isdir(file_path):
continue continue
module = VersionModule() module = VersionModule(self.verbose)
module.load_from_target(file_path) module.load_from_target(file_path)
if self.verbose and re.search("stage=tmodname", self.verbose):
print("Target modname={}, path={}".format(module.name, file_path))
module.print(file_path)
modules[module.name] = module modules[module.name] = module
self._merge_dgb_modules() self._merge_dgb_modules()
@ -411,8 +469,11 @@ class VersionBuild:
modules = {} modules = {}
self.modules = modules self.modules = modules
for image_path in paths: for image_path in paths:
module = VersionModule() module = VersionModule(self.verbose)
module.load(image_path) module.load(image_path)
if self.verbose and re.search("stage=smodname", self.verbose):
print("Source modname={}, path={}".format(module.name, image_path))
module.print(image_path)
modules[module.name] = module modules[module.name] = module
def overwrite(self, build, for_all_dist=False, for_all_arch=False): def overwrite(self, build, for_all_dist=False, for_all_arch=False):
@ -430,6 +491,13 @@ class VersionBuild:
module_path = self.get_module_path(module) module_path = self.get_module_path(module)
module.dump(module_path) module.dump(module_path)
def print(self, message=None):
if self.verbose is None:
return
if message is not None:
print("[============={}===========]".format(message))
for module in [ self.modules[x] for x in (sorted(self.modules, key = lambda x : x)) ]:
module.print(module.module_path)
def subtract(self, default_module): def subtract(self, default_module):
none_aggregatable_module = default_module.clone(exclude_ctypes=DEFAULT_OVERWRITE_COMPONENTS) none_aggregatable_module = default_module.clone(exclude_ctypes=DEFAULT_OVERWRITE_COMPONENTS)
for module in self.modules.values(): for module in self.modules.values():
@ -455,20 +523,39 @@ class VersionBuild:
self.dump() self.dump()
return return
self.load_from_source() self.load_from_source()
if self.verbose and re.search("stage=init", self.verbose):
self.print("Initial Source")
default_module = self.modules.get(DEFAULT_MODULE, None) default_module = self.modules.get(DEFAULT_MODULE, None)
target_build = VersionBuild(self.target_path, self.source_path) if self.verbose and re.search("stage=init", self.verbose):
default_module.print("Default Module")
target_build = VersionBuild(self.verbose, self.target_path, self.source_path)
target_build.load_from_target() target_build.load_from_target()
target_build.filter(ctypes=ctypes) target_build.filter(ctypes=ctypes)
if self.verbose and re.search("stage=init", self.verbose):
target_build.print("Initial Target")
if not default_module: if not default_module:
raise Exception("The default versions does not exist") raise Exception("The default versions does not exist")
for module in target_build.modules.values(): for module in [ target_build.modules[x] for x in (sorted(target_build.modules, key = lambda x : x)) ] :
if module.is_individule_version(): if module.is_individule_version():
continue continue
tmp_module = module.clone(exclude_ctypes=DEFAULT_OVERWRITE_COMPONENTS) tmp_module = module.clone(exclude_ctypes=DEFAULT_OVERWRITE_COMPONENTS)
default_module.overwrite(tmp_module, for_all_dist=True, for_all_arch=True) default_module.overwrite(tmp_module, for_all_dist=True, for_all_arch=True)
if self.verbose and re.search("stage=tmp", self.verbose):
default_module.print("TMP DEFAULT MODULE")
target_build.subtract(default_module) target_build.subtract(default_module)
if self.verbose and re.search("stage=tmp", self.verbose):
target_build.print("After Subtract Target")
self.print("After Subtract Source")
self.overwrite(target_build, for_all_dist=for_all_dist, for_all_arch=for_all_arch) self.overwrite(target_build, for_all_dist=for_all_dist, for_all_arch=for_all_arch)
self.dump()
if self.verbose and re.search("stage=add", self.verbose):
self.print("After Merge")
if not self.verbose or not re.search("dryrun", self.verbose):
self.dump()
def filter(self, ctypes=[]): def filter(self, ctypes=[]):
for module in self.modules.values(): for module in self.modules.values():
@ -485,14 +572,14 @@ class VersionBuild:
for dist in dists: for dist in dists:
versions = self._get_versions(ctype, dist) versions = self._get_versions(ctype, dist)
common_versions = self._get_common_versions(versions) common_versions = self._get_common_versions(versions)
component = Component(common_versions, ctype, dist) component = Component(self.verbose, common_versions, ctype, dist)
components.append(component) components.append(component)
else: else:
versions = self._get_versions(ctype) versions = self._get_versions(ctype)
common_versions = self._get_common_versions(versions) common_versions = self._get_common_versions(versions)
component = Component(common_versions, ctype) component = Component(self.verbose, common_versions, ctype)
components.append(component) components.append(component)
return VersionModule(DEFAULT_MODULE, components) return VersionModule(self.verbose, DEFAULT_MODULE, components)
def get_aggregatable_modules(self): def get_aggregatable_modules(self):
modules = {} modules = {}
@ -619,11 +706,13 @@ class VersionManagerCommands:
parser.add_argument('-d', '--for_all_dist', action='store_true', help='apply the versions for all distributions') parser.add_argument('-d', '--for_all_dist', action='store_true', help='apply the versions for all distributions')
parser.add_argument('-a', '--for_all_arch', action='store_true', help='apply the versions for all architectures') parser.add_argument('-a', '--for_all_arch', action='store_true', help='apply the versions for all architectures')
parser.add_argument('-c', '--ctypes', default='all', help='component types to freeze') parser.add_argument('-c', '--ctypes', default='all', help='component types to freeze')
parser.add_argument('-v', '--verbose', default=None, help="verbose mode")
args = parser.parse_args(sys.argv[2:]) args = parser.parse_args(sys.argv[2:])
ctypes = args.ctypes.split(',') ctypes = args.ctypes.split(',')
if len(ctypes) == 0: if len(ctypes) == 0:
ctypes = ['all'] ctypes = ['all']
build = VersionBuild(target_path=args.target_path, source_path=args.source_path)
build = VersionBuild(verbose=args.verbose, target_path=args.target_path, source_path=args.source_path)
build.freeze(rebuild=args.rebuild, for_all_dist=args.for_all_dist, for_all_arch=args.for_all_arch, ctypes=ctypes) build.freeze(rebuild=args.rebuild, for_all_dist=args.for_all_dist, for_all_arch=args.for_all_arch, ctypes=ctypes)
def merge(self): def merge(self):
@ -632,6 +721,8 @@ class VersionManagerCommands:
parser.add_argument('-m', '--module_path', default=None, help='merge path, use the target path if not specified') parser.add_argument('-m', '--module_path', default=None, help='merge path, use the target path if not specified')
parser.add_argument('-b', '--base_path', required=True, help='base path, merge to the module path') parser.add_argument('-b', '--base_path', required=True, help='base path, merge to the module path')
parser.add_argument('-e', '--exclude_module_path', default=None, help='exclude module path') parser.add_argument('-e', '--exclude_module_path', default=None, help='exclude module path')
parser.add_argument('-i', '--include_module_path', default=None, help='include module path')
parser.add_argument('-v', '--verbose', default=None, help="verbose mode")
args = parser.parse_args(sys.argv[2:]) args = parser.parse_args(sys.argv[2:])
module_path = args.module_path module_path = args.module_path
if not module_path: if not module_path:
@ -640,15 +731,22 @@ class VersionManagerCommands:
print('The module path {0} does not exist'.format(module_path)) print('The module path {0} does not exist'.format(module_path))
if not os.path.exists(args.target_path): if not os.path.exists(args.target_path):
os.makedirs(args.target_path) os.makedirs(args.target_path)
module = VersionModule() module = VersionModule(args.verbose)
module.load(module_path) module.load(module_path)
base_module = VersionModule() base_module = VersionModule(args.verbose)
base_module.load(args.base_path) base_module.load(args.base_path)
module.overwrite(base_module) module.overwrite(base_module)
if args.exclude_module_path: if args.exclude_module_path:
exclude_module = VersionModule() exclude_module = VersionModule(args.verbose)
exclude_module.load(args.exclude_module_path) exclude_module.load(args.exclude_module_path)
module.subtract(exclude_module) module.subtract(exclude_module)
if args.include_module_path:
include_module = VersionModule(args.verbose)
include_module.load(args.include_module_path)
if args.verbose:
include_module.print(args.include_module_path)
include_module.overwrite(module)
module.overwrite(include_module)
module.dump(args.target_path) module.dump(args.target_path)
def generate(self): def generate(self):
@ -661,6 +759,7 @@ class VersionManagerCommands:
parser.add_argument('-d', '--distribution', required=True, help="distribution") parser.add_argument('-d', '--distribution', required=True, help="distribution")
parser.add_argument('-a', '--architecture', required=True, help="architecture") parser.add_argument('-a', '--architecture', required=True, help="architecture")
parser.add_argument('-p', '--priority', default=999, help="priority of the debian apt preference") parser.add_argument('-p', '--priority', default=999, help="priority of the debian apt preference")
parser.add_argument('-v', '--verbose', default=None, help="verbose mode")
args = parser.parse_args(sys.argv[2:]) args = parser.parse_args(sys.argv[2:])
module_path = args.module_path module_path = args.module_path
@ -668,11 +767,20 @@ class VersionManagerCommands:
module_path = VersionModule.get_module_path_by_name(args.source_path, args.module_name) module_path = VersionModule.get_module_path_by_name(args.source_path, args.module_name)
if not os.path.exists(args.target_path): if not os.path.exists(args.target_path):
os.makedirs(args.target_path) os.makedirs(args.target_path)
module = VersionModule() module = VersionModule(args.verbose)
module.load(module_path, filter_dist=args.distribution, filter_arch=args.architecture) module.load(module_path, filter_dist=args.distribution, filter_arch=args.architecture)
config = module.get_config_module(args.source_path, args.distribution, args.architecture) config = module.get_config_module(args.source_path, args.distribution, args.architecture)
if args.verbose:
config.print(args.source_path)
config.clean_info(force=True) config.clean_info(force=True)
config.dump(args.target_path, config=True, priority=args.priority) config.dump(args.target_path, config=True, priority=args.priority)
if __name__ == "__main__": if __name__ == "__main__":
VersionManagerCommands() VersionManagerCommands()
"""
Dry run examples:
scripts/versions_manager.py freeze -v 'dryrun|cmod=docker-config-engine-stretch|cfile=versions-py2|cname=all|stage=sub|stage=add|stage=init|stage=tmodname|stage=tmp'
scripts/versions_manager.py freeze -v 'dryrun|cmod=default|cfile=versions-docker|cname=all|stage=sub|stage=add|stage=init|stage=tmodname|stage=tmp'
"""

View File

@ -92,6 +92,10 @@ export BUILD_WORKDIR
## Define configuration, help etc. ## Define configuration, help etc.
############################################################################### ###############################################################################
# Install the updated build hooks if INSHOOKS flag is set
export INSHOOKS=y
$(if $(INSHOOKS),$(shell sudo dpkg -i /usr/local/share/buildinfo/sonic-build-hooks_1.0_all.deb &>/dev/null))
.platform : .platform :
ifneq ($(CONFIGURED_PLATFORM),generic) ifneq ($(CONFIGURED_PLATFORM),generic)
$(Q)echo Build system is not configured, please run make configure $(Q)echo Build system is not configured, please run make configure
@ -423,8 +427,9 @@ $(info SONiC Build System for $(CONFIGURED_PLATFORM):$(CONFIGURED_ARCH))
endif endif
# Overwrite the buildinfo in slave container # Overwrite the buildinfo in slave container
$(shell sudo scripts/prepare_slave_container_buildinfo.sh $(SLAVE_DIR) $(CONFIGURED_ARCH) $(BLDENV)) ifeq ($(filter clean,$(MAKECMDGOALS)),)
$(shell DBGOPT='$(DBGOPT)' scripts/prepare_slave_container_buildinfo.sh $(SLAVE_DIR) $(CONFIGURED_ARCH) $(BLDENV))
endif
include Makefile.cache include Makefile.cache
ifeq ($(SONIC_USE_DOCKER_BUILDKIT),y) ifeq ($(SONIC_USE_DOCKER_BUILDKIT),y)
@ -558,7 +563,7 @@ $(addprefix $(DEBS_PATH)/, $(SONIC_ONLINE_DEBS)) : $(DEBS_PATH)/% : .platform \
if [ -z '$($*_CACHE_LOADED)' ] ; then if [ -z '$($*_CACHE_LOADED)' ] ; then
$(foreach deb,$* $($*_DERIVED_DEBS), \ $(foreach deb,$* $($*_DERIVED_DEBS), \
{ curl -L -f -o $(DEBS_PATH)/$(deb) $($(deb)_CURL_OPTIONS) $($(deb)_URL) $(LOG) || { exit 1 ; } } ; ) { SKIP_BUILD_HOOK=$($*_SKIP_VERSION) curl -L -f -o $(DEBS_PATH)/$(deb) $($(deb)_CURL_OPTIONS) $($(deb)_URL) $(LOG) || { exit 1 ; } } ; )
# Save the target deb into DPKG cache # Save the target deb into DPKG cache
$(call SAVE_CACHE,$*,$@) $(call SAVE_CACHE,$*,$@)
@ -575,7 +580,7 @@ SONIC_TARGET_LIST += $(addprefix $(DEBS_PATH)/, $(SONIC_ONLINE_DEBS))
# SONIC_ONLINE_FILES += $(SOME_NEW_FILE) # SONIC_ONLINE_FILES += $(SOME_NEW_FILE)
$(addprefix $(FILES_PATH)/, $(SONIC_ONLINE_FILES)) : $(FILES_PATH)/% : .platform $(addprefix $(FILES_PATH)/, $(SONIC_ONLINE_FILES)) : $(FILES_PATH)/% : .platform
$(HEADER) $(HEADER)
curl -L -f -o $@ $($*_CURL_OPTIONS) $($*_URL) $(LOG) SKIP_BUILD_HOOK=$($*_SKIP_VERSION) curl -L -f -o $@ $($*_CURL_OPTIONS) $($*_URL) $(LOG)
$(FOOTER) $(FOOTER)
SONIC_TARGET_LIST += $(addprefix $(FILES_PATH)/, $(SONIC_ONLINE_FILES)) SONIC_TARGET_LIST += $(addprefix $(FILES_PATH)/, $(SONIC_ONLINE_FILES))
@ -883,7 +888,7 @@ $(SONIC_INSTALL_WHEELS) : $(PYTHON_WHEELS_PATH)/%-install : .platform $$(addsuff
while true; do while true; do
if mkdir $(PYTHON_WHEELS_PATH)/pip_lock &> /dev/null; then if mkdir $(PYTHON_WHEELS_PATH)/pip_lock &> /dev/null; then
ifneq ($(CROSS_BUILD_ENVIRON),y) ifneq ($(CROSS_BUILD_ENVIRON),y)
{ sudo -E pip$($*_PYTHON_VERSION) install $(PYTHON_WHEELS_PATH)/$* $(LOG) && rm -d $(PYTHON_WHEELS_PATH)/pip_lock && break; } || { rm -d $(PYTHON_WHEELS_PATH)/pip_lock && exit 1 ; } { sudo -E SKIP_BUILD_HOOK=Y pip$($*_PYTHON_VERSION) install $(PYTHON_WHEELS_PATH)/$* $(LOG) && rm -d $(PYTHON_WHEELS_PATH)/pip_lock && break; } || { rm -d $(PYTHON_WHEELS_PATH)/pip_lock && exit 1 ; }
else else
# Link python script and data expected location to the cross python virtual env istallation locations # Link python script and data expected location to the cross python virtual env istallation locations
{ PATH=$(VIRTENV_BIN_CROSS_PYTHON$($*_PYTHON_VERSION)):${PATH} sudo -E $(VIRTENV_BIN_CROSS_PYTHON$($*_PYTHON_VERSION))/pip$($*_PYTHON_VERSION) install $(PYTHON_WHEELS_PATH)/$* $(LOG) && $(if $(findstring $(SONIC_CONFIG_ENGINE_PY3),$*),(sudo ln -s $(VIRTENV_BIN_CROSS_PYTHON$($*_PYTHON_VERSION))/sonic-cfggen /usr/local/bin/sonic-cfggen 2>/dev/null || true), true ) && $(if $(findstring $(SONIC_YANG_MODELS_PY3),$*),(sudo ln -s $(VIRTENV_BASE_CROSS_PYTHON3)/yang-models /usr/local/yang-models 2>/dev/null || true), true ) && rm -d $(PYTHON_WHEELS_PATH)/pip_lock && break; } || { rm -d $(PYTHON_WHEELS_PATH)/pip_lock && exit 1 ; } { PATH=$(VIRTENV_BIN_CROSS_PYTHON$($*_PYTHON_VERSION)):${PATH} sudo -E $(VIRTENV_BIN_CROSS_PYTHON$($*_PYTHON_VERSION))/pip$($*_PYTHON_VERSION) install $(PYTHON_WHEELS_PATH)/$* $(LOG) && $(if $(findstring $(SONIC_CONFIG_ENGINE_PY3),$*),(sudo ln -s $(VIRTENV_BIN_CROSS_PYTHON$($*_PYTHON_VERSION))/sonic-cfggen /usr/local/bin/sonic-cfggen 2>/dev/null || true), true ) && $(if $(findstring $(SONIC_YANG_MODELS_PY3),$*),(sudo ln -s $(VIRTENV_BASE_CROSS_PYTHON3)/yang-models /usr/local/yang-models 2>/dev/null || true), true ) && rm -d $(PYTHON_WHEELS_PATH)/pip_lock && break; } || { rm -d $(PYTHON_WHEELS_PATH)/pip_lock && exit 1 ; }
@ -911,7 +916,11 @@ $(addprefix $(TARGET_PATH)/, $(SONIC_SIMPLE_DOCKER_IMAGES)) : $(TARGET_PATH)/%.g
# Apply series of patches if exist # Apply series of patches if exist
if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && QUILT_PATCHES=../$(notdir $($*.gz_PATH)).patch quilt push -a; popd; fi if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && QUILT_PATCHES=../$(notdir $($*.gz_PATH)).patch quilt push -a; popd; fi
# Prepare docker build info # Prepare docker build info
scripts/prepare_docker_buildinfo.sh $* $($*.gz_PATH)/Dockerfile $(CONFIGURED_ARCH) $(TARGET_DOCKERFILE)/Dockerfile.buildinfo SONIC_ENFORCE_VERSIONS=$(SONIC_ENFORCE_VERSIONS) \
TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) \
SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \
DBGOPT='$(DBGOPT)' \
scripts/prepare_docker_buildinfo.sh $* $($*.gz_PATH)/Dockerfile $(CONFIGURED_ARCH) $(TARGET_DOCKERFILE)/Dockerfile.buildinfo $(LOG)
docker info $(LOG) docker info $(LOG)
docker build --squash --no-cache \ docker build --squash --no-cache \
--build-arg http_proxy=$(HTTP_PROXY) \ --build-arg http_proxy=$(HTTP_PROXY) \
@ -926,9 +935,12 @@ $(addprefix $(TARGET_PATH)/, $(SONIC_SIMPLE_DOCKER_IMAGES)) : $(TARGET_PATH)/%.g
-t $(DOCKER_IMAGE_REF) $($*.gz_PATH) $(LOG) -t $(DOCKER_IMAGE_REF) $($*.gz_PATH) $(LOG)
if [ x$(SONIC_CONFIG_USE_NATIVE_DOCKERD_FOR_BUILD) == x"y" ]; then docker tag $(DOCKER_IMAGE_REF) $*; fi if [ x$(SONIC_CONFIG_USE_NATIVE_DOCKERD_FOR_BUILD) == x"y" ]; then docker tag $(DOCKER_IMAGE_REF) $*; fi
scripts/collect_docker_version_files.sh $* $(TARGET_PATH) $(DOCKER_IMAGE_REF) $($*.gz_PATH) $(LOG) SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) ARCH=${CONFIGURED_ARCH} \
DBGOPT='$(DBGOPT)' \
scripts/collect_docker_version_files.sh $* $(TARGET_PATH) $(DOCKER_IMAGE_REF) $($*.gz_PATH) $(LOG)
$(call docker-image-save,$*,$@) $(call docker-image-save,$*,$@)
# Clean up # Clean up
if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && quilt pop -a -f; [ -d .pc ] && rm -rf .pc; popd; fi if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && quilt pop -a -f; [ -d .pc ] && rm -rf .pc; popd; fi
$(FOOTER) $(FOOTER)
@ -1030,7 +1042,9 @@ $(addprefix $(TARGET_PATH)/, $(DOCKER_IMAGES)) : $(TARGET_PATH)/%.gz : .platform
PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \ PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \
SONIC_ENFORCE_VERSIONS=$(SONIC_ENFORCE_VERSIONS) \ SONIC_ENFORCE_VERSIONS=$(SONIC_ENFORCE_VERSIONS) \
TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) \ TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) \
scripts/prepare_docker_buildinfo.sh $* $($*.gz_PATH)/Dockerfile $(CONFIGURED_ARCH) SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \
DBGOPT='$(DBGOPT)' \
scripts/prepare_docker_buildinfo.sh $* $($*.gz_PATH)/Dockerfile $(CONFIGURED_ARCH) $(LOG)
docker info $(LOG) docker info $(LOG)
docker build --squash --no-cache \ docker build --squash --no-cache \
--build-arg http_proxy=$(HTTP_PROXY) \ --build-arg http_proxy=$(HTTP_PROXY) \
@ -1042,6 +1056,8 @@ $(addprefix $(TARGET_PATH)/, $(DOCKER_IMAGES)) : $(TARGET_PATH)/%.gz : .platform
--build-arg docker_container_name=$($*.gz_CONTAINER_NAME) \ --build-arg docker_container_name=$($*.gz_CONTAINER_NAME) \
--build-arg frr_user_uid=$(FRR_USER_UID) \ --build-arg frr_user_uid=$(FRR_USER_UID) \
--build-arg frr_user_gid=$(FRR_USER_GID) \ --build-arg frr_user_gid=$(FRR_USER_GID) \
--build-arg SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \
--build-arg SONIC_VERSION_CACHE_SOURCE=$(SONIC_VERSION_CACHE_SOURCE) \
--build-arg image_version=$(SONIC_IMAGE_VERSION) \ --build-arg image_version=$(SONIC_IMAGE_VERSION) \
--label com.azure.sonic.manifest="$$(cat $($*.gz_PATH)/manifest.json)" \ --label com.azure.sonic.manifest="$$(cat $($*.gz_PATH)/manifest.json)" \
--label Tag=$(SONIC_IMAGE_VERSION) \ --label Tag=$(SONIC_IMAGE_VERSION) \
@ -1049,10 +1065,13 @@ $(addprefix $(TARGET_PATH)/, $(DOCKER_IMAGES)) : $(TARGET_PATH)/%.gz : .platform
-t $(DOCKER_IMAGE_REF) $($*.gz_PATH) $(LOG) -t $(DOCKER_IMAGE_REF) $($*.gz_PATH) $(LOG)
if [ x$(SONIC_CONFIG_USE_NATIVE_DOCKERD_FOR_BUILD) == x"y" ]; then docker tag $(DOCKER_IMAGE_REF) $*; fi if [ x$(SONIC_CONFIG_USE_NATIVE_DOCKERD_FOR_BUILD) == x"y" ]; then docker tag $(DOCKER_IMAGE_REF) $*; fi
scripts/collect_docker_version_files.sh $* $(TARGET_PATH) $(DOCKER_IMAGE_REF) $($*.gz_PATH) $($*.gz_PATH)/Dockerfile $(LOG) SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) ARCH=${CONFIGURED_ARCH}\
DBGOPT='$(DBGOPT)' \
scripts/collect_docker_version_files.sh $* $(TARGET_PATH) $(DOCKER_IMAGE_REF) $($*.gz_PATH) $($*.gz_PATH)/Dockerfile $(LOG)
if [ ! -z $(filter $*.gz,$(SONIC_PACKAGES_LOCAL)) ]; then docker tag $(DOCKER_IMAGE_REF) $*:$(SONIC_IMAGE_VERSION); fi if [ ! -z $(filter $*.gz,$(SONIC_PACKAGES_LOCAL)) ]; then docker tag $(DOCKER_IMAGE_REF) $*:$(SONIC_IMAGE_VERSION); fi
$(call docker-image-save,$*,$@) $(call docker-image-save,$*,$@)
# Clean up # Clean up
if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && quilt pop -a -f; [ -d .pc ] && rm -rf .pc; popd; fi if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && quilt pop -a -f; [ -d .pc ] && rm -rf .pc; popd; fi
@ -1092,7 +1111,9 @@ $(addprefix $(TARGET_PATH)/, $(DOCKER_DBG_IMAGES)) : $(TARGET_PATH)/%-$(DBG_IMAG
PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \ PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \
SONIC_ENFORCE_VERSIONS=$(SONIC_ENFORCE_VERSIONS) \ SONIC_ENFORCE_VERSIONS=$(SONIC_ENFORCE_VERSIONS) \
TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) \ TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) \
scripts/prepare_docker_buildinfo.sh $* $($*.gz_PATH)/Dockerfile-dbg $(CONFIGURED_ARCH) SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \
DBGOPT='$(DBGOPT)' \
scripts/prepare_docker_buildinfo.sh $*-dbg $($*.gz_PATH)/Dockerfile-dbg $(CONFIGURED_ARCH) $(LOG)
docker info $(LOG) docker info $(LOG)
docker build \ docker build \
$(if $($*.gz_DBG_DEPENDS), --squash --no-cache, --no-cache) \ $(if $($*.gz_DBG_DEPENDS), --squash --no-cache, --no-cache) \
@ -1100,16 +1121,21 @@ $(addprefix $(TARGET_PATH)/, $(DOCKER_DBG_IMAGES)) : $(TARGET_PATH)/%-$(DBG_IMAG
--build-arg https_proxy=$(HTTPS_PROXY) \ --build-arg https_proxy=$(HTTPS_PROXY) \
--build-arg no_proxy=$(NO_PROXY) \ --build-arg no_proxy=$(NO_PROXY) \
--build-arg docker_container_name=$($*.gz_CONTAINER_NAME) \ --build-arg docker_container_name=$($*.gz_CONTAINER_NAME) \
--build-arg SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \
--build-arg SONIC_VERSION_CACHE_SOURCE=$(SONIC_VERSION_CACHE_SOURCE) \
--label com.azure.sonic.manifest="$$(cat $($*.gz_PATH)/manifest.json)" \ --label com.azure.sonic.manifest="$$(cat $($*.gz_PATH)/manifest.json)" \
--label Tag=$(SONIC_IMAGE_VERSION) \ --label Tag=$(SONIC_IMAGE_VERSION) \
--file $($*.gz_PATH)/Dockerfile-dbg \ --file $($*.gz_PATH)/Dockerfile-dbg \
-t $(DOCKER_DBG_IMAGE_REF) $($*.gz_PATH) $(LOG) -t $(DOCKER_DBG_IMAGE_REF) $($*.gz_PATH) $(LOG)
if [ x$(SONIC_CONFIG_USE_NATIVE_DOCKERD_FOR_BUILD) == x"y" ]; then docker tag $(DOCKER_IMAGE_REF) $*; fi if [ x$(SONIC_CONFIG_USE_NATIVE_DOCKERD_FOR_BUILD) == x"y" ]; then docker tag $(DOCKER_IMAGE_REF) $*; fi
scripts/collect_docker_version_files.sh $*-dbg $(TARGET_PATH) $(DOCKER_DBG_IMAGE_REF) $($*.gz_PATH) $($*.gz_PATH)/Dockerfile-dbg $(LOG) SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) ARCH=${CONFIGURED_ARCH}\
DBGOPT='$(DBGOPT)' \
scripts/collect_docker_version_files.sh $*-dbg $(TARGET_PATH) $(DOCKER_DBG_IMAGE_REF) $($*.gz_PATH) $($*.gz_PATH)/Dockerfile-dbg $(LOG)
if [ ! -z $(filter $*.gz,$(SONIC_PACKAGES_LOCAL)) ]; then docker tag $(DOCKER_IMAGE_REF) $*:$(SONIC_IMAGE_VERSION); fi if [ ! -z $(filter $*.gz,$(SONIC_PACKAGES_LOCAL)) ]; then docker tag $(DOCKER_IMAGE_REF) $*:$(SONIC_IMAGE_VERSION); fi
$(call docker-image-save,$*-$(DBG_IMAGE_MARK),$@) $(call docker-image-save,$*-$(DBG_IMAGE_MARK),$@)
# Clean up # Clean up
docker rmi -f $(DOCKER_IMAGE_REF) &> /dev/null || true docker rmi -f $(DOCKER_IMAGE_REF) &> /dev/null || true
if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && quilt pop -a -f; [ -d .pc ] && rm -rf .pc; popd; fi if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && quilt pop -a -f; [ -d .pc ] && rm -rf .pc; popd; fi
@ -1398,6 +1424,8 @@ $(addprefix $(TARGET_PATH)/, $(SONIC_INSTALLERS)) : $(TARGET_PATH)/% : \
SIGNING_KEY="$(SIGNING_KEY)" \ SIGNING_KEY="$(SIGNING_KEY)" \
SIGNING_CERT="$(SIGNING_CERT)" \ SIGNING_CERT="$(SIGNING_CERT)" \
PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \ PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \
DBGOPT='$(DBGOPT)' \
SONIC_VERSION_CACHE=$(SONIC_VERSION_CACHE) \
MULTIARCH_QEMU_ENVIRON=$(MULTIARCH_QEMU_ENVIRON) \ MULTIARCH_QEMU_ENVIRON=$(MULTIARCH_QEMU_ENVIRON) \
CROSS_BUILD_ENVIRON=$(CROSS_BUILD_ENVIRON) \ CROSS_BUILD_ENVIRON=$(CROSS_BUILD_ENVIRON) \
MASTER_KUBERNETES_VERSION=$(MASTER_KUBERNETES_VERSION) \ MASTER_KUBERNETES_VERSION=$(MASTER_KUBERNETES_VERSION) \
@ -1466,7 +1494,7 @@ SONIC_CLEAN_TARGETS += $(addsuffix -clean,$(addprefix $(TARGET_PATH)/, \
$(SONIC_SIMPLE_DOCKER_IMAGES) \ $(SONIC_SIMPLE_DOCKER_IMAGES) \
$(SONIC_INSTALLERS))) $(SONIC_INSTALLERS)))
$(SONIC_CLEAN_TARGETS) :: $(TARGET_PATH)/%-clean : .platform $(SONIC_CLEAN_TARGETS) :: $(TARGET_PATH)/%-clean : .platform
$(Q)rm -f $(TARGET_PATH)/$* $(Q)rm -f $(TARGET_PATH)/$* target/versions/dockers/$(subst .gz,,$*)
SONIC_CLEAN_STDEB_DEBS = $(addsuffix -clean,$(addprefix $(PYTHON_DEBS_PATH)/, \ SONIC_CLEAN_STDEB_DEBS = $(addsuffix -clean,$(addprefix $(PYTHON_DEBS_PATH)/, \
$(SONIC_PYTHON_STDEB_DEBS))) $(SONIC_PYTHON_STDEB_DEBS)))
@ -1480,8 +1508,13 @@ $(SONIC_CLEAN_WHEELS) :: $(PYTHON_WHEELS_PATH)/%-clean : .platform
clean-logs :: .platform clean-logs :: .platform
$(Q)rm -f $(TARGET_PATH)/*.log $(DEBS_PATH)/*.log $(FILES_PATH)/*.log $(PYTHON_DEBS_PATH)/*.log $(PYTHON_WHEELS_PATH)/*.log $(Q)rm -f $(TARGET_PATH)/*.log $(DEBS_PATH)/*.log $(FILES_PATH)/*.log $(PYTHON_DEBS_PATH)/*.log $(PYTHON_WHEELS_PATH)/*.log
clean-versions :: .platform
@rm -rf target/versions/*
clean :: .platform clean-logs $$(SONIC_CLEAN_DEBS) $$(SONIC_CLEAN_FILES) $$(SONIC_CLEAN_TARGETS) $$(SONIC_CLEAN_STDEB_DEBS) $$(SONIC_CLEAN_WHEELS) vclean:: .platform
@sudo rm -rf target/vcache/* target/baseimage*
clean :: .platform clean-logs clean-versions $$(SONIC_CLEAN_DEBS) $$(SONIC_CLEAN_FILES) $$(SONIC_CLEAN_TARGETS) $$(SONIC_CLEAN_STDEB_DEBS) $$(SONIC_CLEAN_WHEELS)
############################################################################### ###############################################################################
## all ## all

View File

@ -5,7 +5,7 @@ SONIC_BUILD_HOOKS_PACKAGE = $(SONIC_BUILD_HOOKS)_$(SONIC_BUILD_HOOKS_VERSION)_al
BUILDINFO_DIR = buildinfo BUILDINFO_DIR = buildinfo
TMP_DIR = tmp TMP_DIR = tmp
SYMBOL_LINKS_SRC_DIR = ../../usr/local/share/buildinfo/scripts SYMBOL_LINKS_SRC_DIR = ../../usr/local/share/buildinfo/scripts
SYMBOL_LINKS = symlink_build_hooks post_run_buildinfo pre_run_buildinfo collect_version_files SYMBOL_LINKS = symlink_build_hooks post_run_buildinfo pre_run_buildinfo collect_version_files post_run_cleanup
SONIC_BUILD_HOOKS_TARGET = $(BUILDINFO_DIR)/$(SONIC_BUILD_HOOKS_PACKAGE) SONIC_BUILD_HOOKS_TARGET = $(BUILDINFO_DIR)/$(SONIC_BUILD_HOOKS_PACKAGE)
BUILD_ROOT_DIR = $(TMP_DIR)/$(SONIC_BUILD_HOOKS) BUILD_ROOT_DIR = $(TMP_DIR)/$(SONIC_BUILD_HOOKS)
DEBIAN_DIR = $(BUILD_ROOT_DIR)/DEBIAN DEBIAN_DIR = $(BUILD_ROOT_DIR)/DEBIAN

View File

@ -23,6 +23,7 @@ fi
PKG_CACHE_FILE_NAME=${PKG_CACHE_PATH}/cache.tgz PKG_CACHE_FILE_NAME=${PKG_CACHE_PATH}/cache.tgz
mkdir -p ${PKG_CACHE_PATH} mkdir -p ${PKG_CACHE_PATH}
. ${BUILDINFO_PATH}/scripts/utils.sh
URL_PREFIX=$(echo "${PACKAGE_URL_PREFIX}" | sed -E "s#(//[^/]*/).*#\1#") URL_PREFIX=$(echo "${PACKAGE_URL_PREFIX}" | sed -E "s#(//[^/]*/).*#\1#")
@ -35,9 +36,15 @@ fi
log_err() log_err()
{ {
echo "$1" >> $LOG_PATH/error.log echo "$(date "+%F-%H-%M-%S") ERR $1" >> $LOG_PATH/error.log
echo "$1" 1>&2 echo "$1" 1>&2
} }
log_info()
{
echo "$(date "+%F-%H-%M-%S") INFO $1" >> $LOG_PATH/info.log
echo "$1" 1>&2
}
# Get the real command not hooked by sonic-build-hook package # Get the real command not hooked by sonic-build-hook package
get_command() get_command()
@ -76,6 +83,28 @@ check_if_url_exist()
fi fi
} }
get_version_cache_option()
{
#SONIC_VERSION_CACHE="cache"
if [ ! -z ${SONIC_VERSION_CACHE} ]; then
if [ ${SONIC_VERSION_CACHE} == "rcache" ]; then
echo -n "rcache"
elif [ ${SONIC_VERSION_CACHE} == "wcache" ]; then
echo -n "wcache"
elif [ ${SONIC_VERSION_CACHE} == "cache" ]; then
echo -n "wcache"
else
echo -n ""
return 1
fi
echo -n ""
return 0
fi
echo -n ""
return 1
}
# Enable or disable the reproducible mirrors # Enable or disable the reproducible mirrors
set_reproducible_mirrors() set_reproducible_mirrors()
{ {
@ -115,7 +144,7 @@ download_packages()
local filename=$(echo $url | awk -F"/" '{print $NF}' | cut -d? -f1 | cut -d# -f1) local filename=$(echo $url | awk -F"/" '{print $NF}' | cut -d? -f1 | cut -d# -f1)
[ -f $WEB_VERSION_FILE ] && version=$(grep "^${url}=" $WEB_VERSION_FILE | awk -F"==" '{print $NF}') [ -f $WEB_VERSION_FILE ] && version=$(grep "^${url}=" $WEB_VERSION_FILE | awk -F"==" '{print $NF}')
if [ -z "$version" ]; then if [ -z "$version" ]; then
echo "Warning: Failed to verify the package: $url, the version is not specified" 1>&2 log_err "Warning: Failed to verify the package: $url, the version is not specified" 1>&2
continue continue
fi fi
@ -129,7 +158,7 @@ download_packages()
else else
real_version=$(get_url_version $url) real_version=$(get_url_version $url)
if [ "$real_version" != "$version" ]; then if [ "$real_version" != "$version" ]; then
echo "Failed to verify url: $url, real hash value: $real_version, expected value: $version_filename" 1>&2 log_err "Failed to verify url: $url, real hash value: $real_version, expected value: $version_filename" 1>&2
exit 1 exit 1
fi fi
fi fi
@ -294,10 +323,10 @@ update_version_file()
if [ ! -f "$pre_version_file" ]; then if [ ! -f "$pre_version_file" ]; then
return 0 return 0
fi fi
local pacakge_versions="$(cat $pre_version_file)" local package_versions="$(cat $pre_version_file)"
[ -f "$version_file" ] && pacakge_versions="$pacakge_versions $(cat $version_file)" [ -f "$version_file" ] && package_versions="$package_versions $(cat $version_file)"
declare -A versions declare -A versions
for pacakge_version in $pacakge_versions; do for pacakge_version in $package_versions; do
package=$(echo $pacakge_version | awk -F"==" '{print $1}') package=$(echo $pacakge_version | awk -F"==" '{print $1}')
version=$(echo $pacakge_version | awk -F"==" '{print $2}') version=$(echo $pacakge_version | awk -F"==" '{print $2}')
if [ -z "$package" ] || [ -z "$version" ]; then if [ -z "$package" ] || [ -z "$version" ]; then
@ -331,4 +360,8 @@ ENABLE_VERSION_CONTROL_PY2=$(check_version_control "py2")
ENABLE_VERSION_CONTROL_PY3=$(check_version_control "py3") ENABLE_VERSION_CONTROL_PY3=$(check_version_control "py3")
ENABLE_VERSION_CONTROL_WEB=$(check_version_control "web") ENABLE_VERSION_CONTROL_WEB=$(check_version_control "web")
ENABLE_VERSION_CONTROL_GIT=$(check_version_control "git") ENABLE_VERSION_CONTROL_GIT=$(check_version_control "git")
ENABLE_VERSION_CONTROL_PIP=$(check_version_control "pip")
ENABLE_VERSION_CONTROL_PYTHON=$(check_version_control "python")
ENABLE_VERSION_CONTROL_EASY_INSTALL=$(check_version_control "easy_install")
ENABLE_VERSION_CONTROL_GO=$(check_version_control "go")
ENABLE_VERSION_CONTROL_DOCKER=$(check_version_control "docker") ENABLE_VERSION_CONTROL_DOCKER=$(check_version_control "docker")

View File

@ -1,8 +1,9 @@
#!/bin/bash #!/bin/bash
TARGET_PATH=$1
. /usr/local/share/buildinfo/scripts/buildinfo_base.sh . /usr/local/share/buildinfo/scripts/buildinfo_base.sh
TARGET_PATH=$1 [ -d ${TARGET_PATH} ] && rm -rf ${TARGET_PATH}
[ -z "$TARGET_PATH" ] && TARGET_PATH=$POST_VERSION_PATH [ -z "$TARGET_PATH" ] && TARGET_PATH=$POST_VERSION_PATH
ARCH=$(dpkg --print-architecture) ARCH=$(dpkg --print-architecture)
DIST=$(grep VERSION_CODENAME /etc/os-release | cut -d= -f2) DIST=$(grep VERSION_CODENAME /etc/os-release | cut -d= -f2)
@ -11,9 +12,15 @@ DIST=$(grep VERSION_CODENAME /etc/os-release | cut -d= -f2)
mkdir -p $TARGET_PATH mkdir -p $TARGET_PATH
chmod a+rw $TARGET_PATH chmod a+rw $TARGET_PATH
dpkg-query -W -f '${Package}==${Version}\n' >> "${TARGET_PATH}/versions-deb-${DIST}-${ARCH}" # Skip the package that does have a static build version.
([ -x "/usr/local/bin/pip2" ] || [ -x "/usr/bin/pip2" ]) && pip2 freeze >> "${TARGET_PATH}/versions-py2-${DIST}-${ARCH}" # SAI package versions are changed too frequently.
([ -x "/usr/local/bin/pip3" ] || [ -x "/usr/bin/pip3" ]) && pip3 freeze >> "${TARGET_PATH}/versions-py3-${DIST}-${ARCH}" SKIP_VERSION_PACKAGE="libsaibcm|libpaibcm|linuxptp|@ file://"
dpkg-query -W -f '${Package}==${Version}\n' | grep -Ev "${SKIP_VERSION_PACKAGE}" > "${TARGET_PATH}/versions-deb-${DIST}-${ARCH}"
([ -x "/usr/local/bin/pip2" ] || [ -x "/usr/bin/pip2" ]) && pip2 freeze --all| grep -Ev "${SKIP_VERSION_PACKAGE}" > "${TARGET_PATH}/versions-py2-${DIST}-${ARCH}"
([ -x "/usr/local/bin/pip3" ] || [ -x "/usr/bin/pip3" ]) && pip3 freeze --all| grep -Ev "${SKIP_VERSION_PACKAGE}" > "${TARGET_PATH}/versions-py3-${DIST}-${ARCH}"
[ -f "${BUILD_WEB_VERSION_FILE}" ] && cp ${BUILD_WEB_VERSION_FILE} ${TARGET_PATH}
[ -f "${BUILD_GIT_VERSION_FILE}" ] && cp ${BUILD_GIT_VERSION_FILE} ${TARGET_PATH}
## Add the the packages purged ## Add the the packages purged
[ -f $POST_VERSION_PATH/purge-versions-deb ] && cat $POST_VERSION_PATH/purge-versions-deb >> "${TARGET_PATH}/versions-deb-${DIST}-${ARCH}" [ -f $POST_VERSION_PATH/purge-versions-deb ] && cat $POST_VERSION_PATH/purge-versions-deb >> "${TARGET_PATH}/versions-deb-${DIST}-${ARCH}"

View File

@ -4,15 +4,23 @@ IMAGENAME=$1
. /usr/local/share/buildinfo/scripts/buildinfo_base.sh . /usr/local/share/buildinfo/scripts/buildinfo_base.sh
[ -d $POST_VERSION_PATH ] && rm -rf $POST_VERSION_PATH
# Collect the version files # Collect the version files
collect_version_files $POST_VERSION_PATH collect_version_files $POST_VERSION_PATH
#Save the cache file for exporting it to host. #Save the cache file for exporting it to host.
tar -C ${PKG_CACHE_PATH} --exclude=cache.tgz -zcvf /cache.tgz . tar -C ${PKG_CACHE_PATH} --exclude=cache.tgz -zcvf /cache.tgz .
# Disable the build hooks [ -d $BUILD_VERSION_PATH ] && [ ! -z "$(ls -A $BUILD_VERSION_PATH)" ] && cp -rf $BUILD_VERSION_PATH/* $POST_VERSION_PATH
symlink_build_hooks -d rm -rf $BUILD_VERSION_PATH/*
set_reproducible_mirrors -d if [ ! -z "$(get_version_cache_option)" ]; then
# Restore he deletion of cache files
cat <<-EOF >/etc/apt/apt.conf.d/docker-clean
DPkg::Post-Invoke { "rm -f /var/cache/apt/archives/*.deb /var/cache/apt/archives/partial/*.deb /var/cache/apt/*.bin || true"; };
APT::Update::Post-Invoke { "rm -f /var/cache/apt/archives/*.deb /var/cache/apt/archives/partial/*.deb /var/cache/apt/*.bin || true"; };
EOF
fi
# Remove the version deb preference
rm -f $VERSION_DEB_PREFERENCE

View File

@ -0,0 +1,40 @@
#!/bin/bash
IMAGENAME=$1
. /usr/local/share/buildinfo/scripts/buildinfo_base.sh
set -x
if [ ! -z "$(get_version_cache_option)" ]; then
#Delete the rsync package files
if [[ ! ${IMAGENAME} =~ -slave- ]]; then
/usr/bin/apt-get purge -y --auto-remove rsync
fi
fi
apt-get -s clean -y
apt-get -s autoclean -y
apt-get -s autoremove -y
#apt-get -s autoremove -y --purge
rm -f /var/cache/apt/archives/*.deb /var/cache/apt/*.bin
if [[ ! ${IMAGENAME} =~ -slave- ]]; then
rm -f /var/lib/apt/lists/*
fi
rm -rf /sonic/target /ssh
rm -f /tmp/*
rm -rf /debs /python-wheels ~/.cache
find / | grep -E "__pycache__" | xargs rm -rf
rm -rf $BUILD_VERSION_PATH/*
# Disable the build hooks
symlink_build_hooks -d
set_reproducible_mirrors -d
# Remove the version deb preference
rm -f $VERSION_DEB_PREFERENCE

View File

@ -18,6 +18,22 @@ symlink_build_hooks
set_reproducible_mirrors set_reproducible_mirrors
mkdir -p /var/cache/apt/archives/ mkdir -p /var/cache/apt/archives/
mkdir -p ${PKG_CACHE_PATH}/deb/
if [ ! -z "$(get_version_cache_option)" ]; then
# Skip the deletion of cache files
cat <<-EOF >/etc/apt/apt.conf.d/docker-clean
DPkg::Post-Invoke { "test -f /usr/bin/rsync && rsync -avzh --ignore-errors /var/cache/apt/archives/ ${PKG_CACHE_PATH}/deb/; rm -f /var/cache/apt/archives/partial/*.deb /var/cache/apt/*.bin || true"; };
APT::Update::Post-Invoke { "test -f /usr/bin/rsync && rsync -avzh --ignore-errors /var/cache/apt/archives/ ${PKG_CACHE_PATH}/deb/; rm -f /var/cache/apt/archives/partial/*.deb /var/cache/apt/*.bin || true"; };
APT::Keep-Downloaded-Packages "true";
EOF
fi
# Extract the cache inside the docker build.
if [ -f ${PKG_CACHE_FILE_NAME} ]; then
tar -C ${PKG_CACHE_PATH} -xvf ${PKG_CACHE_FILE_NAME}
test -e ${PKG_CACHE_PATH}/deb && cp ${PKG_CACHE_PATH}/deb/* /var/cache/apt/archives/
fi
chmod -R a+rw $BUILDINFO_PATH chmod -R a+rw $BUILDINFO_PATH

View File

@ -0,0 +1,37 @@
#!/bin/bash
# Lock macro for shared file access
# Parameters:
# $(1) - Lock file name
# $(2) - Timeout value
function FLOCK()
{
local filename=$(dirname $1)
local timeout=${2:-360}
if [[ ! -f ${filename}.flock ]]; then
touch ${filename}.flock
chmod -f 777 ${filename}.flock;
fi
local lockname=$(basename ${filename})
local lock_fd=lock_${lockname//[%.\/\-+~]/_}_fd
eval $(echo exec {${lock_fd}}\<\>"${filename}.flock")
#echo ${!lock_fd}
if ! flock -x -w ${timeout} "${!lock_fd}" ; then
echo "ERROR: Lock timeout trying to access ${filename}.flock" 1>&2;
exit 1;
fi
#echo "Lock acquired .."
}
# UnLock macro for shared file access
# Parameters:
# $(1) - Lock file name
function FUNLOCK()
{
local filename=$(dirname $1)
local lockname=$(basename ${filename})
local lock_fd=lock_${lockname//[%.\/\-+~]/_}_fd
eval $(echo exec "${!lock_fd}<&-")
#rm -f ${filename}.flock
}