Recover "Support SONiC Reproduceable Build-debian/pip/web packages (#6255)

* Revert "Revert "Support SONiC Reproduceable Build-debian/pip/web packages (#5718)""

This reverts commit 17497a65e3.

* Revert "Revert "Remove unnecessary sudo authority in build Makefile (#6237)""

This reverts commit 163b7111b5.
This commit is contained in:
xumia 2020-12-21 15:31:10 +08:00 committed by GitHub
parent ee8c3d34a2
commit 0a36de3a89
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
37 changed files with 1443 additions and 26 deletions

7
.gitignore vendored
View File

@ -61,3 +61,10 @@ platform/broadcom/sonic-platform-modules-dell/s6100/modules/dell_s6100_lpc.c
platform/broadcom/sonic-platform-modules-dell/z9100/modules/dell_ich.c platform/broadcom/sonic-platform-modules-dell/z9100/modules/dell_ich.c
platform/broadcom/sonic-platform-modules-dell/z9100/modules/dell_mailbox.c platform/broadcom/sonic-platform-modules-dell/z9100/modules/dell_mailbox.c
platform/broadcom/sonic-platform-modules-dell/z9264f/sonic_platform/ipmihelper.py platform/broadcom/sonic-platform-modules-dell/z9264f/sonic_platform/ipmihelper.py
# buildinfo
files/build/buildinfo
files/build/tmp
dockers/**/buildinfo
platform/**/buildinfo
sonic-slave*/**/buildinfo

View File

@ -38,3 +38,7 @@ ifeq ($(NOSTRETCH), 0)
BLDENV=stretch make -f Makefile.work $@ BLDENV=stretch make -f Makefile.work $@
endif endif
BLDENV=buster make -f Makefile.work $@ BLDENV=buster make -f Makefile.work $@
# Freeze the versions, see more detail options: scripts/versions_manager.py freeze -h
freeze:
@scripts/versions_manager.py freeze $(FREEZE_VERSION_OPTIONS)

View File

@ -89,11 +89,24 @@ else
SLAVE_DIR = sonic-slave-jessie SLAVE_DIR = sonic-slave-jessie
endif endif
SLAVE_BASE_TAG = $(shell CONFIGURED_ARCH=$(CONFIGURED_ARCH) j2 $(SLAVE_DIR)/Dockerfile.j2 > $(SLAVE_DIR)/Dockerfile && sha1sum $(SLAVE_DIR)/Dockerfile | awk '{print substr($$1,0,11);}') include rules/config
SLAVE_TAG = $(shell cat $(SLAVE_DIR)/Dockerfile.user $(SLAVE_DIR)/Dockerfile | sha1sum | awk '{print substr($$1,0,11);}')
SLAVE_BASE_IMAGE = $(SLAVE_DIR) SLAVE_BASE_IMAGE = $(SLAVE_DIR)
SLAVE_IMAGE = $(SLAVE_BASE_IMAGE)-$(USER) SLAVE_IMAGE = $(SLAVE_BASE_IMAGE)-$(USER)
# Generate the version control build info
$(shell SONIC_VERSION_CONTROL_COMPONENTS=$(SONIC_VERSION_CONTROL_COMPONENTS) \
TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \
scripts/generate_buildinfo_config.sh)
# Generate the slave Dockerfile, and prepare build info for it
$(shell CONFIGURED_ARCH=$(CONFIGURED_ARCH) j2 $(SLAVE_DIR)/Dockerfile.j2 > $(SLAVE_DIR)/Dockerfile)
$(shell BUILD_SLAVE=y scripts/prepare_docker_buildinfo.sh $(SLAVE_BASE_IMAGE) $(SLAVE_DIR)/Dockerfile $(CONFIGURED_ARCH) "" $(BLDENV))
# Add the versions in the tag, if the version change, need to rebuild the slave
SLAVE_BASE_TAG = $(shell cat $(SLAVE_DIR)/Dockerfile $(SLAVE_DIR)/buildinfo/versions/versions-* | sha1sum | awk '{print substr($$1,0,11);}')
SLAVE_TAG = $(shell cat $(SLAVE_DIR)/Dockerfile.user $(SLAVE_DIR)/Dockerfile $(SLAVE_DIR)/buildinfo/versions/versions-* | sha1sum | awk '{print substr($$1,0,11);}')
OVERLAY_MODULE_CHECK := \ OVERLAY_MODULE_CHECK := \
lsmod | grep -q "^overlay " &>/dev/null || \ lsmod | grep -q "^overlay " &>/dev/null || \
zgrep -q 'CONFIG_OVERLAY_FS=y' /proc/config.gz &>/dev/null || \ zgrep -q 'CONFIG_OVERLAY_FS=y' /proc/config.gz &>/dev/null || \
@ -118,8 +131,6 @@ DOCKER_RUN := docker run --rm=true --privileged --init \
-i$(if $(TERM),t,) \ -i$(if $(TERM),t,) \
$(SONIC_BUILDER_EXTRA_CMDLINE) $(SONIC_BUILDER_EXTRA_CMDLINE)
include rules/config
ifneq ($(DOCKER_BUILDER_USER_MOUNT),) ifneq ($(DOCKER_BUILDER_USER_MOUNT),)
DOCKER_RUN += $(foreach mount,$(subst $(comma), ,$(DOCKER_BUILDER_USER_MOUNT)), $(addprefix -v , $(mount))) DOCKER_RUN += $(foreach mount,$(subst $(comma), ,$(DOCKER_BUILDER_USER_MOUNT)), $(addprefix -v , $(mount)))
endif endif
@ -218,6 +229,7 @@ SONIC_BUILD_INSTRUCTION := make \
EXTRA_DOCKER_TARGETS=$(EXTRA_DOCKER_TARGETS) \ EXTRA_DOCKER_TARGETS=$(EXTRA_DOCKER_TARGETS) \
BUILD_LOG_TIMESTAMP=$(BUILD_LOG_TIMESTAMP) \ BUILD_LOG_TIMESTAMP=$(BUILD_LOG_TIMESTAMP) \
SONIC_ENABLE_IMAGE_SIGNATURE=$(ENABLE_IMAGE_SIGNATURE) \ SONIC_ENABLE_IMAGE_SIGNATURE=$(ENABLE_IMAGE_SIGNATURE) \
SLAVE_DIR=$(SLAVE_DIR) \
$(SONIC_OVERRIDE_BUILD_VARS) $(SONIC_OVERRIDE_BUILD_VARS)
.PHONY: sonic-slave-build sonic-slave-bash init reset .PHONY: sonic-slave-build sonic-slave-bash init reset
@ -234,27 +246,35 @@ endif
endif endif
@$(OVERLAY_MODULE_CHECK) @$(OVERLAY_MODULE_CHECK)
@pushd src/sonic-build-hooks; TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) make all; popd
@cp src/sonic-build-hooks/buildinfo/sonic-build-hooks* $(SLAVE_BASE_IMAGE)/buildinfo
@docker inspect --type image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) &> /dev/null || \ @docker inspect --type image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) &> /dev/null || \
{ echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found. Building... ; \ { echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found. Building... ; \
$(DOCKER_BASE_BUILD) ; } $(DOCKER_BASE_BUILD) ; \
scripts/collect_docker_version_files.sh $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) target ; }
@docker inspect --type image $(SLAVE_IMAGE):$(SLAVE_TAG) &> /dev/null || \ @docker inspect --type image $(SLAVE_IMAGE):$(SLAVE_TAG) &> /dev/null || \
{ echo Image $(SLAVE_IMAGE):$(SLAVE_TAG) not found. Building... ; \ { echo Image $(SLAVE_IMAGE):$(SLAVE_TAG) not found. Building... ; \
$(DOCKER_BUILD) ; } $(DOCKER_BUILD) ; }
ifeq "$(KEEP_SLAVE_ON)" "yes" ifeq "$(KEEP_SLAVE_ON)" "yes"
ifdef SOURCE_FOLDER ifdef SOURCE_FOLDER
@$(DOCKER_RUN) -v $(SOURCE_FOLDER):/var/$(USER)/src $(SLAVE_IMAGE):$(SLAVE_TAG) bash -c "$(SONIC_BUILD_INSTRUCTION) $@; /bin/bash" @$(DOCKER_RUN) -v $(SOURCE_FOLDER):/var/$(USER)/src $(SLAVE_IMAGE):$(SLAVE_TAG) bash -c "$(SONIC_BUILD_INSTRUCTION) $@; scripts/collect_build_version_files.sh \$$?; /bin/bash"
else else
@$(DOCKER_RUN) $(SLAVE_IMAGE):$(SLAVE_TAG) bash -c "$(SONIC_BUILD_INSTRUCTION) $@; /bin/bash" @$(DOCKER_RUN) $(SLAVE_IMAGE):$(SLAVE_TAG) bash -c "$(SONIC_BUILD_INSTRUCTION) $@; scripts/collect_build_version_files.sh \$$?; /bin/bash"
endif endif
else else
@$(DOCKER_RUN) $(SLAVE_IMAGE):$(SLAVE_TAG) $(SONIC_BUILD_INSTRUCTION) $@ @$(DOCKER_RUN) $(SLAVE_IMAGE):$(SLAVE_TAG) bash -c "$(SONIC_BUILD_INSTRUCTION) $@; scripts/collect_build_version_files.sh \$$?"
endif endif
sonic-slave-base-build : sonic-build-hooks:
@pushd src/sonic-build-hooks; TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) make all; popd
@cp src/sonic-build-hooks/buildinfo/sonic-build-hooks* $(SLAVE_BASE_IMAGE)/buildinfo
sonic-slave-base-build : sonic-build-hooks
@$(OVERLAY_MODULE_CHECK) @$(OVERLAY_MODULE_CHECK)
@docker inspect --type image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) &> /dev/null || \ @docker inspect --type image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) &> /dev/null || \
{ echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found. Building... ; \ { echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found. Building... ; \
$(DOCKER_BASE_BUILD) ; } $(DOCKER_BASE_BUILD) ; \
scripts/collect_docker_version_files.sh $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) target ; }
sonic-slave-build : sonic-slave-base-build sonic-slave-build : sonic-slave-base-build
@docker inspect --type image $(SLAVE_IMAGE):$(SLAVE_TAG) &> /dev/null || \ @docker inspect --type image $(SLAVE_IMAGE):$(SLAVE_TAG) &> /dev/null || \

View File

@ -40,6 +40,8 @@ PLATFORM_DIR=platform
## Hostname for the linux image ## Hostname for the linux image
HOSTNAME=sonic HOSTNAME=sonic
DEFAULT_USERINFO="Default admin user,,," DEFAULT_USERINFO="Default admin user,,,"
BUILD_TOOL_PATH=src/sonic-build-hooks/buildinfo
TRUSTED_GPG_DIR=$BUILD_TOOL_PATH/trusted.gpg.d
## Read ONIE image related config file ## Read ONIE image related config file
. ./onie-image.conf . ./onie-image.conf
@ -70,16 +72,12 @@ pushd $FILESYSTEM_ROOT
sudo mount --bind . . sudo mount --bind . .
popd popd
## Build a basic Debian system by debootstrap ## Build the host debian base system
echo '[INFO] Debootstrap...' echo '[INFO] Build host debian base system...'
if [[ $CONFIGURED_ARCH == armhf || $CONFIGURED_ARCH == arm64 ]]; then TARGET_PATH=$TARGET_PATH scripts/build_debian_base_system.sh $CONFIGURED_ARCH $IMAGE_DISTRO $FILESYSTEM_ROOT
# qemu arm bin executable for cross-building
sudo mkdir -p $FILESYSTEM_ROOT/usr/bin # Prepare buildinfo
sudo cp /usr/bin/qemu*static $FILESYSTEM_ROOT/usr/bin || true sudo scripts/prepare_debian_image_buildinfo.sh $CONFIGURED_ARCH $IMAGE_DISTRO $FILESYSTEM_ROOT $http_proxy
sudo http_proxy=$http_proxy debootstrap --variant=minbase --arch $CONFIGURED_ARCH $IMAGE_DISTRO $FILESYSTEM_ROOT http://deb.debian.org/debian
else
sudo http_proxy=$http_proxy debootstrap --variant=minbase --arch $CONFIGURED_ARCH $IMAGE_DISTRO $FILESYSTEM_ROOT http://debian-archive.trafficmanager.net/debian
fi
## Config hostname and hosts, otherwise 'sudo ...' will complain 'sudo: unable to resolve host ...' ## Config hostname and hosts, otherwise 'sudo ...' will complain 'sudo: unable to resolve host ...'
sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "echo '$HOSTNAME' > /etc/hostname" sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "echo '$HOSTNAME' > /etc/hostname"
@ -100,6 +98,9 @@ echo '[INFO] Mount all'
## Output all the mounted device for troubleshooting ## Output all the mounted device for troubleshooting
sudo LANG=C chroot $FILESYSTEM_ROOT mount sudo LANG=C chroot $FILESYSTEM_ROOT mount
## Install the trusted gpg public keys
[ -d $TRUSTED_GPG_DIR ] && [ ! -z "$(ls $TRUSTED_GPG_DIR)" ] && sudo cp $TRUSTED_GPG_DIR/* ${FILESYSTEM_ROOT}/etc/apt/trusted.gpg.d/
## Pointing apt to public apt mirrors and getting latest packages, needed for latest security updates ## Pointing apt to public apt mirrors and getting latest packages, needed for latest security updates
sudo cp files/apt/sources.list.$CONFIGURED_ARCH $FILESYSTEM_ROOT/etc/apt/sources.list sudo cp files/apt/sources.list.$CONFIGURED_ARCH $FILESYSTEM_ROOT/etc/apt/sources.list
sudo cp files/apt/apt.conf.d/{81norecommends,apt-{clean,gzip-indexes,no-languages},no-check-valid-until} $FILESYSTEM_ROOT/etc/apt/apt.conf.d/ sudo cp files/apt/apt.conf.d/{81norecommends,apt-{clean,gzip-indexes,no-languages},no-check-valid-until} $FILESYSTEM_ROOT/etc/apt/apt.conf.d/
@ -584,6 +585,8 @@ sudo du -hsx $FILESYSTEM_ROOT
sudo mkdir -p $FILESYSTEM_ROOT/var/lib/docker sudo mkdir -p $FILESYSTEM_ROOT/var/lib/docker
sudo mksquashfs $FILESYSTEM_ROOT $FILESYSTEM_SQUASHFS -e boot -e var/lib/docker -e $PLATFORM_DIR sudo mksquashfs $FILESYSTEM_ROOT $FILESYSTEM_SQUASHFS -e boot -e var/lib/docker -e $PLATFORM_DIR
scripts/collect_host_image_version_files.sh $TARGET_PATH $FILESYSTEM_ROOT
## Compress docker files ## Compress docker files
pushd $FILESYSTEM_ROOT && sudo tar czf $OLDPWD/$FILESYSTEM_DOCKERFS -C ${DOCKERFS_PATH}var/lib/docker .; popd pushd $FILESYSTEM_ROOT && sudo tar czf $OLDPWD/$FILESYSTEM_DOCKERFS -C ${DOCKERFS_PATH}var/lib/docker .; popd

View File

@ -6,3 +6,8 @@ deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian/ buster mai
deb [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ buster/updates main contrib non-free deb [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ buster/updates main contrib non-free
deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ buster/updates main contrib non-free deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ buster/updates main contrib non-free
deb [arch=amd64] http://debian-archive.trafficmanager.net/debian/ buster-backports main contrib non-free deb [arch=amd64] http://debian-archive.trafficmanager.net/debian/ buster-backports main contrib non-free
# Debian mirror supports multiple versions for a package
deb [arch=amd64] http://packages.trafficmanager.net/debian/debian buster main contrib non-free
deb [arch=amd64] http://packages.trafficmanager.net/debian/debian buster-updates main contrib non-free
deb [arch=amd64] http://packages.trafficmanager.net/debian/debian buster-backports main contrib non-free

View File

@ -6,3 +6,6 @@ deb-src [arch=arm64] http://deb.debian.org/debian buster main contrib non-free
deb [arch=arm64] http://security.debian.org buster/updates main contrib non-free deb [arch=arm64] http://security.debian.org buster/updates main contrib non-free
deb-src [arch=arm64] http://security.debian.org buster/updates main contrib non-free deb-src [arch=arm64] http://security.debian.org buster/updates main contrib non-free
deb [arch=arm64] http://deb.debian.org/debian/ buster-backports main contrib non-free deb [arch=arm64] http://deb.debian.org/debian/ buster-backports main contrib non-free
deb [arch=arm64] http://packages.trafficmanager.net/debian/debian buster main contrib non-free
deb [arch=arm64] http://packages.trafficmanager.net/debian/debian buster-updates main contrib non-free
deb [arch=arm64] http://packages.trafficmanager.net/debian/debian buster-backports main contrib non-free

View File

@ -6,3 +6,6 @@ deb-src [arch=armhf] http://deb.debian.org/debian buster main contrib non-free
deb [arch=armhf] http://security.debian.org buster/updates main contrib non-free deb [arch=armhf] http://security.debian.org buster/updates main contrib non-free
deb-src [arch=armhf] http://security.debian.org buster/updates main contrib non-free deb-src [arch=armhf] http://security.debian.org buster/updates main contrib non-free
deb [arch=armhf] http://deb.debian.org/debian/ buster-backports main contrib non-free deb [arch=armhf] http://deb.debian.org/debian/ buster-backports main contrib non-free
deb [arch=armhf] http://packages.trafficmanager.net/debian/debian buster main contrib non-free
deb [arch=armhf] http://packages.trafficmanager.net/debian/debian buster-updates main contrib non-free
deb [arch=armhf] http://packages.trafficmanager.net/debian/debian buster-backports main contrib non-free

View File

@ -6,3 +6,6 @@ deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian/ stretch ma
deb [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ stretch/updates main contrib non-free deb [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ stretch/updates main contrib non-free
deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ stretch/updates main contrib non-free deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ stretch/updates main contrib non-free
deb [arch=amd64] http://debian-archive.trafficmanager.net/debian/ stretch-backports main contrib non-free deb [arch=amd64] http://debian-archive.trafficmanager.net/debian/ stretch-backports main contrib non-free
deb [arch=amd64] http://packages.trafficmanager.net/debian/debian stretch main contrib non-free
deb [arch=amd64] http://packages.trafficmanager.net/debian/debian stretch-updates main contrib non-free
deb [arch=amd64] http://packages.trafficmanager.net/debian/debian stretch-backports main contrib non-free

View File

@ -6,3 +6,6 @@ deb-src [arch=arm64] http://deb.debian.org/debian stretch main contrib non-free
deb [arch=arm64] http://security.debian.org stretch/updates main contrib non-free deb [arch=arm64] http://security.debian.org stretch/updates main contrib non-free
deb-src [arch=arm64] http://security.debian.org stretch/updates main contrib non-free deb-src [arch=arm64] http://security.debian.org stretch/updates main contrib non-free
deb [arch=arm64] http://deb.debian.org/debian/ stretch-backports main contrib non-free deb [arch=arm64] http://deb.debian.org/debian/ stretch-backports main contrib non-free
deb [arch=arm64] http://packages.trafficmanager.net/debian/debian stretch main contrib non-free
deb [arch=arm64] http://packages.trafficmanager.net/debian/debian stretch-updates main contrib non-free
deb [arch=arm64] http://packages.trafficmanager.net/debian/debian stretch-backports main contrib non-free

View File

@ -6,3 +6,6 @@ deb-src [arch=armhf] http://deb.debian.org/debian stretch main contrib non-free
deb [arch=armhf] http://security.debian.org stretch/updates main contrib non-free deb [arch=armhf] http://security.debian.org stretch/updates main contrib non-free
deb-src [arch=armhf] http://security.debian.org stretch/updates main contrib non-free deb-src [arch=armhf] http://security.debian.org stretch/updates main contrib non-free
deb [arch=armhf] http://deb.debian.org/debian/ stretch-backports main contrib non-free deb [arch=armhf] http://deb.debian.org/debian/ stretch-backports main contrib non-free
deb [arch=armhf] http://packages.trafficmanager.net/debian/debian stretch main contrib non-free
deb [arch=armhf] http://packages.trafficmanager.net/debian/debian stretch-updates main contrib non-free
deb [arch=armhf] http://packages.trafficmanager.net/debian/debian stretch-backports main contrib non-free

View File

@ -162,3 +162,20 @@ K8s_GCR_IO_PAUSE_VERSION = 3.2
# CA_CERT = # CA_CERT =
# The relative path is build root folder. # The relative path is build root folder.
SONIC_ENABLE_IMAGE_SIGNATURE ?= n SONIC_ENABLE_IMAGE_SIGNATURE ?= n
# PACKAGE_URL_PREFIX - the package url prefix
PACKAGE_URL_PREFIX ?= https://packages.trafficmanager.net/public/packages
# TRUSTED_GPG_URLS - the trusted gpgs, separated by comma
TRUSTED_GPG_URLS = https://packages.trafficmanager.net/debian/public_key.gpg,https://packages.microsoft.com/keys/microsoft.asc
# SONIC_VERSION_CONTROL_COMPONENTS - Valid values: none|all|components..., the components consist of one or multiple: deb,py2,py3,web,git,docker, seperated by comma
# none : disable the version control
# all : enable the version control for all components
# deb : debian packages
# py2 : python2 packages
# py3 : python3 pakcages
# web : web packages, downloaded by wget, curl
# git : git repositories, donloaded by git clone
# docker: docker base images
SONIC_VERSION_CONTROL_COMPONENTS ?= none

View File

@ -0,0 +1,87 @@
#!/bin/bash
CONFIGURED_ARCH=$1
IMAGE_DISTRO=$2
FILESYSTEM_ROOT=$3
http_proxy=$4
TARGET=$TARGET_PATH
[ -z "$TARGET" ] && TARGET=target
. /usr/local/share/buildinfo/scripts/buildinfo_base.sh
TARGET_BASEIMAGE_PATH=$TARGET/versions/host-base-image
mkdir -p $TARGET_BASEIMAGE_PATH
generate_version_file()
{
sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "dpkg-query -W -f '\${Package}==\${Version}\n'" > $TARGET_BASEIMAGE_PATH/versions-deb-${IMAGE_DISTRO}-${CONFIGURED_ARCH}
}
if [ "$ENABLE_VERSION_CONTROL_DEB" != "y" ]; then
if [[ $CONFIGURED_ARCH == armhf || $CONFIGURED_ARCH == arm64 ]]; then
# qemu arm bin executable for cross-building
sudo mkdir -p $FILESYSTEM_ROOT/usr/bin
sudo cp /usr/bin/qemu*static $FILESYSTEM_ROOT/usr/bin || true
sudo http_proxy=$HTTP_PROXY SKIP_BUILD_HOOK=y debootstrap --variant=minbase --arch $CONFIGURED_ARCH $IMAGE_DISTRO $FILESYSTEM_ROOT http://deb.debian.org/debian
else
sudo http_proxy=$HTTP_PROXY SKIP_BUILD_HOOK=y debootstrap --variant=minbase --arch $CONFIGURED_ARCH $IMAGE_DISTRO $FILESYSTEM_ROOT http://debian-archive.trafficmanager.net/debian
fi
RET=$?
if [ $RET -ne 0 ]; then
exit $RET
fi
generate_version_file
exit $RET
fi
ARCH=$(dpkg --print-architecture)
DISTRO=$(grep CODENAME /etc/os-release | cut -d= -f2)
if [ "$ARCH" != "$CONFIGURED_ARCH" ] || [ "$DISTRO" != "$IMAGE_DISTRO" ]; then
"Not support to build different ARCH/DISTRO ${CONFIGURED_ARCH}:${$IMAGE_DISTRO} in ${ARCH}:${DISTRO}."
exit 1
fi
BASE_VERSIONS=files/build/versions/host-base-image/versions-deb-${IMAGE_DISTRO}
BASEIMAGE_TARBALLPATH=$TARGET/baseimage
BASEIMAGE_TARBALL=$(realpath -e $TARGET)/baseimage.tgz
rm -rf $BASEIMAGE_TARBALLPATH $BASEIMAGE_TARBALL
ARCHIEVES=$BASEIMAGE_TARBALLPATH/var/cache/apt/archives
APTLIST=$BASEIMAGE_TARBALLPATH/var/lib/apt/lists
TARGET_DEBOOTSTRAP=$BASEIMAGE_TARBALLPATH/debootstrap
APTDEBIAN="$APTLIST/deb.debian.org_debian_dists_buster_main_binary-${CONFIGURED_ARCH}_Packages"
DEBPATHS=$TARGET_DEBOOTSTRAP/debpaths
DEBOOTSTRAP_BASE=$TARGET_DEBOOTSTRAP/base
DEBOOTSTRAP_REQUIRED=$TARGET_DEBOOTSTRAP/required
[ -d $BASEIMAGE_TARBALLPATH ] && rm -rf $BASEIMAGE_TARBALLPATH
mkdir -p $ARCHIEVES
mkdir -p $APTLIST
mkdir -p $TARGET_DEBOOTSTRAP
PACKAGES=$(sed -E 's/=(=[^=]*)$/\1/' $BASE_VERSIONS)
URL_ARR=($(apt-get download --print-uris $PACKAGES | cut -d" " -f1 | tr -d "'"))
PACKAGE_ARR=($PACKAGES)
LENGTH=${#PACKAGE_ARR[@]}
for ((i=0;i<LENGTH;i++))
do
package=${PACKAGE_ARR[$i]}
packagename=$(echo $package | sed -E 's/=[^=]*$//')
url=${URL_ARR[$i]}
filename=$(basename "$url")
SKIP_BUILD_HOOK=y wget $url -P $ARCHIEVES
echo $packagename >> $DEBOOTSTRAP_REQUIRED
echo "$packagename /var/cache/apt/archives/$filename" >> $DEBPATHS
done
touch $APTDEBIAN
touch $DEBOOTSTRAP_BASE
(cd $BASEIMAGE_TARBALLPATH && tar -zcf $BASEIMAGE_TARBALL .)
sudo debootstrap --verbose --variant=minbase --arch $CONFIGURED_ARCH --unpack-tarball=$BASEIMAGE_TARBALL $IMAGE_DISTRO $FILESYSTEM_ROOT
RET=$?
if [ $RET -ne 0 ]; then
exit $RET
fi
generate_version_file

View File

@ -0,0 +1,28 @@
#!/bin/bash
RET=$1
BLDENV=$2
TARGET_PATH=$3
TIMESTAMP=$(date +"%Y%m%d%H%M%S")
. /usr/local/share/buildinfo/scripts/buildinfo_base.sh
[ -z "$BLDENV" ] && BLDENV=$(grep VERSION_CODENAME /etc/os-release | cut -d= -f2)
[ -z "$BLDENV" ] && exit $RET
[ -z "$TARGET_PATH" ] && TARGET_PATH=./target
VERSION_BUILD_PATH=$TARGET_PATH/versions/build
VERSION_SLAVE_PATH=$VERSION_BUILD_PATH/build-sonic-slave-${BLDENV}
LOG_VERSION_PATH=$VERSION_BUILD_PATH/log-${TIMESTAMP}
sudo chmod -R a+rw $BUILDINFO_PATH
collect_version_files $LOG_VERSION_PATH
([ -d $BUILD_VERSION_PATH ] && [ ! -z "$(ls $BUILD_VERSION_PATH/)" ]) && cp -rf $BUILD_VERSION_PATH/* $LOG_VERSION_PATH/
mkdir -p $VERSION_SLAVE_PATH
scripts/versions_manager.py merge -t $VERSION_SLAVE_PATH -b $LOG_VERSION_PATH -e $POST_VERSION_PATH
rm -rf $BUILD_VERSION_PATH/*
exit $RET

View File

@ -0,0 +1,25 @@
#!/bin/bash
DOCKER_IMAGE=$1
TARGET_PATH=$2
[ -z "$TARGET_PATH" ] && TARGET_PATH=./target
DOCKER_IMAGE_NAME=$(echo $DOCKER_IMAGE | cut -d: -f1)
DOCKER_CONTAINER=$DOCKER_IMAGE_NAME
TARGET_VERSIONS_PATH=$TARGET_PATH/versions/dockers/$DOCKER_IMAGE_NAME
[ -d $TARGET_VERSIONS_PATH ] && rm -rf $TARGET_VERSIONS_PATH
mkdir -p $TARGET_VERSIONS_PATH
export DOCKER_CLI_EXPERIMENTAL=enabled
# Remove the old docker container if existing
if docker container inspect $DOCKER_IMAGE > /dev/null 2>&1; then
docker container rm $DOCKER_IMAGE > /dev/null
fi
docker create --name $DOCKER_CONTAINER --entrypoint /bin/bash $DOCKER_IMAGE
docker cp -L $DOCKER_CONTAINER:/etc/os-release $TARGET_VERSIONS_PATH/
docker cp -L $DOCKER_CONTAINER:/usr/local/share/buildinfo/pre-versions $TARGET_VERSIONS_PATH/
docker cp -L $DOCKER_CONTAINER:/usr/local/share/buildinfo/post-versions $TARGET_VERSIONS_PATH/
docker container rm $DOCKER_CONTAINER

View File

@ -0,0 +1,12 @@
#!/bin/bash
TARGET=$1
FILESYSTEM_ROOT=$2
VERSIONS_PATH=$TARGET/versions/host-image
mkdir -p $VERSIONS_PATH
sudo LANG=C chroot $FILESYSTEM_ROOT post_run_buildinfo
cp -r $FILESYSTEM_ROOT/usr/local/share/buildinfo/pre-versions $VERSIONS_PATH/
cp -r $FILESYSTEM_ROOT/usr/local/share/buildinfo/post-versions $VERSIONS_PATH/

View File

@ -0,0 +1,10 @@
#!/bin/bash
BUILDINFO_PATH=src/sonic-build-hooks
BUILDINFO_CONFIG=$BUILDINFO_PATH/buildinfo/config/buildinfo.config
mkdir -p $BUILDINFO_PATH/buildinfo/config
echo "PACKAGE_URL_PREFIX=$PACKAGE_URL_PREFIX" > $BUILDINFO_CONFIG
echo "SONIC_VERSION_CONTROL_COMPONENTS=$SONIC_VERSION_CONTROL_COMPONENTS" >> $BUILDINFO_CONFIG

View File

@ -0,0 +1,29 @@
#!/bin/bash
ARCH=$1
DISTRO=$2
FILESYSTEM_ROOT=$3
. /usr/local/share/buildinfo/scripts/buildinfo_base.sh
VERSION_DEB_PREFERENCE="01-versions-deb"
BUILDINFO_PATH=${FILESYSTEM_ROOT}/usr/local/share/buildinfo
BUILDINFO_VERSION_PATH=${FILESYSTEM_ROOT}/usr/local/share/buildinfo/versions
BUILDINFO_VERSION_DEB=${BUILDINFO_VERSION_PATH}/${VERSION_DEB_PREFERENCE}
OVERRIDE_VERSION_PATH=files/build/versions/host-image
DIFF_VERSIONS_PATH=$BUILDINFO_PATH/diff-versions
mkdir -p $BUILDINFO_PATH
# Copy the build info config
cp -rf src/sonic-build-hooks/buildinfo/* $BUILDINFO_PATH/
# Generate version lock files
scripts/versions_manager.py generate -t "$BUILDINFO_VERSION_PATH" -m "$OVERRIDE_VERSION_PATH" -d "$DISTRO" -a "$ARCH"
if [ "$ENABLE_VERSION_CONTROL_DEB" == "y" ]; then
cp -f $BUILDINFO_VERSION_DEB ${FILESYSTEM_ROOT}/etc/apt/preferences.d/
fi
sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "dpkg -i /usr/local/share/buildinfo/sonic-build-hooks_1.0_all.deb"
sudo LANG=C chroot $FILESYSTEM_ROOT /bin/bash -c "pre_run_buildinfo"

View File

@ -0,0 +1,50 @@
#!/bin/bash
IMAGENAME=$1
DOCKERFILE=$2
ARCH=$3
DOCKERFILE_TARGE=$4
DISTRO=$5
[ -z "$BUILD_SLAVE" ] && BUILD_SLAVE=n
[ -z "$DOCKERFILE_TARGE" ] && DOCKERFILE_TARGE=$DOCKERFILE
DOCKERFILE_PATH=$(dirname "$DOCKERFILE_TARGE")
BUILDINFO_PATH="${DOCKERFILE_PATH}/buildinfo"
BUILDINFO_VERSION_PATH="${BUILDINFO_PATH}/versions"
[ -d $BUILDINFO_PATH ] && rm -rf $BUILDINFO_PATH
mkdir -p $BUILDINFO_VERSION_PATH
# Get the debian distribution from the docker base image
if [ -z "$DISTRO" ]; then
DOCKER_BASE_IMAGE=$(grep "^FROM" $DOCKERFILE | head -n 1 | awk '{print $2}')
DISTRO=$(docker run --rm --entrypoint "" $DOCKER_BASE_IMAGE cat /etc/os-release | grep VERSION_CODENAME | cut -d= -f2)
[ -z "$DISTRO" ] && DISTRO=jessie
fi
DOCKERFILE_PRE_SCRIPT='# Auto-Generated for buildinfo
COPY ["buildinfo", "/usr/local/share/buildinfo"]
RUN dpkg -i /usr/local/share/buildinfo/sonic-build-hooks_1.0_all.deb
RUN pre_run_buildinfo'
# Add the auto-generate code if it is not added in the target Dockerfile
if [ ! -f $DOCKERFILE_TARGE ] || ! grep -q "Auto-Generated for buildinfo" $DOCKERFILE_TARGE; then
# Insert the docker build script before the RUN command
LINE_NUMBER=$(grep -Fn -m 1 'RUN' $DOCKERFILE | cut -d: -f1)
TEMP_FILE=$(mktemp)
awk -v text="${DOCKERFILE_PRE_SCRIPT}" -v linenumber=$LINE_NUMBER 'NR==linenumber{print text}1' $DOCKERFILE > $TEMP_FILE
# Append the docker build script at the end of the docker file
echo -e "\nRUN post_run_buildinfo" >> $TEMP_FILE
cat $TEMP_FILE > $DOCKERFILE_TARGE
rm -f $TEMP_FILE
fi
# Copy the build info config
cp -rf src/sonic-build-hooks/buildinfo/* $BUILDINFO_PATH
# Generate the version lock files
scripts/versions_manager.py generate -t "$BUILDINFO_VERSION_PATH" -n "$IMAGENAME" -d "$DISTRO" -a "$ARCH"
touch $BUILDINFO_VERSION_PATH/versions-deb

View File

@ -0,0 +1,21 @@
#!/bin/bash
SLAVE_DIR=$1
ARCH=$2
DISTRO=$3
# Enable the build hooks
symlink_build_hooks
# Build the slave running config
cp -rf $SLAVE_DIR/buildinfo/* /usr/local/share/buildinfo/
. /usr/local/share/buildinfo/scripts/buildinfo_base.sh
# Build the slave version config
[ -d /usr/local/share/buildinfo/versions ] && rm -rf /usr/local/share/buildinfo/versions
scripts/versions_manager.py generate -t "/usr/local/share/buildinfo/versions" -n "build-${SLAVE_DIR}" -d "$DISTRO" -a "$ARCH"
touch ${BUILDINFO_PATH}/versions/versions-deb
rm -f /etc/apt/preferences.d/01-versions-deb
([ "$ENABLE_VERSION_CONTROL_DEB" == "y" ] && [ -f $VERSION_DEB_PREFERENCE ]) && cp -f $VERSION_DEB_PREFERENCE /etc/apt/preferences.d/
exit 0

666
scripts/versions_manager.py Executable file
View File

@ -0,0 +1,666 @@
#!/usr/bin/python3
import argparse
import glob
import os
import sys
ALL_DIST = 'all'
ALL_ARCH = 'all'
DEFAULT_MODULE = 'default'
DEFAULT_VERSION_PATH = 'files/build/versions'
VERSION_PREFIX="versions-"
VERSION_DEB_PREFERENCE = '01-versions-deb'
DEFAULT_OVERWRITE_COMPONENTS=['deb', 'py2', 'py3']
SLAVE_INDIVIDULE_VERSION = False
class Component:
'''
The component consists of mutiple packages
ctype -- Component Type, such as deb, py2, etc
dist -- Distribution, such as stretch, buster, etc
arch -- Architectrue, such as amd64, arm64, etc
'''
def __init__(self, versions, ctype, dist=ALL_DIST, arch=ALL_ARCH):
self.versions = versions
self.ctype = ctype
if not dist:
dist = ALL_DIST
if not arch:
arch = ALL_ARCH
self.dist = dist
self.arch = arch
@classmethod
def get_versions(cls, version_file):
result = {}
if not os.path.exists(version_file):
return result
with open(version_file) as fp:
for line in fp.readlines():
offset = line.rfind('==')
if offset > 0:
package = line[:offset].strip()
version = line[offset+2:].strip()
result[package] = version
return result
def clone(self):
return Component(self.versions.copy(), self.ctype, self.dist, self.arch)
def merge(self, versions, overwritten=True):
for package in versions:
if overwritten or package not in self.versions:
self.versions[package] = versions[package]
def subtract(self, versions):
for package in versions:
if package in self.versions and self.versions[package] == versions[package]:
del self.versions[package]
def dump(self, config=False, priority=999):
result = []
for package in sorted(self.versions.keys(), key=str.casefold):
if config and self.ctype == 'deb':
lines = 'Package: {0}\nPin: version {1}\nPin-Priority: {2}\n\n'.format(package, self.versions[package], priority)
result.append(lines)
else:
result.append('{0}=={1}'.format(package, self.versions[package]))
return "\n".join(result)
def dump_to_file(self, version_file, config=False, priority=999):
if len(self.versions) <= 0:
return
with open(version_file, 'w') as f:
f.write(self.dump(config, priority))
def dump_to_path(self, file_path, config=False, priority=999):
if len(self.versions) <= 0:
return
if not os.path.exists(file_path):
os.makedirs(file_path)
filename = self.get_filename()
if config and self.ctype == 'deb':
none_config_file_path = os.path.join(file_path, filename)
self.dump_to_file(none_config_file_path, False, priority)
filename = VERSION_DEB_PREFERENCE
file_path = os.path.join(file_path, filename)
self.dump_to_file(file_path, config, priority)
# Check if the self component can be overwritten by the input component
def check_overwritable(self, component, for_all_dist=False, for_all_arch=False):
if self.ctype != component.ctype:
return False
if self.dist != component.dist and not (for_all_dist and self.dist == ALL_DIST):
return False
if self.arch != component.arch and not (for_all_arch and self.arch == ALL_ARCH):
return False
return True
# Check if the self component can inherit the package versions from the input component
def check_inheritable(self, component):
if self.ctype != component.ctype:
return False
if self.dist != component.dist and component.dist != ALL_DIST:
return False
if self.arch != component.arch and component.arch != ALL_ARCH:
return False
return True
'''
Get the file name
The file name format: versions-{ctype}-{dist}-{arch}
If {arch} is all, then the file name format: versions-{ctype}-{dist}
if {arch} is all and {dist} is all, then the file name format: versions-{ctype}
'''
def get_filename(self):
filename = VERSION_PREFIX + self.ctype
dist = self.dist
if self.arch and self.arch != ALL_ARCH:
if not dist:
dist = ALL_DIST
return filename + '-' + dist + '-' + self.arch
if dist and self.dist != ALL_DIST:
filename = filename + '-' + dist
return filename
def get_order_keys(self):
dist = self.dist
if not dist or dist == ALL_DIST:
dist = ''
arch = self.arch
if not arch or arch == ALL_ARCH:
arch = ''
return (self.ctype, dist, arch)
def clean_info(self, clean_dist=True, clean_arch=True, force=False):
if clean_dist:
if force or self.ctype != 'deb':
self.dist = ALL_DIST
if clean_arch:
self.arch = ALL_ARCH
class VersionModule:
'''
The version module represents a build target, such as docker image, host image, consists of multiple components.
name -- The name of the image, such as sonic-slave-buster, docker-lldp, etc
'''
def __init__(self, name=None, components=None):
self.name = name
self.components = components
# Overwrite the docker/host image/base image versions
def overwrite(self, module, for_all_dist=False, for_all_arch=False):
# Overwrite from generic one to detail one
# For examples: versions-deb overwrtten by versions-deb-buster, and versions-deb-buster overwritten by versions-deb-buster-amd64
components = sorted(module.components, key = lambda x : x.get_order_keys())
for merge_component in components:
merged = False
for component in self.components:
if component.check_overwritable(merge_component, for_all_dist=for_all_dist, for_all_arch=for_all_arch):
component.merge(merge_component.versions, True)
merged = True
if not merged:
tmp_component = merge_component.clone()
tmp_component.clean_info(clean_dist=for_all_dist, clean_arch=for_all_arch)
self.components.append(tmp_component)
self.adjust()
def get_config_module(self, default_module, dist, arch):
if self.is_individule_version():
return self
module = default_module
if not self.is_aggregatable_module(self.name):
module = default_module.clone(exclude_ctypes=DEFAULT_OVERWRITE_COMPONENTS)
return self._get_config_module(module, dist, arch)
def _get_config_module(self, default_module, dist, arch):
module = default_module.clone()
default_ctype_components = module._get_components_per_ctypes()
module.overwrite(self)
config_components = []
ctype_components = module._get_components_per_ctypes()
for ctype in default_ctype_components:
if ctype not in ctype_components:
ctype_components[ctype] = []
for components in ctype_components.values():
config_component = self._get_config_for_ctype(components, dist, arch)
config_components.append(config_component)
config_module = VersionModule(self.name, config_components)
return config_module
def _get_config_for_ctype(self, components, dist, arch):
result = Component({}, components[0].ctype, dist, arch)
for component in sorted(components, key = lambda x : x.get_order_keys()):
if result.check_inheritable(component):
result.merge(component.versions, True)
return result
def subtract(self, default_module):
module = self.clone()
result = []
ctype_components = module._get_components_per_ctypes()
for ctype in ctype_components:
components = ctype_components[ctype]
components = sorted(components, key = lambda x : x.get_order_keys())
for i in range(0, len(components)):
component = components[i]
base_module = VersionModule(self.name, components[0:i])
config_module = base_module._get_config_module(default_module, component.dist, component.arch)
config_components = config_module._get_components_by_ctype(ctype)
if len(config_components) > 0:
config_component = config_components[0]
component.subtract(config_component.versions)
if len(component.versions):
result.append(component)
self.components = result
def adjust(self):
result_components = []
ctype_components = self._get_components_per_ctypes()
for components in ctype_components.values():
result_components += self._adjust_components_for_ctype(components)
self.components = result_components
def _get_components_by_ctype(self, ctype):
components = []
for component in self.components:
if component.ctype == ctype:
components.append(component)
return components
def _adjust_components_for_ctype(self, components):
components = sorted(components, key = lambda x : x.get_order_keys())
result = []
for i in range(0, len(components)):
component = components[i]
inheritable_component = Component({}, component.ctype)
for j in range(0, i):
base_component = components[j]
if component.check_inheritable(base_component):
inheritable_component.merge(base_component.versions, True)
component.subtract(inheritable_component.versions)
if len(component.versions) > 0:
result.append(component)
return result
def _get_components_per_ctypes(self):
result = {}
for component in self.components:
components = result.get(component.ctype, [])
components.append(component)
result[component.ctype] = components
return result
def load(self, image_path, filter_ctype=None, filter_dist=None, filter_arch=None):
version_file_pattern = os.path.join(image_path, VERSION_PREFIX) + '*'
file_paths = glob.glob(version_file_pattern)
components = []
self.name = os.path.basename(image_path)
self.components = components
for file_path in file_paths:
filename = os.path.basename(file_path)
items = filename.split('-')
if len(items) < 2:
continue
ctype = items[1]
if filter_ctype and filter_ctype != ctype:
continue
dist = ''
arch = ''
if len(items) > 2:
dist = items[2]
if filter_dist and dist and filter_dist != dist:
continue
if len(items) > 3:
arch = items[3]
if filter_arch and arch and filter_arch != arch:
continue
versions = Component.get_versions(file_path)
component = Component(versions, ctype, dist, arch)
components.append(component)
def load_from_target(self, image_path):
post_versions = os.path.join(image_path, 'post-versions')
if os.path.exists(post_versions):
self.load(post_versions)
self.name = os.path.basename(image_path)
pre_versions = os.path.join(image_path, 'pre-versions')
if os.path.exists(pre_versions):
pre_module = VersionModule()
pre_module.load(pre_versions)
self.subtract(pre_module)
else:
self.load(image_path)
def dump(self, module_path, config=False, priority=999):
version_file_pattern = os.path.join(module_path, VERSION_PREFIX + '*')
for filename in glob.glob(version_file_pattern):
os.remove(filename)
for component in self.components:
component.dump_to_path(module_path, config, priority)
def filter(self, ctypes=[]):
if 'all' in ctypes:
return self
components = []
for component in self.components:
if component.ctype in ctypes:
components.append(component)
self.components = components
def clean_info(self, clean_dist=True, clean_arch=True, force=False):
for component in self.components:
component.clean_info(clean_dist=clean_dist, clean_arch=clean_arch, force=force)
def clone(self, ctypes=None, exclude_ctypes=None):
components = []
for component in self.components:
if exclude_ctypes and component.ctype in exclude_ctypes:
continue
if ctypes and component.ctype not in ctypes:
continue
components.append(component.clone())
return VersionModule(self.name, components)
def is_slave_module(self):
return self.name.startswith('sonic-slave-')
# Do not inherit the version from the default module
def is_individule_version(self):
return self.is_slave_module() and SLAVE_INDIVIDULE_VERSION
@classmethod
def is_aggregatable_module(cls, module_name):
if module_name.startswith('sonic-slave-'):
return False
if module_name.startswith('build-sonic-slave-'):
return False
if module_name == DEFAULT_MODULE:
return False
if module_name == 'host-image' or module_name == 'host-base-image':
return False
return True
@classmethod
def get_module_path_by_name(cls, source_path, module_name):
common_modules = ['default', 'host-image', 'host-base-image']
if module_name in common_modules:
return os.path.join(source_path, 'files/build/versions', module_name)
if module_name.startswith('build-sonic-slave-'):
return os.path.join(source_path, 'files/build/versions/build', module_name)
return os.path.join(source_path, 'files/build/versions/dockers', module_name)
class VersionBuild:
'''
The VersionBuild consists of multiple version modules.
'''
def __init__(self, target_path="./target", source_path='.'):
self.target_path = target_path
self.source_path = source_path
self.modules = {}
def load_from_target(self):
dockers_path = os.path.join(self.target_path, 'versions/dockers')
build_path = os.path.join(self.target_path, 'versions/build')
modules = {}
self.modules = modules
file_paths = glob.glob(dockers_path + '/*')
file_paths += glob.glob(build_path + '/build-*')
file_paths.append(os.path.join(self.target_path, 'versions/host-image'))
file_paths.append(os.path.join(self.target_path, 'versions/host-base-image'))
for file_path in file_paths:
if not os.path.isdir(file_path):
continue
module = VersionModule()
module.load_from_target(file_path)
modules[module.name] = module
self._merge_dgb_modules()
def load_from_source(self):
# Load default versions and host image versions
versions_path = os.path.join(self.source_path, 'files/build/versions')
dockers_path = os.path.join(versions_path, "dockers")
build_path = os.path.join(versions_path, "build")
paths = [os.path.join(versions_path, 'default')]
paths += glob.glob(versions_path + '/host-*')
paths += glob.glob(dockers_path + '/*')
paths += glob.glob(build_path + '/*')
modules = {}
self.modules = modules
for image_path in paths:
module = VersionModule()
module.load(image_path)
modules[module.name] = module
def overwrite(self, build, for_all_dist=False, for_all_arch=False):
for target_module in build.modules.values():
module = self.modules.get(target_module.name, None)
tmp_module = target_module.clone()
tmp_module.clean_info(for_all_dist, for_all_arch)
if module:
module.overwrite(tmp_module, for_all_dist=for_all_dist, for_all_arch=for_all_arch)
else:
self.modules[target_module.name] = tmp_module
def dump(self):
for module in self.modules.values():
module_path = self.get_module_path(module)
module.dump(module_path)
def subtract(self, default_module):
none_aggregatable_module = default_module.clone(exclude_ctypes=DEFAULT_OVERWRITE_COMPONENTS)
for module in self.modules.values():
if module.name == DEFAULT_MODULE:
continue
if module.name == 'host-base-image':
continue
if module.is_individule_version():
continue
tmp_module = default_module
if not module.is_aggregatable_module(module.name):
tmp_module = none_aggregatable_module
module.subtract(tmp_module)
def freeze(self, rebuild=False, for_all_dist=False, for_all_arch=False, ctypes=['all']):
if rebuild:
self.load_from_target()
self.filter(ctypes=ctypes)
default_module = self.get_default_module()
self._clean_component_info()
self.subtract(default_module)
self.modules[DEFAULT_MODULE] = default_module
self.dump()
return
self.load_from_source()
default_module = self.modules.get(DEFAULT_MODULE, None)
target_build = VersionBuild(self.target_path, self.source_path)
target_build.load_from_target()
target_build.filter(ctypes=ctypes)
if not default_module:
raise Exception("The default versions does not exist")
for module in target_build.modules.values():
if module.is_individule_version():
continue
tmp_module = module.clone(exclude_ctypes=DEFAULT_OVERWRITE_COMPONENTS)
default_module.overwrite(tmp_module, for_all_dist=True, for_all_arch=True)
target_build.subtract(default_module)
self.overwrite(target_build, for_all_dist=for_all_dist, for_all_arch=for_all_arch)
self.dump()
def filter(self, ctypes=[]):
for module in self.modules.values():
module.filter(ctypes=ctypes)
def get_default_module(self):
if DEFAULT_MODULE in self.modules:
return self.modules[DEFAULT_MODULE]
ctypes = self.get_component_types()
dists = self.get_dists()
components = []
for ctype in ctypes:
if ctype == 'deb':
for dist in dists:
versions = self._get_versions(ctype, dist)
common_versions = self._get_common_versions(versions)
component = Component(common_versions, ctype, dist)
components.append(component)
else:
versions = self._get_versions(ctype)
common_versions = self._get_common_versions(versions)
component = Component(common_versions, ctype)
components.append(component)
return VersionModule(DEFAULT_MODULE, components)
def get_aggregatable_modules(self):
modules = {}
for module_name in self.modules:
if not VersionModule.is_aggregatable_module(module_name):
continue
module = self.modules[module_name]
modules[module_name] = module
return modules
def get_components(self):
components = []
for module_name in self.modules:
module = self.modules[module_name]
for component in module.components:
components.append(component)
return components
def get_component_types(self):
ctypes = []
for module_name in self.modules:
module = self.modules[module_name]
for component in module.components:
if component.ctype not in ctypes:
ctypes.append(component.ctype)
return ctypes
def get_dists(self):
dists = []
components = self.get_components()
for component in components:
if component.dist not in dists:
dists.append(component.dist)
return dists
def get_archs(self):
archs = []
components = self.get_components()
for component in components:
if component.arch not in archs:
archs.append(component.arch)
return archs
def get_module_path(self, module):
return self.get_module_path_by_name(module.name)
def get_module_path_by_name(self, module_name):
return VersionModule.get_module_path_by_name(self.source_path, module_name)
def _merge_dgb_modules(self):
dbg_modules = []
for module_name in self.modules:
if not module_name.endswith('-dbg'):
continue
dbg_modules.append(module_name)
base_module_name = module_name[:-4]
if base_module_name not in self.modules:
raise Exception('The Module {0} not found'.format(base_module_name))
base_module = self.modules[base_module_name]
dbg_module = self.modules[module_name]
base_module.overwrite(dbg_module)
for module_name in dbg_modules:
del self.modules[module_name]
def _clean_component_info(self, clean_dist=True, clean_arch=True):
for module in self.modules.values():
module.clean_info(clean_dist, clean_arch)
def _get_versions(self, ctype, dist=None, arch=None):
versions = {}
modules = self.get_aggregatable_modules()
for module_name in self.modules:
if module_name not in modules:
temp_module = self.modules[module_name].clone(exclude_ctypes=DEFAULT_OVERWRITE_COMPONENTS)
modules[module_name] = temp_module
for module in modules.values():
for component in module.components:
if ctype != component.ctype:
continue
if dist and dist != component.dist:
continue
if arch and arch != component.arch:
continue
for package in component.versions:
version = component.versions[package]
package_versions = versions.get(package, [])
if version not in package_versions:
package_versions.append(version)
versions[package] = package_versions
return versions
def _get_common_versions(self, versions):
common_versions = {}
for package in versions:
package_versions = versions[package]
if len(package_versions) == 1:
common_versions[package] = package_versions[0]
return common_versions
class VersionManagerCommands:
def __init__(self):
usage = 'version_manager.py <command> [<args>]\n\n'
usage = usage + 'The most commonly used commands are:\n'
usage = usage + ' freeze Freeze the version files\n'
usage = usage + ' generate Generate the version files\n'
usage = usage + ' merge Merge the version files'
parser = argparse.ArgumentParser(description='Version manager', usage=usage)
parser.add_argument('command', help='Subcommand to run')
args = parser.parse_args(sys.argv[1:2])
if not hasattr(self, args.command):
print('Unrecognized command: {0}'.format(args.command))
parser.print_help()
exit(1)
getattr(self, args.command)()
def freeze(self):
parser = argparse.ArgumentParser(description = 'Freeze the version files')
parser.add_argument('-t', '--target_path', default='./target', help='target path')
parser.add_argument('-s', '--source_path', default='.', help='source path')
# store_true which implies default=False
parser.add_argument('-r', '--rebuild', action='store_true', help='rebuild all versions')
parser.add_argument('-d', '--for_all_dist', action='store_true', help='apply the versions for all distributions')
parser.add_argument('-a', '--for_all_arch', action='store_true', help='apply the versions for all architectures')
parser.add_argument('-c', '--ctypes', default='all', help='component types to freeze')
args = parser.parse_args(sys.argv[2:])
ctypes = args.ctypes.split(',')
if len(ctypes) == 0:
ctypes = ['all']
build = VersionBuild(target_path=args.target_path, source_path=args.source_path)
build.freeze(rebuild=args.rebuild, for_all_dist=args.for_all_dist, for_all_arch=args.for_all_arch, ctypes=ctypes)
def merge(self):
parser = argparse.ArgumentParser(description = 'Merge the version files')
parser.add_argument('-t', '--target_path', required=True, help='target path to save the merged version files')
parser.add_argument('-m', '--module_path', default=None, help='merge path, use the target path if not specified')
parser.add_argument('-b', '--base_path', required=True, help='base path, merge to the module path')
parser.add_argument('-e', '--exclude_module_path', default=None, help='exclude module path')
args = parser.parse_args(sys.argv[2:])
module_path = args.module_path
if not module_path:
module_path = args.target_path
if not os.path.exists(module_path):
print('The module path {0} does not exist'.format(module_path))
if not os.path.exists(args.target_path):
os.makedirs(args.target_path)
module = VersionModule()
module.load(module_path)
base_module = VersionModule()
base_module.load(args.base_path)
module.overwrite(base_module)
if args.exclude_module_path:
exclude_module = VersionModule()
exclude_module.load(args.exclude_module_path)
module.subtract(exclude_module)
module.dump(args.target_path)
def generate(self):
parser = argparse.ArgumentParser(description = 'Generate the version files')
parser.add_argument('-t', '--target_path', required=True, help='target path to generate the version lock files')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('-n', '--module_name', help="module name, such as docker-lldp, sonic-slave-buster, etc")
group.add_argument('-m', '--module_path', help="module apth, such as files/docker/versions/dockers/docker-lldp, files/docker/versions/dockers/sonic-slave-buster, etc")
parser.add_argument('-s', '--source_path', default='.', help='source path')
parser.add_argument('-d', '--distribution', required=True, help="distribution")
parser.add_argument('-a', '--architecture', required=True, help="architecture")
parser.add_argument('-p', '--priority', default=999, help="priority of the debian apt preference")
args = parser.parse_args(sys.argv[2:])
module_path = args.module_path
if not module_path:
module_path = VersionModule.get_module_path_by_name(args.source_path, args.module_name)
if not os.path.exists(args.target_path):
os.makedirs(args.target_path)
module = VersionModule()
module.load(module_path, filter_dist=args.distribution, filter_arch=args.architecture)
default_module_path = VersionModule.get_module_path_by_name(args.source_path, DEFAULT_MODULE)
default_module = VersionModule()
default_module.load(default_module_path, filter_dist=args.distribution, filter_arch=args.architecture)
config = module.get_config_module(default_module, args.distribution, args.architecture)
config.clean_info(force=True)
config.dump(args.target_path, config=True, priority=args.priority)
if __name__ == "__main__":
VersionManagerCommands()

View File

@ -100,6 +100,14 @@ list :
include $(RULES_PATH)/config include $(RULES_PATH)/config
-include $(RULES_PATH)/config.user -include $(RULES_PATH)/config.user
###############################################################################
## Version control related exports
###############################################################################
export PACKAGE_URL_PREFIX
export TRUSTED_GPG_URLS
export SONIC_VERSION_CONTROL_COMPONENTS
ifeq ($(SONIC_ENABLE_PFCWD_ON_START),y) ifeq ($(SONIC_ENABLE_PFCWD_ON_START),y)
ENABLE_PFCWD_ON_START = y ENABLE_PFCWD_ON_START = y
endif endif
@ -257,6 +265,9 @@ else
$(info SONiC Build System for $(CONFIGURED_PLATFORM):$(CONFIGURED_ARCH)) $(info SONiC Build System for $(CONFIGURED_PLATFORM):$(CONFIGURED_ARCH))
endif endif
# Overwrite the buildinfo in slave container
$(shell sudo scripts/prepare_slave_container_buildinfo.sh $(SLAVE_DIR) $(CONFIGURED_ARCH) $(BLDENV))
include Makefile.cache include Makefile.cache
ifeq ($(SONIC_USE_DOCKER_BUILDKIT),y) ifeq ($(SONIC_USE_DOCKER_BUILDKIT),y)
@ -637,6 +648,8 @@ $(addprefix $(TARGET_PATH)/, $(SONIC_SIMPLE_DOCKER_IMAGES)) : $(TARGET_PATH)/%.g
$(HEADER) $(HEADER)
# Apply series of patches if exist # Apply series of patches if exist
if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && QUILT_PATCHES=../$(notdir $($*.gz_PATH)).patch quilt push -a; popd; fi if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && QUILT_PATCHES=../$(notdir $($*.gz_PATH)).patch quilt push -a; popd; fi
# Prepare docker build info
scripts/prepare_docker_buildinfo.sh $* $($*.gz_PATH)/Dockerfile $(CONFIGURED_ARCH) $(TARGET_DOCKERFILE)/Dockerfile.buildinfo
docker info $(LOG) docker info $(LOG)
docker build --squash --no-cache \ docker build --squash --no-cache \
--build-arg http_proxy=$(HTTP_PROXY) \ --build-arg http_proxy=$(HTTP_PROXY) \
@ -646,7 +659,9 @@ $(addprefix $(TARGET_PATH)/, $(SONIC_SIMPLE_DOCKER_IMAGES)) : $(TARGET_PATH)/%.g
--build-arg guid=$(GUID) \ --build-arg guid=$(GUID) \
--build-arg docker_container_name=$($*.gz_CONTAINER_NAME) \ --build-arg docker_container_name=$($*.gz_CONTAINER_NAME) \
--label Tag=$(SONIC_IMAGE_VERSION) \ --label Tag=$(SONIC_IMAGE_VERSION) \
-f $(TARGET_DOCKERFILE)/Dockerfile.buildinfo \
-t $* $($*.gz_PATH) $(LOG) -t $* $($*.gz_PATH) $(LOG)
scripts/collect_docker_version_files.sh $* $(TARGET_PATH)
docker save $* | gzip -c > $@ docker save $* | gzip -c > $@
# Clean up # Clean up
if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && quilt pop -a -f; [ -d .pc ] && rm -rf .pc; popd; fi if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && quilt pop -a -f; [ -d .pc ] && rm -rf .pc; popd; fi
@ -720,6 +735,11 @@ $(addprefix $(TARGET_PATH)/, $(DOCKER_IMAGES)) : $(TARGET_PATH)/%.gz : .platform
$(eval export $(subst -,_,$(notdir $($*.gz_PATH)))_whls=$(shell printf "$(subst $(SPACE),\n,$(call expand,$($*.gz_PYTHON_WHEELS)))\n" | awk '!a[$$0]++')) $(eval export $(subst -,_,$(notdir $($*.gz_PATH)))_whls=$(shell printf "$(subst $(SPACE),\n,$(call expand,$($*.gz_PYTHON_WHEELS)))\n" | awk '!a[$$0]++'))
$(eval export $(subst -,_,$(notdir $($*.gz_PATH)))_dbgs=$(shell printf "$(subst $(SPACE),\n,$(call expand,$($*.gz_DBG_PACKAGES)))\n" | awk '!a[$$0]++')) $(eval export $(subst -,_,$(notdir $($*.gz_PATH)))_dbgs=$(shell printf "$(subst $(SPACE),\n,$(call expand,$($*.gz_DBG_PACKAGES)))\n" | awk '!a[$$0]++'))
j2 $($*.gz_PATH)/Dockerfile.j2 > $($*.gz_PATH)/Dockerfile j2 $($*.gz_PATH)/Dockerfile.j2 > $($*.gz_PATH)/Dockerfile
# Prepare docker build info
PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \
SONIC_ENFORCE_VERSIONS=$(SONIC_ENFORCE_VERSIONS) \
TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) \
scripts/prepare_docker_buildinfo.sh $* $($*.gz_PATH)/Dockerfile $(CONFIGURED_ARCH)
docker info $(LOG) docker info $(LOG)
docker build --squash --no-cache \ docker build --squash --no-cache \
--build-arg http_proxy=$(HTTP_PROXY) \ --build-arg http_proxy=$(HTTP_PROXY) \
@ -732,6 +752,7 @@ $(addprefix $(TARGET_PATH)/, $(DOCKER_IMAGES)) : $(TARGET_PATH)/%.gz : .platform
--build-arg frr_user_gid=$(FRR_USER_GID) \ --build-arg frr_user_gid=$(FRR_USER_GID) \
--label Tag=$(SONIC_IMAGE_VERSION) \ --label Tag=$(SONIC_IMAGE_VERSION) \
-t $* $($*.gz_PATH) $(LOG) -t $* $($*.gz_PATH) $(LOG)
scripts/collect_docker_version_files.sh $* $(TARGET_PATH)
docker save $* | gzip -c > $@ docker save $* | gzip -c > $@
# Clean up # Clean up
if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && quilt pop -a -f; [ -d .pc ] && rm -rf .pc; popd; fi if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && quilt pop -a -f; [ -d .pc ] && rm -rf .pc; popd; fi
@ -764,6 +785,11 @@ $(addprefix $(TARGET_PATH)/, $(DOCKER_DBG_IMAGES)) : $(TARGET_PATH)/%-$(DBG_IMAG
$(eval export $(subst -,_,$(notdir $($*.gz_PATH)))_image_dbgs=$(shell printf "$(subst $(SPACE),\n,$(call expand,$($*.gz_DBG_IMAGE_PACKAGES)))\n" | awk '!a[$$0]++')) $(eval export $(subst -,_,$(notdir $($*.gz_PATH)))_image_dbgs=$(shell printf "$(subst $(SPACE),\n,$(call expand,$($*.gz_DBG_IMAGE_PACKAGES)))\n" | awk '!a[$$0]++'))
./build_debug_docker_j2.sh $* $(subst -,_,$(notdir $($*.gz_PATH)))_dbg_debs $(subst -,_,$(notdir $($*.gz_PATH)))_image_dbgs > $($*.gz_PATH)/Dockerfile-dbg.j2 ./build_debug_docker_j2.sh $* $(subst -,_,$(notdir $($*.gz_PATH)))_dbg_debs $(subst -,_,$(notdir $($*.gz_PATH)))_image_dbgs > $($*.gz_PATH)/Dockerfile-dbg.j2
j2 $($*.gz_PATH)/Dockerfile-dbg.j2 > $($*.gz_PATH)/Dockerfile-dbg j2 $($*.gz_PATH)/Dockerfile-dbg.j2 > $($*.gz_PATH)/Dockerfile-dbg
# Prepare docker build info
PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \
SONIC_ENFORCE_VERSIONS=$(SONIC_ENFORCE_VERSIONS) \
TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) \
scripts/prepare_docker_buildinfo.sh $* $($*.gz_PATH)/Dockerfile-dbg $(CONFIGURED_ARCH)
docker info $(LOG) docker info $(LOG)
docker build \ docker build \
$(if $($*.gz_DBG_DEPENDS), --squash --no-cache, --no-cache) \ $(if $($*.gz_DBG_DEPENDS), --squash --no-cache, --no-cache) \
@ -773,6 +799,7 @@ $(addprefix $(TARGET_PATH)/, $(DOCKER_DBG_IMAGES)) : $(TARGET_PATH)/%-$(DBG_IMAG
--label Tag=$(SONIC_IMAGE_VERSION) \ --label Tag=$(SONIC_IMAGE_VERSION) \
--file $($*.gz_PATH)/Dockerfile-dbg \ --file $($*.gz_PATH)/Dockerfile-dbg \
-t $*-dbg $($*.gz_PATH) $(LOG) -t $*-dbg $($*.gz_PATH) $(LOG)
scripts/collect_docker_version_files.sh $*-dbg $(TARGET_PATH)
docker save $*-dbg | gzip -c > $@ docker save $*-dbg | gzip -c > $@
# Clean up # Clean up
if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && quilt pop -a -f; [ -d .pc ] && rm -rf .pc; popd; fi if [ -f $($*.gz_PATH).patch/series ]; then pushd $($*.gz_PATH) && quilt pop -a -f; [ -d .pc ] && rm -rf .pc; popd; fi
@ -970,6 +997,10 @@ $(addprefix $(TARGET_PATH)/, $(SONIC_INSTALLERS)) : $(TARGET_PATH)/% : \
USERNAME="$(USERNAME)" \ USERNAME="$(USERNAME)" \
PASSWORD="$(PASSWORD)" \ PASSWORD="$(PASSWORD)" \
IMAGE_TYPE=$($*_IMAGE_TYPE) \ IMAGE_TYPE=$($*_IMAGE_TYPE) \
TARGET_PATH=$(TARGET_PATH) \
SONIC_ENFORCE_VERSIONS=$(SONIC_ENFORCE_VERSIONS) \
TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) \
PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \
./build_debian.sh $(LOG) ./build_debian.sh $(LOG)
USERNAME="$(USERNAME)" \ USERNAME="$(USERNAME)" \

View File

@ -16,7 +16,9 @@ RUN echo "deb [arch=amd64] http://debian-archive.trafficmanager.net/debian/ bust
echo "deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian/ buster main contrib non-free" >> /etc/apt/sources.list && \ echo "deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian/ buster main contrib non-free" >> /etc/apt/sources.list && \
echo "deb [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ buster/updates main contrib non-free" >> /etc/apt/sources.list && \ echo "deb [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ buster/updates main contrib non-free" >> /etc/apt/sources.list && \
echo "deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ buster/updates main contrib non-free" >> /etc/apt/sources.list && \ echo "deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ buster/updates main contrib non-free" >> /etc/apt/sources.list && \
echo "deb [arch=amd64] http://debian-archive.trafficmanager.net/debian buster-backports main" >> /etc/apt/sources.list echo "deb [arch=amd64] http://debian-archive.trafficmanager.net/debian buster-backports main" >> /etc/apt/sources.list && \
echo "deb [arch=amd64] http://packages.trafficmanager.net/debian/debian buster main contrib non-free" >> /etc/apt/sources.list && \
echo "deb [arch=amd64] http://packages.trafficmanager.net/debian/debian buster-updates main contrib non-free" >> /etc/apt/sources.list
{%- if CONFIGURED_ARCH == "armhf" %} {%- if CONFIGURED_ARCH == "armhf" %}
RUN echo "deb [arch=armhf] http://deb.debian.org/debian buster main contrib non-free" > /etc/apt/sources.list && \ RUN echo "deb [arch=armhf] http://deb.debian.org/debian buster main contrib non-free" > /etc/apt/sources.list && \
@ -25,7 +27,9 @@ RUN echo "deb [arch=armhf] http://deb.debian.org/debian buster main contrib non-
echo "deb-src [arch=armhf] http://deb.debian.org/debian buster-updates main contrib non-free" >> /etc/apt/sources.list && \ echo "deb-src [arch=armhf] http://deb.debian.org/debian buster-updates main contrib non-free" >> /etc/apt/sources.list && \
echo "deb [arch=armhf] http://security.debian.org buster/updates main contrib non-free" >> /etc/apt/sources.list && \ echo "deb [arch=armhf] http://security.debian.org buster/updates main contrib non-free" >> /etc/apt/sources.list && \
echo "deb-src [arch=armhf] http://security.debian.org buster/updates main contrib non-free" >> /etc/apt/sources.list && \ echo "deb-src [arch=armhf] http://security.debian.org buster/updates main contrib non-free" >> /etc/apt/sources.list && \
echo 'deb [arch=armhf] http://ftp.debian.org/debian buster-backports main' >> /etc/apt/sources.list echo 'deb [arch=armhf] http://ftp.debian.org/debian buster-backports main' >> /etc/apt/sources.list && \
echo "deb [arch=armhf] http://packages.trafficmanager.net/debian/debian buster main contrib non-free" >> /etc/apt/sources.list && \
echo "deb [arch=armhf] http://packages.trafficmanager.net/debian/debian buster-updates main contrib non-free" >> /etc/apt/sources.list
{%- elif CONFIGURED_ARCH == "arm64" %} {%- elif CONFIGURED_ARCH == "arm64" %}
RUN echo "deb [arch=arm64] http://deb.debian.org/debian buster main contrib non-free" > /etc/apt/sources.list && \ RUN echo "deb [arch=arm64] http://deb.debian.org/debian buster main contrib non-free" > /etc/apt/sources.list && \
echo "deb-src [arch=arm64] http://deb.debian.org/debian buster main contrib non-free" >> /etc/apt/sources.list && \ echo "deb-src [arch=arm64] http://deb.debian.org/debian buster main contrib non-free" >> /etc/apt/sources.list && \
@ -33,7 +37,9 @@ RUN echo "deb [arch=arm64] http://deb.debian.org/debian buster main contrib non-
echo "deb-src [arch=arm64] http://deb.debian.org/debian buster-updates main contrib non-free" >> /etc/apt/sources.list && \ echo "deb-src [arch=arm64] http://deb.debian.org/debian buster-updates main contrib non-free" >> /etc/apt/sources.list && \
echo "deb [arch=arm64] http://security.debian.org buster/updates main contrib non-free" >> /etc/apt/sources.list && \ echo "deb [arch=arm64] http://security.debian.org buster/updates main contrib non-free" >> /etc/apt/sources.list && \
echo "deb-src [arch=arm64] http://security.debian.org buster/updates main contrib non-free" >> /etc/apt/sources.list && \ echo "deb-src [arch=arm64] http://security.debian.org buster/updates main contrib non-free" >> /etc/apt/sources.list && \
echo 'deb [arch=arm64] http://ftp.debian.org/debian buster-backports main' >> /etc/apt/sources.list echo 'deb [arch=arm64] http://ftp.debian.org/debian buster-backports main' >> /etc/apt/sources.list && \
echo "deb [arch=arm64] http://packages.trafficmanager.net/debian/debian buster main contrib non-free" >> /etc/apt/sources.list && \
echo "deb [arch=arm64] http://packages.trafficmanager.net/debian/debian buster-updates main contrib non-free" >> /etc/apt/sources.list
{%- endif %} {%- endif %}
## Make apt-get non-interactive ## Make apt-get non-interactive

View File

@ -14,7 +14,10 @@ RUN echo "deb [arch=amd64] http://debian-archive.trafficmanager.net/debian/ stre
echo "deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian/ stretch main contrib non-free" >> /etc/apt/sources.list && \ echo "deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian/ stretch main contrib non-free" >> /etc/apt/sources.list && \
echo "deb [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ stretch/updates main contrib non-free" >> /etc/apt/sources.list && \ echo "deb [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ stretch/updates main contrib non-free" >> /etc/apt/sources.list && \
echo "deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ stretch/updates main contrib non-free" >> /etc/apt/sources.list && \ echo "deb-src [arch=amd64] http://debian-archive.trafficmanager.net/debian-security/ stretch/updates main contrib non-free" >> /etc/apt/sources.list && \
echo "deb [arch=amd64] http://debian-archive.trafficmanager.net/debian stretch-backports main" >> /etc/apt/sources.list echo "deb [arch=amd64] http://debian-archive.trafficmanager.net/debian stretch-backports main" >> /etc/apt/sources.list && \
echo "deb [arch=amd64] http://packages.trafficmanager.net/debian/debian stretch main contrib non-free" >> /etc/apt/sources.list && \
echo "deb [arch=amd64] http://packages.trafficmanager.net/debian/debian stretch-updates main contrib non-free" >> /etc/apt/sources.list && \
echo "deb [arch=amd64] http://packages.microsoft.com/debian/9/prod stretch main" >> /etc/apt/sources.list
{%- if CONFIGURED_ARCH == "armhf" %} {%- if CONFIGURED_ARCH == "armhf" %}
RUN echo "deb [arch=armhf] http://deb.debian.org/debian stretch main contrib non-free" > /etc/apt/sources.list && \ RUN echo "deb [arch=armhf] http://deb.debian.org/debian stretch main contrib non-free" > /etc/apt/sources.list && \
@ -23,7 +26,9 @@ RUN echo "deb [arch=armhf] http://deb.debian.org/debian stretch main contrib non
echo "deb-src [arch=armhf] http://deb.debian.org/debian stretch-updates main contrib non-free" >> /etc/apt/sources.list && \ echo "deb-src [arch=armhf] http://deb.debian.org/debian stretch-updates main contrib non-free" >> /etc/apt/sources.list && \
echo "deb [arch=armhf] http://security.debian.org stretch/updates main contrib non-free" >> /etc/apt/sources.list && \ echo "deb [arch=armhf] http://security.debian.org stretch/updates main contrib non-free" >> /etc/apt/sources.list && \
echo "deb-src [arch=armhf] http://security.debian.org stretch/updates main contrib non-free" >> /etc/apt/sources.list && \ echo "deb-src [arch=armhf] http://security.debian.org stretch/updates main contrib non-free" >> /etc/apt/sources.list && \
echo 'deb [arch=armhf] http://ftp.debian.org/debian stretch-backports main' >> /etc/apt/sources.list echo 'deb [arch=armhf] http://ftp.debian.org/debian stretch-backports main' >> /etc/apt/sources.list && \
echo "deb [arch=armhf] http://packages.trafficmanager.net/debian/debian stretch main contrib non-free" >> /etc/apt/sources.list && \
echo "deb [arch=armhf] http://packages.trafficmanager.net/debian/debian stretch-updates main contrib non-free" >> /etc/apt/sources.list
{%- elif CONFIGURED_ARCH == "arm64" %} {%- elif CONFIGURED_ARCH == "arm64" %}
RUN echo "deb [arch=arm64] http://deb.debian.org/debian stretch main contrib non-free" > /etc/apt/sources.list && \ RUN echo "deb [arch=arm64] http://deb.debian.org/debian stretch main contrib non-free" > /etc/apt/sources.list && \
echo "deb-src [arch=arm64] http://deb.debian.org/debian stretch main contrib non-free" >> /etc/apt/sources.list && \ echo "deb-src [arch=arm64] http://deb.debian.org/debian stretch main contrib non-free" >> /etc/apt/sources.list && \
@ -31,7 +36,9 @@ RUN echo "deb [arch=arm64] http://deb.debian.org/debian stretch main contrib non
echo "deb-src [arch=arm64] http://deb.debian.org/debian stretch-updates main contrib non-free" >> /etc/apt/sources.list && \ echo "deb-src [arch=arm64] http://deb.debian.org/debian stretch-updates main contrib non-free" >> /etc/apt/sources.list && \
echo "deb [arch=arm64] http://security.debian.org stretch/updates main contrib non-free" >> /etc/apt/sources.list && \ echo "deb [arch=arm64] http://security.debian.org stretch/updates main contrib non-free" >> /etc/apt/sources.list && \
echo "deb-src [arch=arm64] http://security.debian.org stretch/updates main contrib non-free" >> /etc/apt/sources.list && \ echo "deb-src [arch=arm64] http://security.debian.org stretch/updates main contrib non-free" >> /etc/apt/sources.list && \
echo 'deb [arch=arm64] http://ftp.debian.org/debian stretch-backports main' >> /etc/apt/sources.list echo 'deb [arch=arm64] http://ftp.debian.org/debian stretch-backports main' >> /etc/apt/sources.list && \
echo "deb [arch=arm64] http://packages.trafficmanager.net/debian/debian stretch main contrib non-free" >> /etc/apt/sources.list && \
echo "deb [arch=arm64] http://packages.trafficmanager.net/debian/debian stretch-updates main contrib non-free" >> /etc/apt/sources.list
{%- endif %} {%- endif %}
## Make apt-get non-interactive ## Make apt-get non-interactive

2
src/sonic-build-hooks/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
buildinfo
tmp

View File

@ -0,0 +1,39 @@
SONIC_BUILD_HOOKS = sonic-build-hooks
SONIC_BUILD_HOOKS_VERSION = 1.0
SONIC_BUILD_HOOKS_PACKAGE = $(SONIC_BUILD_HOOKS)_$(SONIC_BUILD_HOOKS_VERSION)_all.deb
BUILDINFO_DIR = buildinfo
TMP_DIR = tmp
SYMBOL_LINKS_SRC_DIR = ../../usr/local/share/buildinfo/scripts
SYMBOL_LINKS = symlink_build_hooks post_run_buildinfo pre_run_buildinfo collect_version_files
SONIC_BUILD_HOOKS_TARGET = $(BUILDINFO_DIR)/$(SONIC_BUILD_HOOKS_PACKAGE)
BUILD_ROOT_DIR = $(TMP_DIR)/$(SONIC_BUILD_HOOKS)
DEBIAN_DIR = $(BUILD_ROOT_DIR)/DEBIAN
TRUSTED_GPG_PATH = $(BUILD_ROOT_DIR)/etc/apt/trusted.gpg.d
INSTALL_PATH = $(BUILD_ROOT_DIR)/usr/local/share/buildinfo
SYMBOL_LINK_PATH = $(BUILD_ROOT_DIR)/usr/sbin
SCRIPTS_PATH = $(INSTALL_PATH)/scripts
HOOKS_PATH = $(INSTALL_PATH)/hooks
DPKGTOOL = $(shell which dpkg-deb)
# If the depk-deb not installed, use the docker container to make the debian package
ifeq ($(shell which dpkg-deb),)
BUILD_COMMAND=docker run --user $(shell id -u):$(shell id -g) --rm -v $(shell pwd):/build debian:buster bash -c 'cd /build; dpkg-deb --build $(TMP_DIR)/$(SONIC_BUILD_HOOKS) $(SONIC_BUILD_HOOKS_TARGET)'
else
BUILD_COMMAND=dpkg-deb --build $(TMP_DIR)/$(SONIC_BUILD_HOOKS) $(SONIC_BUILD_HOOKS_TARGET)
endif
DEPENDS := $(shell find scripts hooks debian -type f)
$(SONIC_BUILD_HOOKS_TARGET): $(DEPENDS)
@rm -rf $(BUILDINFO_DIR)/$(SONIC_BUILD_HOOKS) $(TMP_DIR)
@mkdir -p $(DEBIAN_DIR) $(SCRIPTS_PATH) $(HOOKS_PATH) $(SYMBOL_LINK_PATH) $(TRUSTED_GPG_PATH) $(BUILDINFO_DIR)
@chmod 0775 $(DEBIAN_DIR)
@cp debian/* $(DEBIAN_DIR)/
@cp scripts/* $(SCRIPTS_PATH)/
@cp hooks/* $(HOOKS_PATH)/
@for url in $$(echo $(TRUSTED_GPG_URLS) | sed 's/[,;]/ /g'); do wget -q "$$url" -P "$(TRUSTED_GPG_PATH)/"; done
@for f in $(SYMBOL_LINKS); do ln -s $(SYMBOL_LINKS_SRC_DIR)/$$f $(SYMBOL_LINK_PATH)/$$f; done
@$(BUILD_COMMAND)
all: $(SONIC_BUILD_HOOKS_TARGET)

View File

@ -0,0 +1,10 @@
Package: sonic-build-hooks
Version: 1.0
Section: devel
Priority: optional
Architecture: all
Depends:
Maintainer: SONiC <sonic@microsoft.com>
Description: sonic build hooks
Hooks the build tools, such as apt-get, wget, pip, etc.
It is used to monitor and control the packages installed during the build.

View File

@ -0,0 +1,38 @@
#!/bin/bash
INSTALL=
. /usr/local/share/buildinfo/scripts/buildinfo_base.sh
REAL_COMMAND=$(get_command apt-get)
if [ -z "$REAL_COMMAND" ]; then
echo "The command apt-get does not exist." 1>&2
exit 1
fi
VERSION_FILE="/usr/local/share/buildinfo/versions/versions-deb"
if [ "$ENABLE_VERSION_CONTROL_DEB" == "y" ]; then
for para in $@
do
if [[ "$para" != -* ]]; then
continue
fi
if [ ! -z "$INSTALL" ]; then
if [[ "$para" == *=* ]]; then
continue
elif [[ "$para" == *=* ]]; then
continue
else
package=$para
if ! grep -q "^${package}=" $VERSION_FILE; then
echo "The version of the package ${package} is not specified."
exit 1
fi
fi
elif [[ "$para" == "install" ]]; then
INSTALL=y
fi
done
fi
$REAL_COMMAND "$@"

View File

@ -0,0 +1,6 @@
#!/bin/bash
. /usr/local/share/buildinfo/scripts/buildinfo_base.sh
[ -z $REAL_COMMAND ] && REAL_COMMAND=/usr/bin/curl
REAL_COMMAND=$REAL_COMMAND download_packages "$@"

View File

@ -0,0 +1 @@
pip2

View File

@ -0,0 +1,13 @@
#!/bin/bash
. /usr/local/share/buildinfo/scripts/buildinfo_base.sh
VERSION_FILE="$BUILDINFO_PATH/versions/versions-py2"
REAL_COMMAND=$(get_command pip2)
if [ ! -x "$REAL_COMMAND" ]; then
echo "The command pip2 not found" 1>&2
exit 1
fi
PIP_VERSION_FILE=$VERSION_FILE ENABLE_VERSION_CONTROL_PY=$ENABLE_VERSION_CONTROL_PY2 REAL_COMMAND=$REAL_COMMAND run_pip_command "$@"

View File

@ -0,0 +1,12 @@
#!/bin/bash
. /usr/local/share/buildinfo/scripts/buildinfo_base.sh
VERSION_FILE="$BUILDINFO_PATH/versions/versions-py3"
REAL_COMMAND=$(get_command pip3)
if [ ! -x "$REAL_COMMAND" ]; then
echo "The command pip3 not found" 1>&2
exit 1
fi
PIP_VERSION_FILE=$VERSION_FILE ENABLE_VERSION_CONTROL_PY=$ENABLE_VERSION_CONTROL_PY2 REAL_COMMAND=$REAL_COMMAND run_pip_command "$@"

View File

@ -0,0 +1,15 @@
#!/bin/bash
. /usr/local/share/buildinfo/scripts/buildinfo_base.sh
[ -z $REAL_COMMAND ] && REAL_COMMAND=$(get_command wget)
if [ -z "$REAL_COMMAND" ]; then
echo "The command wget does not exist." 1>&2
exit 1
fi
if [ "$SKIP_BUILD_HOOK" == y ]; then
$REAL_COMMAND "$@"
exit $?
fi
REAL_COMMAND=$REAL_COMMAND download_packages "$@"

View File

@ -0,0 +1,156 @@
#!/bin/bash
BUILDINFO_PATH=/usr/local/share/buildinfo
LOG_PATH=$BUILDINFO_PATH/log
VERSION_PATH=$BUILDINFO_PATH/versions
PRE_VERSION_PATH=$BUILDINFO_PATH/pre-versions
DIFF_VERSION_PATH=$BUILDINFO_PATH/diff-versions
BUILD_VERSION_PATH=$BUILDINFO_PATH/build-versions
POST_VERSION_PATH=$BUILDINFO_PATH/post-versions
VERSION_DEB_PREFERENCE=$BUILDINFO_PATH/versions/01-versions-deb
WEB_VERSION_FILE=$VERSION_PATH/versions-web
BUILD_WEB_VERSION_FILE=$BUILD_VERSION_PATH/versions-web
. $BUILDINFO_PATH/config/buildinfo.config
URL_PREFIX=$(echo "${PACKAGE_URL_PREFIX}" | sed -E "s#(//[^/]*/).*#\1#")
log_err()
{
echo "$1" >> $LOG_PATH/error.log
echo "$1" 1>&2
}
get_command()
{
local path=$(echo $PATH | sed 's#[^:]*buildinfo/scripts:##' | sed "s#/usr/sbin:##")
local command=$(PATH=$path which $1)
echo $command
}
check_version_control()
{
if [[ ",$SONIC_VERSION_CONTROL_COMPONENTS," == *,all,* ]] || [[ ",$SONIC_VERSION_CONTROL_COMPONENTS," == *,$1,* ]]; then
echo "y"
else
echo "n"
fi
}
get_url_version()
{
local package_url=$1
/usr/bin/curl -ks $package_url | md5sum | cut -d' ' -f1
}
check_if_url_exist()
{
local url=$1
if /usr/bin/curl --output /dev/null --silent --head --fail "$1" > /dev/null 2>&1; then
echo y
else
echo n
fi
}
download_packages()
{
local parameters=("$@")
local filenames=
declare -A filenames
for (( i=0; i<${#parameters[@]}; i++ ))
do
local para=${parameters[$i]}
local nexti=$((i+1))
if [[ "$para" == *://* ]]; then
local url=$para
local real_version=
if [ "$ENABLE_VERSION_CONTROL_WEB" == y ]; then
local version=
local filename=$(echo $url | awk -F"/" '{print $NF}' | cut -d? -f1 | cut -d# -f1)
[ -f $WEB_VERSION_FILE ] && version=$(grep "^${url}=" $WEB_VERSION_FILE | awk -F"==" '{print $NF}')
if [ -z "$version" ]; then
echo "Failed to verify the package: $url, the version is not specified" 2>&1
exit 1
fi
local version_filename="${filename}-${version}"
local proxy_url="${PACKAGE_URL_PREFIX}/${version_filename}"
local url_exist=$(check_if_url_exist $proxy_url)
if [ "$url_exist" == y ]; then
parameters[$i]=$proxy_url
filenames[$version_filename]=$filename
real_version=$version
else
real_version=$(get_url_version $url)
if [ "$real_version" != "$version" ]; then
echo "Failed to verify url: $url, real hash value: $real_version, expected value: $version_filename" 1>&2
exit 1
fi
fi
else
real_version=$(get_url_version $url)
fi
echo "$url==$real_version" >> ${BUILD_WEB_VERSION_FILE}
fi
done
$REAL_COMMAND "${parameters[@]}"
local result=$?
for filename in "${!filenames[@]}"
do
[ -f "$filename" ] && mv "$filename" "${filenames[$filename]}"
done
return $result
}
run_pip_command()
{
parameters=("$@")
if [ ! -x "$REAL_COMMAND" ] && [ " $1" == "freeze" ]; then
return 1
fi
if [ "$ENABLE_VERSION_CONTROL_PY" != "y" ]; then
$REAL_COMMAND "$@"
return $?
fi
local found=n
local install=n
local pip_version_file=$PIP_VERSION_FILE
local tmp_version_file=$(mktemp)
[ -f "$pip_version_file" ] && cp -f $pip_version_file $tmp_version_file
for para in "${parameters[@]}"
do
([ "$para" == "-c" ] || [ "$para" == "--constraint" ]) && found=y
if [ "$para" == "install" ]; then
install=y
elif [[ "$para" == *.whl ]]; then
package_name=$(echo $para | cut -d- -f1 | tr _ .)
sed "/^${package_name}==/d" -i $tmp_version_file
fi
done
if [ "$found" == "n" ] && [ "$install" == "y" ]; then
parameters+=("-c")
parameters+=("${tmp_version_file}")
fi
$REAL_COMMAND "${parameters[@]}"
local result=$?
rm $tmp_version_file
return $result
}
ENABLE_VERSION_CONTROL_DEB=$(check_version_control "deb")
ENABLE_VERSION_CONTROL_PY2=$(check_version_control "py2")
ENABLE_VERSION_CONTROL_PY3=$(check_version_control "py3")
ENABLE_VERSION_CONTROL_WEB=$(check_version_control "web")
ENABLE_VERSION_CONTROL_GIT=$(check_version_control "git")
ENABLE_VERSION_CONTROL_DOCKER=$(check_version_control "docker")

View File

@ -0,0 +1,14 @@
#!/bin/bash
TARGET_PATH=$1
ARCH=$(dpkg --print-architecture)
DIST=$(grep VERSION_CODENAME /etc/os-release | cut -d= -f2)
([ -z "$DIST" ] && grep -q jessie /etc/os-release) && DIST=jessie
mkdir -p $TARGET_PATH
chmod a+rw $TARGET_PATH
dpkg-query -W -f '${Package}==${Version}\n' > "${TARGET_PATH}/versions-deb-${DIST}-${ARCH}"
([ -x "/usr/local/bin/pip2" ] || [ -x "/usr/bin/pip2" ]) && pip2 freeze > "${TARGET_PATH}/versions-py2-${DIST}-${ARCH}"
([ -x "/usr/local/bin/pip3" ] || [ -x "/usr/bin/pip3" ]) && pip3 freeze > "${TARGET_PATH}/versions-py3-${DIST}-${ARCH}"
exit 0

View File

@ -0,0 +1,14 @@
#!/bin/bash
. /usr/local/share/buildinfo/scripts/buildinfo_base.sh
[ -d $POST_VERSION_PATH ] && rm -rf $POST_VERSION_PATH
# Collect the version files
collect_version_files $POST_VERSION_PATH
[ -d $BUILD_VERSION_PATH ] && [ ! -z "$(ls -A $BUILD_VERSION_PATH)" ] && cp -rf $BUILD_VERSION_PATH/* $POST_VERSION_PATH
rm -rf $BUILD_VERSION_PATH/*
# Disable the build hooks
symlink_build_hooks -d

View File

@ -0,0 +1,20 @@
#!/bin/bash
. /usr/local/share/buildinfo/scripts/buildinfo_base.sh
[ -d $DIFF_VERSION_PATH ] && rm -rf $DIFF_VERSION_PATH
mkdir -p $DIFF_VERSION_PATH
mkdir -p $BUILD_VERSION_PATH
mkdir -p $LOG_PATH
[ -d $PRE_VERSION_PATH ] && rm -rf $PRE_VERSION_PATH
collect_version_files $PRE_VERSION_PATH
symlink_build_hooks
chmod -R a+rw $BUILDINFO_PATH
if [ "$ENABLE_VERSION_CONTROL_DEB" == "y" ] && [ -f $VERSION_DEB_PREFERENCE ]; then
cp -f $VERSION_DEB_PREFERENCE /etc/apt/preferences.d/
fi
exit 0

View File

@ -0,0 +1,34 @@
#!/bin/bash
HOOK_PATH=/usr/local/share/buildinfo/hooks
TARGET_PATH=/usr/sbin
FILES=$(ls $HOOK_PATH)
usage()
{
echo "Usage: $0 [-d]"
exit 1
}
DISABLE=n
while getopts "d" opt; do
case $opt in
d)
DISABLE=y
;;
*)
usage
;;
esac
done
for f in $FILES
do
if [ $DISABLE == "n" ]; then
[ ! -e $TARGET_PATH/$f ] && ln -s $HOOK_PATH/$f $TARGET_PATH/$f
else
([ -e $TARGET_PATH/$f ] && ls -l $TARGET_PATH/$f | grep -q $HOOK_PATH) && rm -f $TARGET_PATH/$f
fi
done
exit 0