From c691b739591b9b5bcd0c1ba338d04aa64aa60549 Mon Sep 17 00:00:00 2001 From: Kalimuthu-Velappan <53821802+Kalimuthu-Velappan@users.noreply.github.com> Date: Wed, 5 Oct 2022 02:43:40 +0530 Subject: [PATCH 01/19] 01.Version-cache - restructuring of Makefile.work (#12000) - The Makefile.work becomes complex and it is very difficult to manage the changes across branches. - Restructured the Makefile.work and it becomes more readable. - Added $(QUIET) option to turn on command echo mode through command line option. - Exported the SONIC_BUILD_VARS variable, through which make options can be set dynamically. Eg: make SONIC_BUILD_VARS='INCLUDE_NAT=y' --- Makefile | 34 +++++--- Makefile.work | 213 +++++++++++++++++++++++++++++++++----------------- slave.mk | 66 ++++++++-------- 3 files changed, 196 insertions(+), 117 deletions(-) diff --git a/Makefile b/Makefile index af3d7086ec8e..ccfefc6c183c 100644 --- a/Makefile +++ b/Makefile @@ -5,6 +5,14 @@ NOSTRETCH ?= 0 NOBUSTER ?= 0 NOBULLSEYE ?= 0 +override Q := @ +ifeq ($(QUIET),n) + override Q := +endif +override SONIC_OVERRIDE_BUILD_VARS += $(SONIC_BUILD_VARS) +override SONIC_OVERRIDE_BUILD_VARS += Q=$(Q) +export Q SONIC_OVERRIDE_BUILD_VARS + ifeq ($(NOJESSIE),0) BUILD_JESSIE=1 endif @@ -29,50 +37,50 @@ PLATFORM_CHECKOUT_CMD := $(shell if [ -f $(PLATFORM_CHECKOUT_FILE) ]; then PLATF %:: @echo "+++ --- Making $@ --- +++" ifeq ($(NOJESSIE), 0) - EXTRA_DOCKER_TARGETS=$(notdir $@) make -f Makefile.work jessie + EXTRA_DOCKER_TARGETS=$(notdir $@) $(MAKE) -f Makefile.work jessie endif ifeq ($(NOSTRETCH), 0) - EXTRA_DOCKER_TARGETS=$(notdir $@) BLDENV=stretch make -f Makefile.work stretch + EXTRA_DOCKER_TARGETS=$(notdir $@) BLDENV=stretch $(MAKE) -f Makefile.work stretch endif ifeq ($(NOBUSTER), 0) - EXTRA_DOCKER_TARGETS=$(notdir $@) BLDENV=buster make -f Makefile.work buster + EXTRA_DOCKER_TARGETS=$(notdir $@) BLDENV=buster $(MAKE) -f Makefile.work buster endif ifeq ($(NOBULLSEYE), 0) - BLDENV=bullseye make -f Makefile.work $@ + BLDENV=bullseye $(MAKE) -f Makefile.work $@ endif - BLDENV=bullseye make -f Makefile.work docker-cleanup + BLDENV=bullseye $(MAKE) -f Makefile.work docker-cleanup jessie: @echo "+++ Making $@ +++" ifeq ($(NOJESSIE), 0) - make -f Makefile.work jessie + $(MAKE) -f Makefile.work jessie endif stretch: @echo "+++ Making $@ +++" ifeq ($(NOSTRETCH), 0) - make -f Makefile.work stretch + $(MAKE) -f Makefile.work stretch endif buster: @echo "+++ Making $@ +++" ifeq ($(NOBUSTER), 0) - make -f Makefile.work buster + $(MAKE) -f Makefile.work buster endif init: @echo "+++ Making $@ +++" - make -f Makefile.work $@ + $(MAKE) -f Makefile.work $@ # # Function to invoke target $@ in Makefile.work with proper BLDENV # define make_work @echo "+++ Making $@ +++" - $(if $(BUILD_JESSIE),make -f Makefile.work $@,) - $(if $(BUILD_STRETCH),BLDENV=stretch make -f Makefile.work $@,) - $(if $(BUILD_BUSTER),BLDENV=buster make -f Makefile.work $@,) - $(if $(BUILD_BULLSEYE),BLDENV=bullseye make -f Makefile.work $@,) + $(if $(BUILD_JESSIE),$(MAKE) -f Makefile.work $@,) + $(if $(BUILD_STRETCH),BLDENV=stretch $(MAKE) -f Makefile.work $@,) + $(if $(BUILD_BUSTER),BLDENV=buster $(MAKE) -f Makefile.work $@,) + $(if $(BUILD_BULLSEYE),BLDENV=bullseye $(MAKE) -f Makefile.work $@,) endef .PHONY: $(PLATFORM_PATH) diff --git a/Makefile.work b/Makefile.work index 6171a05c5192..a6cacafce834 100644 --- a/Makefile.work +++ b/Makefile.work @@ -124,7 +124,7 @@ endif # Define a do-nothing target for rules/config.user so that when # the file is missing, make won't try to rebuld everything. rules/config.user: - @echo -n "" + $(Q)echo -n "" include rules/config -include rules/config.user @@ -173,21 +173,59 @@ endif endif # Generate the version control build info -$(shell SONIC_VERSION_CONTROL_COMPONENTS=$(SONIC_VERSION_CONTROL_COMPONENTS) \ - TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \ - scripts/generate_buildinfo_config.sh) +$(shell \ + SONIC_VERSION_CONTROL_COMPONENTS=$(SONIC_VERSION_CONTROL_COMPONENTS) \ + TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) \ + PACKAGE_URL_PREFIX=$(PACKAGE_URL_PREFIX) \ + scripts/generate_buildinfo_config.sh) # Generate the slave Dockerfile, and prepare build info for it -$(shell CONFIGURED_ARCH=$(CONFIGURED_ARCH) MULTIARCH_QEMU_ENVIRON=$(MULTIARCH_QEMU_ENVIRON) CROSS_BUILD_ENVIRON=$(CROSS_BUILD_ENVIRON) ENABLE_FIPS_FEATURE=$(ENABLE_FIPS_FEATURE) DOCKER_EXTRA_OPTS=$(DOCKER_EXTRA_OPTS) DEFAULT_CONTAINER_REGISTRY=$(DEFAULT_CONTAINER_REGISTRY) j2 $(SLAVE_DIR)/Dockerfile.j2 > $(SLAVE_DIR)/Dockerfile) -$(shell CONFIGURED_ARCH=$(CONFIGURED_ARCH) MULTIARCH_QEMU_ENVIRON=$(MULTIARCH_QEMU_ENVIRON) CROSS_BUILD_ENVIRON=$(CROSS_BUILD_ENVIRON) j2 $(SLAVE_DIR)/Dockerfile.user.j2 > $(SLAVE_DIR)/Dockerfile.user) -$(shell BUILD_SLAVE=y DEFAULT_CONTAINER_REGISTRY=$(DEFAULT_CONTAINER_REGISTRY) scripts/prepare_docker_buildinfo.sh $(SLAVE_BASE_IMAGE) $(SLAVE_DIR)/Dockerfile $(CONFIGURED_ARCH) "" $(BLDENV)) +$(shell CONFIGURED_ARCH=$(CONFIGURED_ARCH) \ + MULTIARCH_QEMU_ENVIRON=$(MULTIARCH_QEMU_ENVIRON) \ + CROSS_BUILD_ENVIRON=$(CROSS_BUILD_ENVIRON) \ + ENABLE_FIPS_FEATURE=$(ENABLE_FIPS_FEATURE) \ + DOCKER_EXTRA_OPTS=$(DOCKER_EXTRA_OPTS) \ + DEFAULT_CONTAINER_REGISTRY=$(DEFAULT_CONTAINER_REGISTRY) \ + j2 $(SLAVE_DIR)/Dockerfile.j2 > $(SLAVE_DIR)/Dockerfile) + +$(shell CONFIGURED_ARCH=$(CONFIGURED_ARCH) \ + MULTIARCH_QEMU_ENVIRON=$(MULTIARCH_QEMU_ENVIRON) \ + CROSS_BUILD_ENVIRON=$(CROSS_BUILD_ENVIRON) \ + j2 $(SLAVE_DIR)/Dockerfile.user.j2 > $(SLAVE_DIR)/Dockerfile.user) + +PREPARE_DOCKER=BUILD_SLAVE=y \ + DEFAULT_CONTAINER_REGISTRY=$(DEFAULT_CONTAINER_REGISTRY) \ + scripts/prepare_docker_buildinfo.sh \ + $(SLAVE_BASE_IMAGE) \ + $(SLAVE_DIR)/Dockerfile \ + $(CONFIGURED_ARCH) \ + "" \ + $(BLDENV) + +$(shell $(PREPARE_DOCKER) ) # Add the versions in the tag, if the version change, need to rebuild the slave -SLAVE_BASE_TAG = $(shell cat $(SLAVE_DIR)/Dockerfile $(SLAVE_DIR)/buildinfo/versions/versions-* src/sonic-build-hooks/hooks/* | sha1sum | awk '{print substr($$1,0,11);}') -# Calculate the slave TAG based on $(USER)/$(PWD)/$(CONFIGURED_PLATFORM) to get unique SHA ID -SLAVE_TAG = $(shell (cat $(SLAVE_DIR)/Dockerfile.user $(SLAVE_DIR)/Dockerfile $(SLAVE_DIR)/buildinfo/versions/versions-* .git/HEAD && echo $(USER)/$(PWD)/$(CONFIGURED_PLATFORM)) \ - | sha1sum | awk '{print substr($$1,0,11);}') +SLAVE_BASE_TAG = $(shell \ + cat $(SLAVE_DIR)/Dockerfile \ + $(SLAVE_DIR)/buildinfo/versions/versions-* \ + src/sonic-build-hooks/hooks/* 2>/dev/null \ + | sha1sum \ + | awk '{print substr($$1,0,11);}') +# Calculate the slave TAG based on $(USER)/$(PWD)/$(CONFIGURED_PLATFORM) to get unique SHA ID +SLAVE_TAG = $(shell \ + (cat $(SLAVE_DIR)/Dockerfile.user \ + $(SLAVE_DIR)/Dockerfile \ + $(SLAVE_DIR)/buildinfo/versions/versions-* \ + .git/HEAD \ + && echo $(USER)/$(PWD)/$(CONFIGURED_PLATFORM)) \ + | sha1sum \ + | awk '{print substr($$1,0,11);}') + +COLLECT_DOCKER=DEFAULT_CONTAINER_REGISTRY=$(DEFAULT_CONTAINER_REGISTRY) \ + scripts/collect_docker_version_files.sh \ + $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) \ + target OVERLAY_MODULE_CHECK := \ lsmod | grep -q "^overlay " &>/dev/null || \ zgrep -q 'CONFIG_OVERLAY_FS=y' /proc/config.gz &>/dev/null || \ @@ -329,7 +367,7 @@ DOCKER_BASE_LOG = $(SLAVE_DIR)/$(SLAVE_BASE_IMAGE)_$(SLAVE_BASE_TAG).log DOCKER_LOG = $(SLAVE_DIR)/$(SLAVE_IMAGE)_$(SLAVE_TAG).log -DOCKER_BASE_BUILD = docker build --no-cache \ +DOCKER_SLAVE_BASE_BUILD = docker build --no-cache \ -t $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) \ --build-arg http_proxy=$(http_proxy) \ --build-arg https_proxy=$(https_proxy) \ @@ -339,7 +377,7 @@ DOCKER_BASE_BUILD = docker build --no-cache \ DOCKER_BASE_PULL = docker pull \ $(REGISTRY_SERVER):$(REGISTRY_PORT)/$(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) -DOCKER_BUILD = docker build --no-cache \ +DOCKER_USER_BUILD = docker build --no-cache \ --build-arg user=$(USER) \ --build-arg uid=$(shell id -u) \ --build-arg guid=$(shell id -g) \ @@ -349,7 +387,52 @@ DOCKER_BUILD = docker build --no-cache \ -f $(SLAVE_DIR)/Dockerfile.user \ $(SLAVE_DIR) $(SPLIT_LOG) $(DOCKER_LOG) -SONIC_BUILD_INSTRUCTION := make \ + +DOCKER_SLAVE_BASE_INSPECT = \ + { \ + echo Checking sonic-slave-base image: $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG); \ + docker inspect --type image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) &> /dev/null; \ + } + +DOCKER_SLAVE_BASE_PULL_REGISTRY = \ + [ $(ENABLE_DOCKER_BASE_PULL) == y ] && \ + { \ + echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found. Pulling...; \ + $(DOCKER_BASE_PULL); \ + } && \ + { \ + docker tag $(REGISTRY_SERVER):$(REGISTRY_PORT)/$(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) && \ + $(COLLECT_DOCKER); \ + }\ + +SONIC_SLAVE_BASE_BUILD = \ + { \ + $(DOCKER_SLAVE_BASE_INSPECT); \ + } || \ + { \ + $(DOCKER_SLAVE_BASE_PULL_REGISTRY); \ + } || \ + { \ + echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found. Building... ; \ + $(PREPARE_DOCKER) ; \ + $(DOCKER_SLAVE_BASE_BUILD) ; \ + $(COLLECT_DOCKER) ; \ + } + +DOCKER_SLAVE_USER_INSPECT = \ + { \ + echo Checking sonic-slave-user image: $(SLAVE_IMAGE):$(SLAVE_TAG); \ + docker inspect --type image $(SLAVE_IMAGE):$(SLAVE_TAG) &> /dev/null; \ + } + +SONIC_SLAVE_USER_BUILD = \ + { $(DOCKER_SLAVE_USER_INSPECT) } || \ + { \ + echo Image $(SLAVE_IMAGE):$(SLAVE_TAG) not found. Building... ; \ + $(DOCKER_USER_BUILD) ; \ + } + +SONIC_BUILD_INSTRUCTION := $(MAKE) \ -f slave.mk \ PLATFORM=$(PLATFORM) \ PLATFORM_ARCH=$(PLATFORM_ARCH) \ @@ -415,87 +498,75 @@ SONIC_BUILD_INSTRUCTION := make \ .PHONY: sonic-slave-build sonic-slave-bash init reset + +ifeq ($(filter clean,$(MAKECMDGOALS)),) +COLLECT_BUILD_VERSION = { DBGOPT='$(DBGOPT)' scripts/collect_build_version_files.sh $$?; } +endif + +ifdef SOURCE_FOLDER + DOCKER_RUN += -v $(SOURCE_FOLDER):/var/$(USER)/src +endif + +ifeq "$(KEEP_SLAVE_ON)" "yes" +SLAVE_SHELL={ /bin/bash; } +endif + .DEFAULT_GOAL := all -%:: +%:: | sonic-build-hooks ifneq ($(filter y, $(MULTIARCH_QEMU_ENVIRON) $(CROSS_BUILD_ENVIRON)),) - @$(DOCKER_MULTIARCH_CHECK) + $(Q)$(DOCKER_MULTIARCH_CHECK) ifneq ($(BLDENV), ) - @$(DOCKER_SERVICE_MULTIARCH_CHECK) - @$(DOCKER_SERVICE_DOCKERFS_CHECK) -endif -endif - @$(OVERLAY_MODULE_CHECK) - - @pushd src/sonic-build-hooks; TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) make all; popd - @cp src/sonic-build-hooks/buildinfo/sonic-build-hooks* $(SLAVE_DIR)/buildinfo - @docker inspect --type image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) &> /dev/null || \ - { [ $(ENABLE_DOCKER_BASE_PULL) == y ] && { echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found. Pulling...; } && \ - $(DOCKER_BASE_PULL) && \ - { docker tag $(REGISTRY_SERVER):$(REGISTRY_PORT)/$(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) && \ - scripts/collect_docker_version_files.sh $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) target ; } } || \ - { echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found. Building... ; \ - $(DOCKER_BASE_BUILD) ; \ - scripts/collect_docker_version_files.sh $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) target ; } - @docker inspect --type image $(SLAVE_IMAGE):$(SLAVE_TAG) &> /dev/null || \ - { echo Image $(SLAVE_IMAGE):$(SLAVE_TAG) not found. Building... ; \ - $(DOCKER_BUILD) ; } -ifeq "$(KEEP_SLAVE_ON)" "yes" - ifdef SOURCE_FOLDER - @$(DOCKER_RUN) -v $(SOURCE_FOLDER):/var/$(USER)/src $(SLAVE_IMAGE):$(SLAVE_TAG) bash -c "$(SONIC_BUILD_INSTRUCTION) $@; scripts/collect_build_version_files.sh \$$?; /bin/bash" - else - @$(DOCKER_RUN) $(SLAVE_IMAGE):$(SLAVE_TAG) bash -c "$(SONIC_BUILD_INSTRUCTION) $@; scripts/collect_build_version_files.sh \$$?; /bin/bash" - endif -else - @$(DOCKER_RUN) $(SLAVE_IMAGE):$(SLAVE_TAG) bash -c "$(SONIC_BUILD_INSTRUCTION) $@; scripts/collect_build_version_files.sh \$$?" + $(Q)$(DOCKER_SERVICE_MULTIARCH_CHECK) + $(Q)$(DOCKER_SERVICE_DOCKERFS_CHECK) +endif endif + $(Q)$(OVERLAY_MODULE_CHECK) + $(Q)$(SONIC_SLAVE_BASE_BUILD) + $(Q)$(SONIC_SLAVE_USER_BUILD) + + $(Q)$(DOCKER_RUN) \ + $(SLAVE_IMAGE):$(SLAVE_TAG) \ + bash -c "$(SONIC_BUILD_INSTRUCTION) $@;$(COLLECT_BUILD_VERSION); $(SLAVE_SHELL)" + $(Q)$(docker-image-cleanup) docker-cleanup: - $(docker-image-cleanup) + $(Q)$(docker-image-cleanup) +.PHONY: sonic-build-hooks sonic-build-hooks: - @pushd src/sonic-build-hooks; TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) make all; popd - @cp src/sonic-build-hooks/buildinfo/sonic-build-hooks* $(SLAVE_DIR)/buildinfo + $(Q)pushd src/sonic-build-hooks; TRUSTED_GPG_URLS=$(TRUSTED_GPG_URLS) $(MAKE) all; popd + $(Q)mkdir -p $(SLAVE_DIR)/buildinfo + $(Q)cp src/sonic-build-hooks/buildinfo/sonic-build-hooks* $(SLAVE_DIR)/buildinfo -sonic-slave-base-build : sonic-build-hooks +sonic-slave-base-build : | sonic-build-hooks ifeq ($(MULTIARCH_QEMU_ENVIRON), y) - @$(DOCKER_MULTIARCH_CHECK) -endif - @$(OVERLAY_MODULE_CHECK) - @echo Checking sonic-slave-base image: $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) - @docker inspect --type image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) &> /dev/null || \ - { [ $(ENABLE_DOCKER_BASE_PULL) == y ] && { echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found. Pulling...; } && \ - $(DOCKER_BASE_PULL) && \ - { docker tag $(REGISTRY_SERVER):$(REGISTRY_PORT)/$(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) && \ - scripts/collect_docker_version_files.sh $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) target ; } } || \ - { echo Image $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) not found. Building... ; \ - $(DOCKER_BASE_BUILD) ; \ - scripts/collect_docker_version_files.sh $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) target ; } + $(Q)$(DOCKER_MULTIARCH_CHECK) +endif + $(Q)$(OVERLAY_MODULE_CHECK) + $(Q)$(SONIC_SLAVE_BASE_BUILD) sonic-slave-build : sonic-slave-base-build - @echo Checking sonic-slave image: $(SLAVE_IMAGE):$(SLAVE_TAG) - @docker inspect --type image $(SLAVE_IMAGE):$(SLAVE_TAG) &> /dev/null || \ - { echo Image $(SLAVE_IMAGE):$(SLAVE_TAG) not found. Building... ; \ - $(DOCKER_BUILD) ; } + $(Q)$(SONIC_SLAVE_USER_BUILD) sonic-slave-bash : sonic-slave-build - @$(DOCKER_RUN) -t $(SLAVE_IMAGE):$(SLAVE_TAG) bash + $(Q)$(DOCKER_RUN) -t $(SLAVE_IMAGE):$(SLAVE_TAG) bash sonic-slave-run : sonic-slave-build - @$(DOCKER_RUN) $(SLAVE_IMAGE):$(SLAVE_TAG) bash -c "$(SONIC_RUN_CMDS)" + $(Q)$(DOCKER_RUN) $(SLAVE_IMAGE):$(SLAVE_TAG) bash -c "$(SONIC_RUN_CMDS)" showtag: - @echo $(SLAVE_IMAGE):$(SLAVE_TAG) - @echo $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) + $(Q)echo $(SLAVE_IMAGE):$(SLAVE_TAG) + $(Q)echo $(SLAVE_BASE_IMAGE):$(SLAVE_BASE_TAG) init : - @git submodule update --init --recursive - @git submodule foreach --recursive '[ -f .git ] && echo "gitdir: $$(realpath --relative-to=. $$(cut -d" " -f2 .git))" > .git' + $(Q)git submodule update --init --recursive + $(Q)git submodule foreach --recursive '[ -f .git ] && echo "gitdir: $$(realpath --relative-to=. $$(cut -d" " -f2 .git))" > .git' .ONESHELL : reset reset : - @echo && echo -n "Warning! All local changes will be lost. Proceed? [y/N]: " - @read ans && ( + $(Q)echo && echo -n "Warning! All local changes will be lost. Proceed? [y/N]: " + $(Q)read ans && ( if [ $$ans == y ]; then echo "Resetting local repository. Please wait..."; sudo rm -rf fsroot*; diff --git a/slave.mk b/slave.mk index c521b3807356..28efed3dfc11 100644 --- a/slave.mk +++ b/slave.mk @@ -92,32 +92,32 @@ export BLDENV .platform : ifneq ($(CONFIGURED_PLATFORM),generic) - @echo Build system is not configured, please run make configure - @exit 1 + $(Q)echo Build system is not configured, please run make configure + $(Q)exit 1 endif configure : - @mkdir -p $(JESSIE_DEBS_PATH) - @mkdir -p $(STRETCH_DEBS_PATH) - @mkdir -p $(BUSTER_DEBS_PATH) - @mkdir -p $(BULLSEYE_DEBS_PATH) - @mkdir -p $(FILES_PATH) - @mkdir -p $(JESSIE_FILES_PATH) - @mkdir -p $(STRETCH_FILES_PATH) - @mkdir -p $(BUSTER_FILES_PATH) - @mkdir -p $(BULLSEYE_FILES_PATH) - @mkdir -p $(PYTHON_DEBS_PATH) - @mkdir -p $(PYTHON_WHEELS_PATH) - @mkdir -p $(DPKG_ADMINDIR_PATH) - @echo $(PLATFORM) > .platform - @echo $(PLATFORM_ARCH) > .arch + $(Q)mkdir -p $(JESSIE_DEBS_PATH) + $(Q)mkdir -p $(STRETCH_DEBS_PATH) + $(Q)mkdir -p $(BUSTER_DEBS_PATH) + $(Q)mkdir -p $(BULLSEYE_DEBS_PATH) + $(Q)mkdir -p $(FILES_PATH) + $(Q)mkdir -p $(JESSIE_FILES_PATH) + $(Q)mkdir -p $(STRETCH_FILES_PATH) + $(Q)mkdir -p $(BUSTER_FILES_PATH) + $(Q)mkdir -p $(BULLSEYE_FILES_PATH) + $(Q)mkdir -p $(PYTHON_DEBS_PATH) + $(Q)mkdir -p $(PYTHON_WHEELS_PATH) + $(Q)mkdir -p $(DPKG_ADMINDIR_PATH) + $(Q)echo $(PLATFORM) > .platform + $(Q)echo $(PLATFORM_ARCH) > .arch distclean : .platform clean - @rm -f .platform - @rm -f .arch + $(Q)rm -f .platform + $(Q)rm -f .arch list : - @$(foreach target,$(SONIC_TARGET_LIST),echo $(target);) + $(Q)$(foreach target,$(SONIC_TARGET_LIST),echo $(target);) ############################################################################### ## Include other rules @@ -177,7 +177,7 @@ endif # TODO(PINS): Remove when Bazel binaries are available for armhf ifeq ($(CONFIGURED_ARCH),armhf) ifeq ($(INCLUDE_P4RT),y) - @echo "Disabling P4RT due to incompatible CPU architecture: $(CONFIGURED_ARCH)" + $(Q)echo "Disabling P4RT due to incompatible CPU architecture: $(CONFIGURED_ARCH)" endif override INCLUDE_P4RT = n endif @@ -205,7 +205,7 @@ endif ifeq ($(ENABLE_ASAN),y) ifneq ($(CONFIGURED_ARCH),amd64) - @echo "Disabling SWSS address sanitizer due to incompatible CPU architecture: $(CONFIGURED_ARCH)" + $(Q)echo "Disabling SWSS address sanitizer due to incompatible CPU architecture: $(CONFIGURED_ARCH)" override ENABLE_ASAN = n endif endif @@ -870,12 +870,12 @@ endif # start docker daemon docker-start : - @sudo sed -i 's/--storage-driver=vfs/--storage-driver=$(SONIC_SLAVE_DOCKER_DRIVER)/' /etc/default/docker - @sudo sed -i -e '/http_proxy/d' -e '/https_proxy/d' /etc/default/docker - @sudo bash -c "{ echo \"export http_proxy=$$http_proxy\"; \ + $(Q)sudo sed -i 's/--storage-driver=vfs/--storage-driver=$(SONIC_SLAVE_DOCKER_DRIVER)/' /etc/default/docker + $(Q)sudo sed -i -e '/http_proxy/d' -e '/https_proxy/d' /etc/default/docker + $(Q)sudo bash -c "{ echo \"export http_proxy=$$http_proxy\"; \ echo \"export https_proxy=$$https_proxy\"; \ echo \"export no_proxy=$$no_proxy\"; } >> /etc/default/docker" - @test x$(SONIC_CONFIG_USE_NATIVE_DOCKERD_FOR_BUILD) != x"y" && sudo service docker status &> /dev/null || ( sudo service docker start &> /dev/null && ./scripts/wait_for_docker.sh 60 ) + $(Q)test x$(SONIC_CONFIG_USE_NATIVE_DOCKERD_FOR_BUILD) != x"y" && sudo service docker status &> /dev/null || ( sudo service docker start &> /dev/null && ./scripts/wait_for_docker.sh 60 ) # targets for building simple docker images that do not depend on any debian packages $(addprefix $(TARGET_PATH)/, $(SONIC_SIMPLE_DOCKER_IMAGES)) : $(TARGET_PATH)/%.gz : .platform docker-start $$(addsuffix -load,$$(addprefix $(TARGET_PATH)/,$$($$*.gz_LOAD_DOCKERS))) @@ -1414,12 +1414,12 @@ SONIC_CLEAN_FILES = $(addsuffix -clean,$(addprefix $(FILES_PATH)/, \ $(SONIC_MAKE_FILES))) $(SONIC_CLEAN_DEBS) :: $(DEBS_PATH)/%-clean : .platform $$(addsuffix -clean,$$(addprefix $(DEBS_PATH)/,$$($$*_MAIN_DEB))) - @# remove derived or extra targets if main one is removed, because we treat them - @# as part of one package - @rm -f $(addprefix $(DEBS_PATH)/, $* $($*_DERIVED_DEBS) $($*_EXTRA_DEBS)) + $(Q)# remove derived or extra targets if main one is removed, because we treat them + $(Q)# as part of one package + $(Q)rm -f $(addprefix $(DEBS_PATH)/, $* $($*_DERIVED_DEBS) $($*_EXTRA_DEBS)) $(SONIC_CLEAN_FILES) :: $(FILES_PATH)/%-clean : .platform - @rm -f $(FILES_PATH)/$* + $(Q)rm -f $(FILES_PATH)/$* SONIC_CLEAN_TARGETS += $(addsuffix -clean,$(addprefix $(TARGET_PATH)/, \ $(SONIC_DOCKER_IMAGES) \ @@ -1427,20 +1427,20 @@ SONIC_CLEAN_TARGETS += $(addsuffix -clean,$(addprefix $(TARGET_PATH)/, \ $(SONIC_SIMPLE_DOCKER_IMAGES) \ $(SONIC_INSTALLERS))) $(SONIC_CLEAN_TARGETS) :: $(TARGET_PATH)/%-clean : .platform - @rm -f $(TARGET_PATH)/$* + $(Q)rm -f $(TARGET_PATH)/$* SONIC_CLEAN_STDEB_DEBS = $(addsuffix -clean,$(addprefix $(PYTHON_DEBS_PATH)/, \ $(SONIC_PYTHON_STDEB_DEBS))) $(SONIC_CLEAN_STDEB_DEBS) :: $(PYTHON_DEBS_PATH)/%-clean : .platform - @rm -f $(PYTHON_DEBS_PATH)/$* + $(Q)rm -f $(PYTHON_DEBS_PATH)/$* SONIC_CLEAN_WHEELS = $(addsuffix -clean,$(addprefix $(PYTHON_WHEELS_PATH)/, \ $(SONIC_PYTHON_WHEELS))) $(SONIC_CLEAN_WHEELS) :: $(PYTHON_WHEELS_PATH)/%-clean : .platform - @rm -f $(PYTHON_WHEELS_PATH)/$* + $(Q)rm -f $(PYTHON_WHEELS_PATH)/$* clean-logs :: .platform - @rm -f $(TARGET_PATH)/*.log $(DEBS_PATH)/*.log $(FILES_PATH)/*.log $(PYTHON_DEBS_PATH)/*.log $(PYTHON_WHEELS_PATH)/*.log + $(Q)rm -f $(TARGET_PATH)/*.log $(DEBS_PATH)/*.log $(FILES_PATH)/*.log $(PYTHON_DEBS_PATH)/*.log $(PYTHON_WHEELS_PATH)/*.log clean :: .platform clean-logs $$(SONIC_CLEAN_DEBS) $$(SONIC_CLEAN_FILES) $$(SONIC_CLEAN_TARGETS) $$(SONIC_CLEAN_STDEB_DEBS) $$(SONIC_CLEAN_WHEELS) From 1f0699f51e51cf9ca6e36d85bc632ee0e31792e4 Mon Sep 17 00:00:00 2001 From: xumia <59720581+xumia@users.noreply.github.com> Date: Wed, 5 Oct 2022 08:10:54 +0800 Subject: [PATCH 02/19] Fix sonic-config low dpkg hit rate issue (#12244) Why I did it When sending a PR only CI change, as expected, the target target/python-wheels/buster/sonic_config_engine-1.0-py2-none-any.whl should be from the cache, because the depended files were not changed, but it rebuilt. How I did it Sort the files by name. --- rules/sonic-config.dep | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rules/sonic-config.dep b/rules/sonic-config.dep index 65aabe74d76f..2b8b98fcd42b 100644 --- a/rules/sonic-config.dep +++ b/rules/sonic-config.dep @@ -4,7 +4,7 @@ SPATH := $($(SONIC_CONFIG_ENGINE_PY3)_SRC_PATH) DEP_FILES := $(SONIC_COMMON_FILES_LIST) rules/sonic-config.mk rules/sonic-config.dep DEP_FILES += $(SONIC_COMMON_BASE_FILES_LIST) DEP_FILES += $(shell git ls-files $(SPATH)) -DEP_FILES += files/image_config/interfaces/interfaces.j2 dockers/docker-orchagent/ports.json.j2 dockers/docker-dhcp-relay/wait_for_intf.sh.j2 dockers/docker-dhcp-relay/docker-dhcp-relay.supervisord.conf.j2 dockers/docker-lldp/lldpd.conf.j2 dockers/docker-orchagent/ipinip.json.j2 $(shell find device -type f) files/build_templates/qos_config.j2 dockers/docker-orchagent/switch.json.j2 dockers/docker-orchagent/vxlan.json.j2 files/image_config/constants/constants.yml +DEP_FILES += files/image_config/interfaces/interfaces.j2 dockers/docker-orchagent/ports.json.j2 dockers/docker-dhcp-relay/wait_for_intf.sh.j2 dockers/docker-dhcp-relay/docker-dhcp-relay.supervisord.conf.j2 dockers/docker-lldp/lldpd.conf.j2 dockers/docker-orchagent/ipinip.json.j2 $(shell find device -type f | sort) files/build_templates/qos_config.j2 dockers/docker-orchagent/switch.json.j2 dockers/docker-orchagent/vxlan.json.j2 files/image_config/constants/constants.yml ifeq ($(ENABLE_PY2_MODULES), y) $(SONIC_CONFIG_ENGINE_PY2)_CACHE_MODE := GIT_CONTENT_SHA From 3686454c6ed56500e50df7c4fade62bd315edfc4 Mon Sep 17 00:00:00 2001 From: kannankvs Date: Thu, 6 Oct 2022 06:21:42 +0530 Subject: [PATCH 03/19] Updated the template with comment recieved (#12276) Updated the PR template with comment received on removing the reference link on GCU. Hence added text to show reference for GCU PR. --- .github/pull_request_template.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index d553f7a4d0c4..85645d8ff7ee 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -39,7 +39,7 @@ Write a short (one line) summary that describes the changes in this pull request for inclusion in the changelog: --> -#### Ensure to add label/tag for the feature raised. example - [PR#2174](https://github.com/sonic-net/sonic-utilities/pull/2174) where, Generic Config and Update feature has been labelled as GCU. +#### Ensure to add label/tag for the feature raised. example - PR#2174 under sonic-utilities repo. where, Generic Config and Update feature has been labelled as GCU. #### Link to config_db schema for YANG module changes A{merged?} +A -- NO --> STOP +A -- YES --> A1{Approved
for 202205
Branch?} +A1 -- NO --> STOP +A1 -- YES --> A2(pr_cherrypick_prestep) +B(pr_cherrypick_prestep) +B --> B1{cherry pick
conflict?} +B1 -- YES --> B2(Add tag:
Cherry Pick Confclit_202205) --> B3(Add comment:
refer author code conflict) --> STOP1(STOP) +B1 -- NO --> B4(Create New PR) -- success --> B5(New PR add tag:
automerge) --> B6(New PR add comment:
Origin PR link) --> B7(Origin PR add tag:
Created PR to 202205 Branch) --> B8(Origin PR add comment:
New PR link) +B4 -- fail --> STOP1 +``` + +2. automerge: +```mermaid +graph +Start(PR azp finished successfully) --> A{author:
mssonicbld?} +A -- NO --> STOP +A -- YES --> B{tag:
automerge?} -- YES --> C(Merge PR) +B -- NO --> STOP +``` + +3. pr_cherrypick_poststep: +```mermaid +graph +A(PR is Merged) --> B{tag:
automerge?} +B -- YES --> B1{author:
mssonicbld?} +B1 -- YES --> B2{"title starts:
[action] [PR:123]"} +B2 -- YES --> C(Origin PR remove tag:
Created PR to 202205 Branch) --> D(Origin PR add tag:
Included in 202205 Branch) +B -- NO --> STOP +B1 -- NO --> STOP +B2 -- NO --> STOP +``` diff --git a/.github/workflows/pr_cherrypick_poststep.yml b/.github/workflows/pr_cherrypick_poststep.yml new file mode 100644 index 000000000000..1e9e497075d7 --- /dev/null +++ b/.github/workflows/pr_cherrypick_poststep.yml @@ -0,0 +1,49 @@ +name: PostCherryPick +on: + pull_request_target: + types: + - closed + branches: + - '20*' + +jobs: + post_cherry_pick: + if: github.event.pull_request.merged == true && contains(github.event.pull_request.labels.*.name, 'automerge') && github.event.pull_request.head.user.login == 'mssonicbld' && startsWith(github.event.pull_request.title, '[action]') + runs-on: ubuntu-latest + steps: + - name: Debug + env: + GITHUB_CONTEXT: ${{ toJson(github) }} + run: echo $GITHUB_CONTEXT | jq + - name: Checkout + uses: actions/checkout@v3 + with: + persist-credentials: false + - name: Main + env: + GITHUB_CONTEXT: ${{ toJson(github) }} + TOKEN: ${{ secrets.TOKEN }} + run: | + set -e + pr_url=$(echo $GITHUB_CONTEXT | jq -r ".event.pull_request._links.html.href") + pr_id=$(echo $GITHUB_CONTEXT | jq -r ".event.number") + base_ref=$(echo $GITHUB_CONTEXT | jq -r ".base_ref") + echo ${TOKEN} | gh auth login --with-token + title=$(echo $GITHUB_CONTEXT | jq -r ".event.pull_request.title") + origin_pr_id=$(echo $title | grep -Eo "\[action\] \[PR:[0-9]*\]" | grep -Eo "[0-9]*") + origin_pr_url=$(echo $pr_url | sed "s/$pr_id/$origin_pr_id/") + echo ============================= + echo pr_url: $pr_url + echo pr_id: $pr_id + echo base_ref: $base_ref + echo title: $title + echo origin_pr_id: $origin_pr_id + echo origin_pr_url: $origin_pr_url + echo ============================= + # Add label + if [[ "$origin_pr_id" == "" ]];then + echo "original PR didn't found." + exit 1 + fi + gh pr edit $origin_pr_url --add-label "Included in ${base_ref} Branch" + gh pr edit $origin_pr_url --remove-label "Created PR to ${base_ref} Branch,Request for ${base_ref} Branch,Approved for ${base_ref} Branch" diff --git a/.github/workflows/pr_cherrypick_prestep.yml b/.github/workflows/pr_cherrypick_prestep.yml new file mode 100644 index 000000000000..3caf3f940843 --- /dev/null +++ b/.github/workflows/pr_cherrypick_prestep.yml @@ -0,0 +1,136 @@ +name: PreCherryPick +on: + pull_request_target: + types: + - labeled + - closed + branches: + - master-test + +jobs: + pre_cherry_pick: + if: github.event.pull_request.merged == true && ( (github.event.action == 'closed' && contains(join(github.event.pull_request.labels.*.name, ','), 'Approved for 20')) || (github.event.action == 'labeled' && startsWith(github.event.label.name, 'Approved for 20')) ) + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + persist-credentials: false + - name: Debug + env: + GITHUB_CONTEXT: ${{ toJson(github) }} + run: echo $GITHUB_CONTEXT | jq + - name: Main + env: + GITHUB_CONTEXT: ${{ toJson(github) }} + TOKEN: ${{ secrets.TOKEN }} + run: | + set -e + + sha=$(echo $GITHUB_CONTEXT | jq -r ".event.pull_request.merge_commit_sha") + pr_id=$(echo $GITHUB_CONTEXT | jq -r ".event.number") + pr_url=$(echo $GITHUB_CONTEXT | jq -r ".event.pull_request._links.html.href") + repository=$(echo $GITHUB_CONTEXT | jq -r ".repository") + labels=$(echo $GITHUB_CONTEXT | jq -r ".event.pull_request.labels[].name") + author=$(echo $GITHUB_CONTEXT | jq -r ".event.pull_request.base.user.login") + branches=$(git branch -a --list 'origin/20????' | awk -F/ '{print$3}' | grep -E "202[0-9]{3}") + if [[ $(echo $GITHUB_CONTEXT | jq -r ".event.action") == "labeled" ]];then + labels=$(echo $GITHUB_CONTEXT | jq -r ".event.label.name") + fi + title=$(echo $GITHUB_CONTEXT | jq -r ".event.pull_request.title") + echo ============================= + echo SHA: $sha + echo PRID: $pr_id + echo pr_url: $pr_url + echo repository: $repository + echo branches: $branches + echo labels: + echo "$labels" + echo ${TOKEN} | gh auth login --with-token + echo author: $author + echo title: $title + echo ============================= + + git config user.name mssonicbld + git config user.email sonicbld@microsoft.com + git config credential.https://github.aaakk.us.kg.username mssonicbld + git remote add mssonicbld https://mssonicbld:${TOKEN}@github.com/mssonicbld/sonic-buildimage + git fetch mssonicbld + git remote -vv + + cherry_pick(){ + set -e + local create_pr='' + while read label + do + echo label: $label + if [[ "$label" == "Approved for $branch Branch" ]];then + create_pr=1 + fi + if [[ "$label" == "Created PR to $branch Branch" ]];then + echo "already has tag: Created PR to $branch Branch, return" + return 0 + fi + if [[ "$label" == "Included in $branch Branch" ]];then + echo "already has tag: Included in $branch Branch, return" + return 0 + fi + if [[ "$label" == "Cherry Pick Conflict_$branch" ]];then + echo "already has tag: Cherry Pick Conflict_$branch, return" + return 0 + fi + done <<< "$labels" + + if [[ "$create_pr" != "1" ]];then + echo "Didn't find 'Approved for $branch Branch' tag." + return 0 + fi + # Begin to cherry-pick PR + git cherry-pick --abort 2>/dev/null || true + git clean -xdff 2>/dev/null || true + git reset HEAD --hard || true + git checkout -b $branch --track origin/$branch + git status | grep "working tree clean" + + if ! git cherry-pick $sha;then + echo 'cherry-pick failed.' + git cherry-pick --abort + git status | grep "working tree clean" + # Add label + gh pr edit $pr_url --add-label "Cherry Pick Conflict_$branch" + echo 'Add label "Cherry Pick Conflict_$branch" success' + gh pr comment $pr_url --body "@${author} PR conflicts with $branch branch" + echo 'Add commnet "@${author} PR conflicts with $branch branch"' + else + # Create PR to release branch + git push mssonicbld HEAD:$branch-${pr_id} -f + result=$(gh pr create -R ${repository} -H mssonicbld:$branch-${pr_id} -B $branch -t "[action] [PR:$pr_id] $title" -b '' 2>&1) + echo $result | grep "already exists" && { echo $result; return 0; } + echo $result | grep github.com || { echo $result; return 1; } + new_pr_rul=$(echo $result | grep github.com) + echo new_pr_rul: $new_pr_rul + + # Add label to old PR + gh pr edit $pr_url --add-label "Created PR to $branch Branch" + echo Add label Created PR to $branch Branch + # Add comment to old PR + gh pr comment $pr_url --body "Cherry-pick PR to $branch: ${new_pr_rul}" + echo Add comment to old PR + + # Add label to new PR + gh pr edit $new_pr_rul --add-label "automerge" + echo Add label automerge to new PR + # Add comment to new PR + gh pr comment $new_pr_rul --body "Original PR: ${pr_url}" + echo Add comment to new PR + fi + } + + for branch in $branches + do + echo ------------------------------------------- + echo Begin to parse Branch: $branch + cherry_pick + done + From 660a38e392a266945bf7a762db7d785507814511 Mon Sep 17 00:00:00 2001 From: Oleksandr Ivantsiv Date: Mon, 10 Oct 2022 19:15:14 +0300 Subject: [PATCH 19/19] [gnmi] submodule update. Update gnmi poiter to the latest bluefield branch. --- src/sonic-gnmi | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/sonic-gnmi b/src/sonic-gnmi index 194ecd0896a0..957d84dd992e 160000 --- a/src/sonic-gnmi +++ b/src/sonic-gnmi @@ -1 +1 @@ -Subproject commit 194ecd0896a0e698e22dc2320e5087e9b7de3d06 +Subproject commit 957d84dd992ebcfbb38f7f9d5f38d6c1246efc25