mirror of
https://gitlab.isc.org/isc-projects/bind9.git
synced 2025-12-05 18:57:04 -06:00
In !9155, the QNAME minimization was changed to not leak the query type to the parent name server. This violates RFC 9156 Section 3, step (3) and it is not necessary. It also breaks some (weird) authoritative DNS setups, especially when CNAMEs are involved. Also there is really no privacy leak with query type.
2239 lines
66 KiB
YAML
2239 lines
66 KiB
YAML
variables:
|
|
# Not normally needed, but may be if some script uses `apt-get install`.
|
|
DEBIAN_FRONTEND: noninteractive
|
|
# Locale settings do not affect the build, but might affect tests.
|
|
LC_ALL: C
|
|
|
|
# enable junk filling via jemalloc option
|
|
MALLOC_CONF: "junk:true"
|
|
|
|
# automated commits will inherit identification from pipeline trigger
|
|
GIT_AUTHOR_NAME: "$GITLAB_USER_NAME (GitLab job $CI_JOB_ID)"
|
|
GIT_AUTHOR_EMAIL: "$GITLAB_USER_EMAIL"
|
|
GIT_COMMITTER_NAME: "$GIT_AUTHOR_NAME"
|
|
GIT_COMMITTER_EMAIL: "$GIT_AUTHOR_EMAIL"
|
|
|
|
CI_REGISTRY_IMAGE: registry.gitlab.isc.org/isc-projects/images/bind9
|
|
CCACHE_DIR: "/ccache"
|
|
|
|
GIT_DEPTH: 1
|
|
GIT_CLEAN_FLAGS: -ffdxq
|
|
|
|
# The following values may be overwritten in GitLab's CI/CD Variables Settings.
|
|
BUILD_PARALLEL_JOBS: 6
|
|
TEST_PARALLEL_JOBS: 4
|
|
|
|
CLANG_VERSION: 21
|
|
CLANG: "clang-${CLANG_VERSION}"
|
|
SCAN_BUILD: "scan-build-${CLANG_VERSION}"
|
|
LLVM_SYMBOLIZER: "/usr/lib/llvm-${CLANG_VERSION}/bin/llvm-symbolizer"
|
|
CLANG_FORMAT: "clang-format-${CLANG_VERSION}"
|
|
|
|
CFLAGS_COMMON: -fno-omit-frame-pointer -fno-optimize-sibling-calls
|
|
|
|
UBASAN_CONFIGURE_FLAGS_COMMON: "-Db_sanitize=address,undefined -Didn=enabled -Djemalloc=disabled -Dtracing=disabled"
|
|
TSAN_CONFIGURE_FLAGS_COMMON: "-Db_sanitize=thread -Doptimization=2 -Ddebug=true -Didn=enabled -Dlocktype=system -Djemalloc=disabled --pkg-config-path /opt/tsan/lib/pkgconfig"
|
|
|
|
# Pass run-time flags to AddressSanitizer to get core dumps on error.
|
|
ASAN_OPTIONS: abort_on_error=1:disable_coredump=0:unmap_shadow_on_exit=1:detect_odr_violation=0
|
|
ASAN_SYMBOLIZER_PATH: "${LLVM_SYMBOLIZER}"
|
|
|
|
TSAN_OPTIONS_COMMON: "disable_coredump=0 second_deadlock_stack=1 atexit_sleep_ms=1000 history_size=7 log_exe_name=true log_path=tsan"
|
|
TSAN_SUPPRESSIONS: "suppressions=${CI_PROJECT_DIR}/.tsan-suppress"
|
|
TSAN_OPTIONS_DEBIAN: "${TSAN_OPTIONS_COMMON} ${TSAN_SUPPRESSIONS} external_symbolizer_path=${LLVM_SYMBOLIZER}"
|
|
TSAN_OPTIONS_FEDORA: "${TSAN_OPTIONS_COMMON} ${TSAN_SUPPRESSIONS} external_symbolizer_path=/usr/bin/llvm-symbolizer"
|
|
|
|
UBSAN_OPTIONS: "halt_on_error=1:abort_on_error=1:disable_coredump=0"
|
|
|
|
WITHOUT_LIBEDIT: "-Dline=disabled"
|
|
WITH_LIBEDIT: "-Dline=enabled"
|
|
|
|
STRESS_CONFIGURE_FLAGS: "-Doptimization=g -Dcmocka=disabled"
|
|
|
|
INSTALL_PATH: "${CI_PROJECT_DIR}/.local"
|
|
|
|
# In multithreaded unit tests, abort on the first failure
|
|
CMOCKA_TEST_ABORT: 1
|
|
|
|
# Disable pytest's "cacheprovider" plugin to prevent it from creating
|
|
# cross-testrun files as there is no need to use that feature in CI.
|
|
PYTEST_ADDOPTS: "-p no:cacheprovider"
|
|
|
|
HYPOTHESIS_PROFILE: "ci"
|
|
|
|
# Some jobs may clean up the build artifacts unless this is set to 0.
|
|
CLEAN_BUILD_ARTIFACTS_ON_SUCCESS: 1
|
|
|
|
# DNS Shotgun performance testing defaults
|
|
SHOTGUN_ROUNDS: 1
|
|
SHOTGUN_DURATION: 120
|
|
# allow unlimited improvements against baseline
|
|
SHOTGUN_EVAL_THRESHOLD_CPU_MIN: '-inf'
|
|
SHOTGUN_EVAL_THRESHOLD_MEMORY_MIN: '-inf'
|
|
SHOTGUN_EVAL_THRESHOLD_RCODE_MAX: '+inf'
|
|
SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MIN: '-inf'
|
|
SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_DRIFT_MIN: '-inf'
|
|
|
|
# Even though there's only one job per runtime environment, the GitLab
|
|
# "instance" executor insists on cloning the Git repository to a path that
|
|
# contains a variable number from zero to the "maximum concurrent instances
|
|
# count" allowed on the GitLab Runner. See the "0" directory in this example
|
|
# path: /home/ec2-user/builds/t1_4FZzvz/0/isc-projects/bind9/.git/.
|
|
#
|
|
# This is not a problem for isolated jobs like "stress" tests that depend on
|
|
# no other jobs. However, it is a problem for jobs that need other jobs'
|
|
# artifacts. For example, a system test job that has its Git repo cloned to
|
|
# the "/1/" sub-path will fail if it downloads build job artifacts that have
|
|
# ./configure output files with "/0/" in its sub-path recorded.
|
|
GIT_CLONE_PATH_INSTANCE_EXECUTOR: "/home/ec2-user/builds/${CI_PROJECT_PATH}/"
|
|
|
|
default:
|
|
# Allow all running CI jobs to be automatically canceled when a new
|
|
# version of a branch is pushed.
|
|
#
|
|
# See: https://docs.gitlab.com/ee/ci/pipelines/settings.html#auto-cancel-redundant-pipelines
|
|
interruptible: true
|
|
|
|
# AWS can interrupt the spot instance anytime, so let's retry the job when
|
|
# the interruption event happens to avoid a pipeline failure.
|
|
retry:
|
|
max: 2
|
|
when:
|
|
- runner_system_failure
|
|
|
|
stages:
|
|
- precheck
|
|
- build
|
|
- unit
|
|
- system
|
|
- performance
|
|
- docs
|
|
- postcheck
|
|
- postmerge
|
|
- release
|
|
|
|
### Runner Tag Templates
|
|
|
|
# AlmaLinux autoscaling GitLab Runners on AWS EC2 (amd64)
|
|
|
|
.almalinux-8fips-amd64-image: &almalinux_8fips_amd64_image
|
|
tags:
|
|
- almalinux-8
|
|
- amd64
|
|
- autoscaler
|
|
- aws
|
|
- fips
|
|
- shell
|
|
|
|
.almalinux-9fips-amd64-image: &almalinux_9fips_amd64_image
|
|
tags:
|
|
- almalinux-9
|
|
- amd64
|
|
- autoscaler
|
|
- aws
|
|
- fips
|
|
- shell
|
|
|
|
.almalinux-10fips-amd64-image: &almalinux_10fips_amd64_image
|
|
tags:
|
|
- almalinux-10
|
|
- amd64
|
|
- autoscaler
|
|
- aws
|
|
- fips
|
|
- shell
|
|
|
|
# Autoscaling GitLab Runner on AWS EC2 (amd64)
|
|
|
|
.linux-amd64: &linux_amd64
|
|
tags:
|
|
- linux
|
|
- aws
|
|
- runner-manager
|
|
- amd64
|
|
|
|
# Autoscaling GitLab Runner on AWS EC2 (arm64)
|
|
|
|
.linux-arm64: &linux_arm64
|
|
tags:
|
|
- linux
|
|
- aws
|
|
- runner-manager
|
|
- aarch64
|
|
|
|
.freebsd-autoscaler-13-amd64-tags: &freebsd_autoscaler_13_amd64_tags
|
|
tags:
|
|
- amd64
|
|
- autoscaler
|
|
- aws
|
|
- bsd-stress-test-1
|
|
- shell
|
|
- stress-test
|
|
|
|
.freebsd-autoscaler-14-amd64-tags: &freebsd_autoscaler_14_amd64_tags
|
|
tags:
|
|
- amd64
|
|
- autoscaler
|
|
- aws
|
|
- bsd-stress-test-2
|
|
- shell
|
|
- stress-test
|
|
|
|
.freebsd-autoscaler-amd64: &freebsd_autoscaler_amd64
|
|
variables:
|
|
CC: clang
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
# Use MIT Kerberos5 for BIND 9 GSS-API support because of FreeBSD Heimdal
|
|
# incompatibility; see https://bugs.freebsd.org/275241.
|
|
EXTRA_CONFIGURE: "${WITH_LIBEDIT} -Doptimization=g --native-file ci/freebsd.ini"
|
|
|
|
# Autoscaling GitLab Runner on AWS EC2 (FreeBSD 13)
|
|
|
|
.freebsd-autoscaler-13-amd64: &freebsd_autoscaler_13_amd64
|
|
<<: *freebsd_autoscaler_amd64
|
|
<<: *freebsd_autoscaler_13_amd64_tags
|
|
|
|
# Autoscaling GitLab Runner on AWS EC2 (FreeBSD 14)
|
|
|
|
.freebsd-autoscaler-14-amd64: &freebsd_autoscaler_14_amd64
|
|
<<: *freebsd_autoscaler_amd64
|
|
<<: *freebsd_autoscaler_14_amd64_tags
|
|
|
|
### Docker Image Templates
|
|
|
|
# Alpine Linux
|
|
|
|
.alpine-3.22-amd64: &alpine_3_22_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:alpine-3.22-amd64"
|
|
<<: *linux_amd64
|
|
|
|
# AlmaLinux
|
|
|
|
.almalinux-8-amd64: &almalinux_8_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:almalinux-8-amd64"
|
|
<<: *linux_amd64
|
|
|
|
.almalinux-9-amd64: &almalinux_9_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:almalinux-9-amd64"
|
|
<<: *linux_amd64
|
|
|
|
.almalinux-10-amd64: &almalinux_10_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:almalinux-10-amd64"
|
|
<<: *linux_amd64
|
|
|
|
# Debian
|
|
|
|
.debian-bookworm-amd64: &debian_bookworm_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:debian-bookworm-amd64"
|
|
<<: *linux_amd64
|
|
|
|
.debian-trixie-amd64: &debian_trixie_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:debian-trixie-amd64"
|
|
<<: *linux_amd64
|
|
|
|
.tsan-debian-trixie-amd64: &tsan_debian_trixie_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:tsan-debian-trixie-amd64"
|
|
<<: *linux_amd64
|
|
|
|
.debian-trixie-amd64cross32: &debian_trixie_amd64cross32_image
|
|
image: "$CI_REGISTRY_IMAGE:debian-trixie-amd64cross32"
|
|
<<: *linux_amd64
|
|
|
|
.debian-sid-amd64: &debian_sid_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:debian-sid-amd64"
|
|
<<: *linux_amd64
|
|
|
|
# openSUSE Tumbleweed
|
|
|
|
.tumbleweed-latest-amd64: &tumbleweed_latest_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:tumbleweed-latest-amd64"
|
|
<<: *linux_amd64
|
|
|
|
# Fedora
|
|
|
|
.tsan-fedora-43-amd64: &tsan_fedora_43_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:tsan-fedora-43-amd64"
|
|
<<: *linux_amd64
|
|
|
|
.fedora-43-amd64: &fedora_43_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:fedora-43-amd64"
|
|
<<: *linux_amd64
|
|
|
|
.fedora-43-arm64: &fedora_43_arm64_image
|
|
image: "$CI_REGISTRY_IMAGE:fedora-43-arm64"
|
|
<<: *linux_arm64
|
|
|
|
# Ubuntu
|
|
|
|
.ubuntu-jammy-amd64: &ubuntu_jammy_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:ubuntu-jammy-amd64"
|
|
<<: *linux_amd64
|
|
|
|
.ubuntu-noble-amd64: &ubuntu_noble_amd64_image
|
|
image: "$CI_REGISTRY_IMAGE:ubuntu-noble-amd64"
|
|
<<: *linux_amd64
|
|
|
|
# Base image
|
|
# This is a meta image that is used as a base for non-specific jobs
|
|
|
|
.base: &base_image
|
|
<<: *debian_trixie_amd64_image
|
|
|
|
### Job Templates
|
|
|
|
.rule_mr_code: &rule_mr_code
|
|
- if: '$CI_MERGE_REQUEST_DIFF_BASE_SHA != null'
|
|
changes:
|
|
- '**/*.c'
|
|
- '**/*.h'
|
|
- '**/meson.build'
|
|
|
|
.rule_mr_shell: &rule_mr_shell
|
|
- if: '$CI_MERGE_REQUEST_DIFF_BASE_SHA != null'
|
|
changes:
|
|
- '**/*.sh'
|
|
- '**/*.sh.in'
|
|
- 'bin/tests/system/org.isc.bind.system'
|
|
|
|
.rule_mr_python: &rule_mr_python
|
|
- if: '$CI_MERGE_REQUEST_DIFF_BASE_SHA != null'
|
|
changes:
|
|
- '**/*.py'
|
|
|
|
.rule_mr_system_tests: &rule_mr_system_tests
|
|
- if: '$CI_MERGE_REQUEST_DIFF_BASE_SHA != null'
|
|
changes:
|
|
- 'bin/tests/system/**/*'
|
|
|
|
.rule_mr_manual: &rule_mr_manual
|
|
- if: '$CI_MERGE_REQUEST_DIFF_BASE_SHA != null'
|
|
when: manual # only run on MR if requested
|
|
allow_failure: true # don't block the pipeline or the pipeline result
|
|
|
|
.rule_tag: &rule_tag
|
|
- if: '$CI_PROJECT_NAMESPACE == "isc-private" && $CI_COMMIT_TAG != null'
|
|
|
|
.rule_tag_open_source: &rule_tag_open_source
|
|
- if: '$CI_PROJECT_NAMESPACE == "isc-private" && $CI_COMMIT_TAG != null && $CI_COMMIT_TAG !~ /-S/'
|
|
|
|
.rule_tag_security: &rule_tag_security
|
|
- if: '$CI_PROJECT_NAMESPACE == "isc-private" && $CI_COMMIT_TAG != null && $RELEASE_TYPE == "security"'
|
|
|
|
.rule_tag_security_or_subscription: &rule_tag_security_or_subscription
|
|
- if: '$CI_PROJECT_NAMESPACE == "isc-private" && $CI_COMMIT_TAG != null && ($RELEASE_TYPE == "security" || $CI_COMMIT_TAG =~ /-S/)'
|
|
|
|
.rule_source_other_than_mr: &rule_source_other_than_mr
|
|
- if: '$CI_PIPELINE_SOURCE =~ /^(api|pipeline|schedule|trigger|web)$/ && $REBASE_ONLY != "1"'
|
|
|
|
.rule_source_all: &rule_source_all
|
|
- if: '$CI_PIPELINE_SOURCE =~ /^(api|merge_request_event|pipeline|schedule|trigger|web)$/ && $REBASE_ONLY != "1"'
|
|
|
|
.api-pipelines-schedules-tags-triggers-web-triggering-rules: &api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
rules:
|
|
- *rule_tag
|
|
- *rule_source_other_than_mr
|
|
|
|
.default-triggering-rules_list: &default_triggering_rules_list
|
|
- *rule_tag
|
|
- *rule_source_all
|
|
|
|
.default-triggering-rules: &default_triggering_rules
|
|
rules:
|
|
- *default_triggering_rules_list
|
|
|
|
.code-triggering-rules: &code_triggering_rules
|
|
rules:
|
|
- *rule_mr_code
|
|
- *rule_mr_manual
|
|
- *rule_tag
|
|
- *rule_source_other_than_mr
|
|
|
|
.shell-triggering-rules: &shell_triggering_rules
|
|
rules:
|
|
- *rule_mr_shell
|
|
- *rule_mr_manual
|
|
- *rule_tag
|
|
- *rule_source_other_than_mr
|
|
|
|
.python-triggering-rules: &python_triggering_rules
|
|
rules:
|
|
- *rule_mr_python
|
|
- *rule_mr_manual
|
|
- *rule_tag
|
|
- *rule_source_other_than_mr
|
|
|
|
.extra-system-tests-triggering-rules: &extra_system_tests_triggering_rules
|
|
rules:
|
|
- *rule_tag
|
|
- *rule_source_other_than_mr
|
|
- *rule_mr_system_tests
|
|
|
|
.precheck: &precheck_job
|
|
<<: *default_triggering_rules
|
|
<<: *base_image
|
|
stage: precheck
|
|
|
|
.configure: &configure
|
|
- meson setup
|
|
--libdir=lib
|
|
-Dcmocka=enabled
|
|
-Ddeveloper=enabled
|
|
-Dleak-detection=enabled
|
|
-Doptimization=1
|
|
-Dnamed-lto=thin
|
|
$EXTRA_CONFIGURE
|
|
build
|
|
|
|
# change directory to the workspace before including this
|
|
.find_python: &find_python
|
|
- PYTHON="$(cat build/bin/tests/system/isctest/vars/.build_vars/PYTHON)"
|
|
- test -x "$PYTHON"
|
|
|
|
.find_pytest: &find_pytest
|
|
- PYTEST="$(cat build/bin/tests/system/isctest/vars/.build_vars/PYTEST)"
|
|
- test -x "$PYTEST"
|
|
|
|
.parse_tsan: &parse_tsan
|
|
- *find_python
|
|
- find -name 'tsan.*' -exec "$PYTHON" util/parse_tsan.py {} \;
|
|
|
|
.check_readline_setup: &check_readline_setup
|
|
- if [[ -n "${WITHOUT_LIBEDIT}" ]]; then
|
|
! grep "^#define HAVE_LIBEDIT" build/config.h;
|
|
elif [[ -n "${WITH_LIBEDIT}" ]]; then
|
|
grep -e "^#define HAVE_LIBEDIT" build/config.h;
|
|
fi
|
|
|
|
.list_installed_package_versions: &list_installed_package_versions
|
|
- echo -e "\e[0Ksection_start:`date +%s`:installed_packages_section[collapsed=true]\r\e[0KHeader of the installed packages collapsible section"
|
|
- ( pip3 list || pip list || echo "no pip" ) 2>/dev/null
|
|
- for cmd in "apk info --verbose" "dpkg-query --show --showformat='\${Package}-\${Version}\n'" "pkg info --quiet" "rpm -qa | sort"; do
|
|
eval "$cmd" 2>/dev/null && break;
|
|
done || true
|
|
- echo -e "\e[0Ksection_end:`date +%s`:installed_packages_section\r\e[0K"
|
|
|
|
# Unpack release tarball and continue work in the extracted directory.
|
|
.unpack_release_tarball: &unpack_release_tarball
|
|
- tar --extract --file build/meson-dist/bind-*.tar.xz
|
|
- rm -f build/meson-dist/bind-*.tar.xz
|
|
- cd bind-*
|
|
|
|
.fips-feature-test: &fips_feature_test
|
|
- if build/feature-test --have-fips-mode; then
|
|
if [ "$(cat /proc/sys/crypto/fips_enabled)" = "1" ]; then
|
|
echo "FIPS is enabled";
|
|
else
|
|
echo "FIPS is disabled";
|
|
exit 1;
|
|
fi
|
|
fi
|
|
|
|
.check_for_junit_xml: &check_for_junit_xml
|
|
# test if junit.xml file exists and is longer 40 bytes
|
|
# (i.e., contains more than `<testsuites><testsuite /></testsuites>`)
|
|
- if [ -f "$CI_PROJECT_DIR"/junit.xml ]; then
|
|
if [ $(wc -c < "$CI_PROJECT_DIR"/junit.xml) -gt 40 ]; then
|
|
echo "junit.xml file exists and is longer than 40 bytes.";
|
|
else
|
|
echo "junit.xml file exists but is too short.";
|
|
exit 1;
|
|
fi
|
|
else
|
|
echo "junit.xml file does not exist.";
|
|
exit 1;
|
|
fi
|
|
|
|
.build: &build_job
|
|
<<: *default_triggering_rules
|
|
stage: build
|
|
before_script:
|
|
- test -w "${CCACHE_DIR}" && export PATH="/usr/lib/ccache:${PATH}"
|
|
- *list_installed_package_versions
|
|
script:
|
|
- *configure
|
|
- *check_readline_setup
|
|
- meson compile -C build
|
|
- meson compile -C build system-test-dependencies
|
|
- ninja -C build meson-test-prereq
|
|
- test -z "${RUN_MESON_INSTALL}" || meson install -C build --destdir=$INSTALL_PATH
|
|
- test -z "${RUN_MESON_INSTALL}" || DESTDIR="${INSTALL_PATH}" sh build/util/check-make-install.sh
|
|
#- test -z "${CROSS_COMPILATION}" || grep -F -A 1 "checking whether we are cross compiling" config.log | grep -q "result.*yes"
|
|
- test -z "${CROSS_COMPILATION}" || file build/lib/dns/gen | grep -F -q "ELF 64-bit LSB"
|
|
#- test -z "${CROSS_COMPILATION}" || ( ! git ls-files -z --others --exclude lib/dns/gen | xargs -0 file | grep "ELF 64-bit LSB" )
|
|
- build/named -V
|
|
- *fips_feature_test
|
|
needs: []
|
|
artifacts:
|
|
untracked: true
|
|
when: always
|
|
|
|
.setup_interfaces: &setup_interfaces
|
|
- if [ "$(id -u)" -eq "0" ]; then
|
|
sh -x build/bin/tests/system/ifconfig.sh up;
|
|
else
|
|
sudo sh -x build/bin/tests/system/ifconfig.sh up;
|
|
fi
|
|
|
|
.display_pytest_failures: &display_pytest_failures
|
|
- awk '/^=+ FAILURES =+/{flag=1;next}/^=+.*=+$/{flag=0}flag' bin/tests/system/pytest.out.txt || true
|
|
- awk '/^=+ ERRORS =+/{flag=1;next}/^=+.*=+$/{flag=0}flag' bin/tests/system/pytest.out.txt || true
|
|
|
|
.shotgun: &shotgun_job
|
|
<<: *base_image
|
|
stage: performance
|
|
rules: # FIXME disabled shotgun jobs temporarily due to infra issue
|
|
# - if: '$CI_MERGE_REQUEST_DIFF_BASE_SHA != null'
|
|
# changes:
|
|
# - '**/*.c'
|
|
# - '**/*.h'
|
|
# variables:
|
|
# BASELINE: '$CI_MERGE_REQUEST_DIFF_BASE_SHA'
|
|
- &shotgun_rule_mr_manual
|
|
if: '$CI_MERGE_REQUEST_DIFF_BASE_SHA != null'
|
|
variables:
|
|
BASELINE: '$CI_MERGE_REQUEST_DIFF_BASE_SHA'
|
|
when: manual # don't run on each MR unless requested
|
|
allow_failure: true
|
|
# - &shotgun_rule_tag
|
|
# if: '$CI_PROJECT_NAMESPACE == "isc-private" && $CI_COMMIT_TAG != null'
|
|
# variables:
|
|
# SHOTGUN_ROUNDS: 3
|
|
# - &shotgun_rule_other
|
|
# if: '$CI_PIPELINE_SOURCE =~ /^(api|pipeline|schedule|trigger|web)$/ && $REBASE_ONLY != "1"'
|
|
# when using data from a single run, the overall instability of the results
|
|
# causes quite high false positive rate, rerun the test to attemp to reduce those
|
|
retry: 1
|
|
script:
|
|
- if [ -z "$BASELINE" ]; then export BASELINE=$BIND_BASELINE_VERSION; fi # this dotenv variable can't be set in the rules section, because rules are evaluated before any jobs run
|
|
- PIPELINE_ID=$(curl -s -X POST --fail
|
|
-F "token=$CI_JOB_TOKEN"
|
|
-F ref=main
|
|
-F "variables[SHOTGUN_TEST_VERSION]=['$CI_COMMIT_REF_NAME', '$BASELINE']"
|
|
-F "variables[SHOTGUN_DURATION]=300"
|
|
-F "variables[SHOTGUN_ROUNDS]=$SHOTGUN_ROUNDS"
|
|
-F "variables[SHOTGUN_TRAFFIC_MULTIPLIER]=$SHOTGUN_TRAFFIC_MULTIPLIER"
|
|
-F "variables[SHOTGUN_SCENARIO]=$SHOTGUN_SCENARIO"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_CPU_MIN]=$SHOTGUN_EVAL_THRESHOLD_CPU_MIN"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_CPU_MAX]=$SHOTGUN_EVAL_THRESHOLD_CPU_MAX"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_MEMORY_MIN]=$SHOTGUN_EVAL_THRESHOLD_MEMORY_MIN"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_MEMORY_MAX]=$SHOTGUN_EVAL_THRESHOLD_MEMORY_MAX"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_RCODE_MIN]=$SHOTGUN_EVAL_THRESHOLD_RCODE_MIN"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_RCODE_MAX]=$SHOTGUN_EVAL_THRESHOLD_RCODE_MAX"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MIN]=$SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MIN"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MAX]=$SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MAX"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_DRIFT_MIN]=$SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_DRIFT_MIN"
|
|
-F "variables[SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_DRIFT_MAX]=$SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_DRIFT_MAX"
|
|
https://gitlab.isc.org/api/v4/projects/188/trigger/pipeline | jq .id)
|
|
- util/ci-wait-shotgun.py $PIPELINE_ID
|
|
needs:
|
|
- job: ci-variables
|
|
artifacts: true
|
|
timeout: 2h
|
|
|
|
.system_test_common: &system_test_job
|
|
<<: *default_triggering_rules
|
|
stage: system
|
|
before_script:
|
|
- *setup_interfaces
|
|
# This script needs to: 1) fail if the system tests fail, 2) fail if
|
|
# the junit.xml file is broken, 3) produce the junit.xml file even if
|
|
# the system tests fail. Therefore, $RET is used to "cache" the
|
|
# result of running pytest as interrupting the script immediately when
|
|
# system tests fail would make checking the contents of the junit.xml
|
|
# file impossible (GitLab Runner uses "set -o pipefail").
|
|
script:
|
|
- *fips_feature_test
|
|
- *find_pytest
|
|
- *find_python
|
|
- ( if [ "${CI_DISPOSABLE_ENVIRONMENT}" = "true" ]; then sleep 3000; "$PYTHON" "${CI_PROJECT_DIR}/util/get-running-system-tests.py"; fi ) &
|
|
- cd bin/tests/system
|
|
- RET=0
|
|
- >
|
|
("$PYTEST" --junit-xml="$CI_PROJECT_DIR"/junit.xml -n "$TEST_PARALLEL_JOBS" | tee pytest.out.txt) || RET=1
|
|
- *check_for_junit_xml
|
|
- (exit $RET)
|
|
- '( ! grep -F "grep: warning:" pytest.out.txt )'
|
|
- test "$CLEAN_BUILD_ARTIFACTS_ON_SUCCESS" -eq 0 || ( cd ../../.. && ninja -C build clean >/dev/null 2>&1 )
|
|
after_script:
|
|
- *display_pytest_failures
|
|
artifacts:
|
|
untracked: true
|
|
exclude:
|
|
- "**/__pycache__/**/*"
|
|
when: always
|
|
reports:
|
|
junit: junit.xml
|
|
|
|
.system_test_tsan: &system_test_tsan_job
|
|
<<: *system_test_job
|
|
after_script:
|
|
- *display_pytest_failures
|
|
- *parse_tsan
|
|
|
|
.unit_test_common: &unit_test_job
|
|
<<: *default_triggering_rules
|
|
stage: unit
|
|
# This script needs to: 1) fail if the unit tests fail, 2) fail if the
|
|
# junit.xml file is broken, 3) produce the junit.xml file even if the
|
|
# unit tests fail. Therefore, $RET is used to "cache" the result of
|
|
# running "meson test" as interrupting the script immediately when
|
|
# unit tests fail would make checking the contents of the junit.xml
|
|
# file impossible (GitLab Runner uses "set -o pipefail").
|
|
script:
|
|
- *fips_feature_test
|
|
- RET=0
|
|
- meson test -C build --no-rebuild --no-suite flaky || RET=1
|
|
- cp build/meson-logs/testlog.junit.xml $CI_PROJECT_DIR/junit.xml
|
|
- meson test -C build --no-rebuild --suite flaky --logbase testlog-flaky || meson test -C build --no-rebuild --suite flaky --logbase testlog-flaky || RET=1
|
|
- *check_for_junit_xml
|
|
- (exit $RET)
|
|
- test "$CLEAN_BUILD_ARTIFACTS_ON_SUCCESS" -eq 0 || ninja -C build clean >/dev/null 2>&1
|
|
artifacts:
|
|
untracked: true
|
|
when: always
|
|
reports:
|
|
junit:
|
|
- junit.xml
|
|
- build/meson-logs/testlog-flaky.junit.xml
|
|
|
|
.unit_test_tsan: &unit_test_tsan_job
|
|
<<: *unit_test_job
|
|
after_script:
|
|
- *parse_tsan
|
|
|
|
.docs: &docs_job
|
|
stage: docs
|
|
script:
|
|
- *configure
|
|
- test -z "${DOC_CHECK_MISC_CHANGE}" || ninja -C build doc-misc
|
|
- test -z "${DOC_CHECK_MISC_CHANGE}" || cp build/doc/misc/options build/doc/misc/rndc.grammar build/doc/misc/*.zoneopt doc/misc/
|
|
- test -z "${DOC_CHECK_MISC_CHANGE}" || git diff > doc-misc.patch
|
|
- test -z "${DOC_CHECK_MISC_CHANGE}" || if test "$(git status --porcelain --untracked-files=no | wc -l)" -gt "0"; then git status --short; exit 1; fi
|
|
- meson compile -C build arm arm-epub man
|
|
- find build/man/ -maxdepth 2 -name "*.[0-9]" -exec mandoc -T lint "{}" \; | ( ! grep -v -e "skipping paragraph macro. sp after" -e "unknown font, skipping request. ft C" -e "input text line longer than 80 bytes" )
|
|
|
|
.respdiff: &respdiff_job
|
|
<<: *code_triggering_rules
|
|
stage: system
|
|
before_script:
|
|
- *configure
|
|
- meson compile -C build
|
|
- *setup_interfaces
|
|
- git clone --depth 1 https://gitlab.isc.org/isc-projects/bind9-qa.git
|
|
- cd bind9-qa/respdiff
|
|
needs: []
|
|
artifacts:
|
|
paths:
|
|
- bind9-qa/respdiff
|
|
exclude:
|
|
- bind9-qa/respdiff/rspworkdir/data.mdb # Exclude a 10 GB file.
|
|
untracked: true
|
|
when: always
|
|
|
|
### Job Definitions
|
|
|
|
# Jobs in the precheck stage
|
|
|
|
misc:
|
|
<<: *precheck_job
|
|
script:
|
|
- sh util/checklibs.sh > checklibs.out
|
|
- sh util/check-categories.sh
|
|
- sh util/check-gitignore.sh
|
|
- sh util/check-trailing-whitespace.sh
|
|
- bash util/unused-headers.sh
|
|
# Check dangling symlinks in the repository
|
|
- if find . -xtype l | grep .; then exit 1; fi
|
|
- muon-meson analyze -Werror
|
|
needs: []
|
|
artifacts:
|
|
paths:
|
|
- checklibs.out
|
|
when: on_failure
|
|
|
|
black:
|
|
<<: *precheck_job
|
|
<<: *python_triggering_rules
|
|
needs: []
|
|
script:
|
|
- black $(git ls-files '*.py')
|
|
- git diff > black.patch
|
|
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
|
|
artifacts:
|
|
paths:
|
|
- black.patch
|
|
expire_in: "1 week"
|
|
when: on_failure
|
|
|
|
vulture:
|
|
<<: *precheck_job
|
|
<<: *python_triggering_rules
|
|
needs: []
|
|
script:
|
|
- vulture --exclude "*ans.py,conftest.py,isctest" --ignore-names "after_servers_start,bootstrap,pytestmark" bin/tests/system/
|
|
|
|
ci-variables:
|
|
<<: *precheck_job
|
|
script:
|
|
# When testing a .0 release, compare it against the previous development
|
|
# release (e.g., 9.19.0 and 9.18.0 should both be compared against 9.17.22).
|
|
- export BIND_BASELINE_TAG=$(meson introspect meson.build --projectinfo | ./util/select-baseline-version.jq)
|
|
- BIND_BASELINE_VERSION="$(curl -s -G -d "version=^${BIND_BASELINE_TAG}" -d "order_by=version" "https://gitlab.isc.org/api/v4/projects/1/repository/tags" | jq -r '.[0].name')"
|
|
- echo "BIND_BASELINE_VERSION=$BIND_BASELINE_VERSION" >> ci_vars.env
|
|
artifacts:
|
|
reports:
|
|
dotenv: ci_vars.env
|
|
|
|
ci-orphaned-anchors:
|
|
<<: *precheck_job
|
|
script:
|
|
- git clone --depth 1 https://gitlab.isc.org/isc-projects/bind9-qa.git
|
|
- bind9-qa/ci-orphaned-anchors/check-orphaned-anchors-ci.py .gitlab-ci.yml
|
|
needs: []
|
|
rules:
|
|
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
|
changes:
|
|
- .gitlab-ci.yml
|
|
|
|
clang-format:
|
|
<<: *precheck_job
|
|
rules:
|
|
- if: '$CI_MERGE_REQUEST_DIFF_BASE_SHA != null'
|
|
changes:
|
|
- '**/*.c'
|
|
- '**/*.h'
|
|
- '**/.clang-format'
|
|
- '**/.clang-format.headers'
|
|
- *rule_mr_manual
|
|
- *rule_tag
|
|
- *rule_source_other_than_mr
|
|
needs: []
|
|
script:
|
|
- if [ -r .clang-format ]; then "${CLANG_FORMAT}" -i -style=file $(git ls-files '*.c' '*.h'); fi
|
|
- git diff > clang-format.patch
|
|
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
|
|
artifacts:
|
|
paths:
|
|
- clang-format.patch
|
|
expire_in: "1 week"
|
|
when: on_failure
|
|
|
|
coccinelle:
|
|
<<: *precheck_job
|
|
rules:
|
|
- if: '$CI_MERGE_REQUEST_DIFF_BASE_SHA != null'
|
|
changes:
|
|
- '**/*.c'
|
|
- '**/*.h'
|
|
- 'cocci/**'
|
|
- 'util/check-cocci.sh'
|
|
- *rule_mr_manual
|
|
- *rule_tag
|
|
- *rule_source_other_than_mr
|
|
needs: []
|
|
script:
|
|
- util/check-cocci.sh
|
|
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
|
|
|
|
meson-format:
|
|
<<: *precheck_job
|
|
rules:
|
|
- if: '$CI_MERGE_REQUEST_DIFF_BASE_SHA != null'
|
|
changes:
|
|
- '**/meson.build'
|
|
- *rule_mr_manual
|
|
- *rule_tag
|
|
- *rule_source_other_than_mr
|
|
needs: []
|
|
script:
|
|
- git ls-files "*meson.build" | xargs muon-meson fmt -i
|
|
- git diff > meson-format.patch
|
|
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
|
|
artifacts:
|
|
paths:
|
|
- meson-format.patch
|
|
expire_in: "1 week"
|
|
when: on_failure
|
|
rules:
|
|
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
|
changes:
|
|
- "**/meson.build"
|
|
|
|
doctest:
|
|
<<: *precheck_job
|
|
needs: []
|
|
script:
|
|
- *configure
|
|
- meson compile -C build system-test-init
|
|
- *find_pytest
|
|
- cd bin/tests/system/isctest
|
|
- >
|
|
"$PYTEST" --noconftest --doctest-modules
|
|
|
|
pylint:
|
|
<<: *precheck_job
|
|
<<: *python_triggering_rules
|
|
needs: []
|
|
variables:
|
|
PYTHONPATH: "${CI_PROJECT_DIR}/bin/tests/system"
|
|
script:
|
|
- pylint --rcfile $CI_PROJECT_DIR/.pylintrc $(git ls-files '*.py' | grep -vE '(ans\.py|dangerfile\.py|^bin/tests/system/|^contrib/)')
|
|
# Ignore Pylint wrong-import-position error in system test to enable use of pytest.importorskip
|
|
- pylint --rcfile $CI_PROJECT_DIR/.pylintrc --disable=wrong-import-position $(git ls-files 'bin/tests/system/*.py' | grep -vE '(ans\.py|vulture_ignore_list\.py)')
|
|
|
|
reuse:
|
|
<<: *precheck_job
|
|
needs: []
|
|
image:
|
|
name: docker.io/fsfe/reuse:latest
|
|
entrypoint: [""]
|
|
script:
|
|
- reuse lint
|
|
|
|
shfmt:
|
|
<<: *precheck_job
|
|
<<: *shell_triggering_rules
|
|
needs: []
|
|
script:
|
|
- shfmt -w -i 2 -ci -bn . $(find . -name "*.sh.in")
|
|
- git diff > shfmt.patch
|
|
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
|
|
artifacts:
|
|
paths:
|
|
- shfmt.patch
|
|
expire_in: "1 week"
|
|
when: on_failure
|
|
|
|
danger:
|
|
<<: *precheck_job
|
|
needs: []
|
|
script:
|
|
- pip install git+https://gitlab.isc.org/isc-projects/hazard.git
|
|
- hazard
|
|
rules:
|
|
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
|
|
|
|
checkbashisms:
|
|
<<: *precheck_job
|
|
<<: *shell_triggering_rules
|
|
needs: []
|
|
script:
|
|
- checkbashisms $(find . -path './.git' -prune -o -type f -exec sh -c 'head -n 1 "{}" | grep -qsF "#!/bin/sh"' \; -print)
|
|
|
|
mypy:
|
|
<<: *precheck_job
|
|
<<: *python_triggering_rules
|
|
stage: precheck
|
|
script:
|
|
- mypy "bin/tests/system/isctest/"
|
|
|
|
tarball-create:
|
|
stage: precheck
|
|
<<: *base_image
|
|
<<: *default_triggering_rules
|
|
script:
|
|
- *configure
|
|
- meson dist -C build --no-tests
|
|
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; git diff > diff.patch; exit 1; fi
|
|
artifacts:
|
|
paths:
|
|
- diff.patch
|
|
- build/meson-dist/*.tar.xz
|
|
needs: []
|
|
|
|
# Jobs for doc builds on Debian 13 "trixie" (amd64)
|
|
|
|
changelog:
|
|
<<: *base_image
|
|
<<: *docs_job
|
|
rules:
|
|
- if: '$CI_MERGE_REQUEST_TITLE =~ /\s(dev|usr|pkg):/'
|
|
before_script:
|
|
- echo -e "$CI_MERGE_REQUEST_TITLE\n" > commitmsg
|
|
- sed -i 's/^Draft:\s*//' commitmsg
|
|
- echo -e "$CI_MERGE_REQUEST_DESCRIPTION" >> commitmsg
|
|
- git commit --allow-empty -F commitmsg
|
|
- export CHANGELOG=$(ls doc/changelog/changelog-9.* | sort --version-sort | tail -n 1)
|
|
- printf "\n" >> $CHANGELOG
|
|
- ./contrib/gitchangelog/gitchangelog.py HEAD^..HEAD >> $CHANGELOG
|
|
after_script:
|
|
- git diff
|
|
needs: []
|
|
artifacts:
|
|
untracked: true
|
|
|
|
linkcheck:
|
|
<<: *base_image
|
|
stage: docs
|
|
script:
|
|
# Some domains tested by linkchecker may think that we connect to them too
|
|
# often and will refuse connection or reply with and error code, which
|
|
# makes this job fail. Let's check links only on Wednesdays.
|
|
- if [ "$(date +%w)" != "3" ]; then exit 0; fi
|
|
- pushd doc/arm/ > /dev/null && sphinx-build -b linkcheck . linkcheck_output/
|
|
artifacts:
|
|
paths:
|
|
- doc/arm/linkcheck_output/
|
|
rules:
|
|
- if: '$CI_PIPELINE_SOURCE == "schedule"'
|
|
needs: []
|
|
|
|
docs:
|
|
<<: *default_triggering_rules
|
|
<<: *base_image
|
|
<<: *docs_job
|
|
before_script:
|
|
- *list_installed_package_versions
|
|
variables:
|
|
DOC_CHECK_MISC_CHANGE: 1
|
|
needs: []
|
|
artifacts:
|
|
untracked: true
|
|
when: always
|
|
|
|
docs:tarball:
|
|
<<: *default_triggering_rules
|
|
<<: *base_image
|
|
<<: *docs_job
|
|
before_script:
|
|
- *unpack_release_tarball
|
|
needs:
|
|
- job: tarball-create
|
|
artifacts: true
|
|
|
|
# Job detecting named.conf breakage introduced since the previous point release
|
|
|
|
cross-version-config-tests:
|
|
stage: system
|
|
<<: *base_image
|
|
<<: *default_triggering_rules
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
script:
|
|
- *configure
|
|
- *setup_interfaces
|
|
- meson compile -C build system-test-init system-test-dependencies
|
|
- meson compile -C build
|
|
- *find_pytest
|
|
- git clone --branch "${BIND_BASELINE_VERSION}" --depth 1 https://gitlab.isc.org/isc-projects/bind9.git "bind-${BIND_BASELINE_VERSION}"
|
|
- cd "bind-${BIND_BASELINE_VERSION}"
|
|
# The cross-version-config-tests job would fail when a system test is
|
|
# removed from the upcoming release. To avoid this, remove the system test
|
|
# also from the $BIND_BASELINE_VERSION.
|
|
- find bin/tests/system/ -mindepth 1 -maxdepth 1 -type d -exec sh -c 'test -e ../"$0" || rm -rfv -- "$0"' {} \;
|
|
# @DYLIB@ is missing
|
|
- cp ../bin/tests/system/isctest/vars/basic.py ./bin/tests/system/isctest/vars/basic.py
|
|
- cp ../bin/tests/system/isctest/vars/.build_vars/TOP_BUILDDIR ./bin/tests/system/isctest/vars/.build_vars/TOP_BUILDDIR
|
|
- echo "${CI_PROJECT_DIR}/bind-${BIND_BASELINE_VERSION}" > ../build/bin/tests/system/isctest/vars/.build_vars/TOP_SRCDIR
|
|
- cd ./bin/tests/system
|
|
# System tests that employ binary drivers will fail on ABI change and
|
|
# should not be run.
|
|
- rm -r dlzexternal
|
|
- rm -r dyndb
|
|
- >
|
|
"$PYTEST" --setup-only --junit-xml="$CI_PROJECT_DIR"/junit.xml -n "${TEST_PARALLEL_JOBS:-1}"
|
|
needs:
|
|
- job: ci-variables
|
|
artifacts: true
|
|
artifacts:
|
|
reports:
|
|
junit: junit.xml
|
|
paths:
|
|
- bind-*
|
|
- junit.xml
|
|
untracked: true
|
|
expire_in: "1 day"
|
|
when: always
|
|
allow_failure: true
|
|
|
|
# Jobs for regular GCC builds on Alpine Linux 3.22 (amd64)
|
|
|
|
gcc:alpine3.22:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "${WITHOUT_LIBEDIT}"
|
|
<<: *alpine_3_22_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:alpine3.22:amd64:
|
|
<<: *alpine_3_22_amd64_image
|
|
<<: *system_test_job
|
|
needs:
|
|
- job: gcc:alpine3.22:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:alpine3.22:amd64:
|
|
<<: *alpine_3_22_amd64_image
|
|
<<: *unit_test_job
|
|
needs:
|
|
- job: gcc:alpine3.22:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for regular GCC builds on Alma Linux 8 (amd64)
|
|
|
|
gcc:almalinux8:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled"
|
|
<<: *almalinux_8_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:almalinux8:amd64:
|
|
<<: *almalinux_8_amd64_image
|
|
<<: *system_test_job
|
|
<<: *extra_system_tests_triggering_rules
|
|
needs:
|
|
- job: gcc:almalinux8:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:almalinux8:amd64:
|
|
<<: *almalinux_8_amd64_image
|
|
<<: *unit_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: gcc:almalinux8:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for regular GCC builds on Alma Linux 9 (amd64)
|
|
|
|
gcc:almalinux9:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled -Ddeveloper=disabled"
|
|
<<: *almalinux_9_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:almalinux9:amd64:
|
|
<<: *almalinux_9_amd64_image
|
|
<<: *system_test_job
|
|
needs:
|
|
- job: gcc:almalinux9:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:almalinux9:amd64:
|
|
<<: *almalinux_9_amd64_image
|
|
<<: *unit_test_job
|
|
needs:
|
|
- job: gcc:almalinux9:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for regular GCC builds on Alma Linux 10 (amd64)
|
|
|
|
gcc:almalinux10:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled"
|
|
<<: *almalinux_10_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:almalinux10:amd64:
|
|
<<: *almalinux_10_amd64_image
|
|
<<: *system_test_job
|
|
needs:
|
|
- job: gcc:almalinux10:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:almalinux10:amd64:
|
|
<<: *almalinux_10_amd64_image
|
|
<<: *unit_test_job
|
|
needs:
|
|
- job: gcc:almalinux10:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for scheduled GCC builds on AlmaLinux 8 & 9 FIPS-aware images with FIPS
|
|
# mode in BIND 9 enabled
|
|
|
|
gcc:8fips:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled -Dfips=enabled -Dtracing=disabled"
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
<<: *almalinux_8fips_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:8fips:amd64:
|
|
<<: *almalinux_8fips_amd64_image
|
|
<<: *system_test_job
|
|
<<: *extra_system_tests_triggering_rules
|
|
variables:
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
needs:
|
|
- job: gcc:8fips:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:8fips:amd64:
|
|
<<: *almalinux_8fips_amd64_image
|
|
<<: *unit_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
variables:
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
needs:
|
|
- job: gcc:8fips:amd64
|
|
artifacts: true
|
|
|
|
gcc:9fips:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled -Dfips=enabled -Dleak-detection=disabled"
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
<<: *almalinux_9fips_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:9fips:amd64:
|
|
<<: *almalinux_9fips_amd64_image
|
|
<<: *system_test_job
|
|
<<: *extra_system_tests_triggering_rules
|
|
variables:
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
needs:
|
|
- job: gcc:9fips:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:9fips:amd64:
|
|
<<: *almalinux_9fips_amd64_image
|
|
<<: *unit_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
variables:
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
needs:
|
|
- job: gcc:9fips:amd64
|
|
artifacts: true
|
|
|
|
gcc:10fips:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled -Dfips=enabled -Dleak-detection=disabled"
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
<<: *almalinux_10fips_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:10fips:amd64:
|
|
<<: *almalinux_10fips_amd64_image
|
|
<<: *system_test_job
|
|
variables:
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
needs:
|
|
- job: gcc:10fips:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:10fips:amd64:
|
|
<<: *almalinux_10fips_amd64_image
|
|
<<: *unit_test_job
|
|
variables:
|
|
GIT_CLONE_PATH: "${GIT_CLONE_PATH_INSTANCE_EXECUTOR}"
|
|
needs:
|
|
- job: gcc:10fips:amd64
|
|
artifacts: true
|
|
|
|
gcc:tarball:nosphinx:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled -Ddeveloper=disabled"
|
|
RUN_MESON_INSTALL: 1
|
|
<<: *almalinux_9_amd64_image
|
|
<<: *build_job
|
|
before_script:
|
|
- *list_installed_package_versions
|
|
- (! command -v sphinx-build >/dev/null)
|
|
- *unpack_release_tarball
|
|
needs:
|
|
- job: tarball-create
|
|
artifacts: true
|
|
|
|
# Jobs for regular GCC builds on Debian 12 "bookworm" (amd64)
|
|
|
|
gcc:bookworm:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled"
|
|
<<: *debian_bookworm_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:bookworm:amd64:
|
|
<<: *debian_bookworm_amd64_image
|
|
<<: *system_test_job
|
|
needs:
|
|
- job: gcc:bookworm:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:bookworm:amd64:
|
|
<<: *debian_bookworm_amd64_image
|
|
<<: *unit_test_job
|
|
needs:
|
|
- job: gcc:bookworm:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for regular GCC builds on Debian 13 "trixie" (amd64)
|
|
|
|
gcc:trixie:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
# Tracing needs to be disabled otherwise gcovr fails
|
|
EXTRA_CONFIGURE: "-Doptimization=0 -Db_coverage=true -Dtracing=disabled -Didn=enabled ${WITH_LIBEDIT}"
|
|
RUN_MESON_INSTALL: 1
|
|
<<: *debian_trixie_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:trixie:amd64:
|
|
<<: *debian_trixie_amd64_image
|
|
<<: *system_test_job
|
|
variables:
|
|
CI_ENABLE_LONG_TESTS: 1
|
|
CLEAN_BUILD_ARTIFACTS_ON_SUCCESS: 0
|
|
TZ: Australia/Sydney
|
|
# using artifacts from unit test job is required for gcov
|
|
needs:
|
|
- job: unit:gcc:trixie:amd64
|
|
artifacts: true
|
|
rules:
|
|
- if: '$CI_PIPELINE_SOURCE == "schedule"'
|
|
variables:
|
|
CI_ENABLE_LIVE_INTERNET_TESTS: 1
|
|
- *default_triggering_rules_list
|
|
|
|
unit:gcc:trixie:amd64:
|
|
<<: *debian_trixie_amd64_image
|
|
<<: *unit_test_job
|
|
variables:
|
|
CI_ENABLE_LONG_TESTS: 1
|
|
CLEAN_BUILD_ARTIFACTS_ON_SUCCESS: 0
|
|
needs:
|
|
- job: gcc:trixie:amd64
|
|
artifacts: true
|
|
|
|
# Build job for cross-compiled GCC builds on 64-bit Debian 13 "trixie"
|
|
# (amd64) with 32-bit BIND 9.
|
|
|
|
gcc:trixie:amd64cross32:
|
|
variables:
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
CROSS_COMPILATION: 1
|
|
EXTRA_CONFIGURE: "--cross-file ci/amd64cross32.ini -Didn=enabled -Dgssapi=disabled -Dtracing=disabled ${WITH_LIBEDIT}"
|
|
<<: *debian_trixie_amd64cross32_image
|
|
<<: *build_job
|
|
|
|
# Jobs for scan-build builds on Debian 13 "trixie" (amd64)
|
|
|
|
.scan_build: &scan_build
|
|
- ${SCAN_BUILD} --html-title="BIND 9 ($CI_COMMIT_SHORT_SHA)"
|
|
--keep-cc
|
|
--status-bugs
|
|
--keep-going
|
|
-o scan-build.reports ninja -C build
|
|
|
|
scan-build:
|
|
<<: *default_triggering_rules
|
|
<<: *base_image
|
|
stage: postcheck
|
|
variables:
|
|
CC: "${CLANG}"
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled --native-file ci/clang-trixie.ini"
|
|
before_script:
|
|
- *list_installed_package_versions
|
|
script:
|
|
- *configure
|
|
- *scan_build
|
|
needs: []
|
|
artifacts:
|
|
paths:
|
|
- scan-build.reports/
|
|
when: on_failure
|
|
|
|
# Jobs for strict OpenSSL 3.x (no deprecated) GCC builds on Debian "trixie" (amd64)
|
|
# Run with pkcs11-provider tests
|
|
|
|
gcc:ossl3:trixie:amd64:
|
|
<<: *debian_trixie_amd64_image
|
|
<<: *build_job
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON} -DOPENSSL_NO_DEPRECATED=1 -DOPENSSL_API_COMPAT=30000"
|
|
# See https://gitlab.isc.org/isc-projects/bind9/-/issues/3444
|
|
EXTRA_CONFIGURE: "-Doptimization=3 -Djemalloc=disabled -Dleak-detection=disabled"
|
|
RUN_MESON_INSTALL: 1
|
|
|
|
system:gcc:ossl3:trixie:amd64:
|
|
# Set up environment variables to run pkcs11-provider based system tests
|
|
variables:
|
|
OPENSSL_CONF: "/var/tmp/etc/openssl-provider.cnf"
|
|
SOFTHSM2_CONF: "/var/tmp/softhsm2/softhsm2.conf"
|
|
<<: *debian_trixie_amd64_image
|
|
<<: *system_test_job
|
|
needs:
|
|
- job: gcc:ossl3:trixie:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:ossl3:amd64:
|
|
<<: *debian_trixie_amd64_image
|
|
<<: *unit_test_job
|
|
needs:
|
|
- job: gcc:ossl3:trixie:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for regular GCC builds on Debian "sid" (amd64)
|
|
# Also tests configration option: -Dlmdb=disabled
|
|
|
|
gcc:sid:amd64:
|
|
<<: *debian_sid_amd64_image
|
|
<<: *build_job
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Doptimization=3 -Didn=enabled -Dlmdb=disabled"
|
|
|
|
system:gcc:sid:amd64:
|
|
<<: *debian_sid_amd64_image
|
|
<<: *system_test_job
|
|
<<: *extra_system_tests_triggering_rules
|
|
needs:
|
|
- job: gcc:sid:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:sid:amd64:
|
|
<<: *debian_sid_amd64_image
|
|
<<: *unit_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: gcc:sid:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for tarball GCC builds on Debian 13 "trixie" (amd64)
|
|
|
|
gcc:tarball:
|
|
variables:
|
|
CC: gcc
|
|
EXTRA_CONFIGURE: "-Didn=enabled"
|
|
RUN_MESON_INSTALL: 1
|
|
<<: *base_image
|
|
<<: *build_job
|
|
before_script:
|
|
- *list_installed_package_versions
|
|
- *unpack_release_tarball
|
|
needs:
|
|
- job: tarball-create
|
|
artifacts: true
|
|
|
|
system:gcc:tarball:
|
|
<<: *base_image
|
|
<<: *system_test_job
|
|
before_script:
|
|
- cd bind-*
|
|
- *setup_interfaces
|
|
after_script:
|
|
- cd bind-*
|
|
- *display_pytest_failures
|
|
needs:
|
|
- job: gcc:tarball
|
|
artifacts: true
|
|
|
|
unit:gcc:tarball:
|
|
<<: *base_image
|
|
<<: *unit_test_job
|
|
before_script:
|
|
- cd bind-*
|
|
needs:
|
|
- job: gcc:tarball
|
|
artifacts: true
|
|
|
|
# Jobs for debug GCC builds on openSUSE Tumbleweed (amd64)
|
|
|
|
gcc:tumbleweed:amd64:
|
|
<<: *tumbleweed_latest_amd64_image
|
|
<<: *build_job
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON} -DDEBUG"
|
|
EXTRA_CONFIGURE: "-Didn=enabled -Dgssapi=disabled ${WITH_LIBEDIT}"
|
|
|
|
system:gcc:tumbleweed:amd64:
|
|
<<: *tumbleweed_latest_amd64_image
|
|
<<: *system_test_job
|
|
needs:
|
|
- job: gcc:tumbleweed:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:tumbleweed:amd64:
|
|
<<: *tumbleweed_latest_amd64_image
|
|
<<: *unit_test_job
|
|
needs:
|
|
- job: gcc:tumbleweed:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for regular GCC builds on Ubuntu 22.04 Jammy Jellyfish (amd64)
|
|
|
|
gcc:jammy:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Dgeoip=disabled -Didn=enabled -Ddoh=disabled -Dcmocka=disabled -Ddnstap=disabled -Dgssapi=disabled"
|
|
<<: *ubuntu_jammy_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:jammy:amd64:
|
|
<<: *ubuntu_jammy_amd64_image
|
|
<<: *system_test_job
|
|
<<: *extra_system_tests_triggering_rules
|
|
needs:
|
|
- job: gcc:jammy:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:jammy:amd64:
|
|
<<: *ubuntu_jammy_amd64_image
|
|
<<: *unit_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: gcc:jammy:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for regular GCC builds on Ubuntu 24.04 Noble Numbat (amd64)
|
|
|
|
gcc:noble:amd64:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Didn=enabled"
|
|
<<: *ubuntu_noble_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:noble:amd64:
|
|
<<: *ubuntu_noble_amd64_image
|
|
<<: *system_test_job
|
|
needs:
|
|
- job: gcc:noble:amd64
|
|
artifacts: true
|
|
|
|
unit:gcc:noble:amd64:
|
|
<<: *ubuntu_noble_amd64_image
|
|
<<: *unit_test_job
|
|
needs:
|
|
- job: gcc:noble:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for ASAN builds on Fedora 43 (amd64)
|
|
|
|
gcc:asan:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Db_sanitize=address,undefined -Didn=enabled -Djemalloc=disabled -Dtracing=disabled"
|
|
<<: *fedora_43_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:asan:
|
|
variables:
|
|
LSAN_OPTIONS: "suppressions=$CI_PROJECT_DIR/suppr-lsan.txt"
|
|
<<: *fedora_43_amd64_image
|
|
<<: *system_test_job
|
|
needs:
|
|
- job: gcc:asan
|
|
artifacts: true
|
|
|
|
unit:gcc:asan:
|
|
<<: *fedora_43_amd64_image
|
|
<<: *unit_test_job
|
|
needs:
|
|
- job: gcc:asan
|
|
artifacts: true
|
|
|
|
clang:asan:
|
|
variables:
|
|
CC: ${CLANG}
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Db_sanitize=address,undefined -Db_lundef=false -Didn=enabled -Djemalloc=disabled -Dtracing=disabled --native-file ci/clang-trixie.ini"
|
|
<<: *base_image
|
|
<<: *build_job
|
|
|
|
system:clang:asan:
|
|
variables:
|
|
LSAN_OPTIONS: "suppressions=$CI_PROJECT_DIR/suppr-lsan.txt"
|
|
<<: *base_image
|
|
<<: *system_test_job
|
|
<<: *extra_system_tests_triggering_rules
|
|
needs:
|
|
- job: clang:asan
|
|
artifacts: true
|
|
|
|
unit:clang:asan:
|
|
<<: *base_image
|
|
<<: *unit_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: clang:asan
|
|
artifacts: true
|
|
|
|
# Jobs for TSAN builds on Fedora 43 (amd64)
|
|
|
|
gcc:tsan:
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON} -Wno-stringop-overread"
|
|
LDFLAGS: "-Wl,--disable-new-dtags"
|
|
EXTRA_CONFIGURE: "${TSAN_CONFIGURE_FLAGS_COMMON}"
|
|
<<: *tsan_fedora_43_amd64_image
|
|
<<: *build_job
|
|
|
|
system:gcc:tsan:
|
|
variables:
|
|
TSAN_OPTIONS: "${TSAN_OPTIONS_FEDORA}"
|
|
<<: *tsan_fedora_43_amd64_image
|
|
<<: *system_test_tsan_job
|
|
<<: *extra_system_tests_triggering_rules
|
|
needs:
|
|
- job: gcc:tsan
|
|
artifacts: true
|
|
|
|
unit:gcc:tsan:
|
|
variables:
|
|
TSAN_OPTIONS: "${TSAN_OPTIONS_FEDORA}"
|
|
<<: *tsan_fedora_43_amd64_image
|
|
<<: *unit_test_tsan_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: gcc:tsan
|
|
artifacts: true
|
|
|
|
clang:tsan:
|
|
<<: *tsan_debian_trixie_amd64_image
|
|
<<: *build_job
|
|
variables:
|
|
CC: "${CLANG}"
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
LDFLAGS: "-Wl,--disable-new-dtags"
|
|
EXTRA_CONFIGURE: "${TSAN_CONFIGURE_FLAGS_COMMON} -Db_lundef=false -Dnamed-lto=disabled --native-file ci/clang-trixie.ini"
|
|
<<: *build_job
|
|
|
|
system:clang:tsan:
|
|
variables:
|
|
TSAN_OPTIONS: "${TSAN_OPTIONS_DEBIAN}"
|
|
<<: *tsan_debian_trixie_amd64_image
|
|
<<: *system_test_tsan_job
|
|
needs:
|
|
- job: clang:tsan
|
|
artifacts: true
|
|
|
|
unit:clang:tsan:
|
|
variables:
|
|
TSAN_OPTIONS: "${TSAN_OPTIONS_DEBIAN}"
|
|
<<: *tsan_debian_trixie_amd64_image
|
|
<<: *unit_test_tsan_job
|
|
needs:
|
|
- job: clang:tsan
|
|
artifacts: true
|
|
|
|
generate-tsan-stress-test-configs:
|
|
<<: *base_image
|
|
<<: *default_triggering_rules
|
|
stage: system
|
|
script:
|
|
- util/generate-tsan-stress-jobs.py > tsan-stress-test-configs.yml
|
|
artifacts:
|
|
paths:
|
|
- tsan-stress-test-configs.yml
|
|
needs: []
|
|
when: manual
|
|
|
|
tsan:stress:
|
|
<<: *default_triggering_rules
|
|
stage: postcheck
|
|
variables:
|
|
PARENT_PIPELINE_ID: $CI_PIPELINE_ID
|
|
trigger:
|
|
include:
|
|
- artifact: tsan-stress-test-configs.yml
|
|
job: generate-tsan-stress-test-configs
|
|
needs:
|
|
- job: generate-tsan-stress-test-configs
|
|
artifacts: true
|
|
- job: gcc:tsan
|
|
artifacts: true
|
|
- job: clang:tsan
|
|
artifacts: true
|
|
|
|
# Jobs for Clang builds on Debian 13 "trixie" (amd64)
|
|
|
|
clang:trixie:amd64:
|
|
variables:
|
|
CC: ${CLANG}
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "--native-file ci/clang-trixie.ini"
|
|
RUN_MESON_INSTALL: 1
|
|
<<: *debian_trixie_amd64_image
|
|
<<: *build_job
|
|
|
|
system:clang:trixie:amd64:
|
|
<<: *debian_trixie_amd64_image
|
|
<<: *system_test_job
|
|
<<: *extra_system_tests_triggering_rules
|
|
needs:
|
|
- job: clang:trixie:amd64
|
|
artifacts: true
|
|
|
|
unit:clang:trixie:amd64:
|
|
<<: *debian_trixie_amd64_image
|
|
<<: *unit_test_job
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: clang:trixie:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for Clang builds on FreeBSD 13 (amd64)
|
|
|
|
clang:freebsd13:amd64:
|
|
<<: *build_job
|
|
<<: *freebsd_autoscaler_13_amd64
|
|
|
|
system:clang:freebsd13:amd64:
|
|
<<: *system_test_job
|
|
<<: *freebsd_autoscaler_13_amd64
|
|
<<: *extra_system_tests_triggering_rules
|
|
needs:
|
|
- job: clang:freebsd13:amd64
|
|
artifacts: true
|
|
|
|
unit:clang:freebsd13:amd64:
|
|
<<: *unit_test_job
|
|
<<: *freebsd_autoscaler_13_amd64
|
|
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
|
|
needs:
|
|
- job: clang:freebsd13:amd64
|
|
artifacts: true
|
|
|
|
# Jobs for Clang builds on FreeBSD 14 (amd64)
|
|
|
|
clang:freebsd14:amd64:
|
|
<<: *build_job
|
|
<<: *freebsd_autoscaler_14_amd64
|
|
|
|
system:clang:freebsd14:amd64:
|
|
<<: *system_test_job
|
|
<<: *freebsd_autoscaler_14_amd64
|
|
needs:
|
|
- job: clang:freebsd14:amd64
|
|
artifacts: true
|
|
|
|
unit:clang:freebsd14:amd64:
|
|
<<: *unit_test_job
|
|
<<: *freebsd_autoscaler_14_amd64
|
|
needs:
|
|
- job: clang:freebsd14:amd64
|
|
artifacts: true
|
|
retry: 1 # GL #4924
|
|
|
|
# Job producing a release directory
|
|
|
|
release:
|
|
<<: *base_image
|
|
stage: release
|
|
script:
|
|
- export RELEASE_DIRECTORY="bind-${CI_COMMIT_TAG}-release"
|
|
- export BIND_VERSION="bind-${CI_COMMIT_TAG#v}"
|
|
# Prepare release tarball contents (tarballs + documentation)
|
|
- mkdir -p "${RELEASE_DIRECTORY}/doc/arm"
|
|
- pushd "${RELEASE_DIRECTORY}"
|
|
- mv "../build/meson-dist/${BIND_VERSION}.tar.xz" .
|
|
- tar --extract --file="${BIND_VERSION}.tar.xz" --strip-components=1 "${BIND_VERSION}"/{COPYRIGHT,LICENSE,README.md,srcid}
|
|
- mv ../build/arm/ doc/arm/html/
|
|
- mv ../build/arm-epub/Bv9ARM.epub doc/arm/
|
|
- echo '<!DOCTYPE HTML><html lang="en"><meta http-equiv="refresh" content="0; url=doc/arm/html/notes.html"><title>Redirect</title></html>' > "RELEASE-NOTES-${BIND_VERSION}.html"
|
|
- echo '<!DOCTYPE HTML><html lang="en"><meta http-equiv="refresh" content="0; url=doc/arm/html/changelog.html"><title>Redirect</title></html>' > "CHANGELOG-${BIND_VERSION}.html"
|
|
- popd
|
|
needs:
|
|
- job: tarball-create
|
|
artifacts: true
|
|
- job: docs
|
|
artifacts: true
|
|
rules:
|
|
- *rule_tag
|
|
artifacts:
|
|
paths:
|
|
- bind-${CI_COMMIT_TAG}-release
|
|
expire_in: "1 month"
|
|
|
|
.manual_release_job: &manual_release_job
|
|
stage: release
|
|
when: manual
|
|
|
|
.signer-ssh-job: &signer_ssh_job
|
|
<<: *manual_release_job
|
|
allow_failure: false
|
|
tags:
|
|
- signer
|
|
script:
|
|
- ( rm -f "/tmp/${CI_JOB_NAME}.log" "/tmp/${CI_JOB_NAME}-done" && umask 111 && touch "/tmp/${CI_JOB_NAME}.log" "/tmp/${CI_JOB_NAME}-done" )
|
|
- |
|
|
cat > "/tmp/${CI_JOB_NAME}.sh" <<EOF
|
|
#!/bin/sh
|
|
set -e -x
|
|
{
|
|
${SSH_SCRIPT_CLIENT}
|
|
echo "${CI_COMMIT_TAG}" > "/tmp/${CI_JOB_NAME}-done"
|
|
} 2>&1 | tee "/tmp/${CI_JOB_NAME}.log"
|
|
EOF
|
|
- chmod +x "/tmp/${CI_JOB_NAME}.sh"
|
|
- /bin/sh -c "set -e -x; ${SSH_SCRIPT_RUNNER_PRE}"
|
|
- echo -e "\e[31m*** Sleeping until /tmp/${CI_JOB_NAME}.sh is executed over SSH... ⌛\e[0m"
|
|
- while [ "$(cat "/tmp/${CI_JOB_NAME}-done")" != "${CI_COMMIT_TAG}" ]; do sleep 10; done
|
|
- /bin/sh -c "set -e -x; ${SSH_SCRIPT_RUNNER_POST}"
|
|
- cp "/tmp/${CI_JOB_NAME}.log" "${CI_PROJECT_DIR}/${CI_JOB_NAME}-${CI_COMMIT_TAG}.log"
|
|
- rm -f "/tmp/${CI_JOB_NAME}.log" "/tmp/${CI_JOB_NAME}-done" "/tmp/${CI_JOB_NAME}.sh"
|
|
|
|
# Job signing the source tarballs in the release directory
|
|
|
|
sign:
|
|
<<: *signer_ssh_job
|
|
before_script:
|
|
- export SOURCE_TARBALL="bind-${CI_COMMIT_TAG#v}.tar.xz"
|
|
variables:
|
|
RELEASE_DIRECTORY: bind-${CI_COMMIT_TAG}-release
|
|
SSH_SCRIPT_RUNNER_PRE: |-
|
|
( umask 111 && cat "${RELEASE_DIRECTORY}/$${SOURCE_TARBALL}" > "/tmp/${CI_COMMIT_TAG}.bin" )
|
|
SSH_SCRIPT_CLIENT: |-
|
|
gpg2 --local-user "$${SIGNING_KEY_FINGERPRINT}" --armor --digest-algo SHA512 --detach-sign --output "/tmp/${CI_COMMIT_TAG}.asc" "/tmp/${CI_COMMIT_TAG}.bin"
|
|
SSH_SCRIPT_RUNNER_POST: |-
|
|
cat "/tmp/${CI_COMMIT_TAG}.asc" > "${RELEASE_DIRECTORY}/$${SOURCE_TARBALL}.asc"
|
|
tar --create --file="${RELEASE_DIRECTORY}".tar.gz --gzip "${RELEASE_DIRECTORY}"
|
|
rm -f "/tmp/${CI_COMMIT_TAG}.bin" "/tmp/${CI_COMMIT_TAG}.asc"
|
|
artifacts:
|
|
paths:
|
|
- bind-${CI_COMMIT_TAG}-release.tar.gz
|
|
- sign-${CI_COMMIT_TAG}.log
|
|
expire_in: never
|
|
needs:
|
|
- job: release
|
|
artifacts: true
|
|
rules:
|
|
- *rule_tag
|
|
|
|
# Job staging the signed tarballs
|
|
|
|
staging:
|
|
<<: *signer_ssh_job
|
|
variables:
|
|
RELEASE_TARBALL: bind-${CI_COMMIT_TAG}-release.tar.gz
|
|
SSH_SCRIPT_RUNNER_PRE: |-
|
|
mv "${RELEASE_TARBALL}" "/tmp/${RELEASE_TARBALL}"
|
|
SSH_SCRIPT_CLIENT: |-
|
|
scp "/tmp/${RELEASE_TARBALL}" "${STAGING_USER_UPLOAD}@${STAGING_HOST}:${STAGING_DIR}"
|
|
ssh "${STAGING_USER_ACTIONS}@${STAGING_HOST}" "unpack ${CI_COMMIT_TAG}"
|
|
SSH_SCRIPT_RUNNER_POST: |-
|
|
rm -f "/tmp/${RELEASE_TARBALL}"
|
|
artifacts:
|
|
paths:
|
|
- staging-${CI_COMMIT_TAG}.log
|
|
expire_in: "1 month"
|
|
needs:
|
|
- job: sign
|
|
artifacts: true
|
|
rules:
|
|
- *rule_tag
|
|
|
|
# Job copying a staged release to a secret location
|
|
|
|
publish-private:
|
|
<<: *signer_ssh_job
|
|
variables:
|
|
SSH_SCRIPT_CLIENT: |-
|
|
ssh "${STAGING_USER_ACTIONS}@${STAGING_HOST}" "publish-private ${CI_COMMIT_TAG}"
|
|
SSH_SCRIPT_RUNNER_POST: |-
|
|
awk '/^Public Use URL:/ {print $$NF}' "/tmp/${CI_JOB_NAME}.log" > "url-${CI_COMMIT_TAG}.txt"
|
|
artifacts:
|
|
paths:
|
|
- publish-private-${CI_COMMIT_TAG}.log
|
|
- url-${CI_COMMIT_TAG}.txt
|
|
expire_in: "1 month"
|
|
needs:
|
|
- job: staging
|
|
artifacts: false
|
|
rules:
|
|
- *rule_tag_security_or_subscription
|
|
|
|
# Job copying a staged release to a well-known location
|
|
|
|
publish:
|
|
<<: *signer_ssh_job
|
|
variables:
|
|
SSH_SCRIPT_CLIENT: |-
|
|
ssh "${STAGING_USER_ACTIONS}@${STAGING_HOST}" "publish ${CI_COMMIT_TAG}"
|
|
artifacts:
|
|
paths:
|
|
- publish-${CI_COMMIT_TAG}.log
|
|
expire_in: "1 month"
|
|
needs:
|
|
- job: staging
|
|
artifacts: false
|
|
rules:
|
|
- *rule_tag_open_source
|
|
|
|
.manual_release_job_qa: &manual_release_job_qa
|
|
<<: *manual_release_job
|
|
<<: *base_image
|
|
before_script:
|
|
- git clone --depth 1 https://gitlab.isc.org/isc-projects/bind9-qa.git
|
|
needs:
|
|
- job: staging
|
|
artifacts: false
|
|
|
|
# Setting the FORCE_CVE_IDS environment variable to a comma-separated
|
|
# list of CVE IDs enables overriding the autodetected ones.
|
|
#
|
|
# Setting the FORCE_SECURITY_RELEASES environment variable to a
|
|
# comma-separated list of BIND 9 versions enables overriding the
|
|
# autodetected ones.
|
|
.printing_press_job: &printing_press_job
|
|
<<: *manual_release_job_qa
|
|
variables:
|
|
GIT_DEPTH: 1
|
|
script:
|
|
- bind9-qa/releng/printing_press_mr.py --document "${DOCUMENT}" --metadata bind9-qa/releng/metadata.json ${FORCE_CVE_IDS:+--force-cve-ids ${FORCE_CVE_IDS}} ${FORCE_SECURITY_RELEASES:+--force-security-releases ${FORCE_SECURITY_RELEASES}}
|
|
artifacts:
|
|
paths:
|
|
- printing-press/
|
|
when: on_failure
|
|
|
|
# Job creating the release announcement MR in Printing Press
|
|
|
|
prepare-release-announcement:
|
|
<<: *printing_press_job
|
|
variables:
|
|
DOCUMENT: release-announcement
|
|
rules:
|
|
- *rule_tag_open_source
|
|
|
|
# Job preparing an EVN MR in Printing Press
|
|
|
|
prepare-evn:
|
|
<<: *printing_press_job
|
|
variables:
|
|
DOCUMENT: evn
|
|
rules:
|
|
- *rule_tag_security
|
|
|
|
# Job preparing a security pre-announcement MR in Printing Press
|
|
|
|
prepare-preannouncement:
|
|
<<: *printing_press_job
|
|
variables:
|
|
DOCUMENT: security-preannouncement
|
|
rules:
|
|
- *rule_tag_security
|
|
|
|
# Job preparing a packager notification MR in Printing Press
|
|
|
|
prepare-package-notification:
|
|
<<: *printing_press_job
|
|
variables:
|
|
DOCUMENT: packager-notification
|
|
rules:
|
|
- *rule_tag_security
|
|
|
|
# Job preparing a post-disclosure notification MR in Printing Press
|
|
|
|
prepare-post-disclosure-notification:
|
|
<<: *printing_press_job
|
|
variables:
|
|
DOCUMENT: post-disclosure-notification
|
|
rules:
|
|
- *rule_tag_security
|
|
|
|
# Job merging the tag back into its base branch
|
|
|
|
merge-tag:
|
|
<<: *manual_release_job_qa
|
|
variables:
|
|
GIT_DEPTH: 100
|
|
script:
|
|
- bind9-qa/releng/merge_tag.py --tag "$CI_COMMIT_TAG"
|
|
rules:
|
|
- *rule_tag_open_source
|
|
artifacts:
|
|
paths:
|
|
- bind9/
|
|
when: on_failure
|
|
|
|
.customer-git: &customer_git
|
|
<<: *base_image
|
|
stage: release
|
|
when: manual
|
|
variables: # ensure clean git environment and sufficient history
|
|
GIT_STRATEGY: clone
|
|
GIT_DEPTH: 1000
|
|
|
|
# To trigger this job and push a branch to a customer, you must set the
|
|
# CUSTOMER job variable by clicking on the manual job (not the play button) and
|
|
# set it to the name of the target customer.
|
|
customer-git:branch:
|
|
<<: *customer_git
|
|
needs: []
|
|
rules:
|
|
- if: '$CI_PROJECT_NAMESPACE == "isc-private" && $CI_PIPELINE_SOURCE == "merge_request_event"'
|
|
variables:
|
|
BRANCH: '$CI_MERGE_REQUEST_SOURCE_BRANCH_NAME'
|
|
- if: '$CI_PROJECT_NAMESPACE == "isc-private" && $CI_PIPELINE_SOURCE =~ /^(api|pipeline|trigger|web)$/ && $REBASE_ONLY != "1"'
|
|
variables:
|
|
BRANCH: '$CI_COMMIT_BRANCH'
|
|
before_script:
|
|
- test -n "$CUSTOMER"
|
|
- git clone --depth 1 https://gitlab.isc.org/isc-projects/bind9-qa.git
|
|
script:
|
|
- git checkout -b "$BRANCH" # ensure refs/heads/$BRANCH exists; GitLab clones with detached HEAD
|
|
- bind9-qa/releng/push_to_customer_repository.py --branch "$BRANCH" --customer "$CUSTOMER" --force
|
|
|
|
customer-git:tag:
|
|
<<: *customer_git
|
|
needs:
|
|
- job: release
|
|
artifacts: false
|
|
rules:
|
|
- *rule_tag
|
|
before_script:
|
|
- git clone --depth 1 https://gitlab.isc.org/isc-projects/bind9-qa.git
|
|
- git clone --depth 1 "https://token:${ISC_CUSTOMERS_WRITE_TOKEN}@gitlab.isc.org/isc-customers/isc-customer-settings.git"
|
|
script:
|
|
- bind9-qa/releng/push_to_customer_repository.py --tag "$CI_COMMIT_TAG" --entitlements isc-customer-settings/entitlements.yaml --force
|
|
|
|
# Coverity Scan analysis upload
|
|
|
|
.coverity_prep: &coverity_prep
|
|
- curl --output /tmp/cov-analysis-linux64.md5 https://scan.coverity.com/download/linux64
|
|
--form project=$COVERITY_SCAN_PROJECT_NAME
|
|
--form token=$COVERITY_SCAN_TOKEN
|
|
--form md5=1
|
|
- curl --output /tmp/cov-analysis-linux64.tgz https://scan.coverity.com/download/linux64
|
|
--form project=$COVERITY_SCAN_PROJECT_NAME
|
|
--form token=$COVERITY_SCAN_TOKEN
|
|
- test "$(md5sum /tmp/cov-analysis-linux64.tgz | awk '{ print $1 }')" = "$(cat /tmp/cov-analysis-linux64.md5)"
|
|
- tar --extract --gzip --file=/tmp/cov-analysis-linux64.tgz --directory=/tmp
|
|
- test -d /tmp/cov-analysis-linux64-*
|
|
|
|
.coverity_build: &coverity_build
|
|
- /tmp/cov-analysis-linux64-*/bin/cov-build --dir /tmp/cov-int sh -c 'ninja -C ./build -v'
|
|
- tar --create --gzip --file=/tmp/cov-int.tar.gz --directory /tmp cov-int
|
|
- curl -v https://scan.coverity.com/builds?project=$COVERITY_SCAN_PROJECT_NAME
|
|
--form token=$COVERITY_SCAN_TOKEN
|
|
--form email=bind-changes@isc.org
|
|
--form file=@/tmp/cov-int.tar.gz
|
|
--form version="$(git rev-parse --short HEAD)"
|
|
--form description="$(git rev-parse --short HEAD) / $CI_COMMIT_TITLE / $CI_COMMIT_REF_NAME:$CI_PIPELINE_ID" 2>&1
|
|
| tee curl-response.txt
|
|
- grep -q 'Build successfully submitted' curl-response.txt
|
|
|
|
coverity:
|
|
<<: *base_image
|
|
stage: postcheck
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Doptimization=g -Ddoc=disabled -Didn=enabled"
|
|
script:
|
|
- *coverity_prep
|
|
- *configure
|
|
- *coverity_build
|
|
after_script:
|
|
- mv -v /tmp/cov-int.tar.gz ${CI_PROJECT_DIR}/
|
|
needs: []
|
|
artifacts:
|
|
paths:
|
|
- curl-response.txt
|
|
- cov-int.tar.gz
|
|
expire_in: "1 week"
|
|
when: on_failure
|
|
rules:
|
|
- if: '$COVERITY_SCAN_PROJECT_NAME != null && $COVERITY_SCAN_TOKEN != null'
|
|
|
|
# Respdiff tests
|
|
|
|
respdiff:
|
|
<<: *respdiff_job
|
|
<<: *base_image
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON} -DISC_TRACK_PTHREADS_OBJECTS"
|
|
EXTRA_CONFIGURE: "-Doptimization=g"
|
|
MAX_DISAGREEMENTS_PERCENTAGE: "0.3"
|
|
script:
|
|
- bash respdiff.sh -m /usr/lib/x86_64-linux-gnu/libjemalloc.so.2 -s named -q "${PWD}/100k_mixed.txt" -c 3 -w "${PWD}/rspworkdir" "${CI_PROJECT_DIR}" "/usr/local/respdiff-reference-bind/sbin/named"
|
|
- cd ../.. && ninja -C build clean >/dev/null 2>&1
|
|
|
|
respdiff:asan:
|
|
<<: *respdiff_job
|
|
<<: *base_image
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Doptimization=g -Db_sanitize=address,undefined -Djemalloc=disabled"
|
|
MAX_DISAGREEMENTS_PERCENTAGE: "0.3"
|
|
script:
|
|
- bash respdiff.sh -s named -q "${PWD}/100k_mixed.txt" -c 3 -w "${PWD}/rspworkdir" "${CI_PROJECT_DIR}" "/usr/local/respdiff-reference-bind/sbin/named"
|
|
- cd ../.. && ninja -C build clean >/dev/null 2>&1
|
|
|
|
respdiff:tsan:
|
|
<<: *respdiff_job
|
|
<<: *tsan_debian_trixie_amd64_image
|
|
variables:
|
|
CC: "${CLANG}"
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
LDFLAGS: "-Wl,--disable-new-dtags"
|
|
EXTRA_CONFIGURE: "${TSAN_CONFIGURE_FLAGS_COMMON} -Dnamed-lto=disabled -Db_lundef=false"
|
|
MAX_DISAGREEMENTS_PERCENTAGE: "0.3"
|
|
TSAN_OPTIONS: "${TSAN_OPTIONS_DEBIAN}"
|
|
script:
|
|
- bash respdiff.sh -s named -q "${PWD}/100k_mixed.txt" -c 3 -w "${PWD}/rspworkdir" "${CI_PROJECT_DIR}" "/usr/local/respdiff-reference-bind/sbin/named"
|
|
- cd ../.. && ninja -C build clean >/dev/null 2>&1
|
|
after_script:
|
|
- *parse_tsan
|
|
|
|
respdiff-third-party:
|
|
<<: *respdiff_job
|
|
<<: *base_image
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
EXTRA_CONFIGURE: "-Doptimization=g"
|
|
MAX_DISAGREEMENTS_PERCENTAGE: "0.4"
|
|
script:
|
|
- bash respdiff.sh -s third_party -q "${PWD}/100k_mixed.txt" -c 1 -w "${PWD}/rspworkdir" "${CI_PROJECT_DIR}"
|
|
- cd ../.. && ninja -C build clean >/dev/null 2>&1
|
|
|
|
.respdiff-recent-named: &respdiff_recent_named
|
|
<<: *respdiff_job
|
|
<<: *base_image
|
|
needs:
|
|
- job: ci-variables
|
|
artifacts: true
|
|
script:
|
|
- cd ${CI_PROJECT_DIR}
|
|
- mkdir version-under-test
|
|
- mv build version-under-test/
|
|
- BASELINE=${CI_MERGE_REQUEST_TARGET_BRANCH_NAME:-$BIND_BASELINE_VERSION}
|
|
- git fetch --unshallow origin ${BASELINE}
|
|
- git checkout FETCH_HEAD
|
|
- *configure
|
|
- meson compile -C build
|
|
- cd bind9-qa/respdiff
|
|
- bash respdiff.sh -s named -q "${PWD}/100k_mixed.txt" -c 3 -w "${PWD}/rspworkdir" "${CI_PROJECT_DIR}/version-under-test" "${CI_PROJECT_DIR}/build/named"
|
|
|
|
respdiff:recent-named:
|
|
<<: *respdiff_recent_named
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON} -DISC_TRACK_PTHREADS_OBJECTS"
|
|
EXTRA_CONFIGURE: "-Doptimization=g"
|
|
MAX_DISAGREEMENTS_PERCENTAGE: "0.1"
|
|
allow_failure: true # GL!11293
|
|
|
|
# Performance tests
|
|
|
|
shotgun:udp:
|
|
<<: *shotgun_job
|
|
variables:
|
|
SHOTGUN_SCENARIO: udp
|
|
SHOTGUN_TRAFFIC_MULTIPLIER: 15
|
|
|
|
shotgun:tcp:
|
|
<<: *shotgun_job
|
|
variables:
|
|
SHOTGUN_SCENARIO: tcp
|
|
SHOTGUN_TRAFFIC_MULTIPLIER: 12
|
|
|
|
shotgun:dot:
|
|
<<: *shotgun_job
|
|
variables:
|
|
SHOTGUN_SCENARIO: dot
|
|
SHOTGUN_TRAFFIC_MULTIPLIER: 5
|
|
rules: # FIXME disabled shotgun jobs temporarily due to infra issue
|
|
- *shotgun_rule_mr_manual
|
|
# - *shotgun_rule_tag
|
|
# - *shotgun_rule_other
|
|
|
|
shotgun:doh-get:
|
|
<<: *shotgun_job
|
|
variables:
|
|
SHOTGUN_SCENARIO: doh-get
|
|
SHOTGUN_TRAFFIC_MULTIPLIER: 2
|
|
SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MAX: 0.4 # bump from the default due to increased tail-end jitter
|
|
rules: # FIXME disabled shotgun jobs temporarily due to infra issue
|
|
- *shotgun_rule_mr_manual
|
|
# - *shotgun_rule_tag
|
|
# - *shotgun_rule_other
|
|
|
|
generate-stress-test-configs:
|
|
<<: *base_image
|
|
<<: *default_triggering_rules
|
|
stage: precheck
|
|
script:
|
|
- git clone --depth 1 https://gitlab.isc.org/isc-projects/bind9-qa.git
|
|
- bind9-qa/stress/generate-stress-test-configs.py > stress-test-configs.yml
|
|
artifacts:
|
|
paths:
|
|
- stress-test-configs.yml
|
|
needs: []
|
|
|
|
stress-test-child-pipeline:
|
|
<<: *default_triggering_rules
|
|
stage: performance
|
|
rules:
|
|
- if: '$CI_MERGE_REQUEST_DIFF_BASE_SHA != null'
|
|
changes:
|
|
- '**/*.c'
|
|
- '**/*.h'
|
|
- if: '$CI_MERGE_REQUEST_DIFF_BASE_SHA != null'
|
|
when: manual # don't run on each MR unless requested
|
|
allow_failure: true
|
|
- *rule_tag
|
|
- if: '$CI_PIPELINE_SOURCE =~ /^(api|pipeline|schedule|trigger|web)$/ && $REBASE_ONLY != "1"'
|
|
trigger:
|
|
include:
|
|
- artifact: stress-test-configs.yml
|
|
job: generate-stress-test-configs
|
|
needs:
|
|
- job: generate-stress-test-configs
|
|
artifacts: true
|
|
|
|
# Simple reproductibilty test, needs an image with meson >=1.6.0
|
|
reproducible-build:
|
|
<<: *default_triggering_rules
|
|
<<: *alpine_3_22_amd64_image
|
|
stage: postcheck
|
|
needs: []
|
|
variables:
|
|
CC: gcc
|
|
CFLAGS: "${CFLAGS_COMMON}"
|
|
before_script:
|
|
- *list_installed_package_versions
|
|
script:
|
|
# dnstap produces an intermediate .a file, and meson considers all .a
|
|
# files to be final results independently of whether they are installed or
|
|
# not. But the content of the .a file might be unstable under LTO due to
|
|
# -ffat-lto-objects. Hence we disable dnstap for reproducibility tests.
|
|
- meson reprotest
|
|
--
|
|
-Ddnstap=disabled
|
|
-Ddoc=disabled
|
|
-Doptimization=1
|
|
artifacts:
|
|
untracked: true
|
|
when: on_failure
|
|
|
|
# git fsck operates over the whole repository and is sufficient to schedule it
|
|
# only in one branch, preferably "main". GitLab's clone strategy prevents us
|
|
# from using the "bind9" repo clone; we need to clone it ourselves.
|
|
fsck:
|
|
<<: *base_image
|
|
stage: postcheck
|
|
variables:
|
|
GIT_STRATEGY: none
|
|
script:
|
|
- git clone https://gitlab.isc.org/isc-projects/bind9.git bind9-full-clone
|
|
- cd bind9-full-clone/
|
|
- git fsck
|
|
rules:
|
|
- if: '$CI_PIPELINE_SOURCE == "schedule"'
|
|
needs: []
|
|
|
|
gcov:
|
|
<<: *base_image
|
|
<<: *default_triggering_rules
|
|
stage: postcheck
|
|
needs:
|
|
- job: system:gcc:trixie:amd64
|
|
artifacts: true
|
|
script:
|
|
# Ensure gcov files for unit tests are found via tests/ rather than
|
|
# lib/*/tests/ to prevent confusing gcovr.
|
|
# - find lib/ -name tests -type l -delete
|
|
- ninja -C build coverage
|
|
- tail -n 3 build/meson-logs/coverage.txt
|
|
coverage: /^TOTAL.*\s+(\d+\%)$/
|
|
artifacts:
|
|
paths:
|
|
- build/meson-logs/
|
|
reports:
|
|
coverage_report:
|
|
coverage_format: cobertura
|
|
path: build/meson-logs/coverage.xml
|
|
|
|
# Pairwise testing of build options
|
|
|
|
pairwise:
|
|
<<: *base_image
|
|
stage: build
|
|
needs: []
|
|
script:
|
|
- util/pairwise-testing.sh
|
|
artifacts:
|
|
paths:
|
|
- pairwise-commands.txt
|
|
- pairwise-model.txt
|
|
- pairwise-output.*.txt
|
|
when: on_failure
|
|
rules:
|
|
- if: '$PAIRWISE_TESTING != null'
|
|
|
|
.post_merge_template: &post_merge
|
|
<<: *base_image
|
|
stage: postmerge
|
|
needs: []
|
|
# post-merge processes should run even if another MR was merged while the job was running (or queued)
|
|
interruptible: false
|
|
variables:
|
|
# avoid leftover branches from previous jobs
|
|
GIT_STRATEGY: clone
|
|
# assumed max depth of a MR for backport or a rebased force-push
|
|
GIT_DEPTH: 1000
|
|
before_script:
|
|
# force-pushes should not trigger process automation (happens only in -sub branches)
|
|
- >
|
|
echo "previous branch tip: $CI_COMMIT_BEFORE_SHA"
|
|
- set +o pipefail; git log --format='%H' | grep --silent "$CI_COMMIT_BEFORE_SHA" && PREVIOUS_TIP_REACHABLE=1
|
|
- test "$PREVIOUS_TIP_REACHABLE" != "1" && echo "force-push detected, stop" && exit 0
|
|
# non-fast-forward merges are disabled so we have to have merge commit on top
|
|
- MERGE_REQUEST_ID="$(git log -1 --format='%b' | sed --silent -e "s|^See merge request ${CI_PROJECT_PATH}\!||p")"
|
|
- >
|
|
: stop if this is not a merge request in the current project\'s namespace
|
|
- test -n "$MERGE_REQUEST_ID" || exit 0
|
|
- git clone --depth 1 https://gitlab.isc.org/isc-projects/bind9-qa.git
|
|
|
|
backports:
|
|
<<: *post_merge
|
|
rules:
|
|
# -sub branches are handled manually
|
|
- if: '$CI_PIPELINE_SOURCE == "push" && ($CI_COMMIT_REF_NAME =~ /^bind-9.[0-9]+$/ || $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH)'
|
|
script:
|
|
# CI job token is not sufficient for push operations
|
|
- git remote get-url origin | sed -e "s/gitlab-ci-token:$CI_JOB_TOKEN/oauth2:$BIND_TEAM_WRITE_TOKEN/" | xargs git remote set-url --push origin
|
|
- bind9-qa/releng/backport_mr.py $CI_PROJECT_ID "$MERGE_REQUEST_ID"
|
|
|
|
merged-metadata:
|
|
<<: *post_merge
|
|
rules:
|
|
- if: '$CI_PIPELINE_SOURCE == "push" && ($CI_COMMIT_REF_NAME =~ /^bind-9.[0-9]+(-sub)?$/ || $CI_COMMIT_REF_NAME =~ /^v9.[0-9]+.[0-9]+-release$/ || $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH)'
|
|
script:
|
|
- bind9-qa/releng/after_merge.py "$CI_PROJECT_ID" "$MERGE_REQUEST_ID"
|
|
|
|
auto-rebase-trigger:
|
|
stage: postmerge
|
|
rules:
|
|
- if: '$CI_PROJECT_NAMESPACE == "isc-projects" && $CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_REF_NAME =~ /^bind-9.[0-9]+$/'
|
|
needs: []
|
|
interruptible: true
|
|
inherit:
|
|
variables: false
|
|
variables:
|
|
REBASE_ONLY: 1
|
|
trigger:
|
|
project: isc-private/bind9
|
|
branch: "${CI_COMMIT_BRANCH}-sub"
|