Files
bind9/.gitlab-ci.yml
Nicki Křížek 2a207bc0b2 Replace deprecated only/except with rules in .gitlab-ci.yml
The keyword rules allows more flexible and complex conditions when
deciding whether to create the job and also makes it possible run tweak
variables or job properties depending on arbitraty rules. Since it's
not possible to combine only/except and rules together, replace all
uses of only/except to avoid any potential future issues.

(cherry picked from commit 29fd756408)
2025-02-27 14:55:30 +01:00

1750 lines
51 KiB
YAML

variables:
# Not normally needed, but may be if some script uses `apt-get install`.
DEBIAN_FRONTEND: noninteractive
# Locale settings do not affect the build, but might affect tests.
LC_ALL: C
CI_REGISTRY_IMAGE: registry.gitlab.isc.org/isc-projects/images/bind9
CCACHE_DIR: "/ccache"
GIT_DEPTH: 1
GIT_CLEAN_FLAGS: -ffdxq
# The following values may be overwritten in GitLab's CI/CD Variables Settings.
BUILD_PARALLEL_JOBS: 6
TEST_PARALLEL_JOBS: 4
CONFIGURE: ./configure
CLANG_VERSION: 19
CLANG: "clang-${CLANG_VERSION}"
SCAN_BUILD: "scan-build-${CLANG_VERSION}"
LLVM_SYMBOLIZER: "/usr/lib/llvm-${CLANG_VERSION}/bin/llvm-symbolizer"
ASAN_SYMBOLIZER_PATH: "/usr/lib/llvm-${CLANG_VERSION}/bin/llvm-symbolizer"
CLANG_FORMAT: "clang-format-${CLANG_VERSION}"
CFLAGS_COMMON: -fno-omit-frame-pointer -fno-optimize-sibling-calls -O1 -g -Wall -Wextra
# Pass run-time flags to AddressSanitizer to get core dumps on error.
ASAN_OPTIONS: abort_on_error=1:disable_coredump=0:unmap_shadow_on_exit=1
TSAN_OPTIONS_COMMON: "disable_coredump=0 second_deadlock_stack=1 atexit_sleep_ms=1000 history_size=7 log_exe_name=true log_path=tsan"
TSAN_SUPPRESSIONS: "suppressions=${CI_PROJECT_DIR}/.tsan-suppress"
TSAN_OPTIONS_DEBIAN: "${TSAN_OPTIONS_COMMON} ${TSAN_SUPPRESSIONS} external_symbolizer_path=${LLVM_SYMBOLIZER}"
TSAN_OPTIONS_FEDORA: "${TSAN_OPTIONS_COMMON} ${TSAN_SUPPRESSIONS} external_symbolizer_path=/usr/bin/llvm-symbolizer"
UBSAN_OPTIONS: "halt_on_error=1:abort_on_error=1:disable_coredump=0"
AM_COLOR_TESTS: always
WITHOUT_READLINE: "--without-readline"
WITH_READLINE: "--with-readline"
WITH_READLINE_EDITLINE: "--with-readline=editline"
WITH_READLINE_LIBEDIT: "--with-readline=libedit"
WITH_READLINE_READLINE: "--with-readline=readline"
INSTALL_PATH: "${CI_PROJECT_DIR}/.local"
# In multithreaded unit tests, abort on the first failure
CMOCKA_TEST_ABORT: 1
# Disable pytest's "cacheprovider" plugin to prevent it from creating
# cross-testrun files as there is no need to use that feature in CI.
PYTEST_ADDOPTS: "-p no:cacheprovider"
HYPOTHESIS_PROFILE: "ci"
# Some jobs may clean up the build artifacts unless this is set to 0.
CLEAN_BUILD_ARTIFACTS_ON_SUCCESS: 1
# DNS Shotgun performance testing defaults
SHOTGUN_ROUNDS: 1
SHOTGUN_DURATION: 120
# allow unlimited improvements against baseline
SHOTGUN_EVAL_THRESHOLD_CPU_MIN: '-inf'
SHOTGUN_EVAL_THRESHOLD_MEMORY_MIN: '-inf'
SHOTGUN_EVAL_THRESHOLD_RCODE_MAX: '+inf'
SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MIN: '-inf'
SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_DRIFT_MIN: '-inf'
default:
# Allow all running CI jobs to be automatically canceled when a new
# version of a branch is pushed.
#
# See: https://docs.gitlab.com/ee/ci/pipelines/settings.html#auto-cancel-redundant-pipelines
interruptible: true
# AWS can interrupt the spot instance anytime, so let's retry the job when
# the interruption event happens to avoid a pipeline failure.
retry:
max: 2
when:
- runner_system_failure
stages:
- autoconf
- precheck
- build
- unit
- system
- performance
- docs
- postcheck
- postmerge
- release
### Runner Tag Templates
.libvirt-amd64: &libvirt_amd64
tags:
- libvirt
- amd64
# Autoscaling GitLab Runner on AWS EC2 (amd64)
.linux-amd64: &linux_amd64
tags:
- linux
- aws
- runner-manager
- amd64
# Autoscaling GitLab Runner on AWS EC2 (arm64)
.linux-arm64: &linux_arm64
tags:
- linux
- aws
- runner-manager
- aarch64
# Autoscaling GitLab Runner on AWS EC2 (FreeBSD)
.freebsd-stress-amd64: &freebsd_stress_amd64
tags:
- bsd-stress-test
- aws
- autoscaler
- shell
- stress-test
- amd64
### Docker Image Templates
# Alpine Linux
.alpine-3.21-amd64: &alpine_3_21_amd64_image
image: "$CI_REGISTRY_IMAGE:alpine-3.21-amd64"
<<: *linux_amd64
# Oracle Linux
.oraclelinux-8-amd64: &oraclelinux_8_amd64_image
image: "$CI_REGISTRY_IMAGE:oraclelinux-8-amd64"
<<: *linux_amd64
.oraclelinux-9-amd64: &oraclelinux_9_amd64_image
image: "$CI_REGISTRY_IMAGE:oraclelinux-9-amd64"
<<: *linux_amd64
# Debian
.debian-bookworm-amd64: &debian_bookworm_amd64_image
image: "$CI_REGISTRY_IMAGE:debian-bookworm-amd64"
<<: *linux_amd64
.tsan-debian-bookworm-amd64: &tsan_debian_bookworm_amd64_image
image: "$CI_REGISTRY_IMAGE:tsan-debian-bookworm-amd64"
<<: *linux_amd64
.debian-bookworm-amd64cross32: &debian_bookworm_amd64cross32_image
image: "$CI_REGISTRY_IMAGE:debian-bookworm-amd64cross32"
<<: *linux_amd64
.debian-sid-amd64: &debian_sid_amd64_image
image: "$CI_REGISTRY_IMAGE:debian-sid-amd64"
<<: *linux_amd64
# openSUSE Tumbleweed
.tumbleweed-latest-amd64: &tumbleweed_latest_amd64_image
image: "$CI_REGISTRY_IMAGE:tumbleweed-latest-amd64"
<<: *linux_amd64
# Fedora
.tsan-fedora-41-amd64: &tsan_fedora_41_amd64_image
image: "$CI_REGISTRY_IMAGE:tsan-fedora-41-amd64"
<<: *linux_amd64
.fedora-41-amd64: &fedora_41_amd64_image
image: "$CI_REGISTRY_IMAGE:fedora-41-amd64"
<<: *linux_amd64
.fedora-41-arm64: &fedora_41_arm64_image
image: "$CI_REGISTRY_IMAGE:fedora-41-arm64"
<<: *linux_arm64
# Ubuntu
.ubuntu-focal-amd64: &ubuntu_focal_amd64_image
image: "$CI_REGISTRY_IMAGE:ubuntu-focal-amd64"
<<: *linux_amd64
.ubuntu-jammy-amd64: &ubuntu_jammy_amd64_image
image: "$CI_REGISTRY_IMAGE:ubuntu-jammy-amd64"
<<: *linux_amd64
.ubuntu-noble-amd64: &ubuntu_noble_amd64_image
image: "$CI_REGISTRY_IMAGE:ubuntu-noble-amd64"
<<: *linux_amd64
# Base image
# This is a meta image that is used as a base for non-specific jobs
.base: &base_image
<<: *debian_bookworm_amd64_image
### QCOW2 Image Templates
.freebsd-13-amd64: &freebsd_13_amd64_image
image: "freebsd-13.4-x86_64"
<<: *libvirt_amd64
.freebsd-14-amd64: &freebsd_14_amd64_image
image: "freebsd-14.2-x86_64"
<<: *libvirt_amd64
.openbsd-amd64: &openbsd_amd64_image
image: "openbsd-7.6-x86_64"
<<: *libvirt_amd64
### Job Templates
.api-pipelines-schedules-tags-triggers-web-triggering-rules: &api_pipelines_schedules_tags_triggers_web_triggering_rules
rules:
- if: '$CI_PIPELINE_SOURCE =~ /^(api|pipeline|schedule|trigger|web)$/'
- if: '$CI_COMMIT_TAG != null'
.api-pipelines-schedules-triggers-web-triggering-rules: &api_pipelines_schedules_triggers_web_triggering_rules
rules:
- if: '$CI_PIPELINE_SOURCE =~ /^(api|pipeline|schedule|trigger|web)$/'
.default-triggering-rules: &default_triggering_rules
rules:
- if: '$CI_PIPELINE_SOURCE =~ /^(api|merge_request_event|pipeline|schedule|trigger|web)$/'
- if: '$CI_COMMIT_TAG != null'
.precheck: &precheck_job
<<: *default_triggering_rules
<<: *base_image
stage: precheck
.autoconf: &autoconf_job
<<: *default_triggering_rules
<<: *base_image
stage: autoconf
script:
- autoreconf -fi
artifacts:
untracked: true
.configure: &configure
- ${CONFIGURE}
--disable-maintainer-mode
--enable-developer
--enable-option-checking=fatal
--enable-dnstap
--with-cmocka
--with-libxml2
--with-json-c
$EXTRA_CONFIGURE
|| (test -s config.log && cat config.log; exit 1)
# change directory to the workspace before including this
.find_python: &find_python
- PYTHON="$(source bin/tests/system/conf.sh; echo $PYTHON)"
- test -x "$PYTHON"
.find_pytest: &find_pytest
- PYTEST="$(source bin/tests/system/conf.sh; echo $PYTEST)"
- test -x "$PYTEST"
.parse_tsan: &parse_tsan
- find -name 'tsan.*' -exec "$PYTHON" util/parse_tsan.py {} \;
.check_readline_setup: &check_readline_setup
- if [[ -n "${WITHOUT_READLINE}" ]]; then
! grep "^#define HAVE_READLINE" config.h;
elif [[ -n "${WITH_READLINE}" ]]; then
grep -e "^#define HAVE_READLINE_READLINE"
-e "^#define HAVE_READLINE_LIBEDIT"
-e "^#define HAVE_READLINE_EDITLINE" config.h;
elif [[ -n "${WITH_READLINE_EDITLINE}" ]]; then
grep "^#define HAVE_READLINE_EDITLINE" config.h;
elif [[ -n "${WITH_READLINE_LIBEDIT}" ]]; then
grep "^#define HAVE_READLINE_LIBEDIT" config.h;
elif [[ -n "${WITH_READLINE_READLINE}" ]]; then
grep "^#define HAVE_READLINE_READLINE" config.h;
fi
# Unpack release tarball and continue work in the extracted directory.
.unpack_release_tarball: &unpack_release_tarball
- tar --extract --file bind-*.tar.xz
- rm -f bind-*.tar.xz
- cd bind-*
.build: &build_job
<<: *default_triggering_rules
stage: build
before_script:
- test -w "${CCACHE_DIR}" && export PATH="/usr/lib/ccache:${PATH}"
- test -n "${OUT_OF_TREE_WORKSPACE}" && mkdir "${OUT_OF_TREE_WORKSPACE}" && cd "${OUT_OF_TREE_WORKSPACE}"
script:
- *configure
- *check_readline_setup
- make -j${BUILD_PARALLEL_JOBS:-1} -k all V=1
- test -z "${RUN_MAKE_INSTALL}" || make DESTDIR="${INSTALL_PATH}" install
- test -z "${RUN_MAKE_INSTALL}" || DESTDIR="${INSTALL_PATH}" sh util/check-make-install
- if [[ "${CFLAGS}" == *"-fsanitize=address"* ]]; then ( ! grep -F AddressSanitizer config.log ); fi
- test -z "${CROSS_COMPILATION}" || grep -F -A 1 "checking whether we are cross compiling" config.log | grep -q "result.*yes"
- test -z "${CROSS_COMPILATION}" || file lib/dns/gen | grep -F -q "ELF 64-bit LSB"
- test -z "${CROSS_COMPILATION}" || ( ! git ls-files -z --others --exclude lib/dns/gen | xargs -0 file | grep "ELF 64-bit LSB" )
- if test -z "${OUT_OF_TREE_WORKSPACE}" && test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
- bin/named/named -V
needs:
- job: autoreconf
artifacts: true
artifacts:
untracked: true
when: always
.setup_interfaces: &setup_interfaces
- if [ "$(id -u)" -eq "0" ]; then
sh -x bin/tests/system/ifconfig.sh up;
else
sudo sh -x bin/tests/system/ifconfig.sh up;
fi
.display_pytest_failures: &display_pytest_failures
- awk '/^=+ FAILURES =+/{flag=1;next}/^=+.*=+$/{flag=0}flag' bin/tests/system/pytest.out.txt || true
- awk '/^=+ ERRORS =+/{flag=1;next}/^=+.*=+$/{flag=0}flag' bin/tests/system/pytest.out.txt || true
.shotgun: &shotgun_job
<<: *base_image
stage: performance
rules:
- &shotgun_rule_mr
if: '$CI_MERGE_REQUEST_DIFF_BASE_SHA != null'
variables:
BASELINE: '$CI_MERGE_REQUEST_DIFF_BASE_SHA'
- &shotgun_rule_tag
if: '$CI_COMMIT_TAG != null'
variables:
SHOTGUN_ROUNDS: 3
- &shotgun_rule_other
if: '$CI_PIPELINE_SOURCE =~ /^(api|pipeline|schedule|trigger|web)$/'
script:
- if [ -z "$BASELINE" ]; then export BASELINE=$BIND_BASELINE_VERSION; fi # this dotenv variable can't be set in the rules section, because rules are evaluated before any jobs run
- PIPELINE_ID=$(curl -s -X POST --fail
-F "token=$CI_JOB_TOKEN"
-F ref=main
-F "variables[SHOTGUN_TEST_VERSION]=['$CI_COMMIT_REF_NAME', '$BASELINE']"
-F "variables[SHOTGUN_DURATION]=300"
-F "variables[SHOTGUN_ROUNDS]=$SHOTGUN_ROUNDS"
-F "variables[SHOTGUN_TRAFFIC_MULTIPLIER]=$SHOTGUN_TRAFFIC_MULTIPLIER"
-F "variables[SHOTGUN_SCENARIO]=$SHOTGUN_SCENARIO"
-F "variables[SHOTGUN_EVAL_THRESHOLD_CPU_MIN]=$SHOTGUN_EVAL_THRESHOLD_CPU_MIN"
-F "variables[SHOTGUN_EVAL_THRESHOLD_CPU_MAX]=$SHOTGUN_EVAL_THRESHOLD_CPU_MAX"
-F "variables[SHOTGUN_EVAL_THRESHOLD_MEMORY_MIN]=$SHOTGUN_EVAL_THRESHOLD_MEMORY_MIN"
-F "variables[SHOTGUN_EVAL_THRESHOLD_MEMORY_MAX]=$SHOTGUN_EVAL_THRESHOLD_MEMORY_MAX"
-F "variables[SHOTGUN_EVAL_THRESHOLD_RCODE_MIN]=$SHOTGUN_EVAL_THRESHOLD_RCODE_MIN"
-F "variables[SHOTGUN_EVAL_THRESHOLD_RCODE_MAX]=$SHOTGUN_EVAL_THRESHOLD_RCODE_MAX"
-F "variables[SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MIN]=$SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MIN"
-F "variables[SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MAX]=$SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MAX"
-F "variables[SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_DRIFT_MIN]=$SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_DRIFT_MIN"
-F "variables[SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_DRIFT_MAX]=$SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_DRIFT_MAX"
https://gitlab.isc.org/api/v4/projects/188/trigger/pipeline | jq .id)
- util/ci-wait-shotgun.py $PIPELINE_ID
needs:
- job: ci-variables
artifacts: true
timeout: 2h
.system_test_common: &system_test_common
<<: *default_triggering_rules
stage: system
before_script:
- test -n "${OUT_OF_TREE_WORKSPACE}" && cp -r bin/tests/system/* "${OUT_OF_TREE_WORKSPACE}/bin/tests/system/" && cd "${OUT_OF_TREE_WORKSPACE}"
- *setup_interfaces
script:
- *find_pytest
- *find_python
- ( if [ "${CI_DISPOSABLE_ENVIRONMENT}" = "true" ]; then sleep 3000; "$PYTHON" "${CI_PROJECT_DIR}/util/get-running-system-tests.py"; fi ) &
- cd bin/tests/system
- >
"$PYTEST" --junit-xml="$CI_PROJECT_DIR"/junit.xml -n "$TEST_PARALLEL_JOBS" | tee pytest.out.txt
- '( ! grep -F "grep: warning:" pytest.out.txt )'
- test "$CLEAN_BUILD_ARTIFACTS_ON_SUCCESS" -eq 0 || ( cd ../../.. && make clean >/dev/null 2>&1 )
after_script:
- test -n "${OUT_OF_TREE_WORKSPACE}" && cd "${OUT_OF_TREE_WORKSPACE}"
- *display_pytest_failures
.system_test_legacy: &system_test_legacy
script:
- cd bin/tests/system
- find . -maxdepth 2 -mindepth 2 -type f -name "tests.sh" | cut -d/ -f2 | xargs -n 1 -P ${TEST_PARALLEL_JOBS:-1} sh legacy.run.sh
- if git rev-parse > /dev/null 2>&1; then ( ! grep "^I:.*:file.*not removed$" *.log ); fi
- '( ! grep -F "grep: warning:" *.log )'
after_script:
- test -d bind-* && cd bind-*
- REALSOURCEDIR="$PWD"
- test -n "${OUT_OF_TREE_WORKSPACE}" && cd "${OUT_OF_TREE_WORKSPACE}"
- find bin/tests/system -name "*dig.*" | xargs grep "error" || true
- *find_python
- >
"$PYTHON" "$REALSOURCEDIR"/bin/tests/convert-trs-to-junit.py . > "$CI_PROJECT_DIR"/junit.xml
.system_test: &system_test_job
<<: *system_test_common
artifacts:
untracked: true
exclude:
- "**/__pycache__/**/*"
when: always
reports:
junit: junit.xml
.system_test_make_check: &system_test_make_check_job
<<: *system_test_common
script:
- cd bin/tests/system
- make -j${TEST_PARALLEL_JOBS:-1} check
after_script:
- cat bin/tests/system/test-suite.log || true
.system_test_gcov: &system_test_gcov_job
<<: *system_test_common
artifacts:
untracked: true
exclude:
- "**/__pycache__/**/*"
when: always
.system_test_tsan: &system_test_tsan_job
<<: *system_test_common
after_script:
- *display_pytest_failures
- find bin/tests/system -name "*dig.*" | xargs grep "error" || true
- *find_python
- *parse_tsan
- >
"$PYTHON" bin/tests/convert-trs-to-junit.py . > "$CI_PROJECT_DIR"/junit.xml
artifacts:
untracked: true
exclude:
- "**/__pycache__/**/*"
when: always
reports:
junit: junit.xml
.unit_test_common: &unit_test_common
<<: *default_triggering_rules
stage: unit
before_script:
- test -n "${OUT_OF_TREE_WORKSPACE}" && cd "${OUT_OF_TREE_WORKSPACE}"
script:
- make -j${TEST_PARALLEL_JOBS:-1} -k unit V=1
- test "$CLEAN_BUILD_ARTIFACTS_ON_SUCCESS" -eq 0 || make clean >/dev/null 2>&1
after_script:
- test -d bind-* && cd bind-*
- REALSOURCEDIR="$PWD"
- test -n "${OUT_OF_TREE_WORKSPACE}" && cd "${OUT_OF_TREE_WORKSPACE}"
- *find_python
- >
"$PYTHON" "$REALSOURCEDIR"/bin/tests/convert-trs-to-junit.py . > "$CI_PROJECT_DIR"/junit.xml
.unit_test: &unit_test_job
<<: *unit_test_common
artifacts:
untracked: true
when: always
reports:
junit: junit.xml
.unit_test_gcov: &unit_test_gcov_job
<<: *unit_test_common
artifacts:
untracked: true
when: always
.unit_test_tsan: &unit_test_tsan_job
<<: *unit_test_common
after_script:
- *find_python
- *parse_tsan
- >
"$PYTHON" bin/tests/convert-trs-to-junit.py . > "$CI_PROJECT_DIR"/junit.xml
artifacts:
untracked: true
when: always
reports:
junit: junit.xml
.docs: &docs_job
variables:
DOC_MAKE_TARGET: doc
stage: docs
script:
- *configure
- make -j${BUILD_PARALLEL_JOBS:-1} -k ${DOC_MAKE_TARGET} V=1
- find doc/man/ -maxdepth 1 -name "*.[0-9]" -exec mandoc -T lint "{}" \; | ( ! grep -v -e "skipping paragraph macro. sp after" -e "unknown font, skipping request. ft C" -e "input text line longer than 80 bytes" )
.respdiff: &respdiff_job
stage: system
before_script:
- autoreconf -fi
- *configure
- make -j${BUILD_PARALLEL_JOBS:-1} V=1
- *setup_interfaces
- git clone --depth 1 https://gitlab.isc.org/isc-projects/bind9-qa.git
- cd bind9-qa/respdiff
needs: []
artifacts:
paths:
- bind9-qa/respdiff
exclude:
- bind9-qa/respdiff/rspworkdir/data.mdb # Exclude a 10 GB file.
untracked: true
when: always
### Job Definitions
# Jobs in the precheck stage
autoreconf:
<<: *autoconf_job
misc:
<<: *precheck_job
script:
- sh util/checklibs.sh > checklibs.out
- sh util/check-categories.sh
- sh util/check-gitignore.sh
- sh util/check-trailing-whitespace.sh
- if git grep SYSTEMTESTTOP -- ':!.gitlab-ci.yml'; then echo 'Please use relative paths instead of $SYSTEMTESTTOP.'; exit 1; fi
- bash util/unused-headers.sh
- bash util/xmllint-html.sh
needs: []
artifacts:
paths:
- checklibs.out
when: on_failure
black:
<<: *precheck_job
needs: []
script:
- black $(git ls-files '*.py')
- git diff > black.patch
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
artifacts:
paths:
- black.patch
expire_in: "1 week"
when: on_failure
vulture:
<<: *precheck_job
needs: []
script:
- vulture --exclude "*/ans*/ans.py,conftest.py,get_algorithms.py,isctest" --ignore-names "pytestmark" bin/tests/system/
ci-variables:
stage: precheck
<<: *precheck_job
script:
- export BIND_BASELINE_BRANCH="$(sed -n -E "s|^m4_define\(\[bind_VERSION_MINOR\], ([0-9]+)\)dnl$|\1|p" configure.ac)"
# When testing a .0 release, compare it against the previous development
# release (e.g., 9.19.0 and 9.18.0 should both be compared against 9.17.22).
- if [ "$(sed -n -E "s|^m4_define\(\[bind_VERSION_PATCH\], ([0-9]+)\)dnl$|\1|p" configure.ac)" = "0" ]; then export BIND_BASELINE_BRANCH=$((BIND_BASELINE_BRANCH - 1 - (BIND_BASELINE_BRANCH % 2))); fi
- BIND_BASELINE_VERSION="$(curl -s "https://gitlab.isc.org/api/v4/projects/1/repository/tags?search=^v9.${BIND_BASELINE_BRANCH}&order_by=version" | jq -r ".[0].name")"
- echo "BIND_BASELINE_VERSION=$BIND_BASELINE_VERSION" >> ci_vars.env
needs:
- job: autoreconf
artifacts: true
artifacts:
reports:
dotenv: ci_vars.env
clang-format:
<<: *precheck_job
needs: []
script:
- if [ -r .clang-format ]; then "${CLANG_FORMAT}" -i -style=file $(git ls-files '*.c' '*.h'); fi
- git diff > clang-format.patch
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
artifacts:
paths:
- clang-format.patch
expire_in: "1 week"
when: on_failure
coccinelle:
<<: *precheck_job
needs: []
script:
- util/check-cocci
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
pylint:
<<: *precheck_job
needs: []
variables:
PYTHONPATH: "${CI_PROJECT_DIR}/bin/tests/system"
script:
- pylint --rcfile $CI_PROJECT_DIR/.pylintrc $(git ls-files '*.py' | grep -vE '(ans\.py|dangerfile\.py|^bin/tests/system/|^contrib/)')
# Ignore Pylint wrong-import-position error in system test to enable use of pytest.importorskip
- pylint --rcfile $CI_PROJECT_DIR/.pylintrc --disable=wrong-import-position $(git ls-files 'bin/tests/system/*.py' | grep -vE '(ans\.py|vulture_ignore_list\.py)')
reuse:
<<: *precheck_job
needs: []
image:
name: docker.io/fsfe/reuse:latest
entrypoint: [""]
script:
- reuse lint
shfmt:
<<: *precheck_job
needs: []
script:
- shfmt -w -i 2 -ci -bn . $(find . -name "*.sh.in")
- git diff > shfmt.patch
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
artifacts:
paths:
- shfmt.patch
expire_in: "1 week"
when: on_failure
danger:
<<: *precheck_job
needs: []
script:
- pip install git+https://gitlab.isc.org/isc-projects/hazard.git
- hazard
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event"'
checkbashisms:
<<: *precheck_job
needs: []
script:
- checkbashisms $(find . -path './.git' -prune -o -type f -exec sh -c 'head -n 1 "{}" | grep -qsF "#!/bin/sh"' \; -print)
mypy:
<<: *precheck_job
script:
- mypy "bin/tests/system/isctest/"
tarball-create:
stage: precheck
<<: *base_image
<<: *default_triggering_rules
script:
- ./configure --enable-maintainer-mode
- make maintainer-clean
- autoreconf -fi
- ./configure --enable-maintainer-mode
- make -j${BUILD_PARALLEL_JOBS:-1} all V=1
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; git diff > diff.patch; exit 1; fi
- make -j${BUILD_PARALLEL_JOBS:-1} dist V=1
artifacts:
paths:
- diff.patch
- bind-*.tar.xz
when: always
needs:
- job: autoreconf
artifacts: true
# Jobs for doc builds on Debian 12 "bookworm" (amd64)
changelog:
<<: *base_image
<<: *docs_job
rules:
- if: '$CI_MERGE_REQUEST_TITLE =~ /\s(dev|usr|pkg):/'
variables:
GIT_AUTHOR_NAME: $GITLAB_USER_NAME
GIT_AUTHOR_EMAIL: $GITLAB_USER_EMAIL
GIT_COMMITTER_NAME: $GITLAB_USER_NAME
GIT_COMMITTER_EMAIL: $GITLAB_USER_EMAIL
DOC_MAKE_TARGET: html
before_script:
- echo -e "$CI_MERGE_REQUEST_TITLE\n" > commitmsg
- sed -i 's/^Draft:\s*//' commitmsg
- echo -e "$CI_MERGE_REQUEST_DESCRIPTION" >> commitmsg
- git commit --allow-empty -F commitmsg
- ./contrib/gitchangelog/gitchangelog.py HEAD^..HEAD >> $(ls doc/changelog/changelog-9.* | sort --version-sort | tail -n 1)
after_script:
- git diff
needs:
- job: autoreconf
artifacts: true
artifacts:
untracked: true
linkcheck:
<<: *base_image
stage: docs
script:
- pushd doc/arm/ > /dev/null && sphinx-build -b linkcheck . linkcheck_output/
artifacts:
paths:
- doc/arm/linkcheck_output/
rules:
- if: '$CI_PIPELINE_SOURCE == "schedule"'
needs: []
docs:
<<: *default_triggering_rules
<<: *base_image
<<: *docs_job
needs:
- job: autoreconf
artifacts: true
artifacts:
untracked: true
docs:tarball:
<<: *default_triggering_rules
<<: *base_image
<<: *docs_job
before_script:
- *unpack_release_tarball
needs:
- job: tarball-create
artifacts: true
docs:pdf:
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
<<: *base_image
stage: docs
before_script:
- apt-get update
- apt-get -y install qpdf texlive-full texlive-xetex xindy
script:
- *configure
- make -C doc/arm/ pdf V=1
- qpdf --check doc/arm/_build/latex/Bv9ARM.pdf
needs:
- job: autoreconf
artifacts: true
artifacts:
untracked: true
# Job detecting named.conf breakage introduced since the previous point release
cross-version-config-tests:
stage: system
<<: *base_image
<<: *default_triggering_rules
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON}"
# Disable option checking to prevent problems with new default options in
# the &configure anchor.
EXTRA_CONFIGURE: "--disable-option-checking"
script:
- *configure
- *setup_interfaces
- make -j${BUILD_PARALLEL_JOBS:-1}
- git clone --branch "${BIND_BASELINE_VERSION}" --depth 1 https://gitlab.isc.org/isc-projects/bind9.git "bind-${BIND_BASELINE_VERSION}"
- cd "bind-${BIND_BASELINE_VERSION}"
- autoreconf -fi
- *configure
- make -j${BUILD_PARALLEL_JOBS:-1}
- *find_pytest
# The cross-version-config-tests job would fail when a system test is
# removed from the upcoming release. To avoid this, remove the system test
# also from the $BIND_BASELINE_VERSION.
- find bin/tests/system/ -mindepth 1 -maxdepth 1 -type d -exec sh -c 'test -e ../"$0" || rm -rfv -- "$0"' {} \;
- cd bin/tests/system
# Run the setup phase of all system tests in the most recently tagged BIND 9
# release using the binaries built for the current BIND 9 version. This
# intends to detect obvious backward compatibility issues with the latter.
- sed -i "s|export \(.*\)=\$TOP_BUILDDIR|export \1=\$CI_PROJECT_DIR|" conf.sh
- >
"$PYTEST" --setup-only --junit-xml="$CI_PROJECT_DIR"/junit.xml -n "${TEST_PARALLEL_JOBS:-1}"
needs:
- job: autoreconf
artifacts: true
- job: ci-variables
artifacts: true
artifacts:
reports:
junit: junit.xml
paths:
- bind-*
- junit.xml
untracked: true
expire_in: "1 day"
when: always
# Jobs for regular GCC builds on Alpine Linux 3.21 (amd64)
gcc:alpine3.21:amd64:
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON}"
EXTRA_CONFIGURE: "${WITHOUT_READLINE}"
<<: *alpine_3_21_amd64_image
<<: *build_job
system:gcc:alpine3.21:amd64:
<<: *alpine_3_21_amd64_image
<<: *system_test_job
needs:
- job: gcc:alpine3.21:amd64
artifacts: true
unit:gcc:alpine3.21:amd64:
<<: *alpine_3_21_amd64_image
<<: *unit_test_job
needs:
- job: gcc:alpine3.21:amd64
artifacts: true
# Jobs for regular GCC builds on Oracle Linux 8 (amd64)
gcc:oraclelinux8:amd64:
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON}"
EXTRA_CONFIGURE: "--with-libidn2"
<<: *oraclelinux_8_amd64_image
<<: *build_job
system:gcc:oraclelinux8:amd64:
<<: *oraclelinux_8_amd64_image
<<: *system_test_job
needs:
- job: gcc:oraclelinux8:amd64
artifacts: true
unit:gcc:oraclelinux8:amd64:
<<: *oraclelinux_8_amd64_image
<<: *unit_test_job
needs:
- job: gcc:oraclelinux8:amd64
artifacts: true
# Jobs for regular GCC builds on Oracle Linux 9 (amd64)
gcc:oraclelinux9:amd64:
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON}"
EXTRA_CONFIGURE: "--with-libidn2 --disable-developer"
<<: *oraclelinux_9_amd64_image
<<: *build_job
system:gcc:oraclelinux9:amd64:
<<: *oraclelinux_9_amd64_image
<<: *system_test_job
needs:
- job: gcc:oraclelinux9:amd64
artifacts: true
unit:gcc:oraclelinux9:amd64:
<<: *oraclelinux_9_amd64_image
<<: *unit_test_job
needs:
- job: gcc:oraclelinux9:amd64
artifacts: true
gcc:tarball:nosphinx:
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON}"
EXTRA_CONFIGURE: "--with-libidn2 --disable-developer"
RUN_MAKE_INSTALL: 1
<<: *oraclelinux_9_amd64_image
<<: *build_job
before_script:
- (! command -v sphinx-build >/dev/null)
- *unpack_release_tarball
needs:
- job: tarball-create
artifacts: true
# Jobs for regular GCC builds on Debian 12 "bookworm" (amd64)
gcc:bookworm:amd64:
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON} --coverage -O0"
EXTRA_CONFIGURE: "--with-libidn2 ${WITH_READLINE_LIBEDIT}"
RUN_MAKE_INSTALL: 1
<<: *debian_bookworm_amd64_image
<<: *build_job
system:gcc:bookworm:amd64:
<<: *debian_bookworm_amd64_image
<<: *system_test_gcov_job
variables:
CI_ENABLE_ALL_TESTS: 1
CLEAN_BUILD_ARTIFACTS_ON_SUCCESS: 0
needs: # using artifacts from unit test job is required for gcov
- job: unit:gcc:bookworm:amd64
artifacts: true
unit:gcc:bookworm:amd64:
<<: *debian_bookworm_amd64_image
<<: *unit_test_gcov_job
variables:
CI_ENABLE_ALL_TESTS: 1
CLEAN_BUILD_ARTIFACTS_ON_SUCCESS: 0
needs:
- job: gcc:bookworm:amd64
artifacts: true
# Build job for cross-compiled GCC builds on 64-bit Debian 12 "bookworm"
# (amd64) with 32-bit BIND 9.
gcc:bookworm:amd64cross32:
variables:
CFLAGS: "${CFLAGS_COMMON}"
CROSS_COMPILATION: 1
EXTRA_CONFIGURE: "--build=x86_64-linux-gnu --host=i686-linux-gnu --with-libidn2 ${WITH_READLINE_LIBEDIT}"
<<: *debian_bookworm_amd64cross32_image
<<: *build_job
# Jobs for scan-build builds on Debian 12 "bookworm" (amd64)
.scan_build: &scan_build
- ${SCAN_BUILD} --html-title="BIND 9 ($CI_COMMIT_SHORT_SHA)"
--keep-cc
--status-bugs
--keep-going
-o scan-build.reports make -j${BUILD_PARALLEL_JOBS:-1} all V=1
scan-build:
<<: *default_triggering_rules
<<: *base_image
stage: postcheck
variables:
CC: "${CLANG}"
CFLAGS: "${CFLAGS_COMMON}"
CONFIGURE: "${SCAN_BUILD} ./configure"
EXTRA_CONFIGURE: "--with-libidn2"
script:
- *configure
- *scan_build
needs:
- job: autoreconf
artifacts: true
artifacts:
paths:
- scan-build.reports/
when: on_failure
# Jobs for regular GCC builds on Debian "sid" (amd64)
# Also tests configration option: --without-lmdb.
gcc:sid:amd64:
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON} -O3"
EXTRA_CONFIGURE: "--with-libidn2 --without-lmdb ${WITH_READLINE}"
RUN_MAKE_INSTALL: 1
<<: *debian_sid_amd64_image
<<: *build_job
system:gcc:sid:amd64:
<<: *debian_sid_amd64_image
<<: *system_test_job
<<: *system_test_make_check_job
needs:
- job: gcc:sid:amd64
artifacts: true
unit:gcc:sid:amd64:
<<: *debian_sid_amd64_image
<<: *unit_test_job
needs:
- job: gcc:sid:amd64
artifacts: true
# Job for out-of-tree GCC build on Debian 12 "bookworm" (amd64)
# Also tests configration option: --with-lmdb.
gcc:out-of-tree:
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON} -Og"
CONFIGURE: "${CI_PROJECT_DIR}/configure"
EXTRA_CONFIGURE: "--with-libidn2 --with-lmdb"
RUN_MAKE_INSTALL: 1
OUT_OF_TREE_WORKSPACE: workspace
<<: *base_image
<<: *build_job
system:gcc:out-of-tree:
variables:
OUT_OF_TREE_WORKSPACE: workspace
needs:
- job: gcc:out-of-tree
artifacts: true
<<: *base_image
<<: *system_test_job
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
unit:gcc:out-of-tree:
variables:
OUT_OF_TREE_WORKSPACE: workspace
needs:
- job: gcc:out-of-tree
artifacts: true
<<: *base_image
<<: *unit_test_job
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
# Jobs for tarball GCC builds on Debian 12 "bookworm" (amd64)
gcc:tarball:
variables:
CC: gcc
EXTRA_CONFIGURE: "--with-libidn2"
RUN_MAKE_INSTALL: 1
<<: *base_image
<<: *build_job
before_script:
- *unpack_release_tarball
needs:
- job: tarball-create
artifacts: true
system:gcc:tarball:
<<: *base_image
<<: *system_test_job
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
before_script:
- cd bind-*
- *setup_interfaces
after_script:
- cd bind-*
- *display_pytest_failures
needs:
- job: gcc:tarball
artifacts: true
unit:gcc:tarball:
<<: *base_image
<<: *unit_test_job
<<: *api_pipelines_schedules_tags_triggers_web_triggering_rules
before_script:
- cd bind-*
needs:
- job: gcc:tarball
artifacts: true
# Jobs for debug GCC builds on openSUSE Tumbleweed (amd64)
gcc:tumbleweed:amd64:
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON} -DDEBUG"
# NOTE: Testing DNSRPS-enabled builds currently requires an
# operating system with glibc 2.34+. This requirement will go away
# once the DNSRPS dynamic loading code gets reworked to use libuv's
# dlopen() API.
#
# NOTE: This does *not* enable testing of the DNSRPS feature itself.
# Doing that requires a DNSRPS provider library to be present on the
# test host.
EXTRA_CONFIGURE: "--enable-dnsrps --enable-dnsrps-dl --with-libidn2 ${WITH_READLINE_READLINE}"
<<: *tumbleweed_latest_amd64_image
<<: *build_job
system:gcc:tumbleweed:amd64:
<<: *tumbleweed_latest_amd64_image
<<: *system_test_job
needs:
- job: gcc:tumbleweed:amd64
artifacts: true
unit:gcc:tumbleweed:amd64:
<<: *tumbleweed_latest_amd64_image
<<: *unit_test_job
needs:
- job: gcc:tumbleweed:amd64
artifacts: true
# Jobs for regular GCC builds on Ubuntu 20.04 Focal Fossa (amd64)
gcc:focal:amd64:
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON} -Og"
EXTRA_CONFIGURE: "--disable-geoip --with-libidn2 --disable-doh"
<<: *ubuntu_focal_amd64_image
<<: *build_job
system:gcc:focal:amd64:
<<: *ubuntu_focal_amd64_image
<<: *system_test_job
needs:
- job: gcc:focal:amd64
artifacts: true
unit:gcc:focal:amd64:
<<: *ubuntu_focal_amd64_image
<<: *unit_test_job
needs:
- job: gcc:focal:amd64
artifacts: true
# Jobs for regular GCC builds on Ubuntu 22.04 Jammy Jellyfish (amd64)
gcc:jammy:amd64:
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON} -O2"
EXTRA_CONFIGURE: "--with-libidn2 --disable-dnstap --without-cmocka --without-gssapi"
<<: *ubuntu_jammy_amd64_image
<<: *build_job
system:gcc:jammy:amd64:
<<: *ubuntu_jammy_amd64_image
<<: *system_test_job
needs:
- job: gcc:jammy:amd64
artifacts: true
unit:gcc:jammy:amd64:
<<: *ubuntu_jammy_amd64_image
<<: *unit_test_job
needs:
- job: gcc:jammy:amd64
artifacts: true
# Jobs for regular GCC builds on Ubuntu 24.04 Noble Numbat (amd64)
gcc:noble:amd64:
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON}"
EXTRA_CONFIGURE: "--with-libidn2"
<<: *ubuntu_noble_amd64_image
<<: *build_job
system:gcc:noble:amd64:
<<: *ubuntu_noble_amd64_image
<<: *system_test_job
needs:
- job: gcc:noble:amd64
artifacts: true
unit:gcc:noble:amd64:
<<: *ubuntu_noble_amd64_image
<<: *unit_test_job
needs:
- job: gcc:noble:amd64
artifacts: true
# Jobs for ASAN builds on Fedora 41 (amd64)
gcc:asan:
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON} -fsanitize=address,undefined"
LDFLAGS: "-fsanitize=address,undefined"
EXTRA_CONFIGURE: "--with-libidn2 --without-jemalloc"
<<: *fedora_41_amd64_image
<<: *build_job
system:gcc:asan:
variables:
LSAN_OPTIONS: "suppressions=$CI_PROJECT_DIR/suppr-lsan.txt"
<<: *fedora_41_amd64_image
<<: *system_test_job
needs:
- job: gcc:asan
artifacts: true
unit:gcc:asan:
<<: *fedora_41_amd64_image
<<: *unit_test_job
needs:
- job: gcc:asan
artifacts: true
clang:asan:
variables:
CC: ${CLANG}
CFLAGS: "${CFLAGS_COMMON} -fsanitize=address,undefined"
LDFLAGS: "-fsanitize=address,undefined"
EXTRA_CONFIGURE: "--with-libidn2 --without-jemalloc"
<<: *base_image
<<: *build_job
system:clang:asan:
<<: *base_image
<<: *system_test_job
needs:
- job: clang:asan
artifacts: true
unit:clang:asan:
<<: *base_image
<<: *unit_test_job
needs:
- job: clang:asan
artifacts: true
# Jobs for TSAN builds on Fedora 41 (amd64)
gcc:tsan:
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON} -Wno-stringop-overread -ggdb -O2 -fsanitize=thread"
LDFLAGS: "-fsanitize=thread"
OPENSSL_CFLAGS: -I/opt/tsan/include
OPENSSL_LIBS: -L/opt/tsan/lib -lssl -lcrypto
EXTRA_CONFIGURE: "--with-libidn2 --enable-pthread-rwlock --without-jemalloc PKG_CONFIG_PATH=/opt/tsan/lib/pkgconfig"
<<: *tsan_fedora_41_amd64_image
<<: *build_job
system:gcc:tsan:
variables:
TSAN_OPTIONS: "${TSAN_OPTIONS_FEDORA}"
<<: *tsan_fedora_41_amd64_image
<<: *system_test_tsan_job
needs:
- job: gcc:tsan
artifacts: true
unit:gcc:tsan:
variables:
TSAN_OPTIONS: "${TSAN_OPTIONS_FEDORA}"
<<: *tsan_fedora_41_amd64_image
<<: *unit_test_tsan_job
needs:
- job: gcc:tsan
artifacts: true
clang:tsan:
<<: *tsan_debian_bookworm_amd64_image
<<: *build_job
variables:
CC: "${CLANG}"
CFLAGS: "${CFLAGS_COMMON} -ggdb -O2 -fsanitize=thread"
LDFLAGS: "-fsanitize=thread"
OPENSSL_CFLAGS: -I/opt/tsan/include
OPENSSL_LIBS: -L/opt/tsan/lib -lssl -lcrypto
EXTRA_CONFIGURE: "--with-libidn2 --enable-pthread-rwlock --without-jemalloc PKG_CONFIG_PATH=/opt/tsan/lib/pkgconfig"
system:clang:tsan:
variables:
TSAN_OPTIONS: "${TSAN_OPTIONS_DEBIAN}"
<<: *tsan_debian_bookworm_amd64_image
<<: *system_test_tsan_job
needs:
- job: clang:tsan
artifacts: true
unit:clang:tsan:
variables:
TSAN_OPTIONS: "${TSAN_OPTIONS_DEBIAN}"
<<: *tsan_debian_bookworm_amd64_image
<<: *unit_test_tsan_job
needs:
- job: clang:tsan
artifacts: true
generate-tsan-stress-test-configs:
<<: *base_image
<<: *default_triggering_rules
stage: system
script:
- util/generate-tsan-stress-jobs.py > tsan-stress-test-configs.yml
artifacts:
paths:
- tsan-stress-test-configs.yml
needs: []
when: manual
tsan:stress:
<<: *default_triggering_rules
stage: postcheck
variables:
PARENT_PIPELINE_ID: $CI_PIPELINE_ID
trigger:
include:
- artifact: tsan-stress-test-configs.yml
job: generate-tsan-stress-test-configs
needs:
- job: generate-tsan-stress-test-configs
artifacts: true
- job: gcc:tsan
artifacts: true
- job: clang:tsan
artifacts: true
# Jobs for Clang builds on Debian 12 "bookworm" (amd64)
clang:bookworm:amd64:
variables:
CC: ${CLANG}
CFLAGS: "${CFLAGS_COMMON} -Wenum-conversion -DOPENSSL_API_COMPAT=10100"
# See https://gitlab.isc.org/isc-projects/bind9/-/issues/3444
EXTRA_CONFIGURE: "--without-jemalloc"
RUN_MAKE_INSTALL: 1
<<: *debian_bookworm_amd64_image
<<: *build_job
system:clang:bookworm:amd64:
# Set up environment variables that allow the "keyfromlabel" system test to be run
variables:
DEFAULT_OPENSSL_CONF: "/etc/ssl/openssl.cnf"
OPENSSL_CONF: "/var/tmp/etc/openssl.cnf"
SOFTHSM2_CONF: "/var/tmp/softhsm2/softhsm2.conf"
SOFTHSM2_MODULE: "/usr/lib/softhsm/libsofthsm2.so"
<<: *debian_bookworm_amd64_image
<<: *system_test_job
needs:
- job: clang:bookworm:amd64
artifacts: true
unit:clang:bookworm:amd64:
<<: *debian_bookworm_amd64_image
<<: *unit_test_job
needs:
- job: clang:bookworm:amd64
artifacts: true
# Jobs for Clang builds on FreeBSD 13 (amd64)
clang:freebsd13:amd64:
variables:
CFLAGS: "${CFLAGS_COMMON}"
# Use MIT Kerberos5 for BIND 9 GSS-API support because of FreeBSD Heimdal
# incompatibility; see https://bugs.freebsd.org/275241.
EXTRA_CONFIGURE: "${WITH_READLINE_LIBEDIT} --with-gssapi=/usr/local/bin/krb5-config"
USER: gitlab-runner
<<: *freebsd_13_amd64_image
<<: *build_job
system:clang:freebsd13:amd64:
<<: *freebsd_13_amd64_image
<<: *system_test_job
variables:
USER: gitlab-runner
needs:
- job: clang:freebsd13:amd64
artifacts: true
unit:clang:freebsd13:amd64:
<<: *freebsd_13_amd64_image
<<: *unit_test_job
needs:
- job: clang:freebsd13:amd64
artifacts: true
# Jobs for Clang builds on FreeBSD 14 (amd64)
clang:freebsd14:amd64:
variables:
CFLAGS: "${CFLAGS_COMMON}"
# Use MIT Kerberos5 for BIND 9 GSS-API support because of FreeBSD Heimdal
# incompatibility; see https://bugs.freebsd.org/275241.
EXTRA_CONFIGURE: "${WITH_READLINE_EDITLINE} --with-gssapi=/usr/local/bin/krb5-config"
USER: gitlab-runner
<<: *freebsd_14_amd64_image
<<: *build_job
system:clang:freebsd14:amd64:
<<: *freebsd_14_amd64_image
<<: *system_test_job
variables:
USER: gitlab-runner
needs:
- job: clang:freebsd14:amd64
artifacts: true
unit:clang:freebsd14:amd64:
<<: *freebsd_14_amd64_image
<<: *unit_test_job
needs:
- job: clang:freebsd14:amd64
artifacts: true
# Jobs for Clang builds on OpenBSD (amd64)
clang:openbsd:amd64:
variables:
CC: clang
USER: gitlab-runner
EXTRA_CONFIGURE: "--disable-dnstap"
<<: *openbsd_amd64_image
<<: *build_job
unit:clang:openbsd:amd64:
<<: *openbsd_amd64_image
<<: *unit_test_job
variables:
USER: gitlab-runner
needs:
- job: clang:openbsd:amd64
artifacts: true
# Job producing a release directory
release:
<<: *base_image
stage: release
script:
- export BIND_DIRECTORY="$(basename bind-*.tar.xz ".tar.xz")"
# Prepare release tarball contents (tarballs + documentation)
- mkdir -p "${BIND_DIRECTORY}-release/doc/arm"
- pushd "${BIND_DIRECTORY}-release"
- mv "../${BIND_DIRECTORY}.tar.xz" .
- tar --extract --file="${BIND_DIRECTORY}.tar.xz"
- mv "${BIND_DIRECTORY}"/{COPYRIGHT,LICENSE,README.md,srcid} .
- rm -rf "${BIND_DIRECTORY}"
- mv "../doc/arm/_build/html" doc/arm/
- mv "../doc/arm/_build/latex/Bv9ARM.pdf" doc/arm/
- mv "../doc/arm/_build/epub/Bv9ARM.epub" doc/arm/
- echo '<!DOCTYPE HTML><html lang="en"><meta http-equiv="refresh" content="0; url=doc/arm/html/notes.html"><title>Redirect</title></html>' > "RELEASE-NOTES-${BIND_DIRECTORY}.html"
- echo '<!DOCTYPE HTML><html lang="en"><meta http-equiv="refresh" content="0; url=doc/arm/html/changelog.html"><title>Redirect</title></html>' > "CHANGELOG-${BIND_DIRECTORY}.html"
- popd
needs:
- job: tarball-create
artifacts: true
- job: docs
artifacts: true
- job: docs:pdf
artifacts: true
rules:
- if: '$CI_COMMIT_TAG != null'
artifacts:
paths:
- "*-release"
expire_in: "1 month"
# Job signing the source tarballs in the release directory
sign:
stage: release
tags:
- signer
script:
- export RELEASE_DIRECTORY="$(echo *-release)"
- pushd "${RELEASE_DIRECTORY}"
- |
echo
cat > /tmp/sign-bind9.sh <<EOF
#!/bin/sh
{
for FILE in \$(find "${PWD}" -name "*.tar.xz" | sort); do
echo ">>> Signing \${FILE}..."
gpg2 --local-user "\${SIGNING_KEY_FINGERPRINT}" --armor --digest-algo SHA512 --detach-sign --output "\${FILE}.asc" "\${FILE}"
done
} 2>&1 | tee "${CI_PROJECT_DIR}/signing.log"
EOF
chmod +x /tmp/sign-bind9.sh
echo -e "\e[31m*** Please sign the releases by following the instructions at:\e[0m"
echo -e "\e[31m*** \e[0m"
echo -e "\e[31m*** ${SIGNING_HELP_URL}\e[0m"
echo -e "\e[31m*** \e[0m"
echo -e "\e[31m*** Sleeping until files in ${PWD} are signed... ⌛\e[0m"
while [ "$(find . -name "*.asc" -size +0 | sed "s|\.asc$||" | sort)" != "$(find . -name "*.tar.xz" | sort)" ]; do sleep 10; done
- popd
- tar --create --file="${RELEASE_DIRECTORY}.tar.gz" --gzip "${RELEASE_DIRECTORY}"
artifacts:
paths:
- "*.tar.gz"
- signing.log
expire_in: never
needs:
- job: release
artifacts: true
rules:
- if: '$CI_COMMIT_TAG != null'
when: manual
allow_failure: false
# Coverity Scan analysis upload
.coverity_prep: &coverity_prep
- curl --output /tmp/cov-analysis-linux64.md5 https://scan.coverity.com/download/linux64
--form project=$COVERITY_SCAN_PROJECT_NAME
--form token=$COVERITY_SCAN_TOKEN
--form md5=1
- curl --output /tmp/cov-analysis-linux64.tgz https://scan.coverity.com/download/linux64
--form project=$COVERITY_SCAN_PROJECT_NAME
--form token=$COVERITY_SCAN_TOKEN
- test "$(md5sum /tmp/cov-analysis-linux64.tgz | awk '{ print $1 }')" = "$(cat /tmp/cov-analysis-linux64.md5)"
- tar --extract --gzip --file=/tmp/cov-analysis-linux64.tgz --directory=/tmp
- test -d /tmp/cov-analysis-linux64-*
.coverity_build: &coverity_build
- /tmp/cov-analysis-linux64-*/bin/cov-build --dir /tmp/cov-int --fs-capture-search . sh -c 'make -j${BUILD_PARALLEL_JOBS:-1} -k all V=1'
- tar --create --gzip --file=/tmp/cov-int.tar.gz --directory /tmp cov-int
- curl -v https://scan.coverity.com/builds?project=$COVERITY_SCAN_PROJECT_NAME
--form token=$COVERITY_SCAN_TOKEN
--form email=bind-changes@isc.org
--form file=@/tmp/cov-int.tar.gz
--form version="$(git rev-parse --short HEAD)"
--form description="$(git rev-parse --short HEAD) / $CI_COMMIT_TITLE / $CI_COMMIT_REF_NAME:$CI_PIPELINE_ID" 2>&1
| tee curl-response.txt
- grep -q 'Build successfully submitted' curl-response.txt
coverity:
<<: *base_image
stage: postcheck
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON} -Og"
EXTRA_CONFIGURE: "--with-libidn2"
script:
- *coverity_prep
- *configure
- *coverity_build
after_script:
- mv -v /tmp/cov-int.tar.gz ${CI_PROJECT_DIR}/
needs:
- job: autoreconf
artifacts: true
artifacts:
paths:
- curl-response.txt
- cov-int.tar.gz
expire_in: "1 week"
when: on_failure
rules:
- if: '$COVERITY_SCAN_PROJECT_NAME != null && $COVERITY_SCAN_TOKEN != null'
# Respdiff tests
respdiff:
<<: *respdiff_job
<<: *default_triggering_rules
<<: *debian_bookworm_amd64_image
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON} -Og"
MAX_DISAGREEMENTS_PERCENTAGE: "0.15"
script:
- bash respdiff.sh -s named -q "${PWD}/100k_mixed.txt" -c 3 -w "${PWD}/rspworkdir" "${CI_PROJECT_DIR}" "/usr/local/respdiff-reference-bind/sbin/named"
- cd ../.. && make clean >/dev/null 2>&1
respdiff:asan:
<<: *respdiff_job
<<: *default_triggering_rules
<<: *debian_bookworm_amd64_image
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON} -Og -fsanitize=address,undefined"
LDFLAGS: "-fsanitize=address,undefined"
EXTRA_CONFIGURE: "--disable-dnsrps --without-jemalloc"
MAX_DISAGREEMENTS_PERCENTAGE: "0.15"
script:
- bash respdiff.sh -s named -q "${PWD}/100k_mixed.txt" -c 3 -w "${PWD}/rspworkdir" "${CI_PROJECT_DIR}" "/usr/local/respdiff-reference-bind/sbin/named"
- cd ../.. && make clean >/dev/null 2>&1
respdiff:tsan:
<<: *respdiff_job
<<: *default_triggering_rules
<<: *tsan_debian_bookworm_amd64_image
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON} -Og -fsanitize=thread"
LDFLAGS: "-fsanitize=thread"
OPENSSL_CFLAGS: -I/opt/tsan/include
OPENSSL_LIBS: -L/opt/tsan/lib -lssl -lcrypto
EXTRA_CONFIGURE: "--disable-dnsrps --enable-pthread-rwlock --without-jemalloc PKG_CONFIG_PATH=/opt/tsan/lib/pkgconfig"
MAX_DISAGREEMENTS_PERCENTAGE: "0.15"
TSAN_OPTIONS: "${TSAN_OPTIONS_DEBIAN}"
script:
- bash respdiff.sh -s named -q "${PWD}/100k_mixed.txt" -c 3 -w "${PWD}/rspworkdir" "${CI_PROJECT_DIR}" "/usr/local/respdiff-reference-bind/sbin/named"
- cd ../.. && make clean >/dev/null 2>&1
after_script:
- *find_python
- *parse_tsan
respdiff-third-party:
<<: *respdiff_job
<<: *default_triggering_rules
<<: *debian_bookworm_amd64_image
variables:
CC: gcc
CFLAGS: "${CFLAGS_COMMON} -Og"
MAX_DISAGREEMENTS_PERCENTAGE: "0.2"
script:
- bash respdiff.sh -s third_party -q "${PWD}/100k_mixed.txt" -c 1 -w "${PWD}/rspworkdir" "${CI_PROJECT_DIR}"
- cd ../.. && make clean >/dev/null 2>&1
# Performance tests
# Run shotgun:udp right away, but delay other shotgun jobs sligthly in order to
# allow re-use of the built container image. Otherwise, the jobs would do the
# same builds in parallel rather than re-use the already built image.
shotgun:udp:
<<: *shotgun_job
variables:
SHOTGUN_SCENARIO: udp
SHOTGUN_TRAFFIC_MULTIPLIER: 10
shotgun:tcp:
<<: *shotgun_job
variables:
SHOTGUN_SCENARIO: tcp
SHOTGUN_TRAFFIC_MULTIPLIER: 4
shotgun:dot:
<<: *shotgun_job
variables:
SHOTGUN_SCENARIO: dot
SHOTGUN_TRAFFIC_MULTIPLIER: 2
rules: &shotgun_rules_manual_mr
- if: '$CI_MERGE_REQUEST_DIFF_BASE_SHA != null'
variables:
BASELINE: '$CI_MERGE_REQUEST_DIFF_BASE_SHA'
when: manual # don't run on each MR unless requested
allow_failure: true
- *shotgun_rule_tag
- *shotgun_rule_other
shotgun:doh-get:
<<: *shotgun_job
variables:
SHOTGUN_SCENARIO: doh-get
SHOTGUN_TRAFFIC_MULTIPLIER: 3
SHOTGUN_EVAL_THRESHOLD_LATENCY_PCTL_MAX: 0.3 # bump from the default due to increased tail-end jitter
rules: *shotgun_rules_manual_mr
.stress-test: &stress_test
stage: performance
generate-stress-test-configs:
<<: *base_image
<<: *default_triggering_rules
stage: precheck
script:
- util/generate-stress-test-configs.py > stress-test-configs.yml
artifacts:
paths:
- stress-test-configs.yml
needs: []
stress-test-child-pipeline:
<<: *default_triggering_rules
stage: performance
trigger:
include:
- artifact: stress-test-configs.yml
job: generate-stress-test-configs
needs:
- job: generate-stress-test-configs
artifacts: true
gcov:
<<: *base_image
<<: *default_triggering_rules
stage: postcheck
needs:
- job: system:gcc:bookworm:amd64
artifacts: true
script:
# Ensure gcov files for unit tests are found via tests/ rather than
# lib/*/tests/ to prevent confusing gcovr.
- find lib/ -name tests -type l -delete
# The "a-conftest.gcno" file is result of the ./configure step and
# should be removed as it does not belong to the BIND 9 code base.
- rm a-conftest.gcno
# Generate XML file in the Cobertura XML format suitable for use by GitLab
# for the purpose of displaying code coverage information in the diff view
# of a given merge request.
- gcovr --exclude-directories bin/tests --exclude-directories doc --exclude-directories fuzz --exclude tests --cobertura-pretty -o coverage.xml
- gcovr --exclude-directories bin/tests --exclude-directories doc --exclude-directories fuzz --exclude tests --html-details -o coverage.html
- gcovr --exclude-directories bin/tests --exclude-directories doc --exclude-directories fuzz --exclude tests --txt -o coverage.txt
- tail -n 3 coverage.txt
coverage: /^TOTAL.*\s+(\d+\%)$/
artifacts:
paths:
- coverage*.html
- coverage.css
- coverage.txt
- coverage.xml
reports:
coverage_report:
coverage_format: cobertura
path: coverage.xml
# Pairwise testing of ./configure options
pairwise:
<<: *base_image
stage: build
needs:
- job: autoreconf
artifacts: true
script:
- util/pairwise-testing.sh
artifacts:
paths:
- pairwise-commands.txt
- pairwise-model.txt
- pairwise-output.*.txt
when: on_failure
rules:
- if: '$PAIRWISE_TESTING != null'
.post_merge_template: &post_merge
<<: *base_image
stage: postmerge
needs: []
# post-merge processes should run even if another MR was merged while the job was running (or queued)
interruptible: false
variables:
# automated commits will inherit identification from the user who pressed Merge button
GIT_COMMITTER_NAME: $GITLAB_USER_NAME
GIT_COMMITTER_EMAIL: $GITLAB_USER_EMAIL
# avoid leftover branches from previous jobs
GIT_STRATEGY: clone
# assumed max depth of a MR for backport or a rebased force-push
GIT_DEPTH: 1000
before_script:
# force-pushes should not trigger process automation (happens only in -sub branches)
- >
echo "previous branch tip: $CI_COMMIT_BEFORE_SHA"
- set +o pipefail; git log --format='%H' | grep --silent "$CI_COMMIT_BEFORE_SHA" && PREVIOUS_TIP_REACHABLE=1
- test "$PREVIOUS_TIP_REACHABLE" != "1" && echo "force-push detected, stop" && exit 1
# non-fast-forward merges are disabled so we have to have merge commit on top
- MERGE_REQUEST_ID="$(git log -1 --format='%b' | sed --silent -e "s|^See merge request ${CI_PROJECT_PATH}\!||p")"
- >
: stop if this is not a merge request in the current project\'s namespace
- test -n "$MERGE_REQUEST_ID"
- git clone --depth 1 https://gitlab.isc.org/isc-projects/bind9-qa.git
backports:
<<: *post_merge
rules:
# -sub branches are handled manually
- if: '$CI_PIPELINE_SOURCE == "push" && ($CI_COMMIT_REF_NAME =~ /^bind-9.[0-9]+$/ || $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH)'
script:
# CI job token is not sufficient for push operations
- git remote get-url origin | sed -e "s/gitlab-ci-token:$CI_JOB_TOKEN/oauth2:$BIND_TEAM_WRITE_TOKEN/" | xargs git remote set-url --push origin
- bind9-qa/releng/backport_mr.py $CI_PROJECT_ID "$MERGE_REQUEST_ID"
merged-metadata:
<<: *post_merge
rules:
- if: '$CI_PIPELINE_SOURCE == "push" && ($CI_COMMIT_REF_NAME =~ /^bind-9.[0-9]+(-sub)?$/ || $CI_COMMIT_REF_NAME =~ /^v9.[0-9]+.[0-9]+-release$/ || $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH)'
script:
- bind9-qa/releng/after_merge.py "$CI_PROJECT_ID" "$MERGE_REQUEST_ID"