---
# Main environment vars to set for all tasks
env:
FEDORA_NAME: "fedora-38"
FEDORA_PRIOR_NAME: "fedora-37"
DEBIAN_NAME: "debian-11"
UBUNTU_LATEST_NAME: "ubuntu-24.04"
UBUNTU_NAME: "ubuntu-24.04"
UBUNTU_PRIOR_NAME: "ubuntu-22.04"
UBUNTU_PRIOR2_NAME: "ubuntu-20.04"
UBUNTU_PRIOR3_NAME: "ubuntu-18.04"
CENTOS_9_NAME: "centos-stream-9"
CENTOS_8_NAME: "centos-stream-8"
CENTOS_PROJECT: "centos-cloud"
DEBIAN_PROJECT: "debian-cloud"
FEDORA_PROJECT: "fedora-cloud"
SOS_PROJECT: "sos-devel-jobs"
UBUNTU_PROJECT: "ubuntu-os-cloud"
UBUNTU_DEVEL_PROJECT: "ubuntu-os-cloud-devel"
# Images exist on GCP already
CENTOS_9_IMAGE_NAME: "centos-stream-9-v20240415"
CENTOS_8_IMAGE_NAME: "centos-stream-8-v20240415"
DEBIAN_IMAGE_NAME: "debian-11-bullseye-v20230809"
FEDORA_IMAGE_NAME: "fedora-cloud-base-gcp-38-1-6-x86-64"
FEDORA_PRIOR_IMAGE_NAME: "fedora-cloud-base-gcp-37-1-7-x86-64"
UBUNTU_DEB_IMAGE_NAME: "ubuntu-minimal-2404-noble-amd64-v20240423"
UBUNTU_LATEST_IMAGE_NAME: "ubuntu-2404-noble-amd64-v20240423"
UBUNTU_IMAGE_NAME: "ubuntu-2404-noble-amd64-v20240423"
UBUNTU_PRIOR_IMAGE_NAME: "ubuntu-2204-jammy-v20240501"
UBUNTU_PRIOR2_IMAGE_NAME: "ubuntu-2004-focal-v20240426"
UBUNTU_PRIOR3_IMAGE_NAME: "ubuntu-1804-bionic-v20240116a"
UBUNTU_SNAP_IMAGE_NAME: "ubuntu-2204-jammy-v20240501"
UBUNTU_DEVEL_FAMILY_NAME: "ubuntu-2410-amd64"
# Curl-command prefix for downloading task artifacts, simply add the
# the url-encoded task name, artifact name, and path as a suffix.
# This approach originally seen in the podman project.
ARTCURL: >-
curl --fail --location -O
--url https://api.cirrus-ci.com/v1/artifact/build/${CIRRUS_BUILD_ID}
# Default task timeout
timeout_in: 30m
# enable auto cancelling concurrent builds on main when multiple PRs are
# merged at once
auto_cancellation: true
gcp_credentials: ENCRYPTED[!77d4c8251094346c41db63cb05eba2ff98eaff04e58c5d0e2a8e2c6f159f7d601b3fe9a2a4fce1666297e371f2fc8752!]
# Run a simple lint on the community cluster
flake8_task:
alias: "flake8_test"
name: "Flake8 linting test"
container:
image: alpine/flake8:latest
setup_script: |
apk update
apk add --upgrade py3-tox
flake_script: tox -e flake8
pylint_task:
alias: "pylint_test"
name: "pylint linting test"
allow_failures: true
container:
image: "python:latest"
setup_script: |
apt update
apt -y install tox
pylint_script: tox -e pylint
# Run a check on newer upstream python versions to check for possible
# breaks/changes in common modules. This is not meant to check any of the actual
# collections or archive integrity.
py_break_task:
alias: "py_break"
name: "Breakage test python-$PY_VERSION"
container:
image: "python:${PY_VERSION}"
matrix:
- env:
PY_VERSION: "latest"
- env:
PY_VERSION: "3.9"
# This image has 2 py environments. Install to the one sos uses.
setup_script: pip3 install -t /usr/lib/python3/dist-packages -r requirements.txt
main_script: ./bin/sos report --batch
# Make sure a user can manually build an rpm from the checkout
rpm_build_task:
alias: "rpm_build"
name: "rpm Build From Checkout - ${BUILD_NAME}"
gce_instance: &standardvm
image_project: "${PROJECT}"
image_name: "${VM_IMAGE_NAME}"
type: e2-medium
matrix:
- env: ¢os9
PROJECT: ${CENTOS_PROJECT}
BUILD_NAME: ${CENTOS_9_NAME}
VM_IMAGE_NAME: ${CENTOS_9_IMAGE_NAME}
- env: ¢os8
PROJECT: ${CENTOS_PROJECT}
BUILD_NAME: ${CENTOS_8_NAME}
VM_IMAGE_NAME: ${CENTOS_8_IMAGE_NAME}
- env: &fedora
PROJECT: ${FEDORA_PROJECT}
BUILD_NAME: ${FEDORA_NAME}
VM_IMAGE_NAME: ${FEDORA_IMAGE_NAME}
- env: &fedoraprior
PROJECT: ${FEDORA_PROJECT}
BUILD_NAME: ${FEDORA_PRIOR_NAME}
VM_IMAGE_NAME: ${FEDORA_PRIOR_IMAGE_NAME}
setup_script: |
dnf clean all
dnf -y install rpm-build rpmdevtools gettext python3-devel
main_script: |
mkdir -p /rpmbuild/{BUILD,BUILDROOT,RPMS,SRPMS,SOURCES}
python3 setup.py sdist
cp dist/sos*.tar.gz /rpmbuild/SOURCES/
rpmbuild -bs sos.spec
dnf -y builddep /rpmbuild/SRPMS/sos*src.rpm
rpmbuild -bb sos.spec
# Retrieving the built rpm in later tasks requires knowing the exact name
# of the file. To avoid having to juggle version numbers here, rename it
prep_artifacts_script: mv /rpmbuild/RPMS/noarch/sos-*.rpm ./sos_${BUILD_NAME}.rpm
packages_artifacts:
path: ./sos_${BUILD_NAME}.rpm
type: application/octet-stream
# Make sure a user can manually build a deb from the checkout
deb_build_task:
alias: "deb_build"
name: "deb Build From Checkout"
gce_instance:
image_project: "${UBUNTU_PROJECT}"
image_name: "${UBUNTU_DEB_IMAGE_NAME}"
type: e2-medium
setup_script: |
apt update --allow-releaseinfo-change
apt -y install devscripts equivs python3-pip
mk-build-deps
apt -y install ./sosreport-build-deps*.deb
pip3 install -r test-requirements.txt --break-system-packages
main_script: |
dpkg-buildpackage -b -us -uc -rfakeroot -m --build-by="noreply@canonical.com"
prep_artifacts_script: mv ../*.deb ./sos_cirrus.deb
packages_artifacts:
path: ./sos_cirrus.deb
type: application/octet-stream
# Make sure a user can manually build a snap from the checkout
snap_build_task:
alias: "snap_build"
name: "snap Build From Checkout"
gce_instance:
image_project: "${UBUNTU_PROJECT}"
image_name: "${UBUNTU_SNAP_IMAGE_NAME}"
type: e2-medium
setup_script: |
apt update
apt -y install snapd
systemctl start snapd
sed -i -e 's/adopt-info.*/version: test/g' -e '/set version/d' snap/snapcraft.yaml
snap install snapcraft --classic
main_script: |
snapcraft --destructive-mode
packages_artifacts:
path: "*.snap"
on_failure:
fail_script: |
ls -d /root/.cache/snapcraft/log 2> /dev/null | xargs tar cf snap-build-fail-logs.tar
log_artifacts:
path: "snap-build-fail-logs.tar"
# Run the stage one (no mocking) tests across all distros on GCP
report_stageone_task:
alias: "stageone_report"
name: "Report Stage One - $BUILD_NAME"
depends_on:
- rpm_build
- snap_build
- deb_build
gce_instance: *standardvm
matrix:
- env: *centos9
- env: *centos8
- env: *fedora
- env: *fedoraprior
- env: &ubuntu
PKG: "snap"
PROJECT: ${UBUNTU_PROJECT}
BUILD_NAME: "${UBUNTU_NAME} - ${PKG}"
VM_IMAGE_NAME: ${UBUNTU_IMAGE_NAME}
- env: &ubuntuprior
PKG: "snap"
PROJECT: ${UBUNTU_PROJECT}
BUILD_NAME: "${UBUNTU_PRIOR_NAME} - ${PKG}"
VM_IMAGE_NAME: ${UBUNTU_PRIOR_IMAGE_NAME}
- env: &ubuntuprior2
PKG: "snap"
PROJECT: ${UBUNTU_PROJECT}
BUILD_NAME: "${UBUNTU_PRIOR2_NAME} - ${PKG}"
VM_IMAGE_NAME: ${UBUNTU_PRIOR2_IMAGE_NAME}
- env: &ubuntuprior3
PKG: "snap"
PROJECT: ${UBUNTU_PROJECT}
BUILD_NAME: "${UBUNTU_PRIOR3_NAME} - ${PKG}"
VM_IMAGE_NAME: ${UBUNTU_PRIOR3_IMAGE_NAME}
- env: &ubuntu-latest
PKG: "deb"
PROJECT: ${UBUNTU_PROJECT}
BUILD_NAME: "${UBUNTU_LATEST_NAME} - ${PKG}"
VM_IMAGE_NAME: ${UBUNTU_LATEST_IMAGE_NAME}
setup_script: &setup |
if [ $(command -v apt) ]; then
apt -y purge sosreport
apt update --allow-releaseinfo-change
apt -y install python3-pip snapd
if [ ${PKG} == "snap" ] ; then
echo "$ARTCURL/snap%20Build%20From%20Checkout/packages/sosreport_test_amd64.snap"
$ARTCURL/snap%20Build%20From%20Checkout/packages/sosreport_test_amd64.snap
systemctl start snapd
snap install ./sosreport_test_amd64.snap --classic --dangerous
snap alias sosreport.sos sos
elif [ ${PKG} == "deb" ]; then
echo "$ARTCURL/deb%20Build%20From%20Checkout/packages/sos_cirrus.deb"
$ARTCURL/deb%20Build%20From%20Checkout/packages/sos_cirrus.deb
apt -y install ./sos_cirrus.deb
fi
fi
if [ $(command -v dnf) ]; then
echo "$ARTCURL/rpm%20Build%20From%20Checkout%20-%20${BUILD_NAME}/packages/sos_${BUILD_NAME}.rpm"
$ARTCURL/rpm%20Build%20From%20Checkout%20-%20${BUILD_NAME}/packages/sos_${BUILD_NAME}.rpm
dnf -y remove sos
dnf -y install python3-pip ethtool
dnf -y install ./sos_${BUILD_NAME}.rpm
fi
PIP_EXTRA=""
[[ $(pip3 install --help | grep break-system) ]] && PIP_EXTRA="--break-system-packages"
pip3 install -r test-requirements.txt ${PIP_EXTRA}
# run the unittests separately as they require a different PYTHONPATH in
# order for the imports to work properly under avocado
unittest_script: &unit_test |
PYTHONPATH=. avocado run tests/unittests/
main_script: &stageone_test
PYTHONPATH=tests/ avocado run -p TESTLOCAL=true --max-parallel-tasks=1 -t stageone tests/{cleaner,collect,report,vendor}_tests
on_failure:
fail_script: &faillogs |
ls -d /var/tmp/avocado* /root/avocado* 2> /dev/null | xargs tar cf sos-fail-logs.tar
log_artifacts: &logs
path: "sos-fail-logs.tar"
report_stageone_daily_task:
alias: "stageone_daily_report"
name: "Report Stage One - ${UBUNTU_DEVEL_FAMILY_NAME}"
allow_failures: true
depends_on:
- snap_build
gce_instance:
image_project: ${UBUNTU_DEVEL_PROJECT}
image_family: ${UBUNTU_DEVEL_FAMILY_NAME}
type: e2-medium
environment:
PKG: "snap"
setup_script: *setup
unittest_script: *unit_test
main_script: *stageone_test
on_failure:
fail_script: *faillogs
log_artifacts: *logs
# IFF the stage one tests all pass, then run stage two for latest distros
report_stagetwo_task:
alias: "stagetwo_report"
name: "Report Stage Two - $BUILD_NAME"
depends_on: stageone_report
timeout_in: 45m
gce_instance: *standardvm
matrix:
- env: *centos9
- env: *centos8
- env: *fedora
- env: *ubuntu
- env: *ubuntu-latest
setup_script: *setup
install_pexpect_script: |
if [ $(command -v apt) ]; then
apt -y install python3-pexpect
fi
if [ $(command -v dnf) ]; then
dnf -y install python3-pexpect
fi
main_script: &stagetwo_test |
PYTHONPATH=tests/ avocado run -p TESTLOCAL=true --max-parallel-tasks=1 -t stagetwo tests/{cleaner,collect,report,vendor}_tests
on_failure:
fail_script: *faillogs
log_artifacts: *logs
report_stagetwo_daily_task:
alias: "stagetwo_daily_report"
name: "Report Stage Two - ${UBUNTU_DEVEL_FAMILY_NAME}"
allow_failures: true
depends_on: stageone_daily_report
timeout_in: 45m
gce_instance:
image_project: ${UBUNTU_DEVEL_PROJECT}
image_family: ${UBUNTU_DEVEL_FAMILY_NAME}
type: e2-medium
environment:
PKG: "snap"
setup_script: *setup
main_script: *stagetwo_test
on_failure:
fail_script: *faillogs
log_artifacts: *logs
report_foreman_task:
skip: "!changesInclude('.cirrus.yml', '**/{__init__,apache,foreman,foreman_tests,candlepin,pulp,pulpcore}.py', '**/foreman_setup.sh')"
timeout_in: 45m
alias: "foreman_integration"
name: "Integration Test - Foreman ${FOREMAN_VER} - ${BUILD_NAME}"
depends_on: stageone_report
gce_instance: &bigvm
<<: *standardvm
type: e2-standard-2
matrix:
- env:
<<: *centos8
FOREMAN_VER: "3.3"
- env:
<<: *centos8
FOREMAN_VER: "3.5"
- env:
<<: *centos8
FOREMAN_VER: "3.7"
- env:
PROJECT: ${DEBIAN_PROJECT}
VM_IMAGE_NAME: ${DEBIAN_IMAGE_NAME}
BUILD_NAME: ${DEBIAN_NAME}
FOREMAN_VER: "3.7"
setup_script: *setup
foreman_setup_script: ./tests/test_data/foreman_setup.sh
main_script: PYTHONPATH=tests/ avocado run -p TESTLOCAL=true --max-parallel-tasks=1 -t foreman tests/product_tests/foreman/
on_failure:
fail_script: *faillogs
log_artifacts: *logs