Restructure codebase #225
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Copyright (C) 2024 Roberto Rossini <[email protected]> | |
# | |
# SPDX-License-Identifier: GPL-3.0 | |
# | |
# This library is free software: you can redistribute it and/or | |
# modify it under the terms of the GNU Public License as published | |
# by the Free Software Foundation; either version 3 of the License, | |
# or (at your option) any later version. | |
# | |
# This library is distributed in the hope that it will be useful, | |
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
# Library General Public License for more details. | |
# | |
# You should have received a copy of the GNU Public License along | |
# with this library. If not, see | |
# <https://www.gnu.org/licenses/>. | |
name: Run Codecov | |
on: | |
push: | |
branches: [main] | |
paths: | |
- ".github/codecov.yml" | |
- ".github/workflows/build-conan-deps.yml" | |
- ".github/workflows/cache-test-dataset.yml" | |
- ".github/workflows/codecov.yml" | |
- "cmake/**" | |
- "src/**" | |
- "test/**" | |
- "CMakeLists.txt" | |
- "conanfile.py" | |
- "utils/devel/collect_coverage_data.py" | |
pull_request: | |
paths: | |
- ".github/codecov.yml" | |
- ".github/workflows/build-conan-deps.yml" | |
- ".github/workflows/cache-test-dataset.yml" | |
- ".github/workflows/codecov.yml" | |
- "cmake/**" | |
- "src/**" | |
- "test/**" | |
- "CMakeLists.txt" | |
- "conanfile.py" | |
- "utils/devel/collect_coverage_data.py" | |
# https://stackoverflow.com/a/72408109 | |
concurrency: | |
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} | |
cancel-in-progress: true | |
defaults: | |
run: | |
shell: bash | |
jobs: | |
cache-test-dataset: | |
name: Cache test dataset | |
uses: paulsengroup/nchg/.github/workflows/cache-test-dataset.yml@main | |
build-conan-deps: | |
name: Build Conan deps | |
uses: paulsengroup/nchg/.github/workflows/build-conan-deps.yml@main | |
with: | |
os: ubuntu-24.04 | |
build-project: | |
name: Build project | |
needs: build-conan-deps | |
runs-on: ubuntu-latest | |
container: | |
image: ghcr.io/paulsengroup/ci-docker-images/ubuntu-24.04-cxx-clang-19 | |
options: "--user=root" | |
env: | |
CCACHE_DIR: "/opt/ccache-cache" | |
CCACHE_COMPILERCHECK: "content" | |
CCACHE_COMPRESSLEVEL: "1" | |
CCACHE_MAXSIZE: "500M" | |
CONAN_HOME: "/opt/conan/" | |
steps: | |
- uses: actions/checkout@v4 | |
- name: Fix permissions | |
run: | | |
chown -R $(id -u):$(id -g) $PWD | |
- name: Generate cache key | |
id: cache-key | |
run: | | |
conanfile_hash="${{ hashFiles('conanfile.py') }}" | |
# This can be used by to always update a cache entry (useful e.g. for ccache) | |
current_date="$(date '+%s')" | |
ccache_key_prefix="codecov-ccache-$conanfile_hash" | |
echo "ccache-key=${ccache_key_prefix}-$GITHUB_REF-${current_date}" | tee -a "$GITHUB_OUTPUT" | |
echo "ccache-restore-key-1=$ccache_key_prefix-$GITHUB_REF" | tee -a "$GITHUB_OUTPUT" | |
echo "ccache-restore-key-2=$ccache_key_prefix" | tee -a "$GITHUB_OUTPUT" | |
- name: Restore Conan cache | |
uses: actions/cache/restore@v4 | |
with: | |
key: ${{ needs.build-conan-deps.outputs.conan-key }} | |
path: ${{ env.CONAN_HOME }}/p | |
fail-on-cache-miss: true | |
- name: Restore CMake configs | |
uses: actions/cache/restore@v4 | |
with: | |
key: ${{ needs.build-conan-deps.outputs.cmake-prefix-relwithdebinfo-key }} | |
path: /tmp/cmake-prefix-rwdi.tar | |
fail-on-cache-miss: true | |
- name: Extract CMake configs | |
run: | | |
mkdir conan-env | |
tar -xf /tmp/cmake-prefix-rwdi.tar -C conan-env/ --strip-components=1 | |
- name: Configure project | |
run: | | |
cov_flags='-fprofile-instr-generate -fcoverage-mapping -fcoverage-mcdc' | |
cmake -DCMAKE_BUILD_TYPE=RelWithDebInfo \ | |
-DCMAKE_PREFIX_PATH="$PWD/conan-env" \ | |
-DNCHG_ENABLE_TESTING=ON \ | |
-DNCHG_DOWNLOAD_TEST_DATASET=OFF \ | |
-DCMAKE_C_FLAGS="$cov_flags" \ | |
-DCMAKE_CXX_FLAGS="$cov_flags" \ | |
-S . \ | |
-B build | |
- name: Restore Ccache folder | |
id: cache-ccache | |
uses: actions/cache/restore@v4 | |
with: | |
key: ${{ steps.cache-key.outputs.ccache-restore-key-1 }} | |
restore-keys: ${{ steps.cache-key.outputs.ccache-restore-key-2 }} | |
path: ${{ env.CCACHE_DIR }} | |
- name: Reset Ccache stats | |
run: ccache --zero-stats | |
- name: Build project | |
run: cmake --build build -j $(nproc) | |
- name: Package build folder | |
run: tar --exclude='*.o' -cf - build/ | zstd -T0 -13 -o build.tar.zst | |
- name: Upload build folder | |
uses: actions/upload-artifact@v4 | |
with: | |
name: "build-codecov" | |
path: build.tar.zst | |
if-no-files-found: error | |
retention-days: 1 | |
- name: Print Ccache statistics (pre-cleanup) | |
run: | | |
ccache --show-stats \ | |
--show-compression \ | |
--verbose | |
- name: Cleanup Ccache folder | |
run: | | |
ccache --evict-older-than=14400s # 4h | |
ccache --recompress=19 --recompress-threads="$(nproc)" | |
ccache --cleanup | |
- name: Print Ccache statistics (post-cleanup) | |
run: | | |
ccache --show-stats \ | |
--show-compression \ | |
--verbose | |
- name: Save Ccache folder | |
uses: actions/cache/save@v4 | |
with: | |
key: ${{ steps.cache-key.outputs.ccache-key }} | |
path: ${{ env.CCACHE_DIR }} | |
env: | |
ZSTD_CLEVEL: 1 | |
- name: Generate list of stale cache entries | |
id: stale-cache | |
if: steps.cache-ccache.outputs.cache-matched-key != '' | |
run: | | |
fname='stale-cache-codecov.txt' | |
echo '${{ steps.cache-ccache.outputs.cache-matched-key }}' > "$fname" | |
echo "name=$fname" | tee -a "$GITHUB_OUTPUT" | |
- name: Upload stale cache entries | |
if: steps.cache-ccache.outputs.cache-matched-key != '' | |
uses: actions/upload-artifact@v4 | |
with: | |
name: ${{ steps.stale-cache.outputs.name }} | |
path: "${{ steps.stale-cache.outputs.name }}" | |
if-no-files-found: error | |
retention-days: 1 | |
clean-stale-cache: | |
needs: [build-project] | |
name: Evict stale cache entries | |
runs-on: ubuntu-latest | |
permissions: | |
actions: write | |
steps: | |
- name: Download artifacts | |
uses: actions/download-artifact@v4 | |
continue-on-error: true | |
with: | |
pattern: stale-cache-* | |
merge-multiple: true | |
- name: Evict cache entries | |
continue-on-error: true | |
run: | | |
set -x | |
while read entry; do | |
if ! grep -q '/heads/main' <(echo "$entry"); then | |
gh cache delete --repo '${{ github.repository }}' "$entry" | |
fi | |
done < <(cat stale-cache*.txt | grep -v '^$') | |
env: | |
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | |
run-unit-tests: | |
name: Run unit tests | |
needs: [cache-test-dataset, build-project] | |
runs-on: ubuntu-latest | |
container: | |
image: ghcr.io/paulsengroup/ci-docker-images/ubuntu-24.04-cxx-clang-19 | |
options: "--user=root" | |
steps: | |
- name: Checkout repo | |
uses: actions/checkout@v4 | |
- name: Restore test dataset | |
uses: actions/cache/restore@v4 | |
with: | |
key: ${{ needs.cache-test-dataset.outputs.cache-key }} | |
path: test/data/nchg_test_data.tar.zst | |
fail-on-cache-miss: true | |
- name: Download unit tests artifact | |
uses: actions/download-artifact@v4 | |
with: | |
name: "build-codecov" | |
- name: Extract binaries and test dataset | |
run: | | |
tar -xf build.tar.zst | |
tar -xf test/data/nchg_test_data.tar.zst | |
- name: Add test user | |
run: useradd devel | |
- name: Fix permissions | |
run: | | |
chown -R devel:devel build/ | |
- name: Setup dependencies | |
run: | | |
apt-get update | |
apt-get install -q -y --no-install-recommends \ | |
gpg \ | |
gpg-agent \ | |
sudo \ | |
tar | |
- name: Run unit tests | |
run: | | |
readarray -t -d '' binaries < <(find build/test/units/ -type f -executable | xargs printf '%q\0') | |
mkdir profiles | |
chown devel:devel profiles | |
for bin in "${binaries[@]}"; do | |
hash="$(sha256sum "$bin" | cut -f1 -d ' ')" | |
LLVM_PROFILE_FILE="$PWD/profiles/$hash.profraw" | |
sudo -u devel \ | |
-E env "LLVM_PROFILE_FILE=$LLVM_PROFILE_FILE" \ | |
"$bin" | |
if [ $? -ne 0 ]; then | |
exit 1 | |
fi | |
done | |
- name: Collect coverage data | |
run: | | |
readarray -t -d '' binaries < <(find build/test/units/ -type f -executable | xargs printf '%q\0') | |
utils/devel/collect_coverage_data.py \ | |
--output-dir coverage/ \ | |
--prefix=profiles/ \ | |
--format lcov \ | |
--llvm-cov-bin llvm-cov-19 \ | |
--llvm-profdata-bin llvm-profdata-19 \ | |
"${binaries[@]}" | |
- name: Upload coverage reports to Codecov | |
uses: codecov/codecov-action@v4 | |
with: | |
flags: "tests | unittests" | |
fail_ci_if_error: true | |
handle_no_reports_found: true | |
directory: coverage | |
os: linux | |
token: ${{ secrets.CODECOV_TOKEN }} | |
verbose: true | |
run-integration-tests: | |
name: Run integration tests | |
needs: [cache-test-dataset, build-project] | |
runs-on: ubuntu-latest | |
container: | |
image: ghcr.io/paulsengroup/ci-docker-images/ubuntu-24.04-cxx-clang-19 | |
options: "--user=root" | |
steps: | |
- name: Checkout repo | |
uses: actions/checkout@v4 | |
- name: Restore test dataset | |
uses: actions/cache/restore@v4 | |
with: | |
key: ${{ needs.cache-test-dataset.outputs.cache-key }} | |
path: test/data/nchg_test_data.tar.zst | |
fail-on-cache-miss: true | |
- name: Download binaries artifact | |
uses: actions/download-artifact@v4 | |
with: | |
name: "build-codecov" | |
- name: Extract binaries and test dataset | |
run: | | |
tar -xf build.tar.zst | |
tar -xf test/data/nchg_test_data.tar.zst | |
- name: Add test user | |
run: useradd devel | |
- name: Fix permissions | |
run: chown -R devel:devel build/ | |
- name: Setup dependencies | |
run: | | |
apt-get update | |
apt-get install -q -y --no-install-recommends \ | |
gpg \ | |
gpg-agent \ | |
sudo \ | |
tar | |
pip install -r test/requirements.txt | |
- name: Setup test environment | |
run: | | |
echo "DATA_DIR=$PWD/test/data/integration_tests" | tee -a "$GITHUB_ENV" | |
echo "OUT_PREFIX=ENCFF447ERX.1000000" | tee -a "$GITHUB_ENV" | |
- name: Test NCHG cartesian-product (gw) | |
run: | | |
LLVM_PROFILE_FILE="$PWD/nchg.cartesian-product-gw.profraw" | |
sudo -u devel \ | |
-E env "LLVM_PROFILE_FILE=$LLVM_PROFILE_FILE" \ | |
build/src/nchg/NCHG cartesian-product \ | |
test/data/ENCFF447ERX.1000000.compartments.bed \ | |
> "$OUT_PREFIX.bedpe" | |
test/scripts/validate_nchg_output.py cartesian-product \ | |
"$OUT_PREFIX.bedpe" \ | |
"$DATA_DIR/cartesian_product/$OUT_PREFIX.gw-domains.bedpe" | |
rm "$OUT_PREFIX"* | |
- name: Test NCHG cartesian-product (cis) | |
run: | | |
LLVM_PROFILE_FILE="$PWD/nchg.cartesian-product-cis.profraw" | |
sudo -u devel \ | |
-E env "LLVM_PROFILE_FILE=$LLVM_PROFILE_FILE" \ | |
build/src/nchg/NCHG cartesian-product \ | |
--cis-only \ | |
test/data/ENCFF447ERX.1000000.compartments.bed \ | |
> domains.bedpe | |
test/scripts/validate_nchg_output.py cartesian-product \ | |
domains.bedpe \ | |
"$DATA_DIR/cartesian_product/$OUT_PREFIX.cis-domains.bedpe" | |
rm *.bedpe | |
- name: Test NCHG cartesian-product (trans) | |
run: | | |
LLVM_PROFILE_FILE="$PWD/nchg.cartesian-product-trans.profraw" | |
sudo -u devel \ | |
-E env "LLVM_PROFILE_FILE=$LLVM_PROFILE_FILE" \ | |
build/src/nchg/NCHG cartesian-product \ | |
--trans-only \ | |
test/data/ENCFF447ERX.1000000.compartments.bed \ | |
> domains.bedpe | |
test/scripts/validate_nchg_output.py cartesian-product \ | |
domains.bedpe \ | |
"$DATA_DIR/cartesian_product/$OUT_PREFIX.trans-domains.bedpe" | |
rm *.bedpe | |
- name: Test NCHG cartesian-product (chr1:chr3) | |
run: | | |
LLVM_PROFILE_FILE="$PWD/nchg.cartesian-product-chr1-chr3.profraw" | |
sudo -u devel \ | |
-E env "LLVM_PROFILE_FILE=$LLVM_PROFILE_FILE" \ | |
build/src/nchg/NCHG cartesian-product \ | |
--chrom1 chr1 \ | |
--chrom2 chr3 \ | |
test/data/ENCFF447ERX.1000000.compartments.bed \ | |
> domains.bedpe | |
grep -E '\bchr1\b.*\bchr3\b' \ | |
"$DATA_DIR/cartesian_product/$OUT_PREFIX.trans-domains.bedpe" \ | |
> expected.bedpe | |
test/scripts/validate_nchg_output.py cartesian-product \ | |
domains.bedpe \ | |
expected.bedpe | |
rm *.bedpe | |
- name: Test NCHG compute (mt) | |
run: | | |
LLVM_PROFILE_FILE="$PWD/nchg.compute-mt.%$(nproc)m.profraw" | |
sudo -u devel \ | |
-E env "LLVM_PROFILE_FILE=$LLVM_PROFILE_FILE" \ | |
build/src/nchg/NCHG compute \ | |
test/data/ENCFF447ERX.1000000.cool \ | |
"$OUT_PREFIX" \ | |
--threads "$(nproc)" | |
test/scripts/validate_nchg_output.py compute \ | |
"$OUT_PREFIX" \ | |
"$DATA_DIR/compute/$OUT_PREFIX" | |
rm "$OUT_PREFIX"* | |
- name: Test NCHG compute (st) | |
run: | | |
LLVM_PROFILE_FILE="$PWD/nchg.compute-st.profraw" | |
sudo -u devel \ | |
-E env "LLVM_PROFILE_FILE=$LLVM_PROFILE_FILE" \ | |
build/src/nchg/NCHG compute \ | |
test/data/ENCFF447ERX.1000000.cool \ | |
"$OUT_PREFIX" \ | |
--cis-only \ | |
--threads 1 | |
rm "$OUT_PREFIX"* | |
- name: Test NCHG compute (mt; w/ domain list) | |
run: | | |
LLVM_PROFILE_FILE="$PWD/nchg.compute-mt-with-domains.%$(nproc)m.profraw" | |
sudo -u devel \ | |
-E env "LLVM_PROFILE_FILE=$LLVM_PROFILE_FILE" \ | |
build/src/nchg/NCHG compute \ | |
test/data/ENCFF447ERX.1000000.cool \ | |
"$OUT_PREFIX" \ | |
--threads "$(nproc)" \ | |
--domains "$DATA_DIR/cartesian_product/$OUT_PREFIX.gw-domains.bedpe" | |
test/scripts/validate_nchg_output.py compute \ | |
"$OUT_PREFIX" \ | |
"$DATA_DIR/compute_with_domains/$OUT_PREFIX" | |
rm "$OUT_PREFIX"* | |
- name: Test NCHG merge | |
run: | | |
LLVM_PROFILE_FILE="$PWD/nchg.merge.profraw" | |
sudo -u devel \ | |
-E env "LLVM_PROFILE_FILE=$LLVM_PROFILE_FILE" \ | |
build/src/nchg/NCHG merge \ | |
"$DATA_DIR/compute/$OUT_PREFIX" \ | |
"$OUT_PREFIX.parquet" \ | |
--threads "$(nproc)" | |
test/scripts/validate_nchg_output.py merge \ | |
"$OUT_PREFIX.parquet" \ | |
"$DATA_DIR/merge/$OUT_PREFIX.parquet" | |
rm "$OUT_PREFIX"* | |
- name: Test NCHG filter | |
run: | | |
LLVM_PROFILE_FILE="$PWD/nchg.filter.profraw" | |
sudo -u devel \ | |
-E env "LLVM_PROFILE_FILE=$LLVM_PROFILE_FILE" \ | |
build/src/nchg/NCHG filter \ | |
"$DATA_DIR/merge/$OUT_PREFIX.parquet" \ | |
"$OUT_PREFIX.filtered.parquet" \ | |
--threads "$(nproc)" | |
test/scripts/validate_nchg_output.py filter \ | |
"$OUT_PREFIX.filtered.parquet" \ | |
"$DATA_DIR/filter/$OUT_PREFIX.parquet" | |
rm "$OUT_PREFIX"* | |
- name: Test NCHG view (compute) | |
run: | | |
LLVM_PROFILE_FILE="$PWD/nchg.view.compute.profraw" | |
sudo -u devel \ | |
-E env "LLVM_PROFILE_FILE=$LLVM_PROFILE_FILE" \ | |
build/src/nchg/NCHG view \ | |
"$DATA_DIR/merge/$OUT_PREFIX.parquet" > /dev/null | |
- name: Test NCHG view (filter) | |
run: | | |
LLVM_PROFILE_FILE="$PWD/nchg.view.filter.profraw" | |
sudo -u devel \ | |
-E env "LLVM_PROFILE_FILE=$LLVM_PROFILE_FILE" \ | |
build/src/nchg/NCHG view \ | |
"$DATA_DIR/filter/$OUT_PREFIX.parquet" \ | |
> "$OUT_PREFIX.tsv" | |
test/scripts/validate_nchg_output.py view \ | |
"$OUT_PREFIX.tsv" \ | |
"$DATA_DIR/view/$OUT_PREFIX.tsv" | |
rm "$OUT_PREFIX"* | |
- name: Test NCHG expected (w/ mask) | |
run: | | |
LLVM_PROFILE_FILE="$PWD/nchg.expected.mask.profraw" | |
printf 'chr1\t0\t248956422' > mask.bed | |
sudo -u devel \ | |
-E env "LLVM_PROFILE_FILE=$LLVM_PROFILE_FILE" \ | |
build/src/nchg/NCHG expected \ | |
test/data/ENCFF447ERX.1000000.cool \ | |
--output "$OUT_PREFIX.h5" \ | |
--bin-mask mask.bed | |
rm "$OUT_PREFIX"* mask.bed | |
- name: Test NCHG expected (wo/ mask) | |
run: | | |
LLVM_PROFILE_FILE="$PWD/nchg.expected.no-mask.profraw" | |
sudo -u devel \ | |
-E env "LLVM_PROFILE_FILE=$LLVM_PROFILE_FILE" \ | |
build/src/nchg/NCHG expected \ | |
test/data/ENCFF447ERX.1000000.cool \ | |
--output "$OUT_PREFIX.h5" | |
test/scripts/validate_nchg_output.py expected \ | |
"$OUT_PREFIX.h5" \ | |
"$DATA_DIR/expected/$OUT_PREFIX.h5" | |
rm "$OUT_PREFIX"* | |
- name: Collect coverage data | |
run: | | |
utils/devel/collect_coverage_data.py \ | |
--output-dir coverage/ \ | |
--prefix="$PWD/nchg" \ | |
--format lcov \ | |
--llvm-cov-bin llvm-cov-19 \ | |
--llvm-profdata-bin llvm-profdata-19 \ | |
build/src/nchg/NCHG | |
- name: Upload coverage reports to Codecov | |
uses: codecov/codecov-action@v4 | |
with: | |
flags: "tests | integration" | |
fail_ci_if_error: true | |
handle_no_reports_found: true | |
directory: coverage | |
os: linux | |
token: ${{ secrets.CODECOV_TOKEN }} | |
verbose: true | |
codecov-status-check: | |
name: Status Check (Codecov) | |
if: ${{ always() }} | |
runs-on: ubuntu-latest | |
needs: | |
- build-project | |
- run-unit-tests | |
- run-integration-tests | |
steps: | |
- name: Collect job results | |
if: | | |
needs.build-project.result != 'success' || | |
needs.run-unit-tests.result != 'success' || | |
needs.run-integration-tests.result != 'success' | |
run: exit 1 |