Skip to content

chore: build image which can be reused to speed up CI #63

chore: build image which can be reused to speed up CI

chore: build image which can be reused to speed up CI #63

################################################################################

Check failure on line 1 in .github/workflows/dsBaseClient_test_suite.yaml

View workflow run for this annotation

GitHub Actions / .github/workflows/dsBaseClient_test_suite.yaml

Invalid workflow file

(Line: 29, Col: 14): Unrecognized function: 'replace'. Located at position 42 within expression: startsWith(github.ref_name, 'dev-') && replace(github.ref_name, 'dev-', 'v') || 'latest'
# DataSHIELD GHA test suite - dsBaseClient
# Adapted from `armadillo_azure-pipelines.yml` by Roberto Villegas-Diaz
#
# Inside the root directory $(Pipeline.Workspace) will be a file tree like:
# /dsBaseClient <- Checked out version of datashield/dsBaseClient
# /dsBaseClient/logs <- Where results of tests and logs are collated
# /testStatus <- Checked out version of datashield/testStatus
#
# As of Sept. 2025 this takes ~ 95 mins to run.
################################################################################
name: dsBaseClient tests' suite
on:
push:
schedule:
- cron: '0 0 * * 0' # Weekly
- cron: '0 1 * * *' # Nightly
jobs:
dsBaseClient_test_suite:
runs-on: ubuntu-latest
timeout-minutes: 120
permissions:
contents: read
# This logic automatically selects the image tag
container:
image: >-
ghcr.io/${{ github.repository_owner }}/ds-base-env:${{
startsWith(github.ref_name, 'dev-') &&
replace(github.ref_name, 'dev-', 'v') ||
'latest'
}}-dev
options: --user root -v /var/run/docker.sock:/var/run/docker.sock
env:
TEST_FILTER: '_-|datachk-|smk-|arg-|disc-|perf-|smk_expt-|expt-|math-'
_r_check_system_clock_: 0
WORKFLOW_ID: ${{ github.run_id }}-${{ github.run_attempt }}
PROJECT_NAME: dsBaseClient
BRANCH_NAME: ${{ github.head_ref || github.ref_name }}
REPO_OWNER: ${{ github.repository_owner }}
R_KEEP_PKG_SOURCE: yes
GITHUB_TOKEN: ${{ github.token || 'placeholder-token' }}
steps:
- name: Checkout dsBaseClient
uses: actions/checkout@v4
with:
path: dsBaseClient
- name: Checkout testStatus
if: ${{ github.actor != 'nektos/act' }} # for local deployment only
uses: actions/checkout@v4
with:
repository: ${{ env.REPO_OWNER }}/testStatus
ref: master
path: testStatus
persist-credentials: false
token: ${{ env.GITHUB_TOKEN }}
- name: Install PR version of dsBaseClient
run: R CMD INSTALL dsBaseClient
- name: Check manual updated
run: |
orig_sum=$(find man -type f | sort -u | xargs cat | md5sum)
R -q -e "devtools::document()"
new_sum=$(find man -type f | sort -u | xargs cat | md5sum)
if [ "$orig_sum" != "$new_sum" ]; then
echo "Your committed man/*.Rd files are out of sync with the R headers."
exit 1
fi
working-directory: dsBaseClient
continue-on-error: true
- name: Devtools checks
run: |
R -q -e "devtools::check(args = c('--no-examples', '--no-tests'))" | tee azure-pipelines_check.Rout
grep --quiet "^0 errors" azure-pipelines_check.Rout && grep --quiet " 0 warnings" azure-pipelines_check.Rout && grep --quiet " 0 notes" azure-pipelines_check.Rout
working-directory: dsBaseClient
continue-on-error: true
- name: Start Armadillo docker-compose
run: docker compose -f docker-compose_armadillo.yml up -d --build
working-directory: dsBaseClient
- name: Install test datasets
run: |
sleep 60
R -q -f "molgenis_armadillo-upload_testing_datasets.R"
working-directory: dsBaseClient/tests/testthat/data_files
- name: Install dsBase to Armadillo
run: |
curl -u admin:admin -X GET http://localhost:8080/packages
curl -u admin:admin -H 'Content-Type: multipart/form-data' -F "file=@dsBase_7.0.0-permissive.tar.gz" -X POST http://localhost:8080/install-package
sleep 60
docker restart dsbaseclient-armadillo-1
sleep 30
curl -u admin:admin -X POST http://localhost:8080/whitelist/dsBase
working-directory: dsBaseClient
- name: Run tests with coverage & JUnit report
run: |
mkdir -p logs
R -q -e "devtools::reload();"
R -q -e '
write.csv(
covr::coverage_to_list(
covr::package_coverage(
type = c("none"),
code = c('"'"'
output_file <- file("test_console_output.txt");
sink(output_file);
sink(output_file, type = "message");
junit_rep <- testthat::JunitReporter$new(file = file.path(getwd(), "test_results.xml"));
progress_rep <- testthat::ProgressReporter$new(max_failures = 999999);
multi_rep <- testthat::MultiReporter$new(reporters = list(progress_rep, junit_rep));
options("datashield.return_errors" = FALSE, "default_driver" = "ArmadilloDriver");
testthat::test_package("${{ env.PROJECT_NAME }}", filter = "${{ env.TEST_FILTER }}", reporter = multi_rep, stop_on_failure = FALSE)'"'"'
)
)
),
"coveragelist.csv"
)'
mv coveragelist.csv logs/
mv test_* logs/
working-directory: dsBaseClient
- name: Check for JUnit errors
run: |
issue_count=$(sed 's/failures="0" errors="0"//' test_results.xml | grep -c errors= || true)
echo "Number of testsuites with issues: $issue_count"
sed 's/failures="0" errors="0"//' test_results.xml | grep errors= > issues.log || true
cat issues.log || true
exit 0
working-directory: dsBaseClient/logs
- name: Write versions to file
run: |
echo "branch:${{ env.BRANCH_NAME }}" > ${{ env.WORKFLOW_ID }}.txt
echo "os:$(lsb_release -ds)" >> ${{ env.WORKFLOW_ID }}.txt
echo "R:$(R --version | head -n1)" >> ${{ env.WORKFLOW_ID }}.txt
working-directory: dsBaseClient/logs
- name: Parse results from testthat and covr
run: |
Rscript --verbose --vanilla ../testStatus/source/parse_test_report.R logs/ logs/ https://github.com/datashield/${{ env.PROJECT_NAME }}/blob/${{ env.BRANCH_NAME }} '([^:]+)' '(?<=::)[^:]+(?=::)'
working-directory: dsBaseClient
env:
PROJECT_NAME: ${{ env.PROJECT_NAME }}
BRANCH_NAME: ${{ env.BRANCH_NAME }}
- name: Render report
run: |
cd testStatus
mkdir -p new/logs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/
mkdir -p new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/