|
| 1 | +################################################################################ |
| 2 | +# DataSHIELD GHA test suite - dsBaseClient |
| 3 | +# Adapted from `armadillo_azure-pipelines.yml` by Roberto Villegas-Diaz |
| 4 | +# |
| 5 | +# Inside the root directory $(Pipeline.Workspace) will be a file tree like: |
| 6 | +# /dsBaseClient <- Checked out version of datashield/dsBaseClient |
| 7 | +# /dsBaseClient/logs <- Where results of tests and logs are collated |
| 8 | +# /testStatus <- Checked out version of datashield/testStatus |
| 9 | +# |
| 10 | +# As of Sept. 2025 this takes ~ 95 mins to run. |
| 11 | +################################################################################ |
| 12 | +name: dsBaseClient tests' suite |
| 13 | + |
| 14 | +on: |
| 15 | + push: |
| 16 | + schedule: |
| 17 | + - cron: '0 0 * * 6' # Weekly (on Saturdays @ 0.00) |
| 18 | + |
| 19 | +jobs: |
| 20 | + dsBaseClient_test_suite: |
| 21 | + runs-on: ubuntu-latest |
| 22 | + timeout-minutes: 180 |
| 23 | + permissions: |
| 24 | + contents: read |
| 25 | + |
| 26 | + # These should all be constant, except TEST_FILTER. This can be used to test |
| 27 | + # subsets of test files in the testthat directory. Options are like: |
| 28 | + # '*' <- Run all tests. |
| 29 | + # 'asNumericDS*' <- Run all asNumericDS tests, i.e. all the arg, etc. tests. |
| 30 | + # '*_smk_*' <- Run all the smoke tests for all functions. |
| 31 | + env: |
| 32 | + TEST_FILTER: '_-|datachk-|smk-|arg-|disc-|perf-|smk_expt-|expt-|math-' |
| 33 | + _r_check_system_clock_: 0 |
| 34 | + WORKFLOW_ID: ${{ github.run_id }}-${{ github.run_attempt }} |
| 35 | + PROJECT_NAME: dsBaseClient |
| 36 | + BRANCH_NAME: ${{ github.head_ref || github.ref_name }} |
| 37 | + REPO_OWNER: ${{ github.repository_owner }} |
| 38 | + R_KEEP_PKG_SOURCE: yes |
| 39 | + GITHUB_TOKEN: ${{ github.token || 'placeholder-token' }} |
| 40 | + |
| 41 | + steps: |
| 42 | + - name: Checkout dsBaseClient |
| 43 | + uses: actions/checkout@v4 |
| 44 | + with: |
| 45 | + path: dsBaseClient |
| 46 | + |
| 47 | + - name: Checkout testStatus |
| 48 | + if: ${{ github.actor != 'nektos/act' }} # for local deployment only |
| 49 | + uses: actions/checkout@v4 |
| 50 | + with: |
| 51 | + repository: ${{ env.REPO_OWNER }}/testStatus |
| 52 | + ref: master |
| 53 | + path: testStatus |
| 54 | + persist-credentials: false |
| 55 | + token: ${{ env.GITHUB_TOKEN }} |
| 56 | + |
| 57 | + - name: Uninstall default MySQL |
| 58 | + run: | |
| 59 | + curl https://bazel.build/bazel-release.pub.gpg | sudo apt-key add - |
| 60 | + sudo service mysql stop || true |
| 61 | + sudo apt-get update |
| 62 | + sudo apt-get remove --purge mysql-client mysql-server mysql-common -y |
| 63 | + sudo apt-get autoremove -y |
| 64 | + sudo apt-get autoclean -y |
| 65 | + sudo rm -rf /var/lib/mysql/ |
| 66 | +
|
| 67 | + - uses: r-lib/actions/setup-pandoc@v2 |
| 68 | + |
| 69 | + - uses: r-lib/actions/setup-r@v2 |
| 70 | + with: |
| 71 | + r-version: release |
| 72 | + http-user-agent: release |
| 73 | + use-public-rspm: true |
| 74 | + |
| 75 | + - name: Install R and dependencies |
| 76 | + run: | |
| 77 | + sudo apt-get install --no-install-recommends software-properties-common dirmngr -y |
| 78 | + wget -qO- https://cloud.r-project.org/bin/linux/ubuntu/marutter_pubkey.asc | sudo tee -a /etc/apt/trusted.gpg.d/cran_ubuntu_key.asc |
| 79 | + sudo add-apt-repository "deb https://cloud.r-project.org/bin/linux/ubuntu $(lsb_release -cs)-cran40/" |
| 80 | + sudo apt-get update -qq |
| 81 | + sudo apt-get upgrade -y |
| 82 | + sudo apt-get install -qq libxml2-dev libcurl4-openssl-dev libssl-dev libgsl-dev libgit2-dev r-base -y |
| 83 | + sudo apt-get install -qq libharfbuzz-dev libfribidi-dev libmagick++-dev xml-twig-tools -y |
| 84 | + sudo R -q -e "install.packages(c('devtools','covr','fields','meta','metafor','ggplot2','gridExtra','data.table','DSI','DSOpal','DSLite','MolgenisAuth','MolgenisArmadillo','DSMolgenisArmadillo','DescTools','e1071'), repos='https://cloud.r-project.org')" |
| 85 | + sudo R -q -e "devtools::install_github(repo='datashield/dsDangerClient', ref=Sys.getenv('BRANCH_NAME'))" |
| 86 | +
|
| 87 | + - uses: r-lib/actions/setup-r-dependencies@v2 |
| 88 | + with: |
| 89 | + dependencies: 'c("Imports")' |
| 90 | + extra-packages: | |
| 91 | + any::rcmdcheck |
| 92 | + cran::devtools |
| 93 | + cran::git2r |
| 94 | + cran::RCurl |
| 95 | + cran::readr |
| 96 | + cran::magrittr |
| 97 | + cran::xml2 |
| 98 | + cran::purrr |
| 99 | + cran::dplyr |
| 100 | + cran::stringr |
| 101 | + cran::tidyr |
| 102 | + cran::quarto |
| 103 | + cran::knitr |
| 104 | + cran::kableExtra |
| 105 | + cran::rmarkdown |
| 106 | + cran::downlit |
| 107 | + needs: check |
| 108 | + |
| 109 | + - name: Check manual updated |
| 110 | + run: | |
| 111 | + orig_sum=$(find man -type f | sort -u | xargs cat | md5sum) |
| 112 | + R -q -e "devtools::document()" |
| 113 | + new_sum=$(find man -type f | sort -u | xargs cat | md5sum) |
| 114 | + if [ "$orig_sum" != "$new_sum" ]; then |
| 115 | + echo "Your committed man/*.Rd files are out of sync with the R headers." |
| 116 | + exit 1 |
| 117 | + fi |
| 118 | + working-directory: dsBaseClient |
| 119 | + continue-on-error: true |
| 120 | + |
| 121 | + - name: Devtools checks |
| 122 | + run: | |
| 123 | + R -q -e "devtools::check(args = c('--no-examples', '--no-tests'))" | tee azure-pipelines_check.Rout |
| 124 | + grep --quiet "^0 errors" azure-pipelines_check.Rout && grep --quiet " 0 warnings" azure-pipelines_check.Rout && grep --quiet " 0 notes" azure-pipelines_check.Rout |
| 125 | + working-directory: dsBaseClient |
| 126 | + continue-on-error: true |
| 127 | + |
| 128 | + - name: Start Armadillo docker-compose |
| 129 | + run: docker compose -f docker-compose_armadillo.yml up -d --build |
| 130 | + working-directory: dsBaseClient |
| 131 | + |
| 132 | + - name: Install test datasets |
| 133 | + run: | |
| 134 | + sleep 60 |
| 135 | + R -q -f "molgenis_armadillo-upload_testing_datasets.R" |
| 136 | + working-directory: dsBaseClient/tests/testthat/data_files |
| 137 | + |
| 138 | + - name: Install dsBase to Armadillo |
| 139 | + run: | |
| 140 | + curl -u admin:admin -X GET http://localhost:8080/packages |
| 141 | + curl -u admin:admin -H 'Content-Type: multipart/form-data' -F "file=@dsBase_6.3.5-permissive.tar.gz" -X POST http://localhost:8080/install-package |
| 142 | + sleep 60 |
| 143 | + docker restart dsbaseclient-armadillo-1 |
| 144 | + sleep 30 |
| 145 | + curl -u admin:admin -X POST http://localhost:8080/whitelist/dsBase |
| 146 | + working-directory: dsBaseClient |
| 147 | + |
| 148 | + - name: Run tests with coverage & JUnit report |
| 149 | + run: | |
| 150 | + mkdir -p logs |
| 151 | + R -q -e "devtools::reload();" |
| 152 | + R -q -e ' |
| 153 | + write.csv( |
| 154 | + covr::coverage_to_list( |
| 155 | + covr::package_coverage( |
| 156 | + type = c("none"), |
| 157 | + code = c('"'"' |
| 158 | + output_file <- file("test_console_output.txt"); |
| 159 | + sink(output_file); |
| 160 | + sink(output_file, type = "message"); |
| 161 | + junit_rep <- testthat::JunitReporter$new(file = file.path(getwd(), "test_results.xml")); |
| 162 | + progress_rep <- testthat::ProgressReporter$new(max_failures = 999999); |
| 163 | + multi_rep <- testthat::MultiReporter$new(reporters = list(progress_rep, junit_rep)); |
| 164 | + options("datashield.return_errors" = FALSE, "default_driver" = "ArmadilloDriver"); |
| 165 | + testthat::test_package("${{ env.PROJECT_NAME }}", filter = "${{ env.TEST_FILTER }}", reporter = multi_rep, stop_on_failure = FALSE)'"'"' |
| 166 | + ) |
| 167 | + ) |
| 168 | + ), |
| 169 | + "coveragelist.csv" |
| 170 | + )' |
| 171 | +
|
| 172 | + mv coveragelist.csv logs/ |
| 173 | + mv test_* logs/ |
| 174 | + working-directory: dsBaseClient |
| 175 | + |
| 176 | + - name: Check for JUnit errors |
| 177 | + run: | |
| 178 | + issue_count=$(sed 's/failures="0" errors="0"//' test_results.xml | grep -c errors= || true) |
| 179 | + echo "Number of testsuites with issues: $issue_count" |
| 180 | + sed 's/failures="0" errors="0"//' test_results.xml | grep errors= > issues.log || true |
| 181 | + cat issues.log || true |
| 182 | + # continue with workflow even when some tests fail |
| 183 | + exit 0 |
| 184 | + working-directory: dsBaseClient/logs |
| 185 | + |
| 186 | + - name: Write versions to file |
| 187 | + run: | |
| 188 | + echo "branch:${{ env.BRANCH_NAME }}" > ${{ env.WORKFLOW_ID }}.txt |
| 189 | + echo "os:$(lsb_release -ds)" >> ${{ env.WORKFLOW_ID }}.txt |
| 190 | + echo "R:$(R --version | head -n1)" >> ${{ env.WORKFLOW_ID }}.txt |
| 191 | + Rscript --vanilla -e 'sessionInfo()' >> session_info_${{ env.WORKFLOW_ID }}.txt |
| 192 | + working-directory: dsBaseClient/logs |
| 193 | + |
| 194 | + - name: Parse results from testthat and covr |
| 195 | + run: | |
| 196 | + Rscript --verbose --vanilla ../testStatus/source/parse_test_report.R logs/ logs/ https://github.com/datashield/${{ env.PROJECT_NAME }}/blob/${{ env.BRANCH_NAME }} '([^:]+)' '(?<=::)[^:]+(?=::)' |
| 197 | + working-directory: dsBaseClient |
| 198 | + env: |
| 199 | + PROJECT_NAME: ${{ env.PROJECT_NAME }} |
| 200 | + BRANCH_NAME: ${{ env.BRANCH_NAME }} |
| 201 | + |
| 202 | + - name: Render report |
| 203 | + run: | |
| 204 | + cd testStatus |
| 205 | +
|
| 206 | + mkdir -p new/logs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/ |
| 207 | + mkdir -p new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/ |
| 208 | + mkdir -p new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/latest/ |
| 209 | +
|
| 210 | + # Copy logs to new logs directory location |
| 211 | + cp -rv ../dsBaseClient/logs/* new/logs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/ |
| 212 | + cp -rv ../dsBaseClient/logs/${{ env.WORKFLOW_ID }}.txt new/logs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/ |
| 213 | +
|
| 214 | + R -e 'input_dir <- file.path("../new/logs", Sys.getenv("PROJECT_NAME"), Sys.getenv("BRANCH_NAME"), Sys.getenv("WORKFLOW_ID")); quarto::quarto_render("source/test_report.qmd", execute_params = list(input_dir = input_dir))' |
| 215 | + mv source/test_report.html new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/index.html |
| 216 | + cp -r new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/${{ env.WORKFLOW_ID }}/* new/docs/${{ env.PROJECT_NAME }}/${{ env.BRANCH_NAME }}/latest |
| 217 | +
|
| 218 | + env: |
| 219 | + PROJECT_NAME: ${{ env.PROJECT_NAME }} |
| 220 | + BRANCH_NAME: ${{ env.BRANCH_NAME }} |
| 221 | + WORKFLOW_ID: ${{ env.WORKFLOW_ID }} |
| 222 | + |
| 223 | + - name: Upload test logs |
| 224 | + uses: actions/upload-artifact@v4 |
| 225 | + with: |
| 226 | + name: dsbaseclient-logs |
| 227 | + path: testStatus/new |
| 228 | + |
| 229 | + - name: Dump environment info |
| 230 | + run: | |
| 231 | + echo -e "\n#############################" |
| 232 | + echo -e "ls /: ######################" |
| 233 | + ls -al . |
| 234 | + echo -e "\n#############################" |
| 235 | + echo -e "lscpu: ######################" |
| 236 | + lscpu |
| 237 | + echo -e "\n#############################" |
| 238 | + echo -e "memory: #####################" |
| 239 | + free -m |
| 240 | + echo -e "\n#############################" |
| 241 | + echo -e "env: ########################" |
| 242 | + env |
| 243 | + echo -e "\n#############################" |
| 244 | + echo -e "R sessionInfo(): ############" |
| 245 | + R -e 'sessionInfo()' |
| 246 | + sudo apt install tree -y |
| 247 | + tree . |
0 commit comments