From 384d444c001f9cfb23432cb03d94fa03cdc2c24a Mon Sep 17 00:00:00 2001 From: medde Date: Thu, 12 Dec 2024 14:50:38 +0000 Subject: [PATCH 1/9] fix mrtrix container --- modules/nf-neuro/image/applymask/main.nf | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/modules/nf-neuro/image/applymask/main.nf b/modules/nf-neuro/image/applymask/main.nf index 9332cad2..f7dc660d 100644 --- a/modules/nf-neuro/image/applymask/main.nf +++ b/modules/nf-neuro/image/applymask/main.nf @@ -2,9 +2,7 @@ process IMAGE_APPLYMASK { tag "$meta.id" label 'process_single' - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://scil.usherbrooke.ca/containers/scilus_2.0.2.sif': - 'scilus/scilus:2.0.2' }" + container "mrtrix3/mrtrix3:latest" input: tuple val(meta), path(image), path(mask) From d51aa53995a45009b8e077cfc41c7335a87ae1db Mon Sep 17 00:00:00 2001 From: Anthony Gagnon Date: Thu, 12 Dec 2024 10:36:18 -0500 Subject: [PATCH 2/9] Add trap function for bet + stub-run test. --- modules/nf-neuro/betcrop/fslbetcrop/main.nf | 20 ++++++++----- .../betcrop/fslbetcrop/tests/main.nf.test | 28 +++++++++++++++++++ .../fslbetcrop/tests/main.nf.test.snap | 12 ++++++++ 3 files changed, 53 insertions(+), 7 deletions(-) diff --git a/modules/nf-neuro/betcrop/fslbetcrop/main.nf b/modules/nf-neuro/betcrop/fslbetcrop/main.nf index 1da42ed9..54fc9ddf 100755 --- a/modules/nf-neuro/betcrop/fslbetcrop/main.nf +++ b/modules/nf-neuro/betcrop/fslbetcrop/main.nf @@ -69,16 +69,9 @@ process BETCROP_FSLBETCROP { """ stub: - def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" """ - scil_dwi_extract_b0.py -h - bet -h - scil_volume_math.py -h - mrcalc -h - scil_volume_crop.py -h - touch ${prefix}__image_bet.nii.gz touch ${prefix}__image_bet_mask.nii.gz touch ${prefix}__image_boundingBox.pkl @@ -89,5 +82,18 @@ process BETCROP_FSLBETCROP { mrtrix: \$(mrcalc -version 2>&1 | sed -n 's/== mrcalc \\([0-9.]\\+\\).*/\\1/p') fsl: \$(flirt -version 2>&1 | sed -n 's/FLIRT version \\([0-9.]\\+\\)/\\1/p') END_VERSIONS + + function handle_code () { + local code=\$? + ignore=( 1 ) + exit \$([[ " \${ignore[@]} " =~ " \$code " ]] && echo 0 || echo \$code) + } + trap 'handle_code' ERR + + bet + scil_dwi_extract_b0.py -h + scil_volume_math.py -h + mrcalc -h + scil_volume_crop.py -h """ } diff --git a/modules/nf-neuro/betcrop/fslbetcrop/tests/main.nf.test b/modules/nf-neuro/betcrop/fslbetcrop/tests/main.nf.test index d24c8b08..c89409da 100644 --- a/modules/nf-neuro/betcrop/fslbetcrop/tests/main.nf.test +++ b/modules/nf-neuro/betcrop/fslbetcrop/tests/main.nf.test @@ -108,4 +108,32 @@ nextflow_process { } + test("betcrop - fslbetcrop - stub-run") { + + options "-stub-run" + + when { + process { + """ + input[0] = LOAD_DATA.out.test_data_directory.map{ + test_data_directory -> [ + [ id:'test', single_end:false ], // meta map + file("\${test_data_directory}/dwi/dwi.nii.gz"), + file("\${test_data_directory}/dwi/dwi.bval"), + file("\${test_data_directory}/dwi/dwi.bvec") + ] + } + """ + } + } + + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out.versions).match() } + ) + } + + } + } diff --git a/modules/nf-neuro/betcrop/fslbetcrop/tests/main.nf.test.snap b/modules/nf-neuro/betcrop/fslbetcrop/tests/main.nf.test.snap index 8d1977ef..47640ecb 100644 --- a/modules/nf-neuro/betcrop/fslbetcrop/tests/main.nf.test.snap +++ b/modules/nf-neuro/betcrop/fslbetcrop/tests/main.nf.test.snap @@ -1,4 +1,16 @@ { + "betcrop - fslbetcrop - stub-run": { + "content": [ + [ + "versions.yml:md5,cc45ab23921525536874337c1c88e5cf" + ] + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.10.1" + }, + "timestamp": "2024-12-12T10:33:04.50811" + }, "betcrop - fslbetcrop - dwi": { "content": [ { From 7b8fb95e764e220967c4ae2414184c4f9dad5aaa Mon Sep 17 00:00:00 2001 From: Anthony Gagnon Date: Thu, 12 Dec 2024 13:02:28 -0500 Subject: [PATCH 3/9] catch error in stub for eddy and topup + add stub test --- modules/nf-neuro/preproc/eddy/main.nf | 30 +++++++++++-------- .../nf-neuro/preproc/eddy/tests/main.nf.test | 30 +++++++++++++++++++ .../preproc/eddy/tests/main.nf.test.snap | 12 ++++++++ modules/nf-neuro/preproc/topup/main.nf | 19 +++++++----- .../nf-neuro/preproc/topup/tests/main.nf.test | 28 +++++++++++++++++ .../preproc/topup/tests/main.nf.test.snap | 12 ++++++++ 6 files changed, 111 insertions(+), 20 deletions(-) diff --git a/modules/nf-neuro/preproc/eddy/main.nf b/modules/nf-neuro/preproc/eddy/main.nf index dd1c5ef5..a50fef51 100644 --- a/modules/nf-neuro/preproc/eddy/main.nf +++ b/modules/nf-neuro/preproc/eddy/main.nf @@ -114,32 +114,36 @@ process PREPROC_EDDY { """ stub: - def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" """ - scil_volume_math.py -h - maskfilter -h - bet -h - scil_dwi_extract_b0.py -h - scil_gradients_validate_correct_eddy.py -h - scil_dwi_concatenate.py -h - mrconvert -h - scil_dwi_prepare_eddy_command.py -h - scil_header_print_info.py -h - touch ${prefix}__dwi_corrected.nii.gz touch ${prefix}__dwi_eddy_corrected.bval touch ${prefix}__dwi_eddy_corrected.bvec touch ${prefix}__b0_bet_mask.nii.gz - cat <<-END_VERSIONS > versions.yml "${task.process}": scilpy: \$(pip list | grep scilpy | tr -s ' ' | cut -d' ' -f2) mrtrix: \$(dwidenoise -version 2>&1 | sed -n 's/== dwidenoise \\([0-9.]\\+\\).*/\\1/p') fsl: \$(flirt -version 2>&1 | sed -n 's/FLIRT version \\([0-9.]\\+\\)/\\1/p') - END_VERSIONS + + function handle_code () { + local code=\$? + ignore=( 1 ) + exit \$([[ " \${ignore[@]} " =~ " \$code " ]] && echo 0 || echo \$code) + } + trap 'handle_code' ERR + + scil_volume_math.py -h + maskfilter -h + bet -h + scil_dwi_extract_b0.py -h + scil_gradients_validate_correct_eddy.py -h + scil_dwi_concatenate.py -h + mrconvert -h + scil_dwi_prepare_eddy_command.py -h + scil_header_print_info.py -h """ } diff --git a/modules/nf-neuro/preproc/eddy/tests/main.nf.test b/modules/nf-neuro/preproc/eddy/tests/main.nf.test index bfe7eb1b..bc5d31ab 100644 --- a/modules/nf-neuro/preproc/eddy/tests/main.nf.test +++ b/modules/nf-neuro/preproc/eddy/tests/main.nf.test @@ -93,4 +93,34 @@ nextflow_process { ) } } + + test("eddy - stub-run") { + options "-stub-run" + when { + process { + """ + input[0] = LOAD_DATA.out.test_data_directory + .map{ test_data_directory -> [ + [ id:'test', single_end:false ], // meta map + file("\${test_data_directory}/sub-01_dir-AP_dwi.nii.gz", checkIfExists: true), + file("\${test_data_directory}/sub-01_dir-AP_dwi.bval", checkIfExists: true), + file("\${test_data_directory}/sub-01_dir-AP_dwi.bvec", checkIfExists: true), + file("\${test_data_directory}/sub-01_dir-PA_dwi.nii.gz", checkIfExists: true), + file("\${test_data_directory}/sub-01_dir-PA_dwi.bval", checkIfExists: true), + file("\${test_data_directory}/sub-01_dir-PA_dwi.bvec", checkIfExists: true), + file("\${test_data_directory}/sub-01__corrected_b0s.nii.gz", checkIfExists: true), + file("\${test_data_directory}/topup_results_fieldcoef.nii.gz", checkIfExists: true), + file("\${test_data_directory}/topup_results_movpar.txt", checkIfExists: true) + ] + } + """ + } + } + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out.versions).match() } + ) + } + } } diff --git a/modules/nf-neuro/preproc/eddy/tests/main.nf.test.snap b/modules/nf-neuro/preproc/eddy/tests/main.nf.test.snap index 239baba4..617a4012 100644 --- a/modules/nf-neuro/preproc/eddy/tests/main.nf.test.snap +++ b/modules/nf-neuro/preproc/eddy/tests/main.nf.test.snap @@ -62,5 +62,17 @@ "nextflow": "24.04.4" }, "timestamp": "2024-11-21T19:53:14.315721034" + }, + "eddy - stub-run": { + "content": [ + [ + "versions.yml:md5,137e3ce0fd25e5b16de2d8cc5a5aefca" + ] + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.10.1" + }, + "timestamp": "2024-12-12T12:50:30.654366" } } \ No newline at end of file diff --git a/modules/nf-neuro/preproc/topup/main.nf b/modules/nf-neuro/preproc/topup/main.nf index 99fa3412..3299c91b 100644 --- a/modules/nf-neuro/preproc/topup/main.nf +++ b/modules/nf-neuro/preproc/topup/main.nf @@ -73,16 +73,10 @@ process PREPROC_TOPUP { """ stub: - def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def prefix_topup = task.ext.prefix_topup ? task.ext.prefix_topup : "" """ - scil_volume_math.py -h - scil_dwi_extract_b0.py -h - antsRegistrationSyNQuick.sh -h - scil_dwi_prepare_topup_command.py -h - touch ${prefix}__corrected_b0s.nii.gz touch ${prefix}__rev_b0_warped.nii.gz touch ${prefix}__rev_b0_mean.nii.gz @@ -95,7 +89,18 @@ process PREPROC_TOPUP { scilpy: \$(pip list | grep scilpy | tr -s ' ' | cut -d' ' -f2) antsRegistration: \$(antsRegistration --version | grep "Version" | sed -E 's/.*v([0-9]+\\+\\).*/\\1/') fsl: \$(flirt -version 2>&1 | sed -n 's/FLIRT version \\([0-9.]\\+\\)/\\1/p') - END_VERSIONS + + function handle_code () { + local code=\$? + ignore=( 1 ) + exit \$([[ " \${ignore[@]} " =~ " \$code " ]] && echo 0 || echo \$code) + } + trap 'handle_code' ERR + + scil_volume_math.py -h + scil_dwi_extract_b0.py -h + antsRegistrationSyNQuick.sh + scil_dwi_prepare_topup_command.py -h """ } diff --git a/modules/nf-neuro/preproc/topup/tests/main.nf.test b/modules/nf-neuro/preproc/topup/tests/main.nf.test index f404355c..146ceb93 100644 --- a/modules/nf-neuro/preproc/topup/tests/main.nf.test +++ b/modules/nf-neuro/preproc/topup/tests/main.nf.test @@ -95,4 +95,32 @@ nextflow_process { ) } } + + test("topup - stub-run") { + options "-stub-run" + when { + process { + """ + input[0] = LOAD_DATA.out.test_data_directory + .map{ test_data_directory -> [ + [ id:'test', single_end:false ], // meta map + file("\${test_data_directory}/sub-01_dir-AP_dwi.nii.gz", checkIfExists: true), + file("\${test_data_directory}/sub-01_dir-AP_dwi.bval", checkIfExists: true), + file("\${test_data_directory}/sub-01_dir-AP_dwi.bvec", checkIfExists: true), + file("\${test_data_directory}/sub-01_dir-AP_sbref.nii.gz", checkIfExists: true), + file("\${test_data_directory}/sub-01_dir-PA_dwi.nii.gz", checkIfExists: true), + file("\${test_data_directory}/sub-01_dir-PA_dwi.bval", checkIfExists: true), + file("\${test_data_directory}/sub-01_dir-PA_dwi.bvec", checkIfExists: true), + file("\${test_data_directory}/sub-01_dir-PA_sbref.nii.gz", checkIfExists: true)]} + input[1] = [] + """ + } + } + then { + assertAll( + { assert process.success }, + { assert snapshot(process.out.versions).match() } + ) + } + } } diff --git a/modules/nf-neuro/preproc/topup/tests/main.nf.test.snap b/modules/nf-neuro/preproc/topup/tests/main.nf.test.snap index efe20842..171632b7 100644 --- a/modules/nf-neuro/preproc/topup/tests/main.nf.test.snap +++ b/modules/nf-neuro/preproc/topup/tests/main.nf.test.snap @@ -33,6 +33,18 @@ }, "timestamp": "2024-11-01T16:21:18.752887499" }, + "topup - stub-run": { + "content": [ + [ + "versions.yml:md5,1e46217f093e27a6a26cd479f074296b" + ] + ], + "meta": { + "nf-test": "0.9.0", + "nextflow": "24.10.1" + }, + "timestamp": "2024-12-12T11:56:01.860981" + }, "topup_light": { "content": [ "test__corrected_b0s.nii.gz", From 2e222d18c89e5547a6bf5c0c74673baeb63bcd52 Mon Sep 17 00:00:00 2001 From: Thoumyre Stanislas Date: Thu, 12 Dec 2024 18:32:39 +0000 Subject: [PATCH 4/9] remove scilpy from denoising mppca --- modules/nf-neuro/denoising/mppca/main.nf | 12 ++++------- modules/nf-neuro/denoising/mppca/meta.yml | 3 --- .../denoising/mppca/tests/main.nf.test.snap | 10 +++++----- .../preproc_dwi/tests/main.nf.test.snap | 20 +++++++++---------- 4 files changed, 19 insertions(+), 26 deletions(-) diff --git a/modules/nf-neuro/denoising/mppca/main.nf b/modules/nf-neuro/denoising/mppca/main.nf index dfbe9e99..4eb498d8 100755 --- a/modules/nf-neuro/denoising/mppca/main.nf +++ b/modules/nf-neuro/denoising/mppca/main.nf @@ -3,9 +3,7 @@ process DENOISING_MPPCA { tag "$meta.id" label 'process_medium' - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://scil.usherbrooke.ca/containers/scilus_2.0.2.sif': - 'scilus/scilus:2.0.2' }" + container "mrtrix3/mrtrix3:latest" input: tuple val(meta), path(dwi), path(mask) @@ -30,13 +28,12 @@ process DENOISING_MPPCA { export MRTRIX_RNG_SEED=112524 dwidenoise $dwi ${prefix}_dwi_denoised.nii.gz $extent ${args.join(" ")} - scil_volume_math.py lower_clip ${prefix}_dwi_denoised.nii.gz 0 \ - ${prefix}_dwi_denoised.nii.gz -f + mrcalc ${prefix}_dwi_denoised.nii.gz 0 -gt ${prefix}_dwi_denoised.nii.gz 0 \ + -if ${prefix}_dwi_denoised.nii.gz -force cat <<-END_VERSIONS > versions.yml "${task.process}": mrtrix: \$(mrcalc -version 2>&1 | sed -n 's/== mrcalc \\([0-9.]\\+\\).*/\\1/p') - scilpy: \$(pip list | grep scilpy | tr -s ' ' | cut -d' ' -f2) END_VERSIONS """ @@ -46,14 +43,13 @@ process DENOISING_MPPCA { """ dwidenoise -h - scil_volume_math.py -h + mrcalc -h touch ${prefix}_dwi_denoised.nii.gz cat <<-END_VERSIONS > versions.yml "${task.process}": mrtrix: \$(mrcalc -version 2>&1 | sed -n 's/== mrcalc \\([0-9.]\\+\\).*/\\1/p') - scilpy: \$(pip list | grep scilpy | tr -s ' ' | cut -d' ' -f2) END_VERSIONS """ } diff --git a/modules/nf-neuro/denoising/mppca/meta.yml b/modules/nf-neuro/denoising/mppca/meta.yml index 34744795..5ce90fe6 100755 --- a/modules/nf-neuro/denoising/mppca/meta.yml +++ b/modules/nf-neuro/denoising/mppca/meta.yml @@ -12,9 +12,6 @@ tools: - "MRtrix3": description: "Toolbox for image processing, analysis and visualisation of dMRI." homepage: "https://mrtrix.readthedocs.io/en/latest/" - - "scilpy": - description: "The Sherbrooke Connectivity Imaging Lab (SCIL) Python dMRI processing toolbox." - homepage: "https://github.com/scilus/scilpy.git" input: - meta: diff --git a/modules/nf-neuro/denoising/mppca/tests/main.nf.test.snap b/modules/nf-neuro/denoising/mppca/tests/main.nf.test.snap index ab3c336d..21195dd7 100644 --- a/modules/nf-neuro/denoising/mppca/tests/main.nf.test.snap +++ b/modules/nf-neuro/denoising/mppca/tests/main.nf.test.snap @@ -11,7 +11,7 @@ ] ], "1": [ - "versions.yml:md5,c9915fbc1956d7f54bee0748f1ddf920" + "versions.yml:md5,adbce7b09c63d541cdc2782235363275" ], "image": [ [ @@ -22,14 +22,14 @@ ] ], "versions": [ - "versions.yml:md5,c9915fbc1956d7f54bee0748f1ddf920" + "versions.yml:md5,adbce7b09c63d541cdc2782235363275" ] } ], "meta": { - "nf-test": "0.9.0-rc1", - "nextflow": "24.04.4" + "nf-test": "0.9.0", + "nextflow": "24.10.2" }, - "timestamp": "2024-08-05T15:17:51.718413" + "timestamp": "2024-12-12T18:27:14.057961679" } } \ No newline at end of file diff --git a/subworkflows/nf-neuro/preproc_dwi/tests/main.nf.test.snap b/subworkflows/nf-neuro/preproc_dwi/tests/main.nf.test.snap index 962db759..370fa97f 100644 --- a/subworkflows/nf-neuro/preproc_dwi/tests/main.nf.test.snap +++ b/subworkflows/nf-neuro/preproc_dwi/tests/main.nf.test.snap @@ -15,10 +15,10 @@ "versions.yml:md5,0494fbf74bc9c16d2b30cb45b3bba66b", "versions.yml:md5,0b8c908e52917b0b706fc9d1b4d6cd24", "versions.yml:md5,0e680a0c6b56892e1a7ec7f85bb95322", - "versions.yml:md5,3340c4b5e56ca52f7342be3c9044eb47", + "versions.yml:md5,571265e710ca29198e69be22c0f970d5", + "versions.yml:md5,662ea558da42564a0f6140473132bcb4", "versions.yml:md5,7ddf2e98f59b19c9b933670550f26ad7", "versions.yml:md5,c15ba5efd24564dba4710b6da8c4b791", - "versions.yml:md5,d68fe3399120f84f1add62181708780b", "versions.yml:md5,ea5858879452a59bb355228ae7f38111", "versions.yml:md5,f041502e22449973d84ac1c618e8ebf9" ], @@ -32,9 +32,9 @@ ], "meta": { "nf-test": "0.9.0", - "nextflow": "24.04.4" + "nextflow": "24.10.2" }, - "timestamp": "2024-11-01T12:50:45.411022164" + "timestamp": "2024-12-12T18:30:03.433004717" }, "preproc_dwi_rev_b0": { "content": [ @@ -52,7 +52,7 @@ "versions.yml:md5,0494fbf74bc9c16d2b30cb45b3bba66b", "versions.yml:md5,0b8c908e52917b0b706fc9d1b4d6cd24", "versions.yml:md5,0e680a0c6b56892e1a7ec7f85bb95322", - "versions.yml:md5,3340c4b5e56ca52f7342be3c9044eb47", + "versions.yml:md5,571265e710ca29198e69be22c0f970d5", "versions.yml:md5,7ddf2e98f59b19c9b933670550f26ad7", "versions.yml:md5,c15ba5efd24564dba4710b6da8c4b791", "versions.yml:md5,ea5858879452a59bb355228ae7f38111", @@ -68,9 +68,9 @@ ], "meta": { "nf-test": "0.9.0", - "nextflow": "24.04.4" + "nextflow": "24.10.2" }, - "timestamp": "2024-11-01T12:49:12.328235961" + "timestamp": "2024-12-12T18:28:35.975149345" }, "preproc_dwi_all_options": { "content": [ @@ -88,10 +88,10 @@ "versions.yml:md5,0494fbf74bc9c16d2b30cb45b3bba66b", "versions.yml:md5,0b8c908e52917b0b706fc9d1b4d6cd24", "versions.yml:md5,0e680a0c6b56892e1a7ec7f85bb95322", - "versions.yml:md5,3340c4b5e56ca52f7342be3c9044eb47", + "versions.yml:md5,571265e710ca29198e69be22c0f970d5", + "versions.yml:md5,662ea558da42564a0f6140473132bcb4", "versions.yml:md5,7ddf2e98f59b19c9b933670550f26ad7", "versions.yml:md5,c15ba5efd24564dba4710b6da8c4b791", - "versions.yml:md5,d68fe3399120f84f1add62181708780b", "versions.yml:md5,ea5858879452a59bb355228ae7f38111", "versions.yml:md5,f041502e22449973d84ac1c618e8ebf9" ], @@ -107,6 +107,6 @@ "nf-test": "0.9.0", "nextflow": "24.10.2" }, - "timestamp": "2024-11-29T13:47:57.561745796" + "timestamp": "2024-12-12T18:31:26.492075412" } } \ No newline at end of file From 6b380f7a9c594b71a4ee713b510ee38b2e40677b Mon Sep 17 00:00:00 2001 From: Thoumyre Stanislas Date: Thu, 12 Dec 2024 18:47:54 +0000 Subject: [PATCH 5/9] remplace container gibbs --- modules/nf-neuro/preproc/gibbs/main.nf | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/modules/nf-neuro/preproc/gibbs/main.nf b/modules/nf-neuro/preproc/gibbs/main.nf index 16b3d7b6..1744e5d9 100644 --- a/modules/nf-neuro/preproc/gibbs/main.nf +++ b/modules/nf-neuro/preproc/gibbs/main.nf @@ -2,9 +2,7 @@ process PREPROC_GIBBS { tag "$meta.id" label 'process_single' - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://scil.usherbrooke.ca/containers/scilus_2.0.2.sif': - 'scilus/scilus:2.0.2' }" + container "mrtrix3/mrtrix3:latest" input: tuple val(meta), path(dwi) From 35255ccd0c3837e65c6d566754dcaf82dc420c46 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Tue, 17 Dec 2024 19:44:37 +0000 Subject: [PATCH 6/9] neuroscience dictionary for cspell --- .vscode/settings.json | 21 ++++++- cspell.json | 24 ++++++++ docs/cspell/neuroscience.txt | 103 +++++++++++++++++++++++++++++++++++ 3 files changed, 147 insertions(+), 1 deletion(-) create mode 100644 cspell.json create mode 100644 docs/cspell/neuroscience.txt diff --git a/.vscode/settings.json b/.vscode/settings.json index 0d7d8836..7270868c 100755 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -19,5 +19,24 @@ "testDataExplorer.dataserver": "scil.usherbrooke.ca", "testDataExplorer.serverdatalocation": "scil_test_data/dvc-store/files/md5", "testDataExplorer.localListingLocation": "tests/test_data.json", - "testDataExplorer.remoteListingLocation": "tests/test_data.json" + "testDataExplorer.remoteListingLocation": "tests/test_data.json", + "nextflow.files.exclude": [ + ".git", + ".nf-test", + "work", + ".venv", + ".nextflow", + "node_modules", + ".tests/runs" + ], + "cSpell.ignorePaths": [ + "package-lock.json", + "node_modules", + "vscode-extension", + ".git/", + ".vscode", + ".vscode-insiders", + ".venv", + "tests/.runs" + ] } diff --git a/cspell.json b/cspell.json new file mode 100644 index 00000000..c886734e --- /dev/null +++ b/cspell.json @@ -0,0 +1,24 @@ +{ + "version": "0.2", + "dictionaryDefinitions": [ + { + "name": "neuroscience", + "path": "docs/cspell/neuroscience.txt", + "addWords": true + } + ], + "dictionaries": [ + "typescript", + "node", + "python", + "latex", + "bash", + "companies", + "softwareTerms", + "misc", + "en_US", + "en-gb", + "filetypes", + "neuroscience" + ] +} diff --git a/docs/cspell/neuroscience.txt b/docs/cspell/neuroscience.txt new file mode 100644 index 00000000..3008e2f8 --- /dev/null +++ b/docs/cspell/neuroscience.txt @@ -0,0 +1,103 @@ +*anat* +*anthony +*apply* +*b0* +*bet* +*bore +*bundle* +*bval* +*bvec* +*coeff +*coeffs +*container +*containers +*core +*crop* +*denoise* +*dwi* +*easy* +*extract* +*fast* +*field* +*fodf* +*frf* +*image +*local* +*mask +*mean* +*means* +*metrics +*morph* +*mov* +*normalise +*pack +*par +*pft* +*reg* +*registration* +*rev* +*seg* +*synth* +*test* +*to* +*tracking* +*transforms +*volume* +ants* +aparc +arnaud* +aseg +connectomics +denoised +denoising +descoteaux +dev* +dipy +dti* +dtype +etienne +evals +evecs +extension* +freesurfer* +fsl* +gagnon* +interp +medde +mppca +mrdegibbs +mrtrix +msmt +neuro* +nextflow +nf* +nifti +nl* +nufo +onge +parcellation +parcellations +preproc +reconst +rheault +robsyme +sbref +scilpy +scilus +seed* +ssst +stanislas +subworkflow +subworkflows +thoumyre +topup +track* +tractoflow +tractogram +tractograms +tractography +transfo +uchar +unbias +wmparc +zenodo From 3a9742ea4b69ba400b4b44485a8ce88fa2a99e22 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Tue, 17 Dec 2024 20:06:35 +0000 Subject: [PATCH 7/9] add comment trigger for checks --- .github/workflows/manual_checks.yml | 17 +++++++++++++++++ .github/workflows/update_pr.yml | 3 ++- 2 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/manual_checks.yml diff --git a/.github/workflows/manual_checks.yml b/.github/workflows/manual_checks.yml new file mode 100644 index 00000000..68f78b72 --- /dev/null +++ b/.github/workflows/manual_checks.yml @@ -0,0 +1,17 @@ +name: Trigger checks manually +on: + issue_comment: + types: + - created + - edited + + +jobs: + checks: + if: github.event.issue.pull_request && contains(github.event.comment.body, '@rerun-checks') + # Cancel if a newer run is started + concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + uses: ./.github/workflows/run_checks_suite.yml + secrets: inherit diff --git a/.github/workflows/update_pr.yml b/.github/workflows/update_pr.yml index cd600206..4cb09366 100644 --- a/.github/workflows/update_pr.yml +++ b/.github/workflows/update_pr.yml @@ -6,7 +6,8 @@ on: merge_group: types: - checks_requested - branches: -main + branches: + - main # Cancel if a newer run is started concurrency: From fb1ab8cde62331911cfa83efb6d2ed1c18b1a5b0 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Tue, 17 Dec 2024 20:09:49 +0000 Subject: [PATCH 8/9] prettier --- .github/workflows/manual_checks.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/manual_checks.yml b/.github/workflows/manual_checks.yml index 68f78b72..2b51e6b0 100644 --- a/.github/workflows/manual_checks.yml +++ b/.github/workflows/manual_checks.yml @@ -5,7 +5,6 @@ on: - created - edited - jobs: checks: if: github.event.issue.pull_request && contains(github.event.comment.body, '@rerun-checks') From 0be48f97f30aeeb528943e541d92104b4a447d79 Mon Sep 17 00:00:00 2001 From: AlexVCaron Date: Wed, 18 Dec 2024 17:52:10 +0000 Subject: [PATCH 9/9] revamp test data loader, now handles cache for real. fix nf-test diff rendering --- .devcontainer/devops/devcontainer.json | 4 +- docs/cspell/neuroscience.txt | 3 +- poetry.lock | 4 +- pyproject.toml | 1 + subworkflows/nf-neuro/load_test_data/main.nf | 161 ++++++++++++++++--- tests/nextflow.config | 3 +- tests/test_data.json | 1 - 7 files changed, 144 insertions(+), 33 deletions(-) diff --git a/.devcontainer/devops/devcontainer.json b/.devcontainer/devops/devcontainer.json index ad6788aa..9c044d5a 100755 --- a/.devcontainer/devops/devcontainer.json +++ b/.devcontainer/devops/devcontainer.json @@ -4,7 +4,9 @@ "dockerfile": "Dockerfile", "args": { "NFTEST_VERSION": "0.9.0", - "POETRY_VERSION": "1.8.*" + "POETRY_VERSION": "1.8.*", + "NFT_DIFF": "pdiff", + "NFT_DIFF_ARGS": "--line-numbers --width 120 --expand-tabs=2" } }, "forwardPorts": [3000], diff --git a/docs/cspell/neuroscience.txt b/docs/cspell/neuroscience.txt index 3008e2f8..2b27901f 100644 --- a/docs/cspell/neuroscience.txt +++ b/docs/cspell/neuroscience.txt @@ -29,6 +29,7 @@ *metrics *morph* *mov* +*neuro* *normalise *pack *par @@ -64,11 +65,11 @@ fsl* gagnon* interp medde +mkdirs mppca mrdegibbs mrtrix msmt -neuro* nextflow nf* nifti diff --git a/poetry.lock b/poetry.lock index 8100f7b8..185cdd26 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "annotated-types" @@ -2429,4 +2429,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.0" python-versions = "<3.11,>=3.9" -content-hash = "86ff61aa15873147e59b28a6ff0b1ac405796dde631e28216829299332fdfa26" +content-hash = "84934e125505bfaca09ac4232c63d5c53fa66696503f122fcf1feef8d306e4e8" diff --git a/pyproject.toml b/pyproject.toml index 155550ea..fc149a48 100755 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,3 +38,4 @@ python = "<3.11,>=3.9" nf-core = "~2.14.1" black = "^24.1.1" isort = "^5.13.2" +pdiff = "^1.1.4" diff --git a/subworkflows/nf-neuro/load_test_data/main.nf b/subworkflows/nf-neuro/load_test_data/main.nf index 612c68a7..5a1dbcc3 100644 --- a/subworkflows/nf-neuro/load_test_data/main.nf +++ b/subworkflows/nf-neuro/load_test_data/main.nf @@ -1,36 +1,147 @@ -import java.nio.file.Files -def fetch_archive ( name, destination, remote, database, data_identifiers ) { - // Find cache location for test archives + +def locate_local_cache () { + // Find cache location for test archives, in order of preference: + // 1. Using environment variable $NFNEURO_TEST_DATA_HOME + // 2. Using environment variable $XDG_DATA_HOME + // 3. Using default location $HOME/.local/share + // + // Location selected is appended with 'nf-neuro-test-archives'. + // If the location does not exist, it is created. + def storage = file( System.getenv('NFNEURO_TEST_DATA_HOME') ?: System.getenv('XDG_DATA_HOME') ?: "${System.getenv('HOME')}/.local/share" ) def cache_location = file("$storage/nf-neuro-test-archives") - if ( !cache_location.exists() ) cache_location.mkdirs() - // Fetch file from remote if not present in cache - def data_id = data_identifiers[name] - if ( !data_id ) { - error "Invalid test data identifier supplied: $name" + if ( !cache_location.exists() ) { + try { + cache_location.mkdirs() + } + catch (Exception _e) { + error "Failed to create cache location: $cache_location" + } + } + + return cache_location +} + +def locate_remote_cache () { + return "$params.test_data_remote/$params.test_database_path" +} + +def load_manifest () { + // Load test data associations from params.test_data_associations + // which must be a map of test data identifiers [filename: identifier] + + if ( ! params.test_data_associations ) { + error """ + No test data associations provided, cannot create cache manifest. Please + provide a map of test data identifiers [filename: identifier] using + params.test_data_associations. + """ + } + + return params.test_data_associations +} + +def validate_cache_entry ( name, manager ) { + // Check if the cache entry is present in the manifest + + if ( !manager.manifest[name] ) { + error "Invalid cache entry supplied : $name" + } + +} + +def add_cache_entry ( name, manager ) { + // Add the test data archive as an entry in the cache. The archive is + // fetched from the remote location and stored in the cache location. + // The given name is validated against the manifest before adding. + + manager.validate_entry(name) + + def identifier = "${manager.manifest[name]}" + def cache_entry = file("${manager.cache_location}/$identifier") + def remote_subpath = "${identifier[0..1]}/${identifier[2..-1]}" + def remote_entry = file("$manager.remote_location/$remote_subpath") + + try { + remote_entry.copyTo(cache_entry) + } + catch (Exception _e) { + manager.delete_entry(name) + error "Failed to fetch test data archive: $name | $_e" } - def cache_entry = file("$cache_location/$data_id") - if ( !cache_entry.exists() ) { + return cache_entry +} + +def get_cache_entry ( name, manager ) { + // Retrieve the cache entry for the given test data archive name. + // If the entry does not exist, it is added to the cache. The add + // operation will validate the name against the manifest. + + def identifier = "${manager.manifest[name]}" + def cache_entry = file("${manager.cache_location}/$identifier") + + if ( !cache_entry.exists() ) manager.add_entry(name) + + return cache_entry +} + +def delete_cache_entry ( name, manager ) { + // Delete the cache entry for the given test data archive name. + + def identifier = "${manager.manifest[name]}" + def cache_entry = file("${manager.cache_location}/$identifier") + if ( cache_entry.exists() ) { try { - def remote_entry = "${data_id[0..1]}/${data_id[2..-1]}" - file("$remote/$database/$remote_entry").copyTo(cache_entry) + cache_entry.delete() } - catch (Exception e) { - error "Failed to fetch test data archive: $name" - file("$remote/$database/$remote_entry").delete() + catch (Exception _e) { + error "Failed to delete test data archive: $name" } } +} +def update_cache_entry ( name, manager ) { + // Update the cache entry for the given test data archive name. The + // procedure uses add to carry the update, but deletes the entry first + // if it exists. The add operation will validate the name against + // the manifest. + + manager.delete_entry(name) + manager.add_entry(name) +} + +def setup_cache () { + // Build a cache manager to encapsulate interaction with the test data cache. + // The manager follows simple CRUD operation to handle update and retrieval of + // test data archives from the cache and the remote location. + + def cache_manager = new Expando( + remote_location: locate_remote_cache(), + cache_location: locate_local_cache(), + manifest: load_manifest() + ) + cache_manager.validate_entry = { v -> validate_cache_entry( v, cache_manager ) } + cache_manager.add_entry = { v -> add_cache_entry(v, cache_manager) } + cache_manager.get_entry = { v -> get_cache_entry(v, cache_manager) } + cache_manager.delete_entry = { v -> delete_cache_entry(v, cache_manager) } + cache_manager.update_entry = { v -> update_cache_entry(v, cache_manager) } + + return cache_manager +} + + +def fetch_archive ( name, destination, manager ) { // Unzip all archive content to destination - def content = new java.util.zip.ZipFile("$cache_entry") + def content = null try { + content = new java.util.zip.ZipFile("${manager.get_entry(name)}") content.entries().each{ entry -> def local_target = file("$destination/${entry.getName()}") if (entry.isDirectory()) { @@ -42,11 +153,14 @@ def fetch_archive ( name, destination, remote, database, data_identifiers ) { } } } + content.close() return destination.resolve("${name.take(name.lastIndexOf('.'))}") } - finally { - content.close() + catch (Exception _e) { + if (content) content.close() + manager.delete_entry(name) + error "Failed to extract test data archive: $name | $_e" } } @@ -57,16 +171,11 @@ workflow LOAD_TEST_DATA { test_data_prefix main: + manager = setup_cache() - ch_versions = Channel.empty() - test_data_path = Files.createTempDirectory("$test_data_prefix") + test_data_path = java.nio.file.Files.createTempDirectory("$test_data_prefix") ch_test_data_directory = ch_archive.map{ archive -> - fetch_archive( - archive, test_data_path, - params.test_data_remote, - params.test_database_path, - params.test_data_associations - ) + fetch_archive(archive, test_data_path, manager) } emit: diff --git a/tests/nextflow.config b/tests/nextflow.config index c7b2182a..77959c0a 100644 --- a/tests/nextflow.config +++ b/tests/nextflow.config @@ -1,4 +1,3 @@ -import groovy.json.JsonSlurper params { outdir = "output/" @@ -7,7 +6,7 @@ params { test_data_remote = "https://scil.usherbrooke.ca" test_database_path = "scil_test_data/dvc-store/files/md5" - test_data_associations = new JsonSlurper().parse( + test_data_associations = new groovy.json.JsonSlurper().parse( new File("$projectDir/tests/test_data.json") ) } diff --git a/tests/test_data.json b/tests/test_data.json index c7a1bdb5..cd5e7f52 100644 --- a/tests/test_data.json +++ b/tests/test_data.json @@ -57,6 +57,5 @@ "freesurfer_nifti.zip": "adb5ac4cf5c45040339e04e7c142e8c9", "transform.zip": "148afd665ddbd2bb80493208480571a9", "dicom.zip": "234913cbad53c19aa19aef9eda0a3839", - "freesurfer_nifti.zip": "adb5ac4cf5c45040339e04e7c142e8c9", "TOPUP.zip": "da11914087a1a4ed1d21d478540d41b0" }