Skip to content

Commit

Permalink
Merge branch 'main' into revert-57-revert-25-fix-synthregistration
Browse files Browse the repository at this point in the history
  • Loading branch information
AlexVCaron authored Dec 4, 2024
2 parents a27bd11 + 9ed218f commit bf99204
Show file tree
Hide file tree
Showing 10 changed files with 244 additions and 141 deletions.
17 changes: 9 additions & 8 deletions modules/nf-neuro/preproc/eddy/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@ process PREPROC_EDDY {
tuple val(meta), path(dwi), path(bval), path(bvec), path(rev_dwi), path(rev_bval), path(rev_bvec), path(corrected_b0s), path(topup_fieldcoef), path(topup_movpart)

output:
tuple val(meta), path("*__dwi_corrected.nii.gz") , emit: dwi_corrected
tuple val(meta), path("*__bval_eddy") , emit: bval_corrected
tuple val(meta), path("*__dwi_eddy_corrected.bvec"), emit: bvec_corrected
tuple val(meta), path("*__b0_bet_mask.nii.gz") , emit: b0_mask
path "versions.yml" , emit: versions
tuple val(meta), path("*__dwi_corrected.nii.gz") , emit: dwi_corrected
tuple val(meta), path("*__dwi_eddy_corrected.bval") , emit: bval_corrected
tuple val(meta), path("*__dwi_eddy_corrected.bvec") , emit: bvec_corrected
tuple val(meta), path("*__b0_bet_mask.nii.gz") , emit: b0_mask
path "versions.yml" , emit: versions

when:
task.ext.when == null || task.ext.when
Expand All @@ -38,6 +38,7 @@ process PREPROC_EDDY {
export OMP_NUM_THREADS=$task.cpus
export OPENBLAS_NUM_THREADS=1
export ANTS_RANDOM_SEED=7468
export MRTRIX_RNG_SEED=12345
orig_bval=$bval
# Concatenate DWIs
Expand Down Expand Up @@ -98,9 +99,9 @@ process PREPROC_EDDY {
if [[ \$number_rev_dwi -eq 0 ]]
then
mv dwi_eddy_corrected.eddy_rotated_bvecs ${prefix}__dwi_eddy_corrected.bvec
mv \${orig_bval} ${prefix}__bval_eddy
mv \${orig_bval} ${prefix}__dwi_eddy_corrected.bval
else
scil_gradients_validate_correct_eddy.py dwi_eddy_corrected.eddy_rotated_bvecs \${bval} \${number_rev_dwi} ${prefix}__dwi_eddy_corrected.bvec ${prefix}__bval_eddy
scil_gradients_validate_correct_eddy.py dwi_eddy_corrected.eddy_rotated_bvecs \${bval} \${number_rev_dwi} ${prefix}__dwi_eddy_corrected.bvec ${prefix}__dwi_eddy_corrected.bval
fi
Expand Down Expand Up @@ -128,7 +129,7 @@ process PREPROC_EDDY {
scil_header_print_info.py -h
touch ${prefix}__dwi_corrected.nii.gz
touch ${prefix}__bval_eddy
touch ${prefix}__dwi_eddy_corrected.bval
touch ${prefix}__dwi_eddy_corrected.bvec
touch ${prefix}__b0_bet_mask.nii.gz
Expand Down
4 changes: 3 additions & 1 deletion modules/nf-neuro/preproc/eddy/tests/main.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,9 @@ nextflow_process {
file("\${test_data_directory}/sub-01_dir-PA_dwi.bvec", checkIfExists: true),
file("\${test_data_directory}/sub-01__corrected_b0s.nii.gz", checkIfExists: true),
file("\${test_data_directory}/topup_results_fieldcoef.nii.gz", checkIfExists: true),
file("\${test_data_directory}/topup_results_movpar.txt", checkIfExists: true)]}
file("\${test_data_directory}/topup_results_movpar.txt", checkIfExists: true)
]
}
"""
}
}
Expand Down
8 changes: 4 additions & 4 deletions modules/nf-neuro/preproc/eddy/tests/main.nf.test.snap
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
"id": "test",
"single_end": false
},
"test__bval_eddy:md5,4c61c53078316c31b4d5daf446a3d6ac"
"test__dwi_eddy_corrected.bval:md5,4c61c53078316c31b4d5daf446a3d6ac"
]
],
"test__dwi_eddy_corrected.bvec",
Expand All @@ -29,7 +29,7 @@
"nf-test": "0.9.0",
"nextflow": "24.04.4"
},
"timestamp": "2024-11-01T15:17:57.039667492"
"timestamp": "2024-11-21T19:52:21.761680879"
},
"eddy_light": {
"content": [
Expand All @@ -40,7 +40,7 @@
"id": "test",
"single_end": false
},
"test__bval_eddy:md5,8192e97b08e8032382397bb844b31892"
"test__dwi_eddy_corrected.bval:md5,8192e97b08e8032382397bb844b31892"
]
],
"test__dwi_eddy_corrected.bvec",
Expand All @@ -61,6 +61,6 @@
"nf-test": "0.9.0",
"nextflow": "24.04.4"
},
"timestamp": "2024-11-01T15:47:48.241855619"
"timestamp": "2024-11-21T19:53:14.315721034"
}
}
102 changes: 52 additions & 50 deletions subworkflows/nf-neuro/preproc_dwi/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -14,60 +14,54 @@ include { TOPUP_EDDY } from '../topup_eddy/main'
workflow PREPROC_DWI {

take:
ch_dwi // channel: [ val(meta), [ dwi, bval, bvec ] ]
ch_rev_dwi // channel: [ val(meta), [ rev_dwi, bval, bvec ] ], optional
ch_b0 // Channel: [ val(meta), [ b0 ] ], optional
ch_rev_b0 // channel: [ val(meta), [ reverse b0 ] ], optional
ch_config_topup // channel: [ 'config_topup' ], optional
ch_dwi // channel: [ val(meta), dwi, bval, bvec ]
ch_rev_dwi // channel: [ val(meta), rev-dwi, bval, bvec ], optional
ch_b0 // Channel: [ val(meta), b0 ], optional
ch_rev_b0 // channel: [ val(meta), rev-b0 ], optional
ch_config_topup // channel: [ 'topup.cnf' ], optional

main:

ch_versions = Channel.empty()

ch_denoise_dwi = ch_dwi
ch_dwi_bvalbvec = ch_dwi
.multiMap { meta, dwi, bval, bvec ->
dwi: [ meta, dwi ]
bvs_files: [ meta, bval, bvec ]
}

// ** Denoised DWI ** //
DENOISE_DWI (
ch_denoise_dwi.dwi
.map{ it + [[]] }
)
ch_rev_dwi_bvalbvec = ch_rev_dwi
.multiMap { meta, dwi, bval, bvec ->
rev_dwi: [ meta, dwi ]
rev_bvs_files: [ meta, bval, bvec ]
}

// ** Denoise DWI ** //
ch_denoise_dwi = ch_dwi_bvalbvec.dwi
.map{ it + [[]] }

DENOISE_DWI ( ch_denoise_dwi )
ch_versions = ch_versions.mix(DENOISE_DWI.out.versions.first())

if ( ch_rev_dwi )
{
ch_denoise_rev_dwi = ch_rev_dwi
.multiMap { meta, dwi, bval, bvec ->
rev_dwi: [ [id: "${meta.id}_rev", cache: meta], dwi ]
rev_bvs_files: [ meta, bval, bvec ]
}
// ** Denoised reverse DWI ** //
DENOISE_REVDWI (
ch_denoise_rev_dwi.rev_dwi
.map{ it + [[]] }
)
ch_versions = ch_versions.mix(DENOISE_REVDWI.out.versions.first())

ch_topup_eddy_rev_dwi = DENOISE_REVDWI.out.image
.map{ meta, dwi -> [ meta.cache, dwi ] }
.join(ch_denoise_rev_dwi.rev_bvs_files)
}
else
{
ch_topup_eddy_rev_dwi = [] // or Channel.empty()
}
// ** Denoise REV-DWI ** //
// Need to append "rev" to the ID, to ensure output filenames
// are different from the DWI and prevent file collisions
// - "cache: meta" is used to save the "real" metadata with valid ID for
// join operations, so it can be recovered after execution
ch_denoise_rev_dwi = ch_rev_dwi_bvalbvec.rev_dwi
.map{ meta, dwi -> [ [id: "${meta.id}_rev", cache: meta], dwi, [] ] }

DENOISE_REVDWI ( ch_denoise_rev_dwi )
ch_versions = ch_versions.mix(DENOISE_REVDWI.out.versions.first())

// ** Eddy Topup ** //
ch_topup_eddy_dwi = DENOISE_DWI.out.image.join(ch_denoise_dwi.bvs_files)
ch_topup_eddy_dwi = DENOISE_DWI.out.image
.join(ch_dwi_bvalbvec.bvs_files)

if ( ! ch_b0 ) {
EXTRACTB0_TOPUP { ch_topup_eddy_dwi }
ch_versions = ch_versions.mix(EXTRACTB0_TOPUP.out.versions.first())
ch_b0 = EXTRACTB0_TOPUP.out.b0
}
// Recover the "real" ID from "meta[cache]" (see above), to join with the bval/bvec
ch_topup_eddy_rev_dwi = DENOISE_REVDWI.out.image
.map{ meta, dwi -> [ meta.cache, dwi ] }
.join(ch_rev_dwi_bvalbvec.rev_bvs_files)

TOPUP_EDDY ( ch_topup_eddy_dwi, ch_b0, ch_topup_eddy_rev_dwi, ch_rev_b0, ch_config_topup )
ch_versions = ch_versions.mix(TOPUP_EDDY.out.versions.first())
Expand All @@ -76,19 +70,22 @@ workflow PREPROC_DWI {
ch_betcrop_dwi = TOPUP_EDDY.out.dwi
.join(TOPUP_EDDY.out.bval)
.join(TOPUP_EDDY.out.bvec)

BETCROP_FSLBETCROP ( ch_betcrop_dwi )
ch_versions = ch_versions.mix(BETCROP_FSLBETCROP.out.versions.first())

// ** Crop b0 ** //
ch_crop_b0 = TOPUP_EDDY.out.b0
.join(BETCROP_FSLBETCROP.out.bbox)

BETCROP_CROPVOLUME ( ch_crop_b0 )
ch_versions = ch_versions.mix(BETCROP_CROPVOLUME.out.versions.first())

// ** N4 DWI ** //
ch_N4 = BETCROP_FSLBETCROP.out.image
.join(BETCROP_CROPVOLUME.out.image)
.join(BETCROP_FSLBETCROP.out.mask)

N4_DWI ( ch_N4 )
ch_versions = ch_versions.mix(N4_DWI.out.versions.first())

Expand All @@ -97,11 +94,14 @@ workflow PREPROC_DWI {
.join(TOPUP_EDDY.out.bval)
.join(TOPUP_EDDY.out.bvec)
.join(BETCROP_FSLBETCROP.out.mask)

NORMALIZE_DWI ( ch_normalize )
ch_versions = ch_versions.mix(NORMALIZE_DWI.out.versions.first())

// ** Resample DWI ** //
ch_resample_dwi = NORMALIZE_DWI.out.dwi.map{ it + [[]] }
ch_resample_dwi = NORMALIZE_DWI.out.dwi
.map{ it + [[]] }

RESAMPLE_DWI ( ch_resample_dwi )
ch_versions = ch_versions.mix(RESAMPLE_DWI.out.versions.first())

Expand All @@ -114,18 +114,20 @@ workflow PREPROC_DWI {
ch_versions = ch_versions.mix(EXTRACTB0_RESAMPLE.out.versions.first())

// ** Resample mask ** //
ch_resample_mask = BETCROP_FSLBETCROP.out.mask.map{ it + [[]] }
ch_resample_mask = BETCROP_FSLBETCROP.out.mask
.map{ it + [[]] }

RESAMPLE_MASK ( ch_resample_mask )
ch_versions = ch_versions.mix(RESAMPLE_MASK.out.versions.first())

emit:
dwi_resample = RESAMPLE_DWI.out.image // channel: [ val(meta), [ dwi_resample ] ]
bval = TOPUP_EDDY.out.bval // channel: [ val(meta), [ bval_corrected ] ]
bvec = TOPUP_EDDY.out.bvec // channel: [ val(meta), [ bvec_corrected ] ]
b0 = EXTRACTB0_RESAMPLE.out.b0 // channel: [ val(meta), [ b0 ] ]
b0_mask = RESAMPLE_MASK.out.image // channel: [ val(meta), [ b0_mask ] ]
dwi_bounding_box = BETCROP_FSLBETCROP.out.bbox // channel: [ val(meta), [ dwi_bounding_box ] ]
dwi_topup_eddy = TOPUP_EDDY.out.dwi // channel: [ val(meta), [ dwi_topup_eddy ] ]
dwi_n4 = N4_DWI.out.image // channel: [ val(meta), [ dwi_n4 ] ]
versions = ch_versions // channel: [ versions.yml ]
dwi_resample = RESAMPLE_DWI.out.image // channel: [ val(meta), dwi-resampled ]
bval = TOPUP_EDDY.out.bval // channel: [ val(meta), bval-corrected ]
bvec = TOPUP_EDDY.out.bvec // channel: [ val(meta), bvec-corrected ]
b0 = EXTRACTB0_RESAMPLE.out.b0 // channel: [ val(meta), b0-resampled ]
b0_mask = RESAMPLE_MASK.out.image // channel: [ val(meta), b0-mask ]
dwi_bounding_box = BETCROP_FSLBETCROP.out.bbox // channel: [ val(meta), dwi-bounding-box ]
dwi_topup_eddy = TOPUP_EDDY.out.dwi // channel: [ val(meta), dwi-after-topup-eddy ]
dwi_n4 = N4_DWI.out.image // channel: [ val(meta), dwi-after-n4 ]
versions = ch_versions // channel: [ versions.yml ]
}
64 changes: 58 additions & 6 deletions subworkflows/nf-neuro/preproc_dwi/tests/main.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -44,14 +44,14 @@ nextflow_workflow {
file("\${test_data_directory}/sub-01_dir-AP_dwi.bval"),
file("\${test_data_directory}/sub-01_dir-AP_dwi.bvec")
]}
input[1] = []
input[2] = []
input[1] = Channel.from( [] )
input[2] = Channel.from( [] )
input[3] = LOAD_DATA.out.test_data_directory.map{
test_data_directory -> [
[ id:'test', single_end:false ],
file("\${test_data_directory}/sub-01_dir-PA_sbref.nii.gz")
]}
input[4] = []
input[4] = Channel.from( [] )
"""
}
}
Expand Down Expand Up @@ -92,9 +92,61 @@ nextflow_workflow {
file("\${test_data_directory}/sub-01_dir-PA_dwi.bval"),
file("\${test_data_directory}/sub-01_dir-PA_dwi.bvec")
]}
input[2] = []
input[3] = []
input[4] = []
input[2] = Channel.from( [] )
input[3] = Channel.from( [] )
input[4] = Channel.from( [] )
"""
}
}

then {
assertAll(
{ assert workflow.success},
{ assert snapshot(
workflow.out.bval,
workflow.out.versions,
file(workflow.out.b0.get(0).get(1)).name,
file(workflow.out.b0_mask.get(0).get(1)).name,
file(workflow.out.bvec.get(0).get(1)).name,
file(workflow.out.dwi_bounding_box.get(0).get(1)).name,
file(workflow.out.dwi_n4.get(0).get(1)).name,
file(workflow.out.dwi_resample.get(0).get(1)).name,
file(workflow.out.dwi_topup_eddy.get(0).get(1)).name).match() }
)
}
}


test("preproc_dwi_all_options") {

when {
workflow {
"""
input[0] = LOAD_DATA.out.test_data_directory.map{
test_data_directory -> [
[ id:'test', single_end:false ],
file("\${test_data_directory}/sub-01_dir-AP_dwi.nii.gz"),
file("\${test_data_directory}/sub-01_dir-AP_dwi.bval"),
file("\${test_data_directory}/sub-01_dir-AP_dwi.bvec")
]}
input[1] = LOAD_DATA.out.test_data_directory.map{
test_data_directory -> [
[ id:'test', single_end:false ],
file("\${test_data_directory}/sub-01_dir-PA_dwi.nii.gz"),
file("\${test_data_directory}/sub-01_dir-PA_dwi.bval"),
file("\${test_data_directory}/sub-01_dir-PA_dwi.bvec")
]}
input[2] = LOAD_DATA.out.test_data_directory.map{
test_data_directory -> [
[ id:'test', single_end:false ],
file("\${test_data_directory}/sub-01_dir-AP_sbref.nii.gz")
]}
input[3] = LOAD_DATA.out.test_data_directory.map{
test_data_directory -> [
[ id:'test', single_end:false ],
file("\${test_data_directory}/sub-01_dir-PA_sbref.nii.gz")
]}
input[4] = Channel.from( [] )
"""
}
}
Expand Down
Loading

0 comments on commit bf99204

Please sign in to comment.