Skip to content

Commit

Permalink
Merge pull request #60 from scilus/revert-59-revert-35-fix-sub-prepro…
Browse files Browse the repository at this point in the history
…c_t1

[ENH] Update subworkflow preproc_t1
  • Loading branch information
arnaudbore authored Dec 5, 2024
2 parents b0aa9b0 + bfb9225 commit 83edced
Show file tree
Hide file tree
Showing 7 changed files with 120 additions and 150 deletions.
35 changes: 23 additions & 12 deletions subworkflows/nf-neuro/load_test_data/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -18,25 +18,36 @@ def fetch_archive ( name, destination, remote, database, data_identifiers ) {

def cache_entry = file("$cache_location/$data_id")
if ( !cache_entry.exists() ) {
def remote_entry = "${data_id[0..1]}/${data_id[2..-1]}"
file("$remote/$database/$remote_entry").copyTo(cache_entry)
try {
def remote_entry = "${data_id[0..1]}/${data_id[2..-1]}"
file("$remote/$database/$remote_entry").copyTo(cache_entry)
}
catch (Exception e) {
error "Failed to fetch test data archive: $name"
file("$remote/$database/$remote_entry").delete()
}
}

// Unzip all archive content to destination
def content = new java.util.zip.ZipFile("$cache_entry")
content.entries().each{ entry ->
def local_target = file("$destination/${entry.getName()}")
if (entry.isDirectory()) {
local_target.mkdirs();
} else {
local_target.getParent().mkdirs();
file("$local_target").withOutputStream{
out -> out << content.getInputStream(entry)
try {
content.entries().each{ entry ->
def local_target = file("$destination/${entry.getName()}")
if (entry.isDirectory()) {
local_target.mkdirs();
} else {
local_target.getParent().mkdirs();
file("$local_target").withOutputStream{
out -> out << content.getInputStream(entry)
}
}
}
}

return destination.resolve("${name.take(name.lastIndexOf('.'))}")
return destination.resolve("${name.take(name.lastIndexOf('.'))}")
}
finally {
content.close()
}
}

workflow LOAD_TEST_DATA {
Expand Down
87 changes: 61 additions & 26 deletions subworkflows/nf-neuro/preproc_t1/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -12,19 +12,23 @@ params.run_synthbet = false
workflow PREPROC_T1 {

take:
ch_image // channel: [ val(meta), [ image ] ]
ch_template // channel: [ val(meta), [ template ] ] , optional
ch_probability_map // channel: [ val(meta), [ probability_map, mask, initial_affine ] ] , optional
ch_mask_nlmeans // channel: [ val(meta), [ mask ] ] , optional
ch_ref_n4 // channel: [ val(meta), [ ref, ref_mask ] ] , optional
ch_ref_resample // channel: [ val(meta), [ ref ] ] , optional
ch_weights // channel: [ val(meta), [ weights ] ] , optional
ch_image // channel: [ val(meta), image ]
ch_template // channel: [ val(meta), template ] , optional
ch_probability_map // channel: [ val(meta), probability-map, mask, initial-affine ] , optional
ch_mask_nlmeans // channel: [ val(meta), mask ] , optional
ch_ref_n4 // channel: [ val(meta), ref, ref-mask ] , optional
ch_ref_resample // channel: [ val(meta), ref ] , optional
ch_weights // channel: [ val(meta), weights ] , optional

main:

ch_versions = Channel.empty()

// ** Denoising ** //
// Result : [ meta, image, mask | [] ]
// Steps :
// - join [ meta, image, mask | null ]
// - map [ meta, image, mask | [] ]
ch_nlmeans = ch_image
.join(ch_mask_nlmeans, remainder: true)
.map{ it[0..1] + [it[2] ?: []] }
Expand All @@ -33,23 +37,44 @@ workflow PREPROC_T1 {
ch_versions = ch_versions.mix(DENOISING_NLMEANS.out.versions.first())

// ** N4 correction ** //
// Result : [ meta, image, reference | [], mask | [] ]
// Steps :
// - join [ meta, image ] + [ reference, mask ] | [ reference, null ] | [ null ]
// - map [ meta, image, reference | [], mask | [] ]
// - join [ meta, image, reference | [], mask | [], nlmeans-mask | null ]
// - map [ meta, image, reference | [], mask | [] ]
ch_N4 = DENOISING_NLMEANS.out.image
.join(ch_ref_n4, remainder: true)
.map{ it[0..1] + [it[2] ?: []] }
.map{ it[0..1] + [it[2] ?: [], it[3] ?: []] }
.join(ch_mask_nlmeans, remainder: true)
.map{ it[0..2] + [it[3] ?: []] }
.map{ it[0..2] + [it[3] ?: it[4] ?: []] }

PREPROC_N4 ( ch_N4 )
ch_versions = ch_versions.mix(PREPROC_N4.out.versions.first())

// ** Resampling ** //
ch_resampling = PREPROC_N4.out.image.join(ch_ref_resample)
// Result : [ meta, image, reference | [] ]
// Steps :
// - join [ meta, image, reference | null ]
// - map [ meta, image, reference | [] ]
ch_resampling = PREPROC_N4.out.image
.join(ch_ref_resample, remainder: true)
.map{ it[0..1] + [it[2] ?: []] }

IMAGE_RESAMPLE ( ch_resampling )
ch_versions = ch_versions.mix(IMAGE_RESAMPLE.out.versions.first())

// ** Brain extraction ** //
if ( params.run_synthbet) {
ch_bet = IMAGE_RESAMPLE.out.image.join(ch_weights)
if ( params.run_synthbet ) {
// ** SYNTHBET ** //
// Result : [ meta, image, weights | [] ]
// Steps :
// - join [ meta, image, weights | null ]
// - map [ meta, image, weights | [] ]
ch_bet = IMAGE_RESAMPLE.out.image
.join(ch_weights, remainder: true)
.map{ it[0..1] + [it[2] ?: []] }

BETCROP_SYNTHBET ( ch_bet )
ch_versions = ch_versions.mix(BETCROP_SYNTHBET.out.versions.first())

Expand All @@ -59,7 +84,14 @@ workflow PREPROC_T1 {
}

else {
ch_bet = IMAGE_RESAMPLE.out.image.join(ch_template).join(ch_probability_map)
// ** ANTSBET ** //
// The template and probability maps are mandatory if running antsBET. Since the
// error message from nextflow when they are absent is either non-informative or
// missing, we use ifEmpty to provide a more informative one.
ch_bet = IMAGE_RESAMPLE.out.image
.join(ch_template.ifEmpty{ error("ANTS BET needs a template") })
.join(ch_probability_map.ifEmpty{ error("ANTS BET needs a tissue probability map") })

BETCROP_ANTSBET ( ch_bet )
ch_versions = ch_versions.mix(BETCROP_ANTSBET.out.versions.first())

Expand All @@ -68,25 +100,28 @@ workflow PREPROC_T1 {
mask_bet = BETCROP_ANTSBET.out.mask
}

// ** crop image ** //
ch_crop = image_bet.map{it + [[]]}
// ** Crop image ** //
ch_crop = image_bet
.map{ it + [[]] }

BETCROP_CROPVOLUME_T1 ( ch_crop )
ch_versions = ch_versions.mix(BETCROP_CROPVOLUME_T1.out.versions.first())

// ** crop mask ** //
ch_crop_mask = mask_bet.join(BETCROP_CROPVOLUME_T1.out.bounding_box)
// ** Crop mask ** //
ch_crop_mask = mask_bet
.join(BETCROP_CROPVOLUME_T1.out.bounding_box)

BETCROP_CROPVOLUME_MASK ( ch_crop_mask )
ch_versions = ch_versions.mix(BETCROP_CROPVOLUME_MASK.out.versions.first())

emit:
image_nlmeans = DENOISING_NLMEANS.out.image // channel: [ val(meta), [ image ] ]
image_N4 = PREPROC_N4.out.image // channel: [ val(meta), [ image ] ]
image_resample = IMAGE_RESAMPLE.out.image // channel: [ val(meta), [ image ] ]
image_bet = image_bet // channel: [ val(meta), [ t1 ] ]
mask_bet = mask_bet // channel: [ val(meta), [ mask ] ]
crop_box = BETCROP_CROPVOLUME_T1.out.bounding_box // channel: [ val(meta), [ bounding_box ] ]
mask_final = BETCROP_CROPVOLUME_MASK.out.image // channel: [ val(meta), [ mask ] ]
t1_final = BETCROP_CROPVOLUME_T1.out.image // channel: [ val(meta), [ image ] ]
versions = ch_versions // channel: [ versions.yml ]
t1_final = BETCROP_CROPVOLUME_T1.out.image // channel: [ val(meta), t1-preprocessed ]
mask_final = BETCROP_CROPVOLUME_MASK.out.image // channel: [ val(meta), t1-mask ]
image_nlmeans = DENOISING_NLMEANS.out.image // channel: [ val(meta), t1-after-denoise ]
image_N4 = PREPROC_N4.out.image // channel: [ val(meta), t1-after-unbias ]
image_resample = IMAGE_RESAMPLE.out.image // channel: [ val(meta), t1-after-resample ]
image_bet = image_bet // channel: [ val(meta), t1-after-bet ]
mask_bet = mask_bet // channel: [ val(meta), intermediary-mask ]
crop_box = BETCROP_CROPVOLUME_T1.out.bounding_box // channel: [ val(meta), bounding-box ]
versions = ch_versions // channel: [ versions.yml ]
}
13 changes: 8 additions & 5 deletions subworkflows/nf-neuro/preproc_t1/meta.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ description: |
The resulting T1 is corrected, resampled, brain extracted and cropped.
You can retrieve the file after each step if you don't want to run the entire subworkflow.
The next steps would be to register the resulting T1-corrected image with the DWI-corrected image
with, for example, the REGISTRATION subworkflow.
with, for example, the REGISTRATION subworkflow. IMPORTANT : the subworkflow is only reproducible
with when running ANTs BET using a single thread.
----------- Steps -----------
Denoising (nlmeans, scil).
Used to remove the noise induced by the MRI acquisition,
Expand All @@ -21,9 +22,10 @@ description: |
Resamples the T1 to an isotropic spatial resolution. The default is 1mm, a standard in humans which
usually facilitate registration with corrected DWI images.
This spatial resolution is modifiable in the configuration file.
Brain Extraction (bet, ANTs, freesurfer).
Brain Extraction (ANTs - default, freesurfer).
Isolates the brain tissue voxels from the remaining image. Also creates a binary brain mask.
This brain extraction is required for the T1 to DWI Registration.
This brain extraction is required for the T1 to DWI Registration. IMPORTANT : when using ANTs,
brain extraction is reproducible only when run using a single thread.
Cropping (scil).
Crops the empty planes around the brain to optimize the next processing steps.
Subworkflow based on Tractoflow : https://www.sciencedirect.com/science/article/pii/S105381192030375X?via%3Dihub
Expand Down Expand Up @@ -55,13 +57,14 @@ input:
- ch_template:
type: file
description: |
The input channel containing the anatomical template to perform BET.
The input channel containing the anatomical template for antsBET.
Structure: [ val(meta), path(image) ]
pattern: "*.{nii,nii.gz}"
- ch_probability_map:
type: file
description: |
The input channel containing the brain probability mask, with intensity range 1 (definitely brain) to 0 (definitely background).
The input channel containing the brain probability mask for antsBET,
with intensity range 1 (definitely brain) to 0 (definitely background).
Structure: [ val(meta), path(image) ]
pattern: "*.{nii,nii.gz}"
- ch_mask_nlmeans:
Expand Down
54 changes: 16 additions & 38 deletions subworkflows/nf-neuro/preproc_t1/tests/main.nf.test
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ nextflow_workflow {
}
}

test("preproc_t1_classic") {
test("preproc_t1_antsbet_error") {
config "./nextflow.config"
when {
workflow {
Expand All @@ -44,18 +44,8 @@ nextflow_workflow {
[ id:'test', single_end:false ],
file("\${test_data_directory}/T1w.nii.gz")
]}
input[1] = ch_split_test_data.antsbet.map{
test_data_directory -> [
[ id:'test', single_end:false ],
file("\${test_data_directory}/t1_template.nii.gz")
]}
input[2] = ch_split_test_data.antsbet.map{
test_data_directory -> [
[ id:'test', single_end:false ],
file("\${test_data_directory}/t1_brain_probability_map.nii.gz"),
[],
[]
]}
input[1] = Channel.empty()
input[2] = Channel.empty()
input[3] = ch_split_test_data.t1w.map{
test_data_directory -> [
[ id:'test', single_end:false ],
Expand All @@ -82,26 +72,12 @@ nextflow_workflow {
}

then {
assertAll(
{ assert workflow.success},
{ assert snapshot(
niftiMD5SUM(workflow.out.image_nlmeans.get(0).get(1)),
niftiMD5SUM(workflow.out.image_N4.get(0).get(1)),
niftiMD5SUM(workflow.out.image_resample.get(0).get(1)),
niftiMD5SUM(workflow.out.image_bet.get(0).get(1)),
niftiMD5SUM(workflow.out.mask_bet.get(0).get(1)),
workflow.out.crop_box,
niftiMD5SUM(workflow.out.mask_final.get(0).get(1)),
niftiMD5SUM(workflow.out.t1_final.get(0).get(1)),
workflow.out.versions
).match()}
)
assert workflow.failed
}
}

test("preproc_t1_option") {
config "./nextflow_2.config"

test("preproc_t1_antsbet") {
config "./nextflow.config"
when {
workflow {
"""
Expand All @@ -118,17 +94,19 @@ nextflow_workflow {
input[1] = ch_split_test_data.antsbet.map{
test_data_directory -> [
[ id:'test', single_end:false ],
[]
file("\${test_data_directory}/t1_template.nii.gz")
]}
input[2] = ch_split_test_data.antsbet.map{
test_data_directory -> [
[ id:'test', single_end:false ],
file("\${test_data_directory}/t1_brain_probability_map.nii.gz"),
[],
[]
]}
input[3] = ch_split_test_data.t1w.map{
test_data_directory -> [
[ id:'test', single_end:false ],
file("\${test_data_directory}/T1w_mask.nii.gz")
[]
]}
input[4] = ch_split_test_data.t1w.map{
test_data_directory -> [
Expand All @@ -139,7 +117,7 @@ nextflow_workflow {
input[5] = ch_split_test_data.t1w.map{
test_data_directory -> [
[ id:'test', single_end:false ],
file("\${test_data_directory}/T1w.nii.gz")
[]
]}
input[6] = ch_split_test_data.antsbet.map{
test_data_directory -> [
Expand All @@ -157,11 +135,11 @@ nextflow_workflow {
niftiMD5SUM(workflow.out.image_nlmeans.get(0).get(1)),
niftiMD5SUM(workflow.out.image_N4.get(0).get(1)),
niftiMD5SUM(workflow.out.image_resample.get(0).get(1)),
niftiMD5SUM(workflow.out.image_bet.get(0).get(1)),
niftiMD5SUM(workflow.out.mask_bet.get(0).get(1)),
workflow.out.crop_box,
niftiMD5SUM(workflow.out.mask_final.get(0).get(1)),
niftiMD5SUM(workflow.out.t1_final.get(0).get(1)),
file(workflow.out.image_bet.get(0).get(1)).name,
file(workflow.out.mask_bet.get(0).get(1)).name,
file(workflow.out.crop_box.get(0).get(1)).name,
file(workflow.out.mask_final.get(0).get(1)).name,
file(workflow.out.t1_final.get(0).get(1)).name,
workflow.out.versions
).match()}
)
Expand Down
Loading

0 comments on commit 83edced

Please sign in to comment.