Skip to content
Open
12 changes: 6 additions & 6 deletions assets/schema_input.json
Original file line number Diff line number Diff line change
Expand Up @@ -141,44 +141,44 @@
{
"format": "file-path",
"exists": true,
"pattern": "^\\S+(\\.nii)?\\.gz$"
"pattern": "^\\S+(?:(?:\\.nii)?\\.gz|\\.mgz)$"
},
{
"format": "url",
"mimetype": "application/gzip"
}
],
"errorMessage": "T1-weighted image in Nifti format, cannot contain spaces and must have extension '.nii' or '.nii.gz'"
"errorMessage": "T1-weighted image in Nifti format, cannot contain spaces and must have extension '.nii', '.nii.gz' or '.mgz'"
},
"wmparc": {
"type": "string",
"anyOf": [
{
"format": "file-path",
"exists": true,
"pattern": "^\\S+(\\.nii)?\\.gz$"
"pattern": "^\\S+(?:(?:\\.nii)?\\.gz|\\.mgz)$"
},
{
"format": "url",
"mimetype": "application/gzip"
}
],
"errorMessage": "Segmentation of the white matter parcellation in Nifti format, cannot contain spaces and must have extension '.nii' or '.nii.gz'"
"errorMessage": "Segmentation of the white matter parcellation in Nifti format, cannot contain spaces and must have extension '.nii', '.nii.gz' or '.mgz'"
},
"aparc_aseg": {
"type": "string",
"anyOf": [
{
"format": "file-path",
"exists": true,
"pattern": "^\\S+(\\.nii)?\\.gz$"
"pattern": "^\\S+(?:(?:\\.nii)?\\.gz|\\.mgz)$"
},
{
"format": "url",
"mimetype": "application/gzip"
}
],
"errorMessage": "Segmentation of the cortical parcellation in Nifti format, cannot contain spaces and must have extension '.nii' or '.nii.gz'"
"errorMessage": "Segmentation of the cortical parcellation in Nifti format, cannot contain spaces and must have extension '.nii', '.nii.gz' or '.mgz'"
},
"lesion": {
"type": "string",
Expand Down
16 changes: 15 additions & 1 deletion conf/modules/output_template_space.config
Original file line number Diff line number Diff line change
Expand Up @@ -104,11 +104,25 @@ process {
else if ( filename.contains("md") && filename.contains("desc-fwc")){
"${prefix}_space-${params.template}_model-tensor_param-md_desc-fwc_dwimap.nii.gz"
}
else if ( filename.contains("isovf") ) {
"${prefix}_space-${params.template}_model-noddi_param-isovf_dwimap.nii.gz"
}
else if ( filename.contains("icvf") ) {
"${prefix}_space-${params.template}_model-noddi_param-icvf_dwimap.nii.gz"
}
else if ( filename.contains("odi") ) {
"${prefix}_space-${params.template}_model-noddi_param-odi_dwimap.nii.gz"
}
else if ( filename.contains("ecvf") ) {
"${prefix}_space-${params.template}_model-noddi_param-ecvf_dwimap.nii.gz"
}
else if ( filename.contains("dir") ) {
"${prefix}_space-${params.template}_model-noddi_param-dir_dwimap.nii.gz"
}
else if ( filename.contains("_ad_") ) { "${prefix}_space-${params.template}_model-tensor_param-ad_dwimap.nii.gz" }
else if ( filename.contains("_fa_") ) { "${prefix}_space-${params.template}_model-tensor_param-fa_dwimap.nii.gz" }
else if ( filename.contains("_rd_") ) { "${prefix}_space-${params.template}_model-tensor_param-rd_dwimap.nii.gz" }
else if ( filename.contains("_md_") ) { "${prefix}_space-${params.template}_model-tensor_param-md_dwimap.nii.gz" }

else if ( filename.contains("_mode_") ) { "${prefix}_space-${params.template}_model-tensor_param-mode_dwimap.nii.gz" }
else if ( filename.contains("_ga_") ) { "${prefix}_space-${params.template}_model-tensor_param-ga_dwimap.nii.gz" }
else if ( filename.contains("_afd_total_") ) { "${prefix}_space-${params.template}_model-csd_param-afdtotal_dwimap.nii.gz" }
Expand Down
1 change: 1 addition & 0 deletions main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ workflow {
args,
params.outdir,
params.input,
params.bids_config,
params.help,
params.help_full,
params.show_hidden
Expand Down
12 changes: 6 additions & 6 deletions modules/local/io/safecastinputs/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@ process IO_SAFECASTINPUTS {
out_rev_bval = rev_bval ? "rev_dwi.bval" : "$rev_bval"
out_rev_bvec = rev_bvec ? "rev_dwi.bvec" : "$rev_bvec"
out_rev_sbref = rev_sbref ? "rev_sbref.nii.gz" : "$rev_sbref"
out_t1 = t1 ? "t1.nii.gz" : "$t1"
out_wmparc = wmparc ? "wmparc.nii.gz" : "$wmparc"
out_aparc_aseg = aparc_aseg ? "aparc+aseg.nii.gz" : "$aparc_aseg"
out_t1 = t1 ? (t1.toString().endsWith('.mgz') ? "t1.mgz" : "t1.nii.gz") : "$t1"
out_wmparc = wmparc ? (wmparc.toString().endsWith('.mgz') ? "wmparc.mgz" : "wmparc.nii.gz") : "$wmparc"
out_aparc_aseg = aparc_aseg ? (aparc_aseg.toString().endsWith('.mgz') ? "aparc+aseg.mgz" : "aparc+aseg.nii.gz") : "$aparc_aseg"
out_lesion = lesion ? "lesion.nii.gz" : "$lesion"
"""
[ -f "$dwi" ] && ln -sf $dwi dwi.nii.gz
Expand All @@ -28,9 +28,9 @@ process IO_SAFECASTINPUTS {
[ -f "$rev_bval" ] && ln -sf $rev_bval rev_dwi.bval
[ -f "$rev_bvec" ] && ln -sf $rev_bvec rev_dwi.bvec
[ -f "$rev_sbref" ] && ln -sf $rev_sbref rev_sbref.nii.gz
[ -f "$t1" ] && ln -sf $t1 t1.nii.gz
[ -f "$wmparc" ] && ln -sf $wmparc wmparc.nii.gz
[ -f "$aparc_aseg" ] && ln -sf $aparc_aseg aparc+aseg.nii.gz
[ -f "$t1" ] && ( [[ "$t1" == *.mgz ]] && ln -sf $t1 t1.mgz || ln -sf $t1 t1.nii.gz )
[ -f "$wmparc" ] && ( [[ "$wmparc" == *.mgz ]] && ln -sf $wmparc wmparc.mgz || ln -sf $wmparc wmparc.nii.gz )
[ -f "$aparc_aseg" ] && ( [[ "$aparc_aseg" == *.mgz ]] && ln -sf $aparc_aseg aparc+aseg.mgz || ln -sf $aparc_aseg aparc+aseg.nii.gz )
[ -f "$lesion" ] && ln -sf $lesion lesion.nii.gz
exit 0
"""
Expand Down
10 changes: 9 additions & 1 deletion nextflow_schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,15 @@
"fa_icon": "fas fa-terminal",
"description": "Define where the pipeline should find input data and save output data.",
"help_text": "Specify input samplesheet and output directory",
"required": ["input", "outdir"],
"required": ["outdir"],
"oneOf": [
{
"required": ["input"]
},
{
"required": ["bids_config"]
}
],
"properties": {
"input": {
"mimetype": "text/csv",
Expand Down
124 changes: 113 additions & 11 deletions subworkflows/local/utils_nfcore_sf-tractomics_pipeline/main.nf
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,11 @@ workflow PIPELINE_INITIALISATION {
take:
version // boolean: Display version and exit
validate_params // boolean: Boolean whether to validate parameters against the schema at runtime
monochrome_logs // boolean: Do not use coloured log outputs
_monochrome_logs // boolean: Do not use coloured log outputs
nextflow_cli_args // array: List of positional nextflow CLI args
outdir // string: The output directory where the results will be saved
input // string: Path to input samplesheet
_input // string: Path to input samplesheet
_bids_config // string: Path to BIDS JSON configuration file
help // boolean: Display help message and exit
help_full // boolean: Show the full help message
show_hidden // boolean: Show hidden parameters in the help message
Expand Down Expand Up @@ -78,7 +79,7 @@ workflow PIPELINE_INITIALISATION {
\033[0;35m scilus/sf-tractomics ${workflow.manifest.version}\033[0m
-\033[2m----------------------------------------------------------------------------------\033[0m-
"""
after_text = """${workflow.manifest.doi ? "\n* The pipeline\n" : ""}${workflow.manifest.doi.tokenize(",").collect { " https://doi.org/${it.trim().replace('https://doi.org/','')}"}.join("\n")}${workflow.manifest.doi ? "\n" : ""}
after_text = """${workflow.manifest.doi ? "\n* The pipeline\n" : ""}${workflow.manifest.doi.tokenize(",").collect { doi -> " https://doi.org/${doi.trim().replace('https://doi.org/','')}"}.join("\n")}${workflow.manifest.doi ? "\n" : ""}
* The nf-neuro project
https://scilus.github.io/nf-neuro

Expand Down Expand Up @@ -109,17 +110,63 @@ workflow PIPELINE_INITIALISATION {
)

//
// Create channel from input file provided through params.input
// Create channel from input file provided through params.bids_config or params.input
//
if (params.input) {
if (_bids_config) {
ch_input_sheets = channel
.fromList(parseBidsConfig(_bids_config))
.map {
meta, dwi, bval, bvec, sbref, rev_dwi, rev_bval, rev_bvec, rev_sbref, t1, wmparc, aparc_aseg, lesion ->
return [
meta,
dwi,
bval,
bvec,
sbref ?: [],
rev_dwi ?: [],
rev_bval ?: [],
rev_bvec ?: [],
rev_sbref ?: [],
t1,
wmparc ?: [],
aparc_aseg ?: [],
lesion ?: []
]
}
.map { samplesheet ->
validateInputSamplesheet(samplesheet)
}

IO_SAFECASTINPUTS(ch_input_sheets)
ch_samplesheet = IO_SAFECASTINPUTS.out.safe_inputs
.multiMap { meta, dwi, bval, bvec, sbref, rev_dwi, rev_bval, rev_bvec, rev_sbref, t1, wmparc, aparc_aseg, lesion ->
t1: [meta, t1]
wmparc: [meta, wmparc]
aparc_aseg: [meta, aparc_aseg]
dwi_bval_bvec: [meta, dwi, bval, bvec]
b0: [meta, sbref]
rev_dwi_bval_bvec: [meta, rev_dwi, rev_bval, rev_bvec]
rev_b0: [meta, rev_sbref]
lesion: [meta, lesion]
}

if (params.participants_tsv) {
participants_tsv_path = params.participants_tsv
}
else {
participants_tsv_path = null
log.warn("No participants.tsv provided, covariates will not be used.")
}
}
else if (_input) {
//
// params.input is either a BIDS compliant directory or a samplesheet
// - if directory, we assume it is BIDS
// - everything else is a samplesheet
//
if (file(params.input).isDirectory()) {
if (file(_input).isDirectory()) {
IO_BIDS(
channel.fromPath(params.input),
channel.fromPath(_input),
channel.value(params.fsbids ?: []),
channel.value(params.bidsignore ?: [])
)
Expand All @@ -137,17 +184,17 @@ workflow PIPELINE_INITIALISATION {
if (params.participants_tsv) {
participants_tsv_path = "${params.participants_tsv}"
}
else if (file("${params.input}/participants.tsv").exists()) {
participants_tsv_path = "${params.input}/participants.tsv"
else if (file("${_input}/participants.tsv").exists()) {
participants_tsv_path = "${_input}/participants.tsv"
}
else {
participants_tsv_path = null
log.warn("No participants.tsv provided, covariates will not be used.")
}
}
else {
ch_input_sheets = Channel
.fromList(samplesheetToList(params.input, "${projectDir}/assets/schema_input.json"))
ch_input_sheets = channel
.fromList(samplesheetToList(_input, "${projectDir}/assets/schema_input.json"))
.map{
meta, dwi, bval, bvec, sbref, rev_dwi, rev_bval, rev_bvec, rev_sbref, t1, wmparc, aparc_aseg, lesion ->
return [
Expand Down Expand Up @@ -191,6 +238,9 @@ workflow PIPELINE_INITIALISATION {
}
}
}
else {
error "Please provide one input source: --input or --bids_config"
}

// We avoid merging the covariates (i.e. the extra meta fields)
// directly into the samplesheet's multimap meta fields, as those covariates are not used
Expand Down Expand Up @@ -424,6 +474,58 @@ def validateInputSamplesheet(input) {
return input
}

//
// Parse subjects from a BIDS JSON configuration file.
//
def parseBidsConfig(config_path) {
def config_file = file(config_path)

if (!config_file.exists()) {
error "BIDS config file does not exist: ${config_path}"
}

def config = new groovy.json.JsonSlurper().parseText(config_file.text)

if (!(config instanceof List)) {
error "BIDS config must be a JSON array of subject objects"
}

return config.collect { sample ->
if (!sample.subject) {
error "Each entry in bids_config must define 'subject'"
}
if (!sample.dwi || !sample.bval || !sample.bvec || !sample.t1) {
error "Each entry in bids_config must define required fields: dwi, bval, bvec, t1"
}

def subject_raw = sample.subject.toString()
def session_raw = sample.session != null ? sample.session.toString() : ""
def run_raw = (sample.containsKey('run') && sample.run != null) ? sample.run.toString() : ""

def subject_id = subject_raw.startsWith("sub-") ? subject_raw : "sub-${subject_raw}"
def session_id = session_raw ? (session_raw.startsWith("ses-") ? session_raw : "ses-${session_raw}") : ""
def run_id = run_raw ? (run_raw.startsWith("run-") ? run_raw : "run-${run_raw}") : ""

def meta = [id: subject_id, session: session_id, run: run_id]

return [
meta,
sample.dwi,
sample.bval,
sample.bvec,
sample.topup ?: null,
sample.rev_dwi ?: null,
sample.rev_bval ?: null,
sample.rev_bvec ?: null,
sample.rev_topup ?: null,
sample.t1,
sample.wmparc ?: null,
sample.aparc_aseg ?: null,
sample.lesion ?: null
]
}
}

//
// Generate methods description for MultiQC
//
Expand Down
Loading