Hello
I have been experimenting with ConnectomeMapper3 for the past month in order to implement Lausanne Atlas. I can run the CMP via singularity on a HCP cluster, and perform the anatomical pipeline. I'd like to thank you for this great, and easy to use tool.
I have a few quick questions Anatomical and fMRI pipelines:
Q1) fMRI pipeline: Can fmri pipeline handle Multiband or Multiecho (either or both?) data? How does the configuration.json file should look like to do so? A little bit more documentation and explanation on this would be very helpful.
Q2) Anatomical pipeline: this question is a little bit more detailed. When I run the following config.json file:
{
"Global": {
"process_type": "anatomical",
"subjects": [
"sub-1009668726",
"sub-1009868412",
"sub-1009919045",
"sub-1010049728",
"sub-1010118079",
"sub-1010161944",
"sub-1010171390",
"sub-1010214886",
"sub-1010382745",
"sub-1010402620",
"sub-1010454417",
"sub-1010702562",
"sub-1010781614",
"sub-1010851881",
"sub-1010876845",
"sub-1010955187",
"sub-1011063108",
"sub-1011132647",
"sub-1011140013",
"sub-1011222518",
"sub-1011234530",
"sub-1011362252",
"sub-1011389975"
],
"version": "v3.1.0",
"subject": "sub-1009668726"
},
"segmentation_stage": {
"seg_tool": "Freesurfer",
"make_isotropic": true,
"isotropic_vox_size": 0.7,
"isotropic_interpolation": "nearest",
"brain_mask_extraction_tool": "Freesurfer",
"use_fsl_brain_mask": false,
"use_existing_freesurfer_data": false,
"freesurfer_args": "-qcache",
"number_of_threads": 1
},
"parcellation_stage": {
"pipeline_mode": "Diffusion",
"parcellation_scheme": "Lausanne2018",
"include_thalamic_nuclei_parcellation": true,
"ants_precision_type": "float",
"segment_hippocampal_subfields": true,
"segment_brainstem": true,
"atlas_info": {}
},
"Multi-processing": {
"number_of_cores": 1
}
Most of the subjects do not complete processing (some do). But the nipype, pypeline.log file points out to the hippocampal subfield segmentation not being complete:
250109-14:17:35,714 nipype.workflow WARNING:
[Node] Error on "anatomical_pipeline.parcellation_stage.parcCombiner" (/output_dir/nipype-1.8.0/sub-1009668726/ses-1/anatomical_pipeline/parcellation_stage/parcCombiner)
250109-14:17:37,557 nipype.workflow ERROR:
Node parcCombiner failed to run on host lh06g18.
250109-14:17:37,567 nipype.workflow ERROR:
Saving crash info to /hpc/users/kilicb01/crash-20250109-141737-kilicb01-parcCombiner-892a5829-1cd9-488a-a6d7-eff88e0af550.txt
Traceback (most recent call last):
File "/opt/conda/envs/py37cmp-core/lib/python3.7/site-packages/nipype/pipeline/plugins/multiproc.py", line 67, in run_node
result["result"] = node.run(updatehash=updatehash)
File "/opt/conda/envs/py37cmp-core/lib/python3.7/site-packages/nipype/pipeline/engine/nodes.py", line 524, in run
result = self._run_interface(execute=True)
File "/opt/conda/envs/py37cmp-core/lib/python3.7/site-packages/nipype/pipeline/engine/nodes.py", line 642, in _run_interface
return self._run_command(execute)
File "/opt/conda/envs/py37cmp-core/lib/python3.7/site-packages/nipype/pipeline/engine/nodes.py", line 751, in _run_command
f"Exception raised while executing Node {self.name}.\n\n{result.runtime.traceback}"
nipype.pipeline.engine.nodes.NodeExecutionError: Exception raised while executing Node parcCombiner.
Traceback (most recent call last):
File "/opt/conda/envs/py37cmp-core/lib/python3.7/site-packages/nibabel/loadsave.py", line 42, in load
stat_result = os.stat(filename)
FileNotFoundError: [Errno 2] No such file or directory: '/output_dir/nipype-1.8.0/sub-1009668726/ses-1/anatomical_pipeline/parcellation_stage/parcHippo/lh_subFields.nii.gz'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/envs/py37cmp-core/lib/python3.7/site-packages/nipype/interfaces/base/core.py", line 398, in run
runtime = self._run_interface(runtime)
File "/opt/conda/envs/py37cmp-core/lib/python3.7/site-packages/cmtklib/parcellation.py", line 623, in _run_interface
img_sublh = ni.load(self.inputs.lh_hippocampal_subfields)
File "/opt/conda/envs/py37cmp-core/lib/python3.7/site-packages/nibabel/loadsave.py", line 44, in load
raise FileNotFoundError(f"No such file or no access: '{filename}'")
FileNotFoundError: No such file or no access: '/output_dir/nipype-1.8.0/sub-1009668726/ses-1/anatomical_pipeline/parcellation_stage/parcHippo/lh_subFields.nii.gz'
Next, if I turn off hippocampal subfield segmentation:
{
"Global": {
"process_type": "anatomical",
"subjects": [
"sub-1009668726",
"sub-1009868412",
"sub-1009919045",
"sub-1010049728",
"sub-1010118079",
"sub-1010161944",
"sub-1010171390",
"sub-1010214886",
"sub-1010382745",
"sub-1010402620",
"sub-1010454417",
"sub-1010702562",
"sub-1010781614",
"sub-1010851881",
"sub-1010876845",
"sub-1010955187",
"sub-1011063108",
"sub-1011132647",
"sub-1011140013",
"sub-1011222518",
"sub-1011234530",
"sub-1011362252",
"sub-1011389975"
],
"version": "v3.1.0",
"subject": "sub-1009668726"
},
"segmentation_stage": {
"seg_tool": "Freesurfer",
"make_isotropic": true,
"isotropic_vox_size": 0.7,
"isotropic_interpolation": "nearest",
"brain_mask_extraction_tool": "Freesurfer",
"use_fsl_brain_mask": false,
"use_existing_freesurfer_data": false,
"freesurfer_args": "-qcache",
"number_of_threads": 1
},
"parcellation_stage": {
"pipeline_mode": "Diffusion",
"parcellation_scheme": "Lausanne2018",
"include_thalamic_nuclei_parcellation": true,
"ants_precision_type": "float",
"segment_hippocampal_subfields":
false, "segment_brainstem":
true, "atlas_info": {}
},
"Multi-processing": {
"number_of_cores": 1
}
Then I get an improvement on the subjects that do complete processing (i.e. more subjects are processed, however, some subjects still do not complete processing) with the below pypeline.log error message pointing out the brainstem segmentation failure:
250109-14:17:34,174 nipype.interface INFO:
Start running CombineParcellations interface...
250109-14:17:34,245 nipype.workflow INFO:
[Node] Finished "parcCombiner", elapsed time 0.069102s.
250109-14:17:34,246 nipype.workflow WARNING:
Storing result file without outputs
250109-14:17:34,247 nipype.workflow WARNING:
[Node] Error on "anatomical_pipeline.parcellation_stage.parcCombiner" (/output_dir/nipype-1.8.0/sub-1009668726/ses-1/anatomical_pipeline/parcellation_stage/parcCombiner)
250109-14:17:35,895 nipype.workflow ERROR:
Node parcCombiner failed to run on host lh05g21.
250109-14:17:35,899 nipype.workflow ERROR:
Saving crash info to /hpc/users/kilicb01/crash-20250109-141735-kilicb01-parcCombiner-a46b6421-dbe7-4ea9-9c07-5df3b8d1da41.txt
Traceback (most recent call last):
File "/opt/conda/envs/py37cmp-core/lib/python3.7/site-packages/nipype/pipeline/plugins/multiproc.py", line 67, in run_node
result["result"] = node.run(updatehash=updatehash)
File "/opt/conda/envs/py37cmp-core/lib/python3.7/site-packages/nipype/pipeline/engine/nodes.py", line 524, in run
result = self._run_interface(execute=True)
File "/opt/conda/envs/py37cmp-core/lib/python3.7/site-packages/nipype/pipeline/engine/nodes.py", line 642, in _run_interface
return self._run_command(execute)
File "/opt/conda/envs/py37cmp-core/lib/python3.7/site-packages/nipype/pipeline/engine/nodes.py", line 751, in _run_command
f"Exception raised while executing Node {self.name}.\n\n{result.runtime.traceback}"
nipype.pipeline.engine.nodes.NodeExecutionError: Exception raised while executing Node parcCombiner.
Traceback (most recent call last):
File "/opt/conda/envs/py37cmp-core/lib/python3.7/site-packages/nibabel/loadsave.py", line 42, in load
stat_result = os.stat(filename)
FileNotFoundError: [Errno 2] No such file or directory: '/output_dir/nipype-1.8.0/sub-1009668726/ses-1/anatomical_pipeline/parcellation_stage/parcBrainStem/brainstem.nii.gz'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/conda/envs/py37cmp-core/lib/python3.7/site-packages/nipype/interfaces/base/core.py", line 398, in run
runtime = self._run_interface(runtime)
File "/opt/conda/envs/py37cmp-core/lib/python3.7/site-packages/cmtklib/parcellation.py", line 650, in _run_interface
img_stem = ni.load(self.inputs.brainstem_structures)
File "/opt/conda/envs/py37cmp-core/lib/python3.7/site-packages/nibabel/loadsave.py", line 44, in load
raise FileNotFoundError(f"No such file or no access: '{filename}'")
FileNotFoundError: No such file or no access: '/output_dir/nipype-1.8.0/sub-1009668726/ses-1/anatomical_pipeline/parcellation_stage/parcBrainStem/brainstem.nii.gz'
Finally, if I turn off both HA, and Brainstem segmentation by setting them to false, then I get all the subjects processed. Is there a reason for this behavior and how can it be fixed?
Thank you
Ülgen