I ended up writing a function and wrapping it in a separate workflow. I’m afraid I’m not sure how to post nicely formatted code here, but the function is:
def conj_stat_maps(map1, map2, out_file=None):
"""
Runs a conjunction on stat maps (returning a map with the min t-stat)
Creates a conjunction of two statistical maps (typically the result of
EstimateContrast or Threshold from the spm interface).
Args:
map1 (str): filename of the first stat map in the conjunction
map2 (str): filename of the second stat map in the conjunction
Optional:
out_file (str): output filename. If None (default), creates
'conj_map.nii' in current directory
Returns:
out_file (str): output filename (absolute path)
"""
# Imports all required packages so the func can be run as a nipype
# Function Node
import nilearn.image as nli
import os.path
if (out_file is None):
out_file = 'conj_map.nii'
conj_tmap = nli.math_img('np.minimum(img1*(img1>0), img2*(img2>0)) + ' + \
'np.maximum(img1*(img1<0), img2*(img2<0))',
img1=map1,
img2=map2)
conj_tmap.to_filename(out_file)
return os.path.abspath(out_file)
And then it’s wrapped as a workflow here:
def conj_maps(nipype_params, l2_subdir, con_list, map_name, extent_threshold=0,
matlab_path='', wf_name=None):
"""
Creates nipype workflow for conjuntion of statistical maps
Args:
nipype_params (dict): dict with the following fields:
wd (str): path to nipype work dir
sinkdir (str): path to datasink base dir
l2_subdir (str): path to directory with level2 stat map outputs
(relative to nipype sinkdir)
con_list (list of str): list of two contrasts to use for conjunction
(e.g. ['con_0001', 'con_0002'])
map_name (str): stat map name, will typically be spmT_0001.nii for
unthresholded map and spmT_0001_thr.nii for thresholded
Optional:
extent_threshold (int): min number of voxels for cluster thresholding
after conjunction. If 0 (default), won't run cluster thresholding.
Should only be used when thresholded maps are provided.
matlab_path (str): path to system matlab. Only relevant if
extent_threshold>0
wf_name (str): the name to use for the workflow. If None (default),
the workflow name will be the function name
Returns:
nipype workflow for creating a conjunction of level2 maps
"""
# Checks whether the nipype work and base directories exists and creates
# if they don't
if (not os.path.exists(nipype_params['wd'])):
os.mkdir(nipype_params['wd'])
# If no workflow name was given - sets to function name
if (wf_name is None):
wf_name = sys._getframe().f_code.co_name
con1 = con_list[0]
con2 = con_list[1]
# selectfiles to select maps
selectfiles = Node(SelectFiles({'map1': l2_subdir + f'/{con1}/' + map_name,
'map2': l2_subdir + f'/{con2}/' + map_name},
base_directory=nipype_params['sinkdir'],
sort_filelist=True), name="selectfiles")
conjmaps = Node(Function(input_names=['map1', 'map2'],
output_names=['out_file'], function=conj_stat_maps),
name='conjmaps')
datasink = Node(DataSink(base_directory=nipype_params['sinkdir'],
container=''),
name="datasink")
# Initiation of the 2nd-level analysis workflow
wf = Workflow(name=wf_name)
wf.base_dir = nipype_params['wd']
# Connect up the 2nd-level analysis components
wf.connect([(selectfiles, conjmaps, [('map1', 'map1'), ('map2', 'map2')]),
(conjmaps, datasink, [('out_file','group.' + wf_name + '.@conj')])])
# If a cluster threshold was provided, adds a node for cluster thresholding
if (extent_threshold>0):
clusterthresh = Node(niu.ClusterThresh(matlab_path=matlab_path,
extent_threshold=10), name='clusterthresh')
wf.connect([(conjmaps, clusterthresh, [('out_file','in_file')]),
(clusterthresh, datasink, [('out_file','group.' + \
wf_name + '.@conj_thr'),
('cluster_label_file', 'group.' + wf_name + \
'.@conj_label')])])
return wf
At some point this will all be on github, but I hope it helps for now