forked from miykael/nipype-beginner-s-guide
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsecondlevelpipeline.py
213 lines (166 loc) · 7.92 KB
/
secondlevelpipeline.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
"""
Import modules
"""
import nipype.interfaces.freesurfer as fs # freesurfer
import nipype.interfaces.io as nio # i/o routines
import nipype.interfaces.spm as spm # spm
import nipype.interfaces.utility as util # utility
import nipype.pipeline.engine as pe # pypeline engine
"""
Define experiment specific parameters
"""
#to better access the parent folder of the experiment
experiment_dir = '~SOMEPATH/experiment'
#tell FreeSurfer where the recon-all output is at
freesurfer_dir = experiment_dir + '/freesurfer_data'
fs.FSCommand.set_default_subjects_dir(freesurfer_dir)
#list of subjectnames
subjects = ['subject1', 'subject2', 'subject3']
#second level analysis pipeline specific components
nameOfLevel2Out = 'level2_output'
numberOfContrasts = 5 #number of contrasts you specified in the first level analysis
contrast_ids = range(1,numberOfContrasts+1) #to create a list with value [1,2,3,4,5]
"""
Second level analysis on the volume
===================================
"""
"""
Grab the data
"""
#Node: DataGrabber - to collect all the con images for each contrast
l2volSource = pe.Node(nio.DataGrabber(infields=['con']), name="l2volSource")
path2normcons = experiment_dir + '/results/level1_output/subject*/normcons/ants_con_%04d.nii'
l2volSource.inputs.template = path2normcons
l2volSource.iterables = [('con',contrast_ids)] # iterate over all contrast images
"""
Define nodes
"""
#Node: OneSampleTTest - to perform an one sample t-test analysis on the volume
oneSampleTTestVolDes = pe.Node(interface=spm.OneSampleTTestDesign(),
name="oneSampleTTestVolDes")
#Node: EstimateModel - to estimate the model
l2estimate = pe.Node(interface=spm.EstimateModel(), name="l2estimate")
l2estimate.inputs.estimation_method = {'Classical' : 1}
#Node: EstimateContrast - to estimate the contrast (in this example just one)
l2conestimate = pe.Node(interface = spm.EstimateContrast(), name="l2conestimate")
cont1 = ('Group','T', ['mean'],[1])
l2conestimate.inputs.contrasts = [cont1]
l2conestimate.inputs.group_contrast = True
#Node: Threshold - to threshold the estimated contrast
l2threshold = pe.Node(interface = spm.Threshold(), name="l2threshold")
l2threshold.inputs.contrast_index = 1
l2threshold.inputs.use_fwe_correction = False
l2threshold.inputs.use_topo_fdr = True
l2threshold.inputs.extent_threshold = 1
#voxel threshold
l2threshold.inputs.extent_fdr_p_threshold = 0.05
#cluster threshold (value is in -ln()): 1.301 = 0.05; 2 = 0.01; 3 = 0.001,
l2threshold.inputs.height_threshold = 3
##Node: MultipleRegressionDesign - to perform a multiple regression analysis
#multipleRegDes = pe.Node(interface=spm.MultipleRegressionDesign(),
# name="multipleRegDes")
##regressor1 and regressor2 for 3 subjects
#multipleRegDes.inputs.covariates = [dict(vector=[-0.30,0.52,1.75],
# name='nameOfRegressor1'),
# dict(vector=[1.55,-1.80,0.77],
# name='nameOfRegressor2')]
"""
Establish a second level volume pipeline
"""
#Create 2-level vol pipeline and connect up all components
l2volflow = pe.Workflow(name="l2volflow")
l2volflow.base_dir = experiment_dir + '/results/workingdir_l2vol'
l2volflow.connect([(l2volSource,oneSampleTTestVolDes,[('outfiles','in_files')]),
(oneSampleTTestVolDes,l2estimate,[('spm_mat_file','spm_mat_file')]),
(l2estimate,l2conestimate,[('spm_mat_file','spm_mat_file'),
('beta_images','beta_images'),
('residual_image','residual_image')
]),
(l2conestimate,l2threshold,[('spm_mat_file','spm_mat_file'),
('spmT_images','stat_image'),
]),
])
"""
Second level analysis on the surface
====================================
"""
"""
Grab the data
"""
#Node: IdentityInterface - to iterate over contrasts and hemispheres
l2surfinputnode = pe.Node(interface=util.IdentityInterface(fields=['contrasts','hemi']),
name='l2surfinputnode')
l2surfinputnode.iterables = [('contrasts', contrast_ids),
('hemi', ['lh','rh'])]
#Node: DataGrabber - to collect contrast images and registration files
l2surfSource = pe.Node(interface=nio.DataGrabber(infields=['con_id'],
outfields=['con','reg']),
name='l2surfSource')
l2surfSource.inputs.base_directory = experiment_dir + '/results/level1_output/'
l2surfSource.inputs.template = '*'
l2surfSource.inputs.field_template = dict(con='surf_contrasts/_subject_id_*/con_%04d.img',
reg='bbregister/_subject_id_*/*.dat')
l2surfSource.inputs.template_args = dict(con=[['con_id']],reg=[[]])
"""
Define nodes
"""
#Node: Merge - to merge contrast images and registration files
merge = pe.Node(interface=util.Merge(2, axis='hstack'),name='merge')
#function to create a list of all subjects and the location of their specific files
def ordersubjects(files, subj_list):
outlist = []
for subject in subj_list:
for subj_file in files:
if '/_subject_id_%s/'%subject in subj_file:
outlist.append(subj_file)
continue
return outlist
#Node: MRISPreproc - to concatenate contrast images projected to fsaverage
concat = pe.Node(interface=fs.MRISPreproc(), name='concat')
concat.inputs.target = 'fsaverage'
concat.inputs.fwhm = 5 #the smoothing of the surface data happens here
#function that transforms a given list into tuples
def list2tuple(listoflist):
return [tuple(x) for x in listoflist]
#Node: OneSampleTTest - to perform a one sample t-test on the surface
oneSampleTTestSurfDes = pe.Node(interface=fs.OneSampleTTest(),
name='oneSampleTTestSurfDes')
"""
Establish a second level surface pipeline
"""
#Create 2-level surf pipeline and connect up all components
l2surfflow = pe.Workflow(name='l2surfflow')
l2surfflow.base_dir = experiment_dir + '/results/workingdir_l2surf'
l2surfflow.connect([(l2surfinputnode,l2surfSource,[('contrasts','con_id')]),
(l2surfinputnode,concat,[('hemi','hemi')]),
(l2surfSource,merge,[(('con', ordersubjects, subjects),'in1'),
(('reg', ordersubjects, subjects),'in2')]),
(merge,concat,[(('out', list2tuple),'vol_measure_file')]),
(concat,oneSampleTTestSurfDes,[('out_file','in_file')]),
])
"""
Store results in a common Datasink
==================================
"""
#Node: Datasink - Create a datasink node to store important outputs
l2datasink = pe.Node(interface=nio.DataSink(), name="l2datasink")
l2datasink.inputs.base_directory = experiment_dir + '/results'
l2datasink.inputs.container = nameOfLevel2Out
#integration of the datasink into the volume analysis pipeline
l2volflow.connect([(l2conestimate,l2datasink,[('spm_mat_file','l2vol_contrasts.@spm_mat'),
('spmT_images','l2vol_contrasts.@T'),
('con_images','l2vol_contrasts.@con'),
]),
(l2threshold,l2datasink,[('thresholded_map',
'vol_contrasts_thresh.@threshold')]),
])
#integration of the datasink into the surface analysis pipeline
l2surfflow.connect([(oneSampleTTestSurfDes,l2datasink,[('sig_file',
'l2surf_contrasts.@sig_file')])])
"""
Run pipeline
"""
l2volflow.write_graph(graph2use='flat')
l2volflow.run(plugin='MultiProc', plugin_args={'n_procs' : 2})
l2surfflow.write_graph(graph2use='flat')
l2surfflow.run(plugin='MultiProc', plugin_args={'n_procs' : 2})