forked from QSMxT/QSMxT
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathrun_3_segment.py
executable file
·312 lines (266 loc) · 10.4 KB
/
run_3_segment.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
#!/usr/bin/env python3
from nipype.pipeline.engine import Workflow, Node
from nipype.interfaces.io import DataSink
from nipype.interfaces.ants.registration import RegistrationSynQuick
from nipype.interfaces.ants.resampling import ApplyTransforms
from interfaces import nipype_interface_fastsurfer as fastsurfer
from interfaces import nipype_interface_mgz2nii as mgz2nii
import time
import glob
import os
import argparse
import psutil
def init_workflow():
subjects = [
os.path.split(path)[1]
for path in glob.glob(os.path.join(args.bids_dir, args.subject_pattern))
if not args.subjects or os.path.split(path)[1] in args.subjects
]
wf = Workflow("workflow_segmentation", base_dir=args.work_dir)
wf.add_nodes([
init_subject_workflow(subject)
for subject in subjects
])
return wf
def init_subject_workflow(
subject
):
sessions = [
os.path.split(path)[1]
for path in glob.glob(os.path.join(args.bids_dir, subject, args.session_pattern))
if not args.sessions or os.path.split(path)[1] in args.sessions
]
wf = Workflow(subject, base_dir=os.path.join(args.work_dir, "workflow_segmentation"))
wf.add_nodes([
init_session_workflow(subject, session)
for session in sessions
])
return wf
def init_session_workflow(subject, session):
# identify all runs - ensure that we only look at runs where both T1 and magnitude exist
magnitude_runs = sorted(list(set([
os.path.split(path)[1][os.path.split(path)[1].find('run-') + 4: os.path.split(path)[1].find('_', os.path.split(path)[1].find('run-') + 4)]
for path in glob.glob(os.path.join(args.bids_dir, args.magnitude_pattern.replace("{run}", "").format(subject=subject, session=session)))
])))
t1w_runs = sorted(list(set([
os.path.split(path)[1][os.path.split(path)[1].find('run-') + 4: os.path.split(path)[1].find('_', os.path.split(path)[1].find('run-') + 4)]
for path in glob.glob(os.path.join(args.bids_dir, args.t1_pattern.replace("{run}", "").format(subject=subject, session=session)))
])))
if len(t1w_runs) != len(magnitude_runs):
print(f"QSMxT: WARNING: Number of T1w and magnitude runs do not match for {subject}/{session}");
time.sleep(3)
runs = [f'run-{x}' for x in t1w_runs]
wf = Workflow(session, base_dir=os.path.join(args.work_dir, "workflow_segmentation", subject, session))
wf.add_nodes([
init_run_workflow(subject, session, run)
for run in runs
])
return wf
def init_run_workflow(subject, session, run):
wf = Workflow(run, base_dir=os.path.join(args.work_dir, "workflow_segmentation", subject, session, run))
# get relevant files from this run
t1_pattern = os.path.join(args.bids_dir, args.t1_pattern.format(subject=subject, session=session, run=run))
mag_pattern = os.path.join(args.bids_dir, args.magnitude_pattern.format(subject=subject, session=session, run=run))
t1_files = sorted(glob.glob(t1_pattern))
mag_files = sorted(glob.glob(mag_pattern))
if not t1_files:
print(f"No T1w files matching pattern: {t1_pattern}")
exit()
if not mag_files:
print(f"No magnitude files matching pattern: {mag_files}")
exit()
if len(t1_files) > 1:
print(f"QSMxT: Warning: Multiple T1w files matching pattern {t1_pattern}")
t1_file = t1_files[0]
mag_file = mag_files[0]
# register t1 to magnitude
n_registration = Node(
interface=RegistrationSynQuick(
#num_threads=1,
fixed_image=mag_file,
moving_image=t1_file
),
# relevant outputs: out_matrix
name='register_t1_to_qsm'
)
# segment t1
n_fastsurfer = Node(
interface=fastsurfer.FastSurferInterface(
in_file=t1_file,
num_threads=args.num_threads
),
name='segment_t1'
)
n_fastsurfer.plugin_args = {
'qsub_args': f'-A {args.qsub_account_string} -l walltime=03:00:00 -l select=1:ncpus={args.num_threads}:mem=20gb:vmem=20gb',
'overwrite': True
}
# convert segmentation to nii
n_fastsurfer_aseg_nii = Node(
interface=mgz2nii.Mgz2NiiInterface(),
name='fastsurfer_aseg_nii',
)
wf.connect([
(n_fastsurfer, n_fastsurfer_aseg_nii, [('out_file', 'in_file')])
])
# apply transforms to segmentation
n_transform_segmentation = Node(
interface=ApplyTransforms(
dimension=3,
#output_image=,
reference_image=mag_file,
interpolation="NearestNeighbor"
),
name='transform_segmentation_to_qsm'
)
wf.connect([
(n_fastsurfer_aseg_nii, n_transform_segmentation, [('out_file', 'input_image')]),
(n_registration, n_transform_segmentation, [('out_matrix', 'transforms')])
])
n_datasink = Node(
interface=DataSink(
base_directory=args.out_dir
#container=out_dir
),
name='datasink'
)
wf.connect([
(n_fastsurfer_aseg_nii, n_datasink, [('out_file', 't1_segmentations')]),
(n_transform_segmentation, n_datasink, [('output_image', 'qsm_segmentations')])
])
return wf
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="QSMxT segment: QSM and T1 segmentation pipeline. Segments T1-weighted images and registers " +
"these to the QSM space to produce segmentations for both T1 and QSM.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument(
'bids_dir',
help='Input data folder that can be created using run_1_dicomToBids.py. Can also use a ' +
'custom folder containing subject folders and NIFTI files or a BIDS folder with a ' +
'different structure, as long as --subject_pattern, --session_pattern, ' +
'--t1_pattern and --magnitude_pattern are also specified.'
)
parser.add_argument(
'out_dir',
help='Output segmentation directory; will be created if it does not exist.'
)
parser.add_argument(
'--work_dir',
default=None,
help='NiPype working directory; defaults to \'work\' within \'out_dir\'.'
)
parser.add_argument(
'--subject_pattern',
default='sub*',
help='Pattern used to match subject folders in bids_dir'
)
parser.add_argument(
'--session_pattern',
default='ses*',
help='Pattern used to match session folders in subject folders'
)
parser.add_argument(
'--t1_pattern',
default='{subject}/{session}/anat/*{run}*T1w*nii*',
help='Pattern to match t1 files within the BIDS directory.'
)
parser.add_argument(
'--magnitude_pattern',
default='{subject}/{session}/anat/*{run}*magnitude*nii*',
help='Pattern to match magnitude files within the BIDS directory.'
)
parser.add_argument(
'--subjects',
default=None,
nargs='*',
help='List of subject folders to process; by default all subjects are processed.'
)
parser.add_argument(
'--sessions',
default=None,
nargs='*',
help='List of session folders to process; by default all sessions are processed.'
)
parser.add_argument(
'--num_threads',
type=int,
default=1,
help='The number of threads (MultiProc) or CPUs (PBS) used for each running instance ' +
'of FastSurfer'
)
parser.add_argument(
'--n_procs',
type=int,
default=None,
help='Number of processes to run concurrently. By default, we use the number of ' +
'available CPUs provided there are 4 GBs of memory available for each.'
)
parser.add_argument(
'--pbs',
default=None,
dest='qsub_account_string',
help='Run the pipeline via PBS and use the argument as the QSUB account string.'
)
parser.add_argument(
'--debug',
action='store_true',
help='Enables some NiPype settings for debugging.'
)
args = parser.parse_args()
# supplementary arguments
g_args = lambda:None
# ensure directories are complete and absolute
if not args.work_dir: args.work_dir = args.out_dir
args.bids_dir = os.path.abspath(args.bids_dir)
args.work_dir = os.path.abspath(args.work_dir)
args.out_dir = os.path.abspath(args.out_dir)
# this script's directory
this_dir = os.path.dirname(os.path.abspath(__file__))
# misc environment variables
os.environ["SUBJECTS_DIR"] = "." # needed for reconall
os.environ["FSLOUTPUTTYPE"] = "NIFTI_GZ"
os.environ["FASTSURFER_HOME"] = "/opt/FastSurfer"
# PATH environment variable
os.environ["PATH"] += os.pathsep + os.path.join(this_dir, "scripts")
os.environ["PATH"] += os.pathsep + os.path.abspath("/opt/FastSurfer/")
# PYTHONPATH environment variable
if "PYTHONPATH" in os.environ: os.environ["PYTHONPATH"] += os.pathsep + this_dir
else: os.environ["PYTHONPATH"] = this_dir
# don't remove outputs
from nipype import config
config.set('execution', 'remove_unnecessary_outputs', 'false')
# debugging options
if args.debug:
config.enable_debug_mode()
config.set('execution', 'stop_on_first_crash', 'true')
config.set('execution', 'keep_inputs', 'true')
config.set('logging', 'workflow_level', 'DEBUG')
config.set('logging', 'interface_level', 'DEBUG')
config.set('logging', 'utils_level', 'DEBUG')
wf = init_workflow()
os.makedirs(os.path.abspath(args.work_dir), exist_ok=True)
os.makedirs(os.path.abspath(args.out_dir), exist_ok=True)
# get number of CPUs
n_cpus = int(os.environ["NCPUS"]) if "NCPUS" in os.environ else int(os.cpu_count())
# set number of concurrent processes to run depending on
# available CPUs and RAM (max 1 per 11 GB of available RAM)
if not args.n_procs:
available_ram_gb = psutil.virtual_memory().available / 1e9
args.n_procs = min(int(available_ram_gb / 11), n_cpus)
# run workflow
if args.qsub_account_string:
wf.run(
plugin='PBSGraph',
plugin_args={
'qsub_args': f'-A {args.qsub_account_string} -l walltime=00:50:00 -l select=1:ncpus=1:mem=5gb'
}
)
else:
wf.run(
plugin='MultiProc',
plugin_args={
'n_procs': args.n_procs
}
)