from glob import glob
sub_list, ses_list, layout = get_subs(data_dir)
assert len(sub_list) == len(glob(data_dir + '/sub-*'))
#assert len(get_subs(data_dir)[1]) == len(glob(data_dir + '/sub-*' + '/ses-*'))
all_sub_ses_combos = set(product(sub_list, ses_list))
missing_list = []
for tup in all_sub_ses_combos:
sub_ses = os.path.join('testing', 'BIDS_dir', 'sub-'+tup[0], 'ses-'+tup[1])
if not os.path.exists(sub_ses):
missing_list.append(tup)
filtered_sub_ses_list = list(all_sub_ses_combos - set(missing_list))
assert len(filtered_sub_ses_list) == len(glob(data_dir + '/sub-*' + '/ses-*'))
sub_iter, ses_iter = filter_workflow(data_dir, sub_list, ses_list, exclude_list=[()])
assert len(sub_iter) == len(ses_iter)
assert len(filtered_sub_ses_list) == len(sub_iter) == len(ses_iter)
for ind, sub in enumerate(sub_iter):
assert (sub, ses_iter[ind]) in filtered_sub_ses_list
Get specific subject's and session's gradient files associated with a given dwi
file:
#usage
sub_grad_files = get_sub_gradfiles('./testing/BIDS_dir/sub-11042/ses-01/dwi/sub-11042_ses-01_dwi.nii.gz')
sub_grad_files
sub_grad_files = get_sub_gradfiles('./testing/BIDS_dir/sub-11042/ses-01/dwi/sub-11042_ses-01_dwi.nii.gz')
assert len(sub_grad_files) == 2
dwi = layout.get(
subject=sub_list[1],
datatype="dwi",
extension=[".nii", ".nii.gz"],
return_type="file",
)
assert len(dwi) == len(layout.get_sessions())
#usage
anat = layout.get(
subject=sub_list[0],
datatype = 'anat',
#session='002',
extension=[".nii.gz"],
return_type="filename",
)
print(anat)
TRtime, pe_dir = BIDS_metadata(dwi[0], bids_dir = data_dir)
print('Total readout time is {}'.format(TRtime))
print('Phase encoding direction is {}'.format(pe_dir))
reorient = fsl.utils.Reorient2Std()
reorient.inputs.in_file = '/Users/xxie/sample_data/ben/SZ/anat/sub-11016_ses-1_T1w.nii.gz'
reorient.inputs.out_file = '/Users/xxie/sample_data/ben/SZ/anat/sub-11016_ses-1_reoriented.nii.gz'
reorient.inputs.args = '-m /Users/xxie/sample_data/ben/SZ/anat/mnitransformation.mat'
print(reorient.cmdline)
Let's test if fslreorient2std
actually runs:
reorient.run()
# look at results:
!tree /Users/xxie/sample_data/ben/SZ/anat/
#usage
catgrad = GradCat()
catgrad.inputs.grad1 = '/Users/xxie/sample_data/derivatives/pipetography/_subject_id_MINN294/mrtrix_image1/mapflow/_mrtrix_image10/raw_dwi1.b'
catgrad.inputs.grad2 = '/Users/xxie/sample_data/derivatives/pipetography/_subject_id_MINN294/mrtrix_image2/mapflow/_mrtrix_image20/raw_dwi2.b'
catgrad.inputs.out_file = '/Users/xxie/sample_data/derivatives/raw_dwi.b'
catgrad.run()
#usage
rfov = fsl.utils.RobustFOV() # first reduce FOV of T1 image
rfov.inputs.in_file = anat[0]
rfov.inputs.out_transform = 'outputs/roi2full.mat'
rfov.inputs.out_roi = 'outputs/robustfov.nii.gz'
rfov.run() # run RobustFOV
inv_xfm = fsl.utils.ConvertXFM() # inverse transformation matrix
inv_xfm.inputs.in_file = 'outputs/roi2full.mat'
inv_xfm.inputs.invert_xfm = True
inv_xfm.inputs.out_file = 'outputs/full2roi.mat'
inv_xfm.run()
temp_ref = os.path.expandvars('$FSLDIR/data/standard/MNI152_T1_1mm.nii.gz')
flirt = fsl.preprocess.FLIRT()
flirt.inputs.in_file = 'outputs/robustfov.nii.gz'
flirt.inputs.reference = temp_ref
flirt.inputs.interp = 'spline'
flirt.inputs.out_matrix_file = 'outputs/roi2std.mat'
flirt.inputs.out_file = 'outputs/acpc_mni.nii.gz'
flirt.run() # align acpc mni
concat = fsl.utils.ConvertXFM() # concatenate xfm
concat.inputs.in_file2 = 'outputs/roi2std.mat'
concat.inputs.in_file = 'outputs/full2roi.mat'
concat.inputs.concat_xfm = True
concat.inputs.out_file = 'outputs/full2std.mat'
concat.run()
affr = fslaff2rigid()
affr.inputs.in_file = 'outputs/full2std.mat'
affr.inputs.out_file = 'outputs/outputmatrix.mat'
affr.run()
warp = fsl.preprocess.ApplyWarp() # apply warp finally
warp.inputs.in_file = anat[0]
warp.inputs.relwarp = True
warp.inputs.interp = 'spline'
warp.inputs.ref_file = temp_ref
warp.inputs.out_file = 'outputs/acpc_t1.nii.gz'
warp.inputs.premat = 'outputs/outputmatrix.mat'
warp.run()
# if we visualize both the MNI template and aligned T1, they should be in the same coordinate system
template_img = nb.load(temp_ref)
template_data = template_img.get_fdata()
img = new_img_like(template_img, template_data, affine=template_img.affine, copy_header=True)
_ = plotting.plot_img(img)
acpc_img = nb.load('outputs/acpc_t1.nii.gz')
acpc_data = acpc_img.get_fdata()
t1_img = new_img_like(acpc_img, acpc_data, affine=acpc_img.affine, copy_header=True)
_ = plotting.plot_img(t1_img)
#usage
# get session 2's dwi image:
dwi = layout.get(
subject=sub_list[0],
datatype="dwi",
session='002',
extensions=[".nii", ".nii.gz"],
return_type='filename'
)
print(dwi)
mconvert = Convert()
mconvert.inputs.in_file = dwi[0]
mconvert.inputs.grad_fsl = bfiles_fsl
mconvert.inputs.out_file = 'outputs/raw_dwi.mif'
mconvert.inputs.export_grad = True
mconvert.inputs.out_bfile = 'outputs/raw_dwi.b'
mconvert.inputs.force = True
mconvert.run()
# check out the new bfile in mrtrix3 format:
!head -n 5 outputs/raw_dwi.b
#usage
createMask = BrainMask()
createMask.inputs.in_file = 'outputs/raw_dwi.mif'
createMask.inputs.out_file = 'outputs/brain_dwi_mask.mif'
createMask.run()
# visualize mask:
!mrconvert outputs/brain_dwi_mask.mif outputs/brain_dwi_mask.nii.gz
b0mask = nb.load('outputs/brain_dwi_mask.nii.gz') # display dwi
mask_data = b0mask.get_fdata()
img = new_img_like(b0mask, mask_data, affine=b0mask.affine, copy_header=True)
_ = plotting.plot_img(img)
#usage
gradcheck = GradCheck()
gradcheck.inputs.mask_file = 'outputs/brain_dwi_mask.mif'
gradcheck.inputs.in_file = 'outputs/raw_dwi.mif'
gradcheck.inputs.export_grad = True
gradcheck.inputs.out_bfile = 'outputs/corrected.b'
gradcheck.inputs.grad_file = 'outputs/raw_dwi.b'
gradcheck.inputs.force = True
gradcheck.cmdline
gradcheck.run()
# create new dwi image with corrected gradients:
mconvert = Convert()
mconvert.inputs.in_file = 'outputs/raw_dwi.mif'
mconvert.inputs.grad_file = 'outputs/corrected.b'
mconvert.inputs.out_file = 'outputs/cdwi.mif'
mconvert.inputs.force = True
mconvert.run()
# we should have the following mrtrix3 image files going into the preprocessing stages
!tree outputs -L 2 | grep .mif
#usage:
# we will work with .nii.gz format for simpler visualization
denoise = dwidenoise()
denoise.inputs.out_file = "outputs/denoised.nii.gz"
denoise.inputs.noise = "outputs/noise.nii.gz"
denoise.inputs.in_file = "outputs/cdwi.mif" # the first session's image
denoise.inputs.force = True
denoise.run()
# visualize:
!mrconvert outputs/cdwi.mif outputs/cdwi.nii.gz -force
orig_dwi = nb.load('outputs/cdwi.nii.gz') # display dwi
orig_data = orig_dwi.get_fdata()[:, :, :, 0]
orig_img = new_img_like(orig_dwi, orig_data, affine=orig_dwi.affine, copy_header=True)
_ = plotting.plot_img(orig_img)
_ = plotting.plot_img(denoise.inputs.noise)
denoised_dwi = nb.load(denoise.inputs.out_file)
denoised_data = denoised_dwi.get_fdata()[:, :, :, 0]
denoised_img = new_img_like(
denoised_dwi, denoised_data, affine=denoised_dwi.affine, copy_header=True
)
_ = plotting.plot_img(denoised_img)
#from fastcore.test import *
#
#test_eq(os.path.exists(denoise.inputs.noise), True)
#test_eq(os.path.exists(denoise.inputs.out_file), True)
#usage
DeGibbs = MRDeGibbs()
DeGibbs.inputs.in_file = 'outputs/denoised.nii.gz'
DeGibbs.inputs.out_file = 'outputs/degibbs.nii.gz'
DeGibbs.run()
# visualize:
degibbs_dwi = nb.load(DeGibbs.inputs.out_file)
degibbs_data = degibbs_dwi.get_fdata()[:, :, :, 0]
degibbs_img = new_img_like(
degibbs_dwi, degibbs_data, affine=degibbs_dwi.affine, copy_header=True
)
_ = plotting.plot_img(degibbs_img)
DWI Distortion correction
dwipreproc
Was recently updated to dwifslpreproc
, so we create the following commandline inputs, also dwipreproc
doesn't exist in Nipype
's mrtrix3 interface:
#usage
# this example is for basic DWI acquisition
# all image volumes are acquired in a single protocol with fixed phase encoding:
preproc = dwipreproc()
preproc.inputs.in_file = 'outputs/degibbs.nii.gz'
preproc.inputs.rpe_options = "-rpe_none"
preproc.inputs.eddy_options = '"--slm=linear --repol"'
preproc.inputs.out_file = "outputs/preproc.nii.gz"
preproc.inputs.grad_file = gradcheck.inputs.out_bfile
preproc.inputs.force = True
# PyBIDS Can also extract meta data:
preproc.inputs.RO_time = layout.get_metadata(dwi[0])['TotalReadoutTime']
preproc.inputs.pe_dir = layout.get_metadata(dwi[0])['PhaseEncodingDirection']
print(preproc.cmdline)
preproc.run()
# visualize:
preproc_dwi = nb.load(preproc.inputs.out_file)
preproc_data = preproc_dwi.get_fdata()[:, :, :, 0]
preproc_img = new_img_like(
preproc_dwi, preproc_data, affine=preproc_dwi.affine, copy_header=True
)
_ = plotting.plot_img(preproc_img)
Bias field correction with ANTS:
There are 3 important parameters:
ants.b
: default is [100,3], the values stand for [Initial mesh resolution in mm, spline order]. ANTS' N4BiasFieldCorrection option -b. This value is optimised for human adult data and needs to be adjusted for rodent data.
ants.c
: default is [1000,0.0] or [numberOfIterations,convergenceThreshold], Equivalent to ANTS' N4BiasFieldCorrection option -c.
ants.s
: default is 4 N4BiasFieldCorrection option -s. shrink-factor applied to spatial dimensions
The current Nipype
interface doesn't support the additional commandline arguments for ANTS
in the correct order, AND their flags for certain inputs are outdated. So we create our own:
#usage
N4BiasCorrect = BiasCorrect()
N4BiasCorrect.inputs.use_ants = True
N4BiasCorrect.inputs.in_file = preproc.inputs.out_file
N4BiasCorrect.inputs.out_file = 'outputs/dwi_bias.nii.gz'
N4BiasCorrect.inputs.bias = 'outputs/biasfield.nii.gz'
N4BiasCorrect.inputs.grad_file = gradcheck.inputs.out_bfile
# We use the default values provided by brainlife.io as a test:
N4BiasCorrect.inputs.args = "-ants.b [150,3] -ants.c [200x200, 1e-6] -ants.s 2"
print(N4BiasCorrect.cmdline)
N4BiasCorrect.run()
# visualize the output bias field:
bias_dwi = nb.load(N4BiasCorrect.inputs.bias)
bias_data = bias_dwi.get_fdata() # try ...
bias_img = new_img_like(
bias_dwi, bias_data, affine=bias_dwi.affine, copy_header=True
)
_ = plotting.plot_img(bias_img)
#usage
# generate new gradient after bias correct
grad_info = MRInfo()
grad_info.inputs.in_file = N4BiasCorrect.inputs.out_file
grad_info.inputs.grad_file = gradcheck.inputs.out_bfile
grad_info.inputs.export_grad = True
grad_info.inputs.out_bfile = 'outputs/RCNtmp.b'
grad_info.inputs.force = True
print(grad_info.cmdline)
grad_info.run()
# check:
!head -n 5 outputs/RCNtmp.b
#usage
CheckNoise = CheckNIZ()
CheckNoise.inputs.out_file = 'outputs/lownoisemap.nii.gz'
CheckNoise.inputs.isfinite = 'outputs/noise.nii.gz'
CheckNoise.inputs.cond_if = 'outputs/noise.nii.gz'
CheckNoise.inputs.force = True
CheckNoise.inputs.quiet = True
CheckNoise.inputs.nthreads = 6
print(CheckNoise.cmdline)
CheckNoise.run()
# visualize:
lownoise_dwi = nb.load(CheckNoise.inputs.out_file)
lownoise_data = lownoise_dwi.get_fdata() # try ...
lownoise_img = new_img_like(
lownoise_dwi, lownoise_data, affine=lownoise_dwi.affine, copy_header=True
)
_ = plotting.plot_img(lownoise_img)
#usage
RCNRemoval = RicianNoise()
RCNRemoval.inputs.in_file = N4BiasCorrect.inputs.out_file
RCNRemoval.inputs.power = 2
RCNRemoval.inputs.lownoisemap = CheckNoise.inputs.out_file
RCNRemoval.inputs.denoise = 2
RCNRemoval.inputs.out_file = 'outputs/RCN_dwi.mif'
RCNRemoval.inputs.force=True
print(RCNRemoval.cmdline)
RCNRemoval.run()
# We need to do noise comparison with the DWI's instead of noise files:
CheckDWI = CheckNIZ()
CheckDWI.inputs.out_file = 'outputs/RCN_tmp.mif'
CheckDWI.inputs.isfinite = 'outputs/RCN_dwi.mif'
CheckDWI.inputs.cond_if = 'outputs/RCN_dwi.mif'
CheckDWI.inputs.force = True
print(CheckDWI.cmdline)
CheckDWI.run()
# Create rician noise removed image:
RCN_convert = Convert()
RCN_convert.inputs.in_file = CheckDWI.inputs.out_file
RCN_convert.inputs.grad_file = grad_info.inputs.out_bfile
RCN_convert.inputs.out_file = 'outputs/rician_corrected_dwi.mif'
RCN_convert.inputs.force = True
RCN_convert.run()
print(RCN_convert.cmdline)
# visualize:
!mrconvert outputs/rician_corrected_dwi.mif outputs/rician_corrected_dwi.nii.gz -force
rician_dwi = nb.load('outputs/rician_corrected_dwi.nii.gz')
rician_data = rician_dwi.get_fdata()[:,:,:,2] # try ...
rician_img = new_img_like(
rician_dwi, rician_data, affine=rician_dwi.affine, copy_header=True
)
_ = plotting.plot_img(rician_img)
#usage
createMask = BrainMask()
createMask.inputs.in_file = 'outputs/rician_corrected_dwi.mif'
createMask.inputs.out_file = 'outputs/dwi_mask.mif'
createMask.run()
# visualize mask:
!mrconvert outputs/dwi_mask.mif outputs/dwi_mask.nii.gz
mask = nb.load('outputs/dwi_mask.nii.gz') # display dwi
mask_data = mask.get_fdata()
img = new_img_like(mask, mask_data, affine=mask.affine, copy_header=True)
_ = plotting.plot_img(img)
Then createt FA WM mask, we first use Nipype
provided dwi2tensor
and tensor2metric
interfaces, then create a threshold interface that's unavailable from Nipype
:
#usage
makeDTI = FitTensor()
makeDTI.inputs.in_file = 'outputs/rician_corrected_dwi.mif'
makeDTI.inputs.in_mask = 'outputs/dwi_mask.mif'
makeDTI.inputs.out_file = 'outputs/dti.mif'
print(makeDTI.cmdline)
makeDTI.run()
GetFA = TensorMetrics()
GetFA.inputs.in_file = 'outputs/dti.mif'
GetFA.inputs.out_fa = 'outputs/fa.mif'
print(GetFA.cmdline)
GetFA.run()
WMThresh = MRThreshold()
WMThresh.inputs.in_file = 'outputs/fa.mif'
WMThresh.inputs.opt_abs = 0.5
WMThresh.inputs.out_file = 'outputs/wm.mif'
print(WMThresh.cmdline)
WMThresh.run()
#visualize WM image:
!mrconvert outputs/wm.mif outputs/wm.nii.gz
wm = nb.load('outputs/wm.nii.gz') # display dwi
wm_data = wm.get_fdata()
img = new_img_like(wm, wm_data, affine=wm.affine, copy_header=True)
_ = plotting.plot_img(img)
Now we can normalize the white matter signal:
#usage
normalize = DWINormalize()
normalize.inputs.opt_intensity = 1000
normalize.inputs.in_file = 'outputs/rician_corrected_dwi.mif'
normalize.inputs.out_file = 'outputs/dwi_norm.mif'
normalize.inputs.mask_file = 'outputs/wm.mif'
normalize.inputs.force=True
print(normalize.cmdline)
normalize.run()
#visualize
!mrconvert outputs/dwi_norm.mif outputs/dwi_norm.nii.gz -force
norm = nb.load('outputs/dwi_norm.nii.gz') # display dwi
norm_data = norm.get_fdata()[:,:,:,2] # try non-B0 volume
img = new_img_like(norm, norm_data, affine=norm.affine, copy_header=True)
_ = plotting.plot_img(img)
#usage
# extract B0 volume
B0Extract = DWIExtract()
B0Extract.inputs.bzero = True
B0Extract.inputs.in_file = 'outputs/dwi_norm.mif' # intensity normalized dwi volume
B0Extract.inputs.out_file = 'outputs/b0_volumes.mif'
B0Extract.run()
# find mean B0 volume
B0Mean = MRMath()
B0Mean.inputs.operation = 'mean'
B0Mean.inputs.in_file = 'outputs/b0_volumes.mif'
B0Mean.inputs.axis = 3
B0Mean.inputs.out_file = 'outputs/b0_dwi.mif'
B0Mean.run()
# create DWI B0 mask
B0Mask = BrainMask()
B0Mask.inputs.in_file = 'outputs/dwi_norm.mif'
B0Mask.inputs.out_file = 'outputs/dwi_norm_mask.mif'
B0Mask.run()
# do a simple conversion to nifti format for FSL input
!mrconvert outputs/b0_dwi.mif outputs/b0_dwi.nii.gz -force
!mrconvert outputs/dwi_norm_mask.mif outputs/dwi_norm_mask.nii.gz -force
# apply mask to B0 volume
FSL_mask = fsl.ApplyMask()
FSL_mask.inputs.in_file = 'outputs/b0_dwi.nii.gz'
FSL_mask.inputs.mask_file = 'outputs/dwi_norm_mask.nii.gz'
FSL_mask.inputs.out_file = 'outputs/b0_dwi_brain.nii.gz'
FSL_mask.run()
#usage:
# visualize B0 DWI:
b0dwi = nb.load('outputs/b0_dwi_brain.nii.gz') # display dwi
b0dwi_data = b0dwi.get_fdata()
img = new_img_like(b0dwi, b0dwi_data, affine=b0dwi.affine, copy_header=True)
_ = plotting.plot_img(img)
#usage
# first we extract the brain with FSL:
FSL_BET = fsl.preprocess.BET()
FSL_BET.inputs.in_file = 'outputs/acpc_t1.nii'
FSL_BET.inputs.out_file = 'outputs/acpc_t1_brain'
FSL_BET.inputs.args = '-R -B -m'
print(FSL_BET.cmdline)
FSL_BET.run()
# perform alignment of DWI to ANAT, get transformation matrix with EPIREG
EPI_REG = fsl.epi.EpiReg()
EPI_REG.inputs.epi = 'outputs/b0_dwi_brain.nii.gz'
EPI_REG.inputs.t1_head = 'outputs/acpc_t1.nii'
EPI_REG.inputs.t1_brain = 'outputs/acpc_t1_brain.nii.gz'
EPI_REG.inputs.out_base = 'outputs/dwi2acpc'
print(EPI_REG.cmdline)
EPI_REG.run()
# apply transformation in mrtrix:
acpc_xfm = TransConvert()
acpc_xfm.inputs.flirt_xfm = 'outputs/dwi2acpc.mat'
acpc_xfm.inputs.flirt_in = 'outputs/b0_dwi_brain.nii.gz'
acpc_xfm.inputs.flirt_ref = 'outputs/acpc_t1_brain.nii.gz'
acpc_xfm.inputs.flirt=True
acpc_xfm.inputs.out_file = 'outputs/dwi2acpc_xfm.mat'
acpc_xfm.inputs.force=True
print(acpc_xfm.cmdline)
acpc_xfm.run()
Apply_xfm = MRTransform()
Apply_xfm.inputs.linear_xfm = 'outputs/dwi2acpc_xfm.mat'
Apply_xfm.inputs.in_file = 'outputs/dwi_norm.mif'
Apply_xfm.inputs.out_file = 'outputs/dwi_acpc.mif'
print(Apply_xfm.cmdline)
Apply_xfm.run()
!mrconvert outputs/dwi_acpc.mif outputs/dwi_acpc.nii.gz -force
# visualize, our dwi should be in same space as anat/mni152:
adwi = nb.load('outputs/dwi_acpc.nii.gz') # display dwi
adwi_data = adwi.get_fdata()[:,:,:,3]
adwi_img = new_img_like(adwi, adwi_data, affine=adwi.affine, copy_header=True)
_ = plotting.plot_img(adwi_img)
anatbrain = nb.load('outputs/acpc_t1_brain.nii.gz')
anatbrain_data=anatbrain.get_fdata()
ab_img = new_img_like(anatbrain, anatbrain_data, affine=anatbrain.affine, copy_header=True)
_ = plotting.plot_img(ab_img)
#usage
get_wmmask = WMBinarize()
get_wmmask.inputs.in_file = 'outputs/freesurfer/recon_all/mri/aseg.mgz'
get_wmmask.inputs.out_file = 'outputs/fs_wm_mask.nii.gz'
get_wmmask.inputs.all_wm = True
print(get_wmmask.cmdline)
#usage
temp_ref = os.path.expandvars('$FSLDIR/data/standard/MNI152_T1_1mm.nii.gz')
reslice = MRRegrid()
reslice.inputs.in_file = 'outputs/dwi_acpc.mif'
reslice.inputs.regrid = temp_ref
reslice.inputs.out_file = 'outputs/dwi_acpc_1mm.mif'
print(reslice.cmdline)
reslice.run()
#usage
# extract B0 volume
mniB0Extract = DWIExtract()
mniB0Extract.inputs.bzero = True
mniB0Extract.inputs.in_file = 'outputs/dwi_acpc_1mm.mif' # intensity normalized dwi volume
mniB0Extract.inputs.out_file = 'outputs/dwi_acpc_1mm_b0.mif'
mniB0Extract.run()
# find mean B0 volume
mniB0Mean = MRMath()
mniB0Mean.inputs.operation = 'mean'
mniB0Mean.inputs.in_file = 'outputs/dwi_acpc_1mm_b0.mif'
mniB0Mean.inputs.axis = 3
mniB0Mean.inputs.out_file = 'outputs/dwi_acpc_1mm_b0_mean.mif'
mniB0Mean.run()
# create DWI mask in MNI output space
B0Mask = BrainMask()
B0Mask.inputs.in_file = 'outputs/dwi_acpc_1mm.mif'
B0Mask.inputs.out_file = 'outputs/dwi_acpc_1mm_mask.mif'
B0Mask.run()
# convert B0 volume and DWI mask to nifti format
# do a simple conversion here, will be replaced with Nodes in pipeline
!mrconvert outputs/dwi_acpc_1mm_b0_mean.mif outputs/dwi_acpc_1mm_b0_mean.nii.gz -force
!mrconvert outputs/dwi_acpc_1mm_mask.mif outputs/dwi_acpc_1mm_mask.nii.gz -force
# apply mask to B0 volume to create B0 brain images
FSL_mask = fsl.ApplyMask()
FSL_mask.inputs.in_file = 'outputs/dwi_acpc_1mm_b0_mean.nii.gz'
FSL_mask.inputs.mask_file = 'outputs/dwi_acpc_1mm_mask.nii.gz'
FSL_mask.inputs.out_file = 'outputs/dwi_acpc_1mm_brain.nii.gz'
FSL_mask.run()
# Produce final images and gradients:
ConvertOutput = Convert()
ConvertOutput.inputs.in_file = 'outputs/dwi_acpc_1mm.mif'
ConvertOutput.inputs.out_file = 'outputs/dwi_acpc_1mm.nii.gz'
ConvertOutput.inputs.export_grad = True
ConvertOutput.inputs.out_bfile = 'outputs/dwi_acpc_1mm.b'
ConvertOutput.inputs.export_fslgrad = True
ConvertOutput.inputs.out_fslgrad = ('outputs/dwi_acpc.bvecs','outputs/dwi_acpc.bvals')
ConvertOutput.inputs.export_json = True
ConvertOutput.inputs.out_json = 'outputs/dwi_acpc_1mm.json'
ConvertOutput.inputs.force=True
print(ConvertOutput.cmdline)
ConvertOutput.run()
Now we have all the preprocessing done! Time for tracts!
#example
sift2 = tckSIFT2()
sift2.inputs.in_file = '/Users/xxie/sample_data/dwipreproc/cuda_tracking/_session_id_002_subject_id_01/sub-01_ses-002_gmwmi2wm.tck'
sift2.inputs.in_fod = 'outputs/wmfod.mif'
sift2.inputs.act = 'outputs/5tt.mif'
sift2.inputs.fd_scale_gm = True
sift2.inputs.out_file = 'outputs/sift2.txt'
sift2.inputs.force=True
print(sift2.cmdline)
sift2.run()
Tweak Atlas labels with look up tables.
mrtrix3's labelconvert
and labelsgmfix
if using freesurfer atlas.
Use the nipype interface for LabelConvert
, and declare a mrtrix3 path to its /share/mrtrix3/labelconvert/
folder where the target connectome lookup tabel for output images reside. Available files include:
- aal.txt
- aal2.txt
- fs_default.txt
- hcpmmp1_ordered.txt
- hcpmmp1_original.txt
- lpba40.txt
- fs2lobes_cinginc_convert.txt
- fs2lobes_cinginc_labels.txt
- fs2lobes_cingsep_convert.txt
- fs2lobes_cingsep_labels.txt
Please refer to https://mrtrix.readthedocs.io/en/latest/quantitative_structural_connectivity/labelconvert_tutorial.html#labelconvert-tutorial for detailed explanation.
#example
connectivity = MakeConnectome()
connectivity.inputs.in_file = '/Users/xxie/sample_data/dwipreproc/cuda_tracking/_session_id_002_subject_id_01/sub-01_ses-002_gmwmi2wm.tck'
connectivity.inputs.in_parc = 'outputs/atlas_in_dwi_syn_int.nii.gz'
connectivity.inputs.out_file = 'outputs/connectome.csv'
connectivity.inputs.in_weights = 'outputs/sift2.txt'
connectivity.inputs.symmetric = True
connectivity.inputs.zero_diag = True
connectivity.inputs.force = True
print(connectivity.cmdline)
connectivity.run()
Distance adjacency matrix:
#example
distance = MakeConnectome()
distance.inputs.in_file = '/Users/xxie/sample_data/dwipreproc/cuda_tracking/_session_id_002_subject_id_01/sub-01_ses-002_gmwmi2wm.tck'
distance.inputs.in_parc = 'outputs/atlas_in_dwi_syn_int.nii.gz'
distance.inputs.out_file = 'outputs/distances.csv'
distance.inputs.scale_length = True
distance.inputs.stat_edge = 'mean'
distance.inputs.symmetric = True
distance.inputs.zero_diag = True
distance.inputs.force = True
print(distance.cmdline)
distance.run()
Visualize:
#example
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
# load matrices:
CM = pd.read_csv('outputs/connectome.csv', header = None)
DM = pd.read_csv('outputs/distances.csv', header = None)
# Visualize:
fig, (ax1, ax2) = plt.subplots(1,2, sharey = True, figsize = (12,12))
# log1p - calculates log(1+x)
cdk=ax1.imshow(np.log10(CM), cmap = plt.get_cmap('inferno'), interpolation = 'nearest')
ax1.set_title('DK86 Connectivity Matrix')
cbar=fig.colorbar(cdk, ax=ax1, shrink=0.4)
cbar.set_label('Log Scale Streamline Counts')
ddk=ax2.imshow(DM, interpolation = 'nearest')
ax2.set_title('DK86 Distance Adjacency Matrix')
dbar=fig.colorbar(ddk, ax=ax2, shrink=0.4)
dbar.set_label('Mean Streamline Length (mm)')