Volume-wise T2*/S0 estimation with t2smap

Volume-wise T2*/S0 estimation with t2smap#

Use tedana.workflows.t2smap_workflow() [DuPre et al., 2021] to calculate volume-wise T2*/S0, as in Power et al. [2018] and Heunis et al. [2021].

import os
from glob import glob

import matplotlib.pyplot as plt
import numpy as np
from myst_nb import glue
from nilearn import image, plotting
from repo2data.repo2data import Repo2Data
from tedana import workflows

# Install the data if running locally, or point to cached data if running on neurolibre
DATA_REQ_FILE = os.path.join("../binder/data_requirement.json")

# Download data
repo2data = Repo2Data(DATA_REQ_FILE)
data_path = repo2data.install()
data_path = os.path.abspath(data_path[0])
/opt/hostedtoolcache/Python/3.10.18/x64/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html
  from .autonotebook import tqdm as notebook_tqdm
It is highly recommended to configure Git before using DataLad. Set both 'user.name' and 'user.email' configuration variables.
---- repo2data starting ----
/opt/hostedtoolcache/Python/3.10.18/x64/lib/python3.10/site-packages/repo2data
Config from file :
../binder/data_requirement.json
Destination:
./../data/ds006193/multi-echo-data-analysis

Info : Starting to download from datalad https://github.com/OpenNeuroDatasets/ds006193.git ...
[INFO] Attempting a clone into /home/runner/work/multi-echo-data-analysis/multi-echo-data-analysis/data/ds006193/multi-echo-data-analysis 
[INFO] Attempting to clone from https://github.com/OpenNeuroDatasets/ds006193.git to /home/runner/work/multi-echo-data-analysis/multi-echo-data-analysis/data/ds006193/multi-echo-data-analysis 
[INFO] Start enumerating objects 
[INFO] Start counting objects 
[INFO] Start compressing objects 
[INFO] Start receiving objects 
[INFO] Start resolving deltas 
[INFO] Completed clone attempts for Dataset(/home/runner/work/multi-echo-data-analysis/multi-echo-data-analysis/data/ds006193/multi-echo-data-analysis) 
install(error): /home/runner/work/multi-echo-data-analysis/multi-echo-data-analysis/data/ds006193/multi-echo-data-analysis (dataset) [No working git-annex installation of version >= 8.20200309. Visit http://handbook.datalad.org/r.html?install for instructions on how to install DataLad and git-annex.] [No working git-annex installation of version >= 8.20200309. Visit http://handbook.datalad.org/r.html?install for instructions on how to install DataLad and git-annex.]
---------------------------------------------------------------------------
CalledProcessError                        Traceback (most recent call last)
Cell In[1], line 16
     14 # Download data
     15 repo2data = Repo2Data(DATA_REQ_FILE)
---> 16 data_path = repo2data.install()
     17 data_path = os.path.abspath(data_path[0])

File /opt/hostedtoolcache/Python/3.10.18/x64/lib/python3.10/site-packages/repo2data/repo2data.py:106, in Repo2Data.install(self)
    103     for key, value in self._data_requirement_file.items():
    104         if isinstance(value, dict):
    105             ret += [Repo2DataChild(value, self._use_server,
--> 106                                    self._data_requirement_path,key,self._server_dst_folder).install()]
    107 # if not, it is a single assignment
    108 else:
    109     ret += [Repo2DataChild(self._data_requirement_file,
    110                            self._use_server, self._data_requirement_path, None, self._server_dst_folder).install()]

File /opt/hostedtoolcache/Python/3.10.18/x64/lib/python3.10/site-packages/repo2data/repo2data.py:364, in Repo2DataChild.install(self)
    362     os.makedirs(self._dst_path)
    363 # Downloading with the right method, depending on the src type
--> 364 self._scan_dl_type()
    365 # If needed, decompression of the data
    366 self._archive_decompress()

File /opt/hostedtoolcache/Python/3.10.18/x64/lib/python3.10/site-packages/repo2data/repo2data.py:332, in Repo2DataChild._scan_dl_type(self)
    330 # if the source link has a .git, we use datalad
    331 elif re.match(".*?\\.git$", self._data_requirement_file["src"]):
--> 332     self._datalad_download()
    333 # or coming from google drive
    334 elif re.match(".*?(drive\\.google\\.com).*?", self._data_requirement_file["src"]):

File /opt/hostedtoolcache/Python/3.10.18/x64/lib/python3.10/site-packages/repo2data/repo2data.py:263, in Repo2DataChild._datalad_download(self)
    260 print("Info : Starting to download from datalad %s ..." %
    261       (self._data_requirement_file["src"]))
    262 try:
--> 263     subprocess.check_call(
    264         ['datalad', 'install', self._dst_path, "-s", self._data_requirement_file["src"]])
    265 except FileNotFoundError:
    266     print("Error: datalad does not appear to be installed")

File /opt/hostedtoolcache/Python/3.10.18/x64/lib/python3.10/subprocess.py:369, in check_call(*popenargs, **kwargs)
    367     if cmd is None:
    368         cmd = popenargs[0]
--> 369     raise CalledProcessError(retcode, cmd)
    370 return 0

CalledProcessError: Command '['datalad', 'install', './../data/ds006193/multi-echo-data-analysis', '-s', 'https://github.com/OpenNeuroDatasets/ds006193.git']' returned non-zero exit status 1.
func_dir = os.path.join(data_path, "func/")
data_files = [
    os.path.join(
        func_dir,
        "sub-04570_task-rest_echo-1_space-scanner_desc-partialPreproc_bold.nii.gz",
    ),
    os.path.join(
        func_dir,
        "sub-04570_task-rest_echo-2_space-scanner_desc-partialPreproc_bold.nii.gz",
    ),
    os.path.join(
        func_dir,
        "sub-04570_task-rest_echo-3_space-scanner_desc-partialPreproc_bold.nii.gz",
    ),
    os.path.join(
        func_dir,
        "sub-04570_task-rest_echo-4_space-scanner_desc-partialPreproc_bold.nii.gz",
    ),
]
echo_times = [12.0, 28.0, 44.0, 60.0]
mask_file = os.path.join(
    func_dir, "sub-04570_task-rest_space-scanner_desc-brain_mask.nii.gz"
)
confounds_file = os.path.join(
    func_dir, "sub-04570_task-rest_desc-confounds_timeseries.tsv"
)

out_dir = os.path.join(data_path, "fit")
workflows.t2smap_workflow(
    data_files,
    echo_times,
    out_dir=out_dir,
    mask=mask_file,
    prefix="sub-04570_task-rest_space-scanner",
    fittype="loglin",
    fitmode="ts",
)
out_files = sorted(glob(os.path.join(out_dir, "*")))
out_files = [os.path.basename(f) for f in out_files]
print("\n".join(out_files))
fig, axes = plt.subplots(figsize=(16, 16), nrows=3)

plotting.plot_carpet(
    os.path.join(out_dir, "sub-04570_task-rest_space-scanner_desc-optcom_bold.nii.gz"),
    axes=axes[0],
    figure=fig,
)
axes[0].set_title("Optimally Combined Data", fontsize=20)
plotting.plot_carpet(
    os.path.join(out_dir, "sub-04570_task-rest_space-scanner_T2starmap.nii.gz"),
    axes=axes[1],
    figure=fig,
)
axes[1].set_title("T2* Estimates", fontsize=20)
plotting.plot_carpet(
    os.path.join(out_dir, "sub-04570_task-rest_space-scanner_S0map.nii.gz"),
    axes=axes[2],
    figure=fig,
)
axes[2].set_title("S0 Estimates", fontsize=20)
axes[0].xaxis.set_visible(False)
axes[1].xaxis.set_visible(False)
axes[0].spines["bottom"].set_visible(False)
axes[1].spines["bottom"].set_visible(False)
fig.tight_layout()

glue("figure_volumewise_t2ss0_carpets", fig, display=False)
fig, ax = plt.subplots(figsize=(16, 8))
plotting.plot_stat_map(
    image.mean_img(
        os.path.join(out_dir, "sub-04570_task-rest_space-scanner_T2starmap.nii.gz")
    ),
    vmax=0.6,
    draw_cross=False,
    bg_img=None,
    figure=fig,
    axes=ax,
)
glue("figure_mean_volumewise_t2s", fig, display=False)
fig, ax = plt.subplots(figsize=(16, 8))
plotting.plot_stat_map(
    image.mean_img(
        os.path.join(out_dir, "sub-04570_task-rest_space-scanner_S0map.nii.gz")
    ),
    vmax=8000,
    draw_cross=False,
    bg_img=None,
    figure=fig,
    axes=ax,
)
glue("figure_mean_volumewise_s0", fig, display=False)
fig, axes = plt.subplots(figsize=(16, 15), nrows=5)
plotting.plot_epi(
    image.mean_img(data_files[0]),
    draw_cross=False,
    bg_img=None,
    cut_coords=[-10, 0, 10, 20, 30, 40, 50, 60, 70],
    display_mode="z",
    axes=axes[0],
)
plotting.plot_epi(
    image.mean_img(data_files[1]),
    draw_cross=False,
    bg_img=None,
    cut_coords=[-10, 0, 10, 20, 30, 40, 50, 60, 70],
    display_mode="z",
    axes=axes[1],
)
plotting.plot_epi(
    image.mean_img(data_files[2]),
    draw_cross=False,
    bg_img=None,
    cut_coords=[-10, 0, 10, 20, 30, 40, 50, 60, 70],
    display_mode="z",
    axes=axes[2],
)
plotting.plot_epi(
    image.mean_img(data_files[3]),
    draw_cross=False,
    bg_img=None,
    cut_coords=[-10, 0, 10, 20, 30, 40, 50, 60, 70],
    display_mode="z",
    axes=axes[3],
)
plotting.plot_epi(
    image.mean_img(
        os.path.join(
            out_dir, "sub-04570_task-rest_space-scanner_desc-optcom_bold.nii.gz"
        )
    ),
    draw_cross=False,
    bg_img=None,
    cut_coords=[-10, 0, 10, 20, 30, 40, 50, 60, 70],
    display_mode="z",
    axes=axes[4],
)
glue("figure_mean_echos_and_optcom", fig, display=False)
te30_tsnr = image.math_img(
    "(np.nanmean(img, axis=3) / np.nanstd(img, axis=3)) * mask",
    img=data_files[1],
    mask=mask_file,
)
oc_tsnr = image.math_img(
    "(np.nanmean(img, axis=3) / np.nanstd(img, axis=3)) * mask",
    img=os.path.join(
        out_dir, "sub-04570_task-rest_space-scanner_desc-optcom_bold.nii.gz"
    ),
    mask=mask_file,
)
vmax = np.nanmax(np.abs(oc_tsnr.get_fdata()))

fig, axes = plt.subplots(figsize=(10, 8), nrows=2)
plotting.plot_stat_map(
    te30_tsnr,
    draw_cross=False,
    bg_img=None,
    threshold=0.1,
    cut_coords=[0, 10, 10],
    vmax=vmax,
    symmetric_cbar=False,
    axes=axes[0],
)
axes[0].set_title("TE30 TSNR", fontsize=16)
plotting.plot_stat_map(
    oc_tsnr,
    draw_cross=False,
    bg_img=None,
    threshold=0.1,
    cut_coords=[0, 10, 10],
    vmax=vmax,
    symmetric_cbar=False,
    axes=axes[1],
)
axes[1].set_title("Optimal Combination TSNR", fontsize=16)
glue("figure_t2snr_te30_and_optcom", fig, display=False)
fig, ax = plt.subplots(figsize=(16, 8))
plotting.plot_carpet(
    data_files[1],
    axes=ax,
)
glue("figure_echo3_carpet", fig, display=False)
fig, ax = plt.subplots(figsize=(16, 8))
plotting.plot_carpet(
    os.path.join(out_dir, "sub-04570_task-rest_space-scanner_desc-optcom_bold.nii.gz"),
    axes=ax,
)
glue("figure_carpet_optcom", fig, display=False)