Skip to content

Commit

Permalink
Add tools for fMRI analysis
Browse files Browse the repository at this point in the history
  • Loading branch information
hrayrhar committed Dec 4, 2018
1 parent 341805f commit cb0a195
Show file tree
Hide file tree
Showing 2 changed files with 209 additions and 0 deletions.
96 changes: 96 additions & 0 deletions experiments/fmri/fmri.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@
from __future__ import print_function
from __future__ import division

import nibabel as nib
import numpy as np


def compute_variance_of_cluster(clusters, cluster_index, coords):
filtered = coords[clusters == cluster_index]
return ((filtered - filtered.mean(axis=0)) ** 2).sum(axis=1).mean(axis=0)


def plot_least_varying(plt, clusters, coords, left, right):
n_clusters = np.max(clusters) + 1
variances = [compute_variance_of_cluster(clusters, k, coords) for k in range(n_clusters)]
order = np.argsort(variances)
fig = plt.figure(figsize=(6, 6))
ax = fig.gca(projection='3d')
for k in range(left, right):
print(variances[order[k]])
index = (clusters == order[k])
filtered = coords[index]
ax.scatter(filtered[:, 0], filtered[:, 1], filtered[:, 2], s=5, alpha=0.1)


def plot_most_important(plt, clusters, importance, coords, left, right, mode='absolute'):
a = np.array(importance).copy()
if mode == 'relative':
a = np.array(importance).copy()
n_clusters = np.max(clusters)
for j in range(n_clusters):
cnt = np.sum(clusters == j)
a[j] /= (cnt + 1e-4)

order = np.argsort(-a)
fig = plt.figure(figsize=(6, 6))
ax = fig.gca(projection='3d')
for k in range(left, right):
print(a[order[k]])
index = (clusters == order[k])
filtered = coords[index]
ax.scatter(filtered[:, 0], filtered[:, 1], filtered[:, 2], s=5, alpha=0.1)


def plot_biggest(plt, clusters, coords, left, right):
n_clusters = np.max(clusters) + 1
cnt = [0] * n_clusters
for j in range(n_clusters):
cnt[j] = np.sum(clusters == j)
order = np.argsort(-np.array(cnt))
fig = plt.figure(figsize=(6, 6))
ax = fig.gca(projection='3d')
for k in range(left, right):
print(cnt[order[k]])
index = (clusters == order[k])
filtered = coords[index]
ax.scatter(filtered[:, 0], filtered[:, 1], filtered[:, 2], s=5, alpha=0.1)


def plot_clusters_probabilistic(plotting, prob_clusters, coords, source_img):
""" Plot probabilistic atlas.
:param plotting: nilearn.plotting
:param prob_clusters: (n_clusters, n_voxels)
:param coords: (n_voxels, 3)
:return:
"""
X, Y, Z, T = source_img.shape
a = np.zeros((X, Y, Z))
for j in range(prob_clusters.shape[0]):
for i in range(prob_clusters.shape[1]):
x = int(coords[i, 0])
y = int(coords[i, 1])
z = int(coords[i, 2])
a[x, y, z, j] = prob_clusters[j, i]
atlas = nib.Nifti1Image(a, affine=source_img.affine)
plotting.plot_prob_atlas(atlas, bg_img=False)


def plot_clusters(plotting, clusters, coords, source_img, output_file=None, figure=None):
""" Plot probabilistic atlas.
:param plotting: nilearn.plotting
:param clusters: (n_voxels,)
:param coords: (n_voxels, 3)
:param output_file: if given the plot is saved here
:param figure: figure param to be passed to plotting.plot_roi function
:return:
"""
X, Y, Z, T = source_img.shape
a = np.zeros((X, Y, Z))
for i in range(clusters.shape[0]):
x = int(coords[i, 0])
y = int(coords[i, 1])
z = int(coords[i, 2])
a[x, y, z] = clusters[i]
img = nib.Nifti1Image(a, affine=source_img.affine)
return plotting.plot_roi(img, output_file=output_file, figure=figure)
113 changes: 113 additions & 0 deletions experiments/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,3 +85,116 @@ def plot_for_next_timestep(plt, data, covs, title="Negative log-likelihood of es
plt.show()
print("NLL for next time step = {}".format(np.mean(nll)))
return np.mean(nll)


""" Helper functions for working with large low-rank plus diagonal matrices
"""


def diag_from_left(A, d):
""" diag(d) A """
m, n = A.shape
X = np.zeros_like(A)
for i in range(m):
X[i, :] = A[i, :] * d[i]
return X


def diag_from_right(A, d):
""" A diag(d) """
m, n = A.shape
X = np.zeros_like(A)
for i in range(n):
X[:, i] = A[:, i] * d[i]
return X


def inverse(A, d):
""" Compute inverse of A^T A + diag(d) faster than n^2.
A - (m, n)
d - (n,)
Return: V, d_inv such that inverse = d_inv - V^T V
"""
m, n = A.shape
d_inv = 1 / d
M = np.eye(m) + np.dot(diag_from_right(A, d_inv), A.T) # m^2 * n
M_inv = np.linalg.inv(M) # m^3
R = np.linalg.cholesky(M_inv) # m^3
V = diag_from_left(np.dot(A.T, R), d_inv).T # m^2 * n
return V, d_inv


def compute_inverses(A):
""" Given the low-rank parts of correlation matrices, compute low-rank + diagonal representation
of inverse correlation matrices.
Returns: B, d_inv such that Sigma^{-1}_t = d_inv_t - B_t^T B_t
"""
nt = len(A)
d = [None] * nt
d_inv = [None] * nt
B = [None] * nt
for t in range(nt):
d[t] = 1 - (A[t] ** 2).sum(axis=0)
d_inv[t] = 1.0 / d[t]

for t in range(nt):
B[t], d_inv[t] = inverse(A[t], d[t])

return B, d_inv


def estimate_diff_norm(A_1, d_1, A_2, d_2, n_iters=300):
""" Estimates ||(d_1 - A_1^T A_1) - (d_2 - A_2^T A_2)|| using power method.
"""
ret = 0
for iteration in range(n_iters):
if iteration == 0:
x = np.random.normal(size=(A_1.shape[1], 1))
else:
x = -np.dot(A_1.T, np.dot(A_1, x)) + np.dot(A_2.T, np.dot(A_2, x)) + (d_1 - d_2).reshape((-1, 1)) * x
s = np.linalg.norm(x, ord=2)
x = x / s
ret = s
# print("\tIteration: {}, ret: {}".format(iteration, ret), end='\r')
return ret


def compute_diff_norms(A):
""" Given the low-rank parts of correlation matrices, compute spectral norm of differences of inverse
correlation matrices of neighboring time steps.
"""
B, d_inv = compute_inverses(A)
nt = len(A)
diffs = [None] * (nt-1)
for t in range(nt - 1):
print("Estimating norm of difference at time step: {}".format(t))
diffs[t] = estimate_diff_norm(B[t], d_inv[t], B[t + 1], d_inv[t + 1])
return diffs


def compute_diff_norm_fro(A, d_1, B, d_2):
""" Estimates ||(d_1 - A^T A) - (d_2 - B^T B)||_F analytically.
"""
# First compute || B^T B - A^T A ||^2_F
_, sigma_a, _ = np.linalg.svd(A, full_matrices=False)
_, sigma_b, _ = np.linalg.svd(B, full_matrices=False)
low_rank_norm = np.sum(sigma_a ** 4) - 2 * np.trace(np.dot(np.dot(B, A.T), np.dot(A, B.T))) + np.sum(sigma_b ** 4)

# Let X = (B^T B - A^T A), D = diag(d_1 - d_2). Compute ||X + D||^2_F
d = d_1 - d_2
ret = low_rank_norm + (np.linalg.norm(d) ** 2) + 2 * np.inner(np.sum(B ** 2 - A ** 2, axis=0), d)
return np.sqrt(ret)


def compute_diff_norms_fro(A):
""" Given the low-rank parts of correlation matrices, compute Frobenius norm of differences of inverse
correlation matrices of neighboring time steps.
"""
B, d_inv = compute_inverses(A)
nt = len(A)
diffs = [None] * (nt-1)
for t in range(nt - 1):
print("Calculating Frobenius norm of difference at time step: {}".format(t))
diffs[t] = compute_diff_norm_fro(B[t], d_inv[t], B[t + 1], d_inv[t + 1])
return diffs

0 comments on commit cb0a195

Please sign in to comment.