Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fixed a bunch of codacy issues. #52

Merged
merged 3 commits into from
Apr 2, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 1 addition & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@

[![Codacy Badge](https://api.codacy.com/project/badge/Grade/963ddefc14884a289b800497e74c4e45)](https://app.codacy.com/gh/courtois-neuromod/dypac?utm_source=github.com&utm_medium=referral&utm_content=courtois-neuromod/dypac&utm_campaign=Badge_Grade_Dashboard) [![CircleCI](https://circleci.com/gh/courtois-neuromod/dypac.svg?style=svg)](https://circleci.com/gh/courtois-neuromod/dypac) [![codecov](https://codecov.io/gh/courtois-neuromod/dypac/branch/master/graph/badge.svg)](https://codecov.io/gh/courtois-neuromod/dypac)



Detecting stable dynamic parcellation in fMRI data on the full brain.

The algorithm is a simple two level clustering, one on sliding time windows, and one on indicator functions of parcels agreggated over many windows. Optionally the approach can be iterated over several runs.
The algorithm is a simple two level clustering, one on sliding time windows, and one on indicator functions of parcels agreggated over many windows. Optionally the approach can be iterated over several runs.
4 changes: 1 addition & 3 deletions dypac/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,4 @@
"""
Dynamic Parcel Aggregation with Clustering (dypac)
"""
"""Dynamic Parcel Aggregation with Clustering (dypac)."""
from .dypac import dypac
from .bascpp import replicate_clusters, find_states, stab_maps
__all__ = ['dypac', 'replicate_clusters', 'find_states', 'stab_maps']
6 changes: 3 additions & 3 deletions dypac/bascpp.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
# License: BSD 3 clause
from tqdm import tqdm

from scipy.sparse import csr_matrix, vstack, find
from scipy.sparse import csr_matrix, find
import numpy as np

from sklearn.cluster import k_means
Expand Down Expand Up @@ -61,7 +61,7 @@ def _start_window(n_time, n_replications, subsample_size):
def _trim_states(onehot, states, n_states, verbose, threshold_sim):
"""Trim the states clusters to exclude outliers."""
for ss in tqdm(range(n_states), disable=not verbose, desc="Trimming states"):
[ix, iy, val] = find(onehot[states == ss, :])
ix, iy, _ = find(onehot[states == ss, :])
size_onehot = np.array(onehot[states == ss, :].sum(axis=1)).flatten()
ref_cluster = np.array(onehot[states == ss, :].mean(dtype="float", axis=0))
avg_stab = np.bincount(ix, weights=ref_cluster[0,iy].flatten())
Expand Down Expand Up @@ -120,7 +120,7 @@ def replicate_clusters(
samp = scale(samp, axis=1)
if embedding.shape[0] > 0:
samp = np.concatenate([samp, embedding], axis=1)
cent, part[rr, :], inert = k_means(
_, part[rr, :], _ = k_means(
samp,
n_clusters=n_clusters,
init="k-means++",
Expand Down
21 changes: 13 additions & 8 deletions dypac/dypac.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@


class dypac(BaseDecomposition):
"""Perform Stable Dynamic Cluster Analysis.
"""
Perform Stable Dynamic Cluster Analysis.

Parameters
----------
Expand Down Expand Up @@ -130,7 +131,6 @@ class dypac(BaseDecomposition):
See http://nilearn.github.io/manipulating_images/input_output.html
The mask of the data. If no mask was given at masker creation, contains
the automatically computed mask.

"""

def __init__(
Expand Down Expand Up @@ -160,6 +160,7 @@ def __init__(
memory_level=0,
verbose=1,
):
"""Set up default attributes for the class."""
# All those settings are taken from nilearn BaseDecomposition
self.random_state = random_state
self.mask = mask
Expand Down Expand Up @@ -190,6 +191,7 @@ def __init__(
self.threshold_sim = threshold_sim

def _check_components_(self):
"""Check for presence of estimated components."""
if not hasattr(self, "components_"):
raise ValueError(
"Object has no components_ attribute. "
Expand All @@ -198,7 +200,8 @@ def _check_components_(self):
)

def fit(self, imgs, confounds=None):
"""Compute the mask and the dynamic parcels across datasets
"""
Compute the mask and the dynamic parcels across datasets.

Parameters
----------
Expand All @@ -217,7 +220,6 @@ def fit(self, imgs, confounds=None):
self: object
Returns the instance itself. Contains attributes listed
at the object level.

"""
# Base fit for decomposition estimators : compute the embedded masker
if isinstance(imgs, str):
Expand Down Expand Up @@ -306,9 +308,12 @@ def _mask_and_reduce(self, imgs, confounds=None):
stable dynamic parcels from a list of 4D fMRI datasets.

Returns
------
-------
stab_maps: ndarray
Concatenation of dynamic parcels across all datasets.
stability maps of each state.

dwell_time: ndarray
dwell time of each state.
"""

for ind, img, confound in zip(range(len(imgs)), imgs, confounds):
Expand Down Expand Up @@ -353,7 +358,7 @@ def _mask_and_reduce(self, imgs, confounds=None):
return stab_maps, dwell_time

def transform_sparse(self, img, confound=None):
"""Transform a 4D dataset in a component space"""
"""Transform a 4D dataset in a component space."""
self._check_components_()
this_data = self.masker_.transform(img, confound)
del img
Expand All @@ -363,6 +368,6 @@ def transform_sparse(self, img, confound=None):
return reg.coef_

def inverse_transform_sparse(self, weights):
"""Transform component weights as a 4D dataset"""
"""Transform component weights as a 4D dataset."""
self._check_components_()
self.masker_.inverse_transform(weights * self.components_)