Skip to content

Commit

Permalink
Merge pull request #11 from fish-quant/release/v0.1.0
Browse files Browse the repository at this point in the history
v0.1.0
  • Loading branch information
Henley13 authored May 15, 2020
2 parents 48bf067 + 47342cf commit eb3a092
Show file tree
Hide file tree
Showing 43 changed files with 14,549 additions and 0 deletions.
20 changes: 20 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# Dot files
.idea/
.DS_Store

# Packaging related files
MANIFEST
build/
dist/
big_fish.egg-info/

# Notebooks
notebooks/old
notebooks/.ipynb_checkpoints

# Data
data/input/*
data/output/*

# Cache
__pycache__/
Empty file added MANIFEST.in
Empty file.
2 changes: 2 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
init:
pip install -r requirements.txt
17 changes: 17 additions & 0 deletions bigfish/classification/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# -*- coding: utf-8 -*-

"""
The bigfish.classification module includes models to classify the localization
patterns of the RNA.
"""

# from .squeezenet import SqueezeNet0
from .features import get_features, get_features_name

# ### Load models ###

_features = ["get_features", "get_features_name"]

# _squeezenet = ["SqueezeNet0"]

__all__ = _features
101 changes: 101 additions & 0 deletions bigfish/classification/base.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
# -*- coding: utf-8 -*-

"""
General classes and methods to use the models.
"""

from abc import ABCMeta, abstractmethod

from tensorflow.python.keras.optimizers import (Adam, Adadelta, Adagrad,
Adamax, SGD)


# ### General models ###

class BaseModel(object, metaclass=ABCMeta):

def __init__(self):
pass

@abstractmethod
def fit(self, train_data, train_label, validation_data, validation_label,
batch_size, nb_epochs):
pass

@abstractmethod
def fit_generator(self, train_generator, validation_generator, nb_epochs,
nb_workers=1, multiprocessing=False):
pass

@abstractmethod
def predict(self, data, return_probability=False):
pass

@abstractmethod
def predict_generator(self, generator, return_probability=False,
nb_workers=1, multiprocessing=False):
pass

@abstractmethod
def predict_probability(self, data):
pass

@abstractmethod
def predict_probability_generator(self, generator,
nb_workers=1, multiprocessing=False):
pass

@abstractmethod
def evaluate(self, data, label):
pass

@abstractmethod
def evaluate_generator(self, generator, nb_workers=1,
multiprocessing=False):
pass


# ### optimizer ###

def get_optimizer(optimizer_name="adam", **kwargs):
"""Instantiate the optimizer.
Parameters
----------
optimizer_name : str
Name of the optimizer to use.
Returns
-------
optimizer : tf.keras.optimizers
Optimizer instance used in the model.
"""
# TODO use tensorflow optimizer
if optimizer_name == "adam":
optimizer = Adam(**kwargs)
elif optimizer_name == "adadelta":
optimizer = Adadelta(**kwargs)
elif optimizer_name == "adagrad":
optimizer = Adagrad(**kwargs)
elif optimizer_name == "adamax":
optimizer = Adamax(**kwargs)
elif optimizer_name == "sgd":
optimizer = SGD(**kwargs)
else:
raise ValueError("Instead of {0}, optimizer must be chosen among "
"['adam', 'adadelta', 'adagrad', adamax', sgd']."
.format(optimizer_name))

return optimizer




#print(globals())
#print()
#print(globals()["BaseModel"])
#print()
#print(locals())
#print()
#print(BaseModel.__subclasses__())
Loading

0 comments on commit eb3a092

Please sign in to comment.