Skip to content

Commit

Permalink
refine models
Browse files Browse the repository at this point in the history
  • Loading branch information
Germey committed Nov 16, 2019
1 parent c33b59d commit 04aa9cb
Show file tree
Hide file tree
Showing 3 changed files with 70 additions and 180 deletions.
81 changes: 33 additions & 48 deletions models/basic_cnn.py
Original file line number Diff line number Diff line change
@@ -1,68 +1,53 @@
from model_zoo.model import BaseModel
from model_zoo import Model
import tensorflow as tf


class BasicCNNModel(BaseModel):
class BasicCNNModel(Model):
"""
Basic CNN Model
"""

def __init__(self, config):
"""
init layers
:param config:
"""
super(BasicCNNModel, self).__init__(config)
self.bn1 = tf.keras.layers.BatchNormalization()
self.conv1 = tf.keras.layers.Conv2D(32, (2, 2), padding='same', activation='relu',
kernel_initializer='random_uniform')
self.pool1 = tf.keras.layers.MaxPool2D(padding='same')
self.dropout1 = tf.keras.layers.Dropout(0.5)
self.conv2 = tf.keras.layers.Conv2D(32, (2, 2), padding='same', activation='relu',
kernel_initializer='random_uniform')
self.pool2 = tf.keras.layers.MaxPool2D(padding='same')
self.dropout2 = tf.keras.layers.Dropout(0.5)
self.flatten1 = tf.keras.layers.Flatten()
self.dense1 = tf.keras.layers.Dense(128, activation='relu', kernel_initializer='random_uniform')
self.dense2 = tf.keras.layers.Dense(10, activation='softmax')

def call(self, inputs, training=False, mask=None):

def inputs(self):
"""
build model
:param inputs: inputs image
:param training:
:param mask:
Define inputs.
:return:
"""
o = self.bn1(inputs)
o = self.conv1(o)
o = self.pool1(o)
o = self.dropout1(o) if training else o
o = self.conv2(o)
o = self.pool2(o)
o = self.dropout2(o) if training else o
o = self.flatten1(o)
o = self.dense1(o)
o = self.dense2(o)
return o

def get_optimizer(self):
"""
build optimizer
return tf.keras.Input(shape=(28, 28, 1))

def outputs(self, inputs):
"""
Define outputs.
"""
x = tf.keras.layers.BatchNormalization()(inputs)
x = tf.keras.layers.Conv2D(32, (2, 2), padding='same', activation='relu',
kernel_initializer='random_uniform')(x)
x = tf.keras.layers.MaxPool2D(padding='same')(x)
x = tf.keras.layers.Dropout(0.5)(x)
x = tf.keras.layers.Conv2D(32, (2, 2), padding='same', activation='relu',
kernel_initializer='random_uniform')(x)
x = tf.keras.layers.MaxPool2D(padding='same')(x)
x = tf.keras.layers.Dropout(0.5)(x)
x = tf.keras.layers.Flatten()(x)
x = tf.keras.layers.Dense(128, activation='relu', kernel_initializer='random_uniform')(x)
return tf.keras.layers.Dense(10, activation='softmax')(x)

def optimizer(self):
"""
build optimizer.
:return:
"""
return tf.keras.optimizers.Adam(lr=self.config.get('learning_rate'))

def get_loss(self):
def loss(self):
"""
define loss
define loss.
:return:
"""
return 'categorical_crossentropy'

def get_metrics(self):
def metrics(self):
"""
define metrics
define metrics.
:return:
"""
return ['accuracy']
167 changes: 36 additions & 131 deletions models/vgg19.py
Original file line number Diff line number Diff line change
@@ -1,149 +1,54 @@
from model_zoo.model import BaseModel
from tensorflow_core.python.keras.applications.vgg19 import VGG19

from model_zoo import Model
import tensorflow as tf


class VGG19Model(BaseModel):
class VGG19Model(Model):
"""
This model gets no good results, deprecated.
"""

def __init__(self, config):
super(VGG19Model, self).__init__(config)
self.num_features = 64
# layer1
self.conv11 = tf.keras.layers.Conv2D(filters=self.num_features, kernel_size=(3, 3), activation='relu',
padding='same')
self.conv12 = tf.keras.layers.Conv2D(filters=self.num_features, kernel_size=(3, 3), activation='relu',
padding='same')
self.bn1 = tf.keras.layers.BatchNormalization()
self.pool1 = tf.keras.layers.MaxPool2D(pool_size=(2, 2), strides=(2, 2))
self.drop1 = tf.keras.layers.Dropout(rate=0.5)

# layer2
self.conv21 = tf.keras.layers.Conv2D(filters=2 * self.num_features, kernel_size=(3, 3), activation='relu',
padding='same')
self.conv22 = tf.keras.layers.Conv2D(filters=2 * self.num_features, kernel_size=(3, 3), activation='relu',
padding='same')
self.bn2 = tf.keras.layers.BatchNormalization()
self.pool2 = tf.keras.layers.MaxPool2D(pool_size=(2, 2), strides=(2, 2))
self.drop2 = tf.keras.layers.Dropout(rate=0.5)

# layer3
self.conv31 = tf.keras.layers.Conv2D(filters=2 * 2 * self.num_features, kernel_size=(3, 3), activation='relu',
padding='same')
self.conv32 = tf.keras.layers.Conv2D(filters=2 * 2 * self.num_features, kernel_size=(3, 3), activation='relu',
padding='same')
self.conv33 = tf.keras.layers.Conv2D(filters=2 * 2 * self.num_features, kernel_size=(3, 3), activation='relu',
padding='same')
self.bn3 = tf.keras.layers.BatchNormalization()
self.pool3 = tf.keras.layers.MaxPool2D(pool_size=(2, 2), strides=(2, 2))
self.drop3 = tf.keras.layers.Dropout(rate=0.5)

# layer4
self.conv41 = tf.keras.layers.Conv2D(filters=2 * 2 * 2 * self.num_features, kernel_size=(3, 3),
activation='relu',
padding='same')
self.conv42 = tf.keras.layers.Conv2D(filters=2 * 2 * 2 * self.num_features, kernel_size=(3, 3),
activation='relu',
padding='same')
self.conv43 = tf.keras.layers.Conv2D(filters=2 * 2 * 2 * self.num_features, kernel_size=(3, 3),
activation='relu',
padding='same')
self.bn4 = tf.keras.layers.BatchNormalization()
self.pool4 = tf.keras.layers.MaxPool2D(pool_size=(2, 2), strides=(2, 2))
self.drop4 = tf.keras.layers.Dropout(rate=0.5)

# layer5
self.conv51 = tf.keras.layers.Conv2D(filters=2 * 2 * 2 * self.num_features, kernel_size=(3, 3),
activation='relu',
padding='same')
self.conv52 = tf.keras.layers.Conv2D(filters=2 * 2 * 2 * self.num_features, kernel_size=(3, 3),
activation='relu',
padding='same')
self.conv53 = tf.keras.layers.Conv2D(filters=2 * 2 * 2 * self.num_features, kernel_size=(3, 3),
activation='relu',
padding='same')
self.bn5 = tf.keras.layers.BatchNormalization()
self.pool5 = tf.keras.layers.MaxPool2D(pool_size=(2, 2), strides=(2, 2))
self.drop5 = tf.keras.layers.Dropout(rate=0.5)

# flatten
self.flatten = tf.keras.layers.Flatten()

# dense
self.dense1 = tf.keras.layers.Dense(2 * 2 * 2 * self.num_features, activation='relu')
self.drop5 = tf.keras.layers.Dropout(0.5)
self.dense2 = tf.keras.layers.Dense(2 * 2 * self.num_features, activation='relu')
self.drop6 = tf.keras.layers.Dropout(0.5)
self.dense3 = tf.keras.layers.Dense(2 * self.num_features, activation='relu')
self.drop7 = tf.keras.layers.Dropout(0.5)

self.dense4 = tf.keras.layers.Dense(10, activation='softmax')

def call(self, inputs, training=None, mask=None):
# layer1
x = self.conv11(inputs)
x = self.conv12(x)
x = self.bn1(x, training=training)
x = self.pool1(x)
x = self.drop1(x, training=training)
# layer2
x = self.conv21(x)
x = self.conv22(x)
x = self.bn2(x, training=training)
x = self.pool2(x)
x = self.drop2(x, training=training)
# # layer3
x = self.conv31(x)
x = self.conv32(x)
x = self.conv33(x)
x = self.bn3(x, training=training)
x = self.pool3(x)
x = self.drop3(x, training=training)
# # layer4
x = self.conv41(x)
x = self.conv42(x)
x = self.conv43(x)
x = self.bn4(x, training=training)
x = self.pool4(x)
x = self.drop4(x, training=training)
# layer5
x = self.conv51(x)
x = self.conv52(x)
x = self.conv53(x)
x = self.bn5(x, training=training)
x = self.pool5(x)
x = self.drop5(x, training=training)

# flatten
x = self.flatten(x)
# dense
x = self.dense1(x)
x = self.drop5(x, training=training)
x = self.dense2(x)
x = self.drop6(x, training=training)
x = self.dense3(x)
x = self.drop7(x, training=training)
x = self.dense4(x)
return x

def get_optimizer(self):

def __init__(self, **kwargs):
"""
Init base model.
"""
self.base_model = VGG19()
super(VGG19Model, self).__init__(**kwargs)

def inputs(self):
"""
Define inputs.
:return:
"""
return self.base_model.input

def outputs(self, inputs):
"""
build optimizer
Define outputs.
:param inputs:
:return:
"""
x = self.base_model.output
return tf.keras.layers.Dense(10, activation='softmax')(x)

def optimizer(self):
"""
Build optimizer.
:return:
"""
return tf.keras.optimizers.Adam(lr=self.config.get('learning_rate'))

def get_loss(self):
def loss(self):
"""
define loss
Define loss.
:return:
"""
return 'categorical_crossentropy'

def get_metrics(self):
def metrics(self):
"""
define metrics
Define metrics.
:return:
"""
return ['accuracy']
2 changes: 1 addition & 1 deletion train.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ class Trainer(BaseTrainer):
Train Image Classification Model.
"""

def prepare_data(self):
def data(self):
"""
Prepare fashion mnist data.
:return:
Expand Down

0 comments on commit 04aa9cb

Please sign in to comment.