forked from intoli/intoli-article-materials
-
Notifications
You must be signed in to change notification settings - Fork 0
/
utils.py
135 lines (110 loc) · 4.43 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
import keras
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D, Dense, Dropout, Flatten
from keras import backend as K
from matplotlib import pyplot as plt
from matplotlib import rcParamsDefault
def grid_axes_it(n_plots, n_cols=3, enumerate=False, fig=None):
"""
Iterate through Axes objects on a grid with n_cols columns and as many
rows as needed to accommodate n_plots many plots.
Args:
n_plots: Number of plots to plot onto figure.
n_cols: Number of columns to divide the figure into.
fig: Optional figure reference.
Yields:
n_plots many Axes objects on a grid.
"""
n_rows = n_plots / n_cols + int(n_plots % n_cols > 0)
if not fig:
default_figsize = rcParamsDefault['figure.figsize']
fig = plt.figure(figsize=(
default_figsize[0] * n_cols,
default_figsize[1] * n_rows
))
for i in range(1, n_plots + 1):
ax = plt.subplot(n_rows, n_cols, i)
yield ax
def create_mlp_model(
n_hidden_layers,
dim_layer,
input_shape,
n_classes,
kernel_initializer,
bias_initializer,
activation,
):
"""Create Multi-Layer Perceptron with given parameters."""
model = Sequential()
model.add(Dense(dim_layer, input_shape=input_shape, kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer))
for i in range(n_hidden_layers):
model.add(Dense(dim_layer, activation=activation, kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer))
model.add(Dense(n_classes, activation='softmax', kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer))
return model
def create_cnn_model(input_shape, num_classes, kernel_initializer='glorot_uniform',
bias_initializer='zeros'):
"""Create CNN model similar to
https://github.com/keras-team/keras/blob/master/examples/mnist_cnn.py."""
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 3),
activation='relu',
input_shape=input_shape,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer))
model.add(Conv2D(64, (3, 3), activation='relu',
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu',
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax',
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer))
return model
def compile_model(model):
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.RMSprop(),
metrics=['accuracy'])
return model
def get_init_id(init):
"""
Returns string ID summarizing initialization scheme and its parameters.
Args:
init: Instance of some initializer from keras.initializers.
"""
try:
init_name = str(init).split('.')[2].split(' ')[0]
except:
init_name = str(init).split(' ')[0].replace('.', '_')
param_list = []
config = init.get_config()
for k, v in config.items():
if k == 'seed':
continue
param_list.append('{k}-{v}'.format(k=k, v=v))
init_params = '__'.join(param_list)
return '|'.join([init_name, init_params])
def get_activations(model, x, mode=0.0):
"""Extract activations with given model and input vector x."""
outputs = [layer.output for layer in model.layers]
activations = K.function([model.input], outputs)
output_elts = activations([x, mode])
return output_elts
class LossHistory(keras.callbacks.Callback):
"""A custom keras callback for recording losses during network training."""
def on_train_begin(self, logs={}):
self.losses = []
self.epoch_losses = []
self.epoch_val_losses = []
def on_batch_end(self, batch, logs={}):
self.losses.append(logs.get('loss'))
def on_epoch_end(self, epoch, logs={}):
self.epoch_losses.append(logs.get('loss'))
self.epoch_val_losses.append(logs.get('val_loss'))