-
Notifications
You must be signed in to change notification settings - Fork 1
/
model.py
90 lines (66 loc) · 3.25 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import tensorflow as tf
from tensorflow.python.keras.engine import training
def create_dnn(seed=42, input_shape=(2381), mc=False):
"""
This function compiles and returns a Keras model.
"""
initializer = tf.keras.initializers.GlorotNormal(seed=seed)
# model = tf.keras.models.Sequential()
inputs = tf.keras.Input(shape=input_shape)
x = tf.keras.layers.Dense(2381, activation='elu', kernel_initializer=initializer)(inputs)
x = tf.keras.layers.LayerNormalization() (x)
x = tf.keras.layers.Dropout(0.3)(x, training=mc)
x = tf.keras.layers.Dense(1024, activation='elu', kernel_initializer=initializer)(x)
x = tf.keras.layers.LayerNormalization() (x)
x = tf.keras.layers.Dropout(0.3)(x, training=mc)
x = tf.keras.layers.Dense(512, activation='elu', kernel_initializer=initializer)(x)
x = tf.keras.layers.LayerNormalization() (x)
x = tf.keras.layers.Dropout(0.3)(x, training=mc)
x = tf.keras.layers.Dense(128, activation='elu', kernel_initializer=initializer)(x)
x = tf.keras.layers.LayerNormalization() (x)
x = tf.keras.layers.Dropout(0.3)(x, training=mc)
outputs = tf.keras.layers.Dense(1, activation="sigmoid")(x)
model = tf.keras.Model(inputs, outputs)
bce = tf.keras.losses.BinaryCrossentropy(from_logits=False)
# optim = tf.keras.optimizers.RMSprop(lr=1e-3, momentum=0.9)
optim = tf.keras.optimizers.Adam()
model.compile(optimizer=optim, loss=bce,
metrics=['accuracy'])
# metrics=['accuracy',
# tf.keras.metrics.SensitivityAtSpecificity(0.99, name="TPR_01")])
return model
def create_dnn2(seed=42, mc=False):
"""
This function compiles and returns a Keras model.
"""
initializer = tf.keras.initializers.GlorotNormal(seed=seed)
# model = tf.keras.models.Sequential()
input1 = tf.keras.Input(shape=(2381, ))
input2 = tf.keras.Input(shape=(1, ))
x = tf.keras.layers.concatenate([input1, input2])
x = tf.keras.layers.Dense(2382, activation='elu', kernel_initializer=initializer)(x)
x = tf.keras.layers.LayerNormalization()(x)
x = tf.keras.layers.Dropout(0.3)(x, training=mc)
x = tf.keras.layers.Dense(1024, activation='elu', kernel_initializer=initializer)(x)
x = tf.keras.layers.LayerNormalization()(x)
x = tf.keras.layers.Dropout(0.3)(x, training=mc)
x = tf.keras.layers.Dense(512, activation='elu', kernel_initializer=initializer)(x)
x = tf.keras.layers.LayerNormalization()(x)
x = tf.keras.layers.Dropout(0.3)(x, training=mc)
x = tf.keras.layers.Dense(128, activation='elu', kernel_initializer=initializer)(x)
x = tf.keras.layers.LayerNormalization()(x)
x = tf.keras.layers.Dropout(0.3)(x, training=mc)
outputs = tf.keras.layers.Dense(1)(x)
out = tf.add(outputs, input2)
# out = tf.clip_by_value(out, 0, 1)
out = tf.sigmoid(out)
model = tf.keras.Model((input1, input2), out)
bce = tf.keras.losses.BinaryCrossentropy(from_logits=False)
# mae = tf.keras.losses.MeanAbsoluteError()
# mse = tf.keras.losses.MeanSquaredError()
optim = tf.keras.optimizers.Adam()
model.compile(optimizer=optim, loss=bce,
metrics=['accuracy'])
# metrics=['accuracy',
# tf.keras.metrics.SensitivityAtSpecificity(0.99, name="TPR_01")])
return model