-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmodels.py
67 lines (51 loc) · 2.37 KB
/
models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import tensorflow as tf
from tensorflow import keras
import os
import tempfile
import sklearn
from sklearn.metrics import confusion_matrix
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
early_stopping = tf.keras.callbacks.EarlyStopping(
monitor='val_pr_auc',
verbose=1,
patience=10,
mode='max',
restore_best_weights=True)
metrics = [keras.metrics.AUC(curve='PR', name='pr_auc')]
def make_model_baseline(output_bias=None):
if output_bias is not None:
output_bias = tf.keras.initializers.Constant(output_bias)
model = keras.Sequential([
keras.layers.Dense(32,
activation='relu',
input_shape=(train_features.shape[-1],)
),
keras.layers.Dense(1,
activation='sigmoid',
bias_initializer=output_bias),
])
model.compile(
optimizer=keras.optimizers.Adam(lr=1e-3),
loss=keras.losses.BinaryCrossentropy(),
metrics=metrics)
return model
def make_model_baseline_batchnorm_drouput(output_bias=None):
if output_bias is not None:
output_bias = tf.keras.initializers.Constant(output_bias)
model = keras.Sequential([
keras.layers.Dense(32,
activation='relu',
input_shape=(train_features.shape[-1],)
),
keras.layers.BatchNormalization(),
keras.layers.Dropout(0.7),
keras.layers.Dense(1,
activation='sigmoid',
bias_initializer=output_bias),
])
model.compile(
optimizer=keras.optimizers.Adam(lr=1e-3),
loss=keras.losses.BinaryCrossentropy(),
metrics=metrics)
return model