This repository has been archived by the owner on Jun 4, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathAlgorithm.py
57 lines (48 loc) · 1.64 KB
/
Algorithm.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
#!/usr/bin/env python3
from keras.models import Sequential
import matplotlib.pyplot as plt
from keras.layers import TimeDistributed, Dense, Dropout,Activation
from keras.layers import Embedding
from keras.layers import LSTM
from keras.optimizers import RMSprop, Adam
import numpy as np
def one_layer_lstm(max_len,inp,hidden,outp):
model = Sequential()
<<<<<<< HEAD
layers = {'input': inp, 'hidden': hidden, 'output': outp}
=======
layers = {'input': 16, 'hidden1': 64, 'output': 1}
model.add(LSTM(1,
input_shape=(None,16),
return_sequences=False))
#model.add(Dropout(0.2))
>>>>>>> c543bee5512fb52c6569d18489a6f6e62e3864da
model.add(LSTM(layers['hidden'],
input_shape=(max_len, layers['input']),
return_sequences=True)
)
#model.add(Dense(
# layers['output']))
model.add(TimeDistributed(Dense(
layers['output'])))
model.add(Activation("softmax"))
optimizer = Adam(lr=0.1)
model.compile(loss="categorical_crossentropy", optimizer=optimizer, metrics=['acc'])
model.summary()
return model
def lstm(max_len):
model = Sequential()
layers = {'input': 48, 'hidden1': 64, 'hidden2' : 128, 'output': 1}
model.add(LSTM(layers['hidden1'],
input_shape=(max_len, layers['input']),
return_sequences=True))
model.add(Dropout(0.5))
model.add(LSTM(
layers['hidden2'],
return_sequences=False))
model.add(Dropout(0.5))
model.add(Dense(
layers['output']))
model.add(Activation("softmax"))
model.compile(loss="binary_crossentropy", optimizer="adam", metrics=['acc'])
return model