-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathmodels.py
49 lines (39 loc) · 1.45 KB
/
models.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import tensorflow as tf
import numpy as np
from tfRecordTools import *
from dataLoader import *
from bertEmbeddings import *
from train import *
def makeFeedForwardModel(HPARAMS):
"""
Builds a simple 2 layer feed forward neural network.
"""
inputs = tf.keras.Input(
shape=(HPARAMS.max_seq_length,),
dtype='int64',
name='words')
embedding_layer = tf.keras.layers.Embedding(HPARAMS.vocab_size, 16)(inputs)
pooling_layer = tf.keras.layers.GlobalAveragePooling1D()(embedding_layer)
dense_layer = tf.keras.layers.Dense(16, activation='relu')(pooling_layer)
outputs = tf.keras.layers.Dense(1)(dense_layer)
return tf.keras.Model(inputs=inputs, outputs=outputs)
def makeBilstmModel(HPARAMS):
"""
Builds a bi-directional LSTM model.
"""
inputs = tf.keras.Input(
shape=(HPARAMS.max_seq_length,),
dtype='int64',
name='words')
embedding_layer = tf.keras.layers.Embedding(HPARAMS.vocab_size,
HPARAMS.num_embedding_dims)(
inputs)
lstm_layer = tf.keras.layers.Bidirectional(
tf.keras.layers.LSTM(HPARAMS.num_lstm_dims))(
embedding_layer)
dense_layer = tf.keras.layers.Dense(
HPARAMS.num_fc_units,
activation='relu')(
lstm_layer)
outputs = tf.keras.layers.Dense(1)(dense_layer)
return tf.keras.Model(inputs=inputs, outputs=outputs)