-
Notifications
You must be signed in to change notification settings - Fork 27
/
Copy pathFNN.py
53 lines (42 loc) · 1.64 KB
/
FNN.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import tensorflow as tf
from core.features import FeatureMetas, Features
from core.blocks import DNN
def FNN(
feature_metas,
embedding_initializer='glorot_uniform',
embedding_regularizer=tf.keras.regularizers.l2(1e-5),
fixed_embedding_dim=None,
dnn_hidden_units=(128, 64, 1),
dnn_activations=('relu', 'relu', None),
dnn_use_bias=True,
dnn_use_bn=False,
dnn_dropout=0,
dnn_kernel_initializers='glorot_uniform',
dnn_bias_initializers='zeros',
dnn_kernel_regularizers=tf.keras.regularizers.l2(1e-5),
dnn_bias_regularizers=None,
name='FNN'):
assert isinstance(feature_metas, FeatureMetas)
with tf.name_scope(name):
features = Features(metas=feature_metas)
inputs = features.gen_concated_feature(
embedding_group='embedding',
fixed_embedding_dim=fixed_embedding_dim,
embedding_initializer=embedding_initializer,
embedding_regularizer=embedding_regularizer,
slots_filter=None
)
output = DNN(
units=dnn_hidden_units,
use_bias=dnn_use_bias,
activations=dnn_activations,
use_bn=dnn_use_bn,
dropout=dnn_dropout,
kernel_initializers=dnn_kernel_initializers,
bias_initializers=dnn_bias_initializers,
kernel_regularizers=dnn_kernel_regularizers,
bias_regularizers=dnn_bias_regularizers
)(inputs)
output = tf.keras.activations.sigmoid(output)
model = tf.keras.Model(inputs=features.get_inputs_list(), outputs=output)
return model