Skip to content

Commit

Permalink
New model AFM
Browse files Browse the repository at this point in the history
  • Loading branch information
张浩天 committed Jul 29, 2019
1 parent 444ce74 commit 5abdbc3
Show file tree
Hide file tree
Showing 6 changed files with 138 additions and 9 deletions.
60 changes: 59 additions & 1 deletion core/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,12 @@
import tensorflow as tf
from tensorflow.python.keras import layers

from tensorflow.python.keras.initializers import (Zeros, glorot_normal,
glorot_uniform)
from tensorflow.python.keras.regularizers import l2
from tensorflow.python.keras import backend as K
import itertools


def get_activation(activation):
if activation is None:
Expand Down Expand Up @@ -151,7 +157,7 @@ def __init__(self, **kwargs):

super(InnerProduct, self).__init__(**kwargs)

def call(self, inputs, **kwargs):
def call(self, inputs, concat=True, **kwargs):

inner_products_list = list()

Expand Down Expand Up @@ -290,3 +296,55 @@ def call(self, inputs, hidden_width=(128, 64), require_logit=True, **kwargs):
logit = tf.matmul(finals, kernel)

return logit


class AttentionBasedPoolingLayer(tf.keras.Model):

def __init__(self,
attention_factor=4,
kernel_initializer='glorot_uniform',
kernel_regularizer=tf.keras.regularizers.l2(1e-5),
bias_initializer='zeros',
bias_regularizer=None,
**kwargs):

super(AttentionBasedPoolingLayer, self).__init__(**kwargs)

self.attention_factor = attention_factor
self.kernel_initializer = kernel_initializer
self.kernel_regularizer = kernel_regularizer
self.bias_initializer = bias_initializer
self.bias_regularizer = bias_regularizer

self.att_layer = layers.Dense(
units=self.attention_factor,
activation='relu',
use_bias=True,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer,
bias_initializer=self.bias_initializer,
bias_regularizer=self.bias_regularizer
)
self.att_proj_layer = layers.Dense(
units=1,
activation=None,
use_bias=False,
kernel_initializer=self.kernel_initializer
)

def call(self, inputs, **kwargs):

interactions = list()

for i in range(len(inputs) - 1):
for j in range(i + 1, len(inputs)):
interactions.append(tf.multiply(inputs[i], inputs[j]))

interactions = tf.stack(interactions, axis=1)
att_weight = self.att_layer(interactions)
att_weight = self.att_proj_layer(att_weight)

att_weight = layers.Softmax(axis=1)(att_weight)
output = tf.reduce_sum(interactions * att_weight, axis=1)

return output
71 changes: 71 additions & 0 deletions models/AFM.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import tensorflow as tf

from core.features import FeatureMetas, Features, group_embedded_by_dim
from core.blocks import DNN, AttentionBasedPoolingLayer


def AFM(
feature_metas,
linear_slots,
fm_slots,
embedding_initializer='glorot_uniform',
embedding_regularizer=tf.keras.regularizers.l2(1e-5),
fm_fixed_embedding_dim=None,
linear_use_bias=True,
linear_kernel_initializer=tf.keras.initializers.RandomNormal(stddev=1e-4, seed=1024),
linear_kernel_regularizer=tf.keras.regularizers.l2(1e-5),
dnn_hidden_units=(128, 64, 1),
dnn_activations=('relu', 'relu', None),
dnn_use_bias=True,
dnn_use_bn=False,
dnn_dropout=0,
dnn_kernel_initializers='glorot_uniform',
dnn_bias_initializers='zeros',
dnn_kernel_regularizers=tf.keras.regularizers.l2(1e-5),
dnn_bias_regularizers=None,
name='AFM'):

assert isinstance(feature_metas, FeatureMetas)

with tf.name_scope(name):

features = Features(metas=feature_metas)

# Linear Part
with tf.name_scope('Linear'):
linear_output = features.get_linear_logit(use_bias=linear_use_bias,
kernel_initializer=linear_kernel_initializer,
kernel_regularizer=linear_kernel_regularizer,
embedding_group='dot_embedding',
slots_filter=linear_slots)

# Interaction
with tf.name_scope('Interaction'):
fm_embedded_dict = features.get_embedded_dict(group_name='embedding',
fixed_embedding_dim=fm_fixed_embedding_dim,
embedding_initializer=embedding_initializer,
embedding_regularizer=embedding_regularizer,
slots_filter=fm_slots)
fm_dim_groups = group_embedded_by_dim(fm_embedded_dict)
fms = [AttentionBasedPoolingLayer()(group)
for group in fm_dim_groups.values() if len(group) > 1]
dnn_inputs = tf.concat(fms, axis=1)
dnn_output = DNN(
units=dnn_hidden_units,
use_bias=dnn_use_bias,
activations=dnn_activations,
use_bn=dnn_use_bn,
dropout=dnn_dropout,
kernel_initializers=dnn_kernel_initializers,
bias_initializers=dnn_bias_initializers,
kernel_regularizers=dnn_kernel_regularizers,
bias_regularizers=dnn_bias_regularizers
)(dnn_inputs)

# Output
output = tf.add_n([linear_output, dnn_output])
output = tf.keras.activations.sigmoid(output)

model = tf.keras.Model(inputs=features.get_inputs_list(), outputs=output)

return model
4 changes: 2 additions & 2 deletions models/DeepFM.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ def DeepFM(
embedding_regularizer=tf.keras.regularizers.l2(1e-5),
fm_fixed_embedding_dim=None,
linear_use_bias=True,
linear_kernel_initializer='glorot_uniform',
linear_kernel_regularizer=None,
linear_kernel_initializer=tf.keras.initializers.RandomNormal(stddev=1e-4, seed=1024),
linear_kernel_regularizer=tf.keras.regularizers.l2(1e-5),
dnn_hidden_units=(128, 64, 1),
dnn_activations=('relu', 'relu', None),
dnn_use_bias=True,
Expand Down
4 changes: 2 additions & 2 deletions models/NFM.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ def NFM(
embedding_regularizer=tf.keras.regularizers.l2(1e-5),
fm_fixed_embedding_dim=None,
linear_use_bias=True,
linear_kernel_initializer='glorot_uniform',
linear_kernel_regularizer=None,
linear_kernel_initializer=tf.keras.initializers.RandomNormal(stddev=1e-4, seed=1024),
linear_kernel_regularizer=tf.keras.regularizers.l2(1e-5),
dnn_hidden_units=(128, 64, 1),
dnn_activations=('relu', 'relu', None),
dnn_use_bias=True,
Expand Down
4 changes: 2 additions & 2 deletions models/WideAndDeep.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,8 @@ def WideAndDeep(
embedding_initializer=tf.keras.initializers.RandomNormal(mean=0.0, stddev=1e-4),
embedding_regularizer=tf.keras.regularizers.l2(1e-5),
wide_use_bias=True,
wide_kernel_initializer='glorot_uniform',
wide_kernel_regularizer=None,
wide_kernel_initializer=tf.keras.initializers.RandomNormal(stddev=1e-4, seed=1024),
wide_kernel_regularizer=tf.keras.regularizers.l2(1e-5),
deep_fixed_embedding_dim=None,
deep_hidden_units=(128, 64, 1),
deep_activations=('relu', 'relu', None),
Expand Down
4 changes: 2 additions & 2 deletions models/xDeepFM.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ def xDeepFM(
fm_kernel_initializer='glorot_uniform',
fm_kernel_regularizer=None,
linear_use_bias=True,
linear_kernel_initializer='glorot_uniform',
linear_kernel_regularizer=None,
linear_kernel_initializer=tf.keras.initializers.RandomNormal(stddev=1e-4, seed=1024),
linear_kernel_regularizer=tf.keras.regularizers.l2(1e-5),
dnn_hidden_units=(128, 64, 1),
dnn_activations=('relu', 'relu', None),
dnn_use_bias=True,
Expand Down

0 comments on commit 5abdbc3

Please sign in to comment.