Closed wangych6 closed 2 years ago
def call(self, inputs):
sparse_inputs = inputs # Embedding layer sparse_embed = [self.embed_layers['embed_{}'.format(i)](sparse_inputs[:, i]) for i in range(sparse_inputs.shape[1])] sparse_embed = tf.transpose(tf.convert_to_tensor(sparse_embed), [1, 0, 2]) # (None, filed_num, embed_dim) # Bi-Interaction Layer sparse_embed = 0.5 * (tf.pow(tf.reduce_sum(sparse_embed, axis=1), 2) - tf.reduce_sum(tf.pow(sparse_embed, 2), axis=1)) # (None, embed_dim) # Concat # 这里是不是和原论文有出入,少了pooling层 #x = tf.concat([dense_inputs, sparse_embed], axis=-1) # BatchNormalization x = sparse_embed x = self.bn(x, training=self.bn_use) # Hidden Layers x = self.dnn_network(x) outputs = tf.nn.sigmoid(self.dense(x)) return outputs
没有少哇,pooling层转化后是这一行:
sparse_embed = 0.5 * (tf.pow(tf.reduce_sum(sparse_embed, axis=1), 2) - tf.reduce_sum(tf.pow(sparse_embed, 2), axis=1)) # (None, embed_dim)
def call(self, inputs):
Inputs layer