Open BlackcOVER opened 4 years ago
FM.py中 elif self.loss_type == 'log_loss': self.out = tf.sigmoid(self.out) if self.lambda_bilinear > 0: self.loss = tf.contrib.losses.log_loss(self.out, self.train_labels, weight=1.0, epsilon=1e-07, scope=None) + tf.contrib.layers.l2_regularizer(self.lamda_bilinear)(self.weights['feature_embeddings']) # regulizer else: self.loss = tf.contrib.losses.log_loss(self.out, self.train_labels, weight=1.0, epsilon=1e-07, scope=None)
self.lambda_bilinear 变量有误,应该是 self.lamda_bilinear
FM.py中 elif self.loss_type == 'log_loss': self.out = tf.sigmoid(self.out) if self.lambda_bilinear > 0: self.loss = tf.contrib.losses.log_loss(self.out, self.train_labels, weight=1.0, epsilon=1e-07, scope=None) + tf.contrib.layers.l2_regularizer(self.lamda_bilinear)(self.weights['feature_embeddings']) # regulizer else: self.loss = tf.contrib.losses.log_loss(self.out, self.train_labels, weight=1.0, epsilon=1e-07, scope=None)
self.lambda_bilinear 变量有误,应该是 self.lamda_bilinear