in ()
1 if __name__ == '__main__':
----> 2 main()
in main()
18
19 with tf.name_scope('train'):
---> 20 train_step = tf.train.AdamOptimizer(0.001).minimize(cross_entropy)
21
22 with tf.name_scope('accuracy'):
D:\Anaconda3\envs\tensorflow\lib\site-packages\tensorflow\python\training\optimizer.py in minimize(self, loss, global_step, var_list, gate_gradients, aggregation_method, colocate_gradients_with_ops, name, grad_loss)
320 "No gradients provided for any variable, check your graph for ops"
321 " that do not support gradients, between variables %s and loss %s." %
--> 322 ([str(v) for _, v in grads_and_vars], loss))
323
324 return self.apply_gradients(grads_and_vars, global_step=global_step,
ValueError: No gradients provided for any variable, check your graph for ops that do not support gradients, between variables ["", "", "", "", "", "", "", "", "", "", "", ""] and loss Tensor("cross_entropy_2/Mean:0", shape=(), dtype=float32).
Extracting /MNIST_data\train-images-idx3-ubyte.gz Extracting /MNIST_data\train-labels-idx1-ubyte.gz Extracting /MNIST_data\t10k-images-idx3-ubyte.gz Extracting /MNIST_data\t10k-labels-idx1-ubyte.gz
ValueError Traceback (most recent call last)