import tensorflow as tf
from tensorflow.keras.layers import Dense
from tensorflow.keras.backend import categorical_crossentropy
from tensorflow.keras.models import Model
import time
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()
net = tf.keras.models.Sequential([tf.keras.layers.Input(shape=(784,)),Dense(128, activation='relu'), Dense(128, activation='relu'),Dense(10, activation='softmax')])
def ll(targets,preds):
return tf.reduce_mean(categorical_crossentropy(targets, preds))
adam = tf.keras.optimizers.Adam(learning_rate = 1e-3)
net.compile(loss=ll, optimizer=adam)
z = tf.cast(x_train[0:50],'float32')
y = tf.one_hot(y_train[0:50],10)
z_f =tf.reshape(z,(-1,784))
t0 = time.time()
for i in range(1000):
net.train_on_batch(z_f,y)
t = time.time()-t0
print(t)
In my computer, the time is 5.807 with tf-2.
I alse run another code:
import tensorflow as tf
from tensorflow.python.keras.layers import Dense
from tensorflow.python.keras.backend import categorical_crossentropy
from tensorflow.examples.tutorials.mnist import input_data
from tensorflow.python.keras.models import Model
import time
sess = tf.Session()
img = tf.placeholder(tf.float32, shape=(None, 784))
x = Dense(128, activation='relu')(img) # fully-connected layer with 128 units and ReLU activation
x = Dense(128, activation='relu')(x)
preds = Dense(10, activation='softmax')(x) # output layer with 10 units and a softmax activation
init_op = tf.global_variables_initializer()
sess.run(init_op)
t0 = time.time()
with sess.as_default():
for i in range(1000):
batch = mnist_data.train.next_batch(50)
train_step.run(feed_dict={img: batch[0],labels: batch[1]})
import tensorflow as tf from tensorflow.keras.layers import Dense from tensorflow.keras.backend import categorical_crossentropy from tensorflow.keras.models import Model import time (x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data() net = tf.keras.models.Sequential([tf.keras.layers.Input(shape=(784,)),Dense(128, activation='relu'), Dense(128, activation='relu'),Dense(10, activation='softmax')]) def ll(targets,preds): return tf.reduce_mean(categorical_crossentropy(targets, preds))
adam = tf.keras.optimizers.Adam(learning_rate = 1e-3) net.compile(loss=ll, optimizer=adam)
z = tf.cast(x_train[0:50],'float32') y = tf.one_hot(y_train[0:50],10) z_f =tf.reshape(z,(-1,784))
t0 = time.time()
for i in range(1000): net.train_on_batch(z_f,y)
t = time.time()-t0 print(t)
In my computer, the time is 5.807 with tf-2.
I alse run another code: import tensorflow as tf from tensorflow.python.keras.layers import Dense from tensorflow.python.keras.backend import categorical_crossentropy from tensorflow.examples.tutorials.mnist import input_data from tensorflow.python.keras.models import Model import time sess = tf.Session() img = tf.placeholder(tf.float32, shape=(None, 784)) x = Dense(128, activation='relu')(img) # fully-connected layer with 128 units and ReLU activation x = Dense(128, activation='relu')(x) preds = Dense(10, activation='softmax')(x) # output layer with 10 units and a softmax activation
labels = tf.placeholder(tf.float32, shape=(None, 10)) loss = tf.reduce_mean(categorical_crossentropy(labels, preds)) mnist_data = input_data.read_data_sets('MNIST_data', one_hot=True) train_step = tf.train.GradientDescentOptimizer(0.5).minimize(loss)
init_op = tf.global_variables_initializer() sess.run(init_op) t0 = time.time() with sess.as_default(): for i in range(1000): batch = mnist_data.train.next_batch(50) train_step.run(feed_dict={img: batch[0],labels: batch[1]})
t = time.time()-t0 print(t)
Then the time is 3.658. with tf-1.14.0.
How can I change my first code?