When I changed the image size of the dataset to 512x256(out of Memory), and I encountered the following problem:
File "/home/jathy/anaconda3/envs/generative-compression/lib/python3.6/site-packages/tensorflow/python/client/session.py", line 1322, in _do_call
return fn(*args)
File "/home/jathy/anaconda3/envs/generative-compression/lib/python3.6/site-packages/tensorflow/python/client/session.py", line 1307, in _run_fn
options, feed_dict, fetch_list, target_list, run_metadata)
File "/home/jathy/anaconda3/envs/generative-compression/lib/python3.6/site-packages/tensorflow/python/client/session.py", line 1409, in _call_tf_sessionrun
run_metadata)
tensorflow.python.framework.errors_impl.InvalidArgumentError: _MklConcatOp : Dimensions of inputs should match: shape[0][1]= 16 vs. shape[1][1] = 32
[[Node: generator/concat = _MklConcatV2[N=2, T=DT_FLOAT, Tidx=DT_INT32, _kernel="MklOp", _device="/job:localhost/replica:0/task:0/device:CPU:0"](generator/quantizer_image/Round, generator/noise_generator/conv_out/conv2d/BiasAdd, generator/quantizer_image/ArgMin/dimension, DMT/_164, generator/noise_generator/conv_out/conv2d/BiasAdd:2, DMT/_165)]]
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "train.py", line 119, in <module>
main()
File "train.py", line 116, in main
train(config_train, args)
File "train.py", line 70, in train
start_time, epoch, args.name, G_loss_best, D_loss_best)
File "/home/jathy/PycharmProjects/pythonProject/generative-compression/utils.py", line 78, in run_diagnostics
G_loss, D_loss, summary = sess.run([model.G_loss, model.D_loss, model.merge_op], feed_dict=feed_dict_test)
File "/home/jathy/anaconda3/envs/generative-compression/lib/python3.6/site-packages/tensorflow/python/client/session.py", line 900, in run
run_metadata_ptr)
File "/home/jathy/anaconda3/envs/generative-compression/lib/python3.6/site-packages/tensorflow/python/client/session.py", line 1135, in _run
feed_dict_tensor, options, run_metadata)
File "/home/jathy/anaconda3/envs/generative-compression/lib/python3.6/site-packages/tensorflow/python/client/session.py", line 1316, in _do_run
run_metadata)
File "/home/jathy/anaconda3/envs/generative-compression/lib/python3.6/site-packages/tensorflow/python/client/session.py", line 1335, in _do_call
raise type(e)(node_def, op, message)
tensorflow.python.framework.errors_impl.InvalidArgumentError: _MklConcatOp : Dimensions of inputs should match: shape[0][1]= 16 vs. shape[1][1] = 32
[[Node: generator/concat = _MklConcatV2[N=2, T=DT_FLOAT, Tidx=DT_INT32, _kernel="MklOp", _device="/job:localhost/replica:0/task:0/device:CPU:0"](generator/quantizer_image/Round, generator/noise_generator/conv_out/conv2d/BiasAdd, generator/quantizer_image/ArgMin/dimension, DMT/_164, generator/noise_generator/conv_out/conv2d/BiasAdd:2, DMT/_165)]]
Caused by op 'generator/concat', defined at:
File "train.py", line 119, in <module>
main()
File "train.py", line 116, in main
train(config_train, args)
File "train.py", line 34, in train
gan = Model(config, paths, name=args.name, dataset=args.dataset)
File "/home/jathy/PycharmProjects/pythonProject/generative-compression/model.py", line 77, in __init__
self.z = tf.concat([self.w_hat, Gv], axis=-1)
File "/home/jathy/anaconda3/envs/generative-compression/lib/python3.6/site-packages/tensorflow/python/ops/array_ops.py", line 1189, in concat
return gen_array_ops.concat_v2(values=values, axis=axis, name=name)
File "/home/jathy/anaconda3/envs/generative-compression/lib/python3.6/site-packages/tensorflow/python/ops/gen_array_ops.py", line 953, in concat_v2
"ConcatV2", values=values, axis=axis, name=name)
File "/home/jathy/anaconda3/envs/generative-compression/lib/python3.6/site-packages/tensorflow/python/framework/op_def_library.py", line 787, in _apply_op_helper
op_def=op_def)
File "/home/jathy/anaconda3/envs/generative-compression/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 3392, in create_op
op_def=op_def)
File "/home/jathy/anaconda3/envs/generative-compression/lib/python3.6/site-packages/tensorflow/python/framework/ops.py", line 1718, in __init__
self._traceback = self._graph._extract_stack() # pylint: disable=protected-access
InvalidArgumentError (see above for traceback): _MklConcatOp : Dimensions of inputs should match: shape[0][1]= 16 vs. shape[1][1] = 32
[[Node: generator/concat = _MklConcatV2[N=2, T=DT_FLOAT, Tidx=DT_INT32, _kernel="MklOp", _device="/job:localhost/replica:0/task:0/device:CPU:0"](generator/quantizer_image/Round, generator/noise_generator/conv_out/conv2d/BiasAdd, generator/quantizer_image/ArgMin/dimension, DMT/_164, generator/noise_generator/conv_out/conv2d/BiasAdd:2, DMT/_165)]]
Is there any part of the code that needs to be modified?
When I changed the image size of the dataset to 512x256(out of Memory), and I encountered the following problem:
Is there any part of the code that needs to be modified?