Hi , I am coding the 8-11 about 'gradient',but I have some bug.
code:
from keras.applications import inception_v3
from keras import backend as K
import tensorflow as tf
K.set_learning_phase(0)
model = inception_v3.InceptionV3(weights='imagenet',include_top=False)
#设置deepdream配置
layer_contributions = {
'mixed2':0.2,
'mixed3':3.,
'mixed4':2.,
'mixed5':1.5,
}
#定义需要最大化损失
layer_dict = dict([(layer.name,layer) for layer in model.layers])
loss = K.variable(0.)
for layer_name in layer_contributions:
coeff = layer_contributions[layer_name]
activation = layer_dict[layer_name].output
scaling = K.prod(K.cast(K.shape(activation),'float32'))
loss = loss + coeff * K.sum(K.square(activation[:, 2 :-2, 2: -2, :])) / scaling
#梯度上升过程
dream = model.input
with tf.GradientTape() as gtape:
grads = gtape.gradient(loss,dream)[0]
# grads = K.gradients(loss,dream)[0]
grads /= K.maximum(K.mean(K.abs(grads)),1e-7)
outputs = [loss,grads]
fetch_loss_and_grads = K.function([dream],outputs)
def eval_loss_and_grads(x):
outs = fetch_loss_and_grads([x])
loss_value = out[0]
grad_value =outs[1]
return loss_value,grad_values
def gradient_ascent(x,iterations,step,max_loss=None):
for i in range(iterations):
loss_value,gra_values = eval_loss_and_grads(x)
if max_loss is not None and loss_value > max_loss:
break
print('...Loss value at',i,':',loss_value)
x += step * grad_values
return x
bug information:
AttributeError Traceback (most recent call last)
~\AppData\Local\Temp/ipykernel_40076/4006197052.py in <module>
27
28 with tf.GradientTape() as gtape:
---> 29 grads = gtape.gradient(loss,dream)[0]
30
31 # grads = K.gradients(loss,dream)[0]
E:\Users\48067\Anaconda3\envs\Tensorflow-Keras-GPU\lib\site-packages\tensorflow\python\eager\backprop.py in gradient(self, target, sources, output_gradients, unconnected_gradients)
1078 output_gradients=output_gradients,
1079 sources_raw=flat_sources_raw,
-> 1080 unconnected_gradients=unconnected_gradients)
1081
1082 if not self._persistent:
E:\Users\48067\Anaconda3\envs\Tensorflow-Keras-GPU\lib\site-packages\tensorflow\python\eager\imperative_grad.py in imperative_grad(tape, target, sources, output_gradients, sources_raw, unconnected_gradients)
75 output_gradients,
76 sources_raw,
---> 77 compat.as_str(unconnected_gradients.value))
AttributeError: 'KerasTensor' object has no attribute '_id
And My tensorflow and keras version is :
tensorflow-gpu 2.5.0
keras 2.5.0
looking forwoard to your reply. Thank you very much!
Hi , I am coding the 8-11 about 'gradient',but I have some bug.
code:
bug information:
And My tensorflow and keras version is :
looking forwoard to your reply. Thank you very much!