Hello,
I was trying to implement Vanilla Gradient Core API for my deep learning model and the tutorial did not work for me. I was implementing it on Google Colab.
This is my code -
I am using a Densenet201 model. Following is the traceback that I got -
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
<ipython-input-34-c2b31b45db83> in <module>()
5 class_index = 1
6 explainer = VanillaGradients()
----> 7 grid = explainer.explain(data, model, 1)
8 plt.imshow(grid)
8 frames
/usr/local/lib/python3.6/dist-packages/tf_explain/core/vanilla_gradients.py in explain(self, validation_data, model, class_index)
33 images, _ = validation_data
34
---> 35 gradients = self.compute_gradients(images, model, class_index)
36
37 grayscale_gradients = transform_to_normalized_grayscale(
/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
578 xla_context.Exit()
579 else:
--> 580 result = self._call(*args, **kwds)
581
582 if tracing_count == self._get_tracing_count():
/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
616 # In this case we have not created variables on the first call. So we can
617 # run the first trace but we should fail if variables are created.
--> 618 results = self._stateful_fn(*args, **kwds)
619 if self._created_variables:
620 raise ValueError("Creating variables on a non-first call to a function"
/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py in __call__(self, *args, **kwargs)
2417 """Calls a graph function specialized to the inputs."""
2418 with self._lock:
-> 2419 graph_function, args, kwargs = self._maybe_define_function(args, kwargs)
2420 return graph_function._filtered_call(args, kwargs) # pylint: disable=protected-access
2421
/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
2775
2776 self._function_cache.missed.add(call_context_key)
-> 2777 graph_function = self._create_graph_function(args, kwargs)
2778 self._function_cache.primary[cache_key] = graph_function
2779 return graph_function, args, kwargs
/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
2665 arg_names=arg_names,
2666 override_flat_arg_shapes=override_flat_arg_shapes,
-> 2667 capture_by_value=self._capture_by_value),
2668 self._function_attributes,
2669 # Tell the ConcreteFunction to clean up its graph once it goes out of
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
979 _, original_func = tf_decorator.unwrap(python_func)
980
--> 981 func_outputs = python_func(*func_args, **func_kwargs)
982
983 # invariant: `func_outputs` contains only Tensors, CompositeTensors,
/usr/local/lib/python3.6/dist-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
439 # __wrapped__ allows AutoGraph to swap in a converted function. We give
440 # the function a weak reference to itself to avoid a reference cycle.
--> 441 return weak_wrapped_fn().__wrapped__(*args, **kwds)
442 weak_wrapped_fn = weakref.ref(wrapped_fn)
443
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
966 except Exception as e: # pylint:disable=broad-except
967 if hasattr(e, "ag_error_metadata"):
--> 968 raise e.ag_error_metadata.to_exception(e)
969 else:
970 raise
AttributeError: in user code:
/usr/local/lib/python3.6/dist-packages/tf_explain/core/vanilla_gradients.py:61 compute_gradients *
expected_output = tf.one_hot([class_index] * images.shape[0], num_classes)
AttributeError: 'list' object has no attribute 'shape'
Upon checking vanilla_gradients.py , the function compute_gradients(images, model, class_index) expects the first argument to be -
images (numpy.ndarray): 4D-Tensor of images with shape (batch_size, H, W, 3)
Hello, I was trying to implement Vanilla Gradient Core API for my deep learning model and the tutorial did not work for me. I was implementing it on Google Colab. This is my code -
I am using a Densenet201 model. Following is the traceback that I got -
Upon checking
vanilla_gradients.py
, the functioncompute_gradients(images, model, class_index)
expects the first argument to be -images (numpy.ndarray): 4D-Tensor of images with shape (batch_size, H, W, 3)
Please could you help me solve this issue.