hiranumn / IntegratedGradients

Python/Keras implementation of integrated gradients presented in "Axiomatic Attribution for Deep Networks" for explaining any model defined in Keras framework.
MIT License
216 stars 51 forks source link

Issue with multiple input #3

Open LucasSilvaFerreira opened 6 years ago

LucasSilvaFerreira commented 6 years ago

HI, I'm still receiving error output (enclosed error log and model)

Thank you


MissingInputError Traceback (most recent call last)

in () ----> 1 plot_roc( t_valores, t_grupos, t_conservation, 'model_used_to_paper.hdf5') in plot_roc(v_teste, c_teste, t_conservation, model_file) 109 110 --> 111 integrated_gradients(model=merged_model) /home/lucas/PycharmProjects/MEGS_introns/deeplearning/IntegratedGradients.py in __init__(self, model, outchannels, verbose) 77 78 # This takes a lot of time for a big model with many tasks. ---> 79 # So lets pring the progress. 80 if verbose: 81 sys.stdout.write('\r') /home/lucas/miniconda2/lib/python2.7/site-packages/keras/backend/theano_backend.pyc in function(inputs, outputs, updates, **kwargs) 680 msg = "Invalid argument '%s' passed to K.function" % key 681 raise ValueError(msg) --> 682 return Function(inputs, outputs, updates=updates, **kwargs) 683 684 /home/lucas/miniconda2/lib/python2.7/site-packages/keras/backend/theano_backend.pyc in __init__(self, inputs, outputs, updates, **kwargs) 666 allow_input_downcast=True, 667 on_unused_input='ignore', --> 668 **kwargs) 669 670 def __call__(self, inputs): /home/lucas/.local/lib/python2.7/site-packages/theano/compile/function.pyc in function(inputs, outputs, mode, updates, givens, no_default_updates, accept_inplace, name, rebuild_strict, allow_input_downcast, profile, on_unused_input) 324 on_unused_input=on_unused_input, 325 profile=profile, --> 326 output_keys=output_keys) 327 # We need to add the flag check_aliased inputs if we have any mutable or 328 # borrowed used defined inputs /home/lucas/.local/lib/python2.7/site-packages/theano/compile/pfunc.pyc in pfunc(params, outputs, mode, updates, givens, no_default_updates, accept_inplace, name, rebuild_strict, allow_input_downcast, profile, on_unused_input, output_keys) 484 accept_inplace=accept_inplace, name=name, 485 profile=profile, on_unused_input=on_unused_input, --> 486 output_keys=output_keys) 487 488 /home/lucas/.local/lib/python2.7/site-packages/theano/compile/function_module.pyc in orig_function(inputs, outputs, mode, accept_inplace, name, profile, on_unused_input, output_keys) 1792 profile=profile, 1793 on_unused_input=on_unused_input, -> 1794 output_keys=output_keys).create( 1795 defaults) 1796 /home/lucas/.local/lib/python2.7/site-packages/theano/compile/function_module.pyc in __init__(self, inputs, outputs, mode, accept_inplace, function_builder, profile, on_unused_input, fgraph, output_keys) 1444 # OUTPUT VARIABLES) 1445 fgraph, additional_outputs = std_fgraph(inputs, outputs, -> 1446 accept_inplace) 1447 fgraph.profile = profile 1448 else: /home/lucas/.local/lib/python2.7/site-packages/theano/compile/function_module.pyc in std_fgraph(input_specs, output_specs, accept_inplace) 175 176 fgraph = gof.fg.FunctionGraph(orig_inputs, orig_outputs, --> 177 update_mapping=update_mapping) 178 179 for node in fgraph.apply_nodes: /home/lucas/.local/lib/python2.7/site-packages/theano/gof/fg.pyc in __init__(self, inputs, outputs, features, clone, update_mapping) 178 179 for output in outputs: --> 180 self.__import_r__(output, reason="init") 181 for i, output in enumerate(outputs): 182 output.clients.append(('output', i)) /home/lucas/.local/lib/python2.7/site-packages/theano/gof/fg.pyc in __import_r__(self, variable, reason) 349 # Imports the owners of the variables 350 if variable.owner and variable.owner not in self.apply_nodes: --> 351 self.__import__(variable.owner, reason=reason) 352 elif (variable.owner is None and 353 not isinstance(variable, graph.Constant) and /home/lucas/.local/lib/python2.7/site-packages/theano/gof/fg.pyc in __import__(self, apply_node, check, reason) 395 % (node.inputs.index(r), str(node))) 396 error_msg += get_variable_trace_string(r) --> 397 raise MissingInputError(error_msg, variable=r) 398 399 for node in new_nodes: MissingInputError: Input 0 of the graph (indices start from 0), used to compute InplaceDimShuffle{0,1,2,x}(convolution1d_input_34), was not provided and not given a value. Use the Theano flag exception_verbosity='high', for more information on this error. Backtrace when that variable is created: File "/home/lucas/miniconda2/lib/python2.7/site-packages/IPython/core/interactiveshell.py", line 2881, in run_code exec(code_obj, self.user_global_ns, self.user_ns) File "", line 1, in plot_roc( t_valores, t_grupos, t_conservation, 'model_used_to_paper.hdf5') File "", line 16, in plot_roc model_up_conservation.add(Convolution1D(100, filter_first_layer_size, input_shape=(SIZE_SEQUENCE, 1), name='up_conservation_intron_layer')) File "/home/lucas/miniconda2/lib/python2.7/site-packages/keras/models.py", line 276, in add layer.create_input_layer(batch_input_shape, input_dtype) File "/home/lucas/miniconda2/lib/python2.7/site-packages/keras/engine/topology.py", line 367, in create_input_layer dtype=input_dtype, name=name) File "/home/lucas/miniconda2/lib/python2.7/site-packages/keras/engine/topology.py", line 1085, in Input input_tensor=tensor) File "/home/lucas/miniconda2/lib/python2.7/site-packages/keras/engine/topology.py", line 1008, in __init__ name=self.name) File "/home/lucas/miniconda2/lib/python2.7/site-packages/keras/backend/theano_backend.py", line 54, in placeholder x = T.TensorType(dtype, broadcast)(name) Backtrace when the variable is created: File "/home/lucas/miniconda2/lib/python2.7/site-packages/IPython/core/interactiveshell.py", line 2881, in run_code exec(code_obj, self.user_global_ns, self.user_ns) File "", line 1, in plot_roc( t_valores, t_grupos, t_conservation, 'model_used_to_paper.hdf5') File "", line 16, in plot_roc model_up_conservation.add(Convolution1D(100, filter_first_layer_size, input_shape=(SIZE_SEQUENCE, 1), name='up_conservation_intron_layer')) File "/home/lucas/miniconda2/lib/python2.7/site-packages/keras/models.py", line 276, in add layer.create_input_layer(batch_input_shape, input_dtype) File "/home/lucas/miniconda2/lib/python2.7/site-packages/keras/engine/topology.py", line 367, in create_input_layer dtype=input_dtype, name=name) File "/home/lucas/miniconda2/lib/python2.7/site-packages/keras/engine/topology.py", line 1085, in Input input_tensor=tensor) File "/home/lucas/miniconda2/lib/python2.7/site-packages/keras/engine/topology.py", line 1008, in __init__ name=self.name) File "/home/lucas/miniconda2/lib/python2.7/site-packages/keras/backend/theano_backend.py", line 54, in placeholder x = T.TensorType(dtype, broadcast)(name) ` `python def plot_roc( v_teste,c_teste, t_conservation, model_file): filter_first_layer_size = 6 model_up = Sequential() model_up.add(Convolution1D(20, filter_first_layer_size, input_shape=(SIZE_SEQUENCE, 4), name='up_intron_layer')) model_up.add(Activation('relu')) #model_up.add(MaxPooling1D(3)) model_up.add(Dropout(0.5)) model_up.add(Convolution1D(100, 3)) model_up.add(Activation('relu')) model_up.add(MaxPooling1D(2)) #model_up.add(Dropout(0.1)) model_up_conservation = Sequential() model_up_conservation.add(Convolution1D(100, filter_first_layer_size, input_shape=(SIZE_SEQUENCE, 1), name='up_conservation_intron_layer')) model_up_conservation.add(Activation('relu')) merged_conservation_up = Sequential() merged_conservation_up.add(Merge([model_up, model_up_conservation], mode='concat', concat_axis=1)) model_down = Sequential() model_down.add(Convolution1D(20, filter_first_layer_size, input_shape=(SIZE_SEQUENCE, 4), name='down_intron_layer')) model_down.add(Activation('relu')) #model_down.add(MaxPooling1D(3)) model_down.add(Dropout(0.5)) model_down.add(Convolution1D(100, 3)) model_down.add(Activation('relu')) model_down.add(MaxPooling1D(2)) #model_down.add(Dropout(0.1)) model_down_conservation = Sequential() model_down_conservation.add(Convolution1D(100, filter_first_layer_size, input_shape=(SIZE_SEQUENCE, 1), name='down_conservation_intron_layer')) model_down_conservation.add(Activation('relu')) merged_conservation_down = Sequential() merged_conservation_down.add(Merge([model_down, model_down_conservation], mode='concat', concat_axis=1)) merged_model = Sequential() merged_model.add(Merge([merged_conservation_up, merged_conservation_down], mode='concat', concat_axis=1)) #print merged_model.summary() #model.add(MaxPooling2D(pool_size=(2, 2))) merged_model.add(Flatten()) #model.add(Dense(12, input_dim=[4, 200], init='uniform', activation='relu')) #model.add(Dense(8, init='uniform', activation='relu')) #model.add(Dense(1, init='uniform', activation='sigmoid')) merged_model.add(Dense(100)) merged_model.add(Activation('relu')) merged_model.add(Dropout(0.5)) merged_model.add(Dense(100)) merged_model.add(Activation('relu')) #merged_model.add(Dropout(0.75)) merged_model.add(Dense(output_dim=1)) merged_model.add(Activation('sigmoid')) conser_up_teste = np.array([v for v_u in t_conservation for v in v_u[0]]) print conser_up_teste.shape conser_up_teste = conser_up_teste.reshape(len(conser_up_teste)/SIZE_SEQUENCE, SIZE_SEQUENCE, 1) print 'reshaped', conser_up_teste.shape conser_down_teste = np.array([v for v_u in t_conservation for v in v_u[1]]) print conser_down_teste.shape conser_down_teste = conser_down_teste.reshape(len(conser_down_teste)/SIZE_SEQUENCE, SIZE_SEQUENCE, 1) print 'reshaped', conser_down_teste.shape print conser_down_teste[0].shape print 'saindo' print np.array([v_down[SIZE_SEQUENCE:] for v_down in v_teste]).shape merged_model.load_weights(model_file) merged_model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']) scores = merged_model.evaluate( [ np.array([v_up[:SIZE_SEQUENCE] for v_up in v_teste]), conser_up_teste, np.array([v_down[SIZE_SEQUENCE:] for v_down in v_teste]), conser_down_teste], np.array([c_ts for c_ts in c_teste]), verbose=0) print("%s: %.2f%%" % (merged_model.metrics_names[1], scores[1] * 100)) print('Predicting on test data') y_score = merged_model.predict( [ np.array([v_up[:SIZE_SEQUENCE] for v_up in v_teste]), conser_up_teste, np.array([v_down[SIZE_SEQUENCE:] for v_down in v_teste]), conser_down_teste ]) #y_score = model.predict(X_test) print('Generating results') generate_results(np.array([c_ts for c_ts in c_teste]), y_score[:, 0]) integrated_gradients(model=merged_model) `
hiranumn commented 6 years ago

Does it work with tensorflow backend?

LucasSilvaFerreira commented 6 years ago

Hi Thanks for helping. I change the backend and received this error:

AttributeError Traceback (most recent call last)

in () ----> 1 plot_roc( t_valores, t_grupos, t_conservation, 'model_used_to_paper.hdf5') in plot_roc(v_teste, c_teste, t_conservation, model_file) 109 110 --> 111 integrated_gradients(model=merged_model) /home/lucas/PycharmProjects/MEGS_introns/deeplearning/IntegratedGradients.py in __init__(self, model, outchannels, verbose) 55 elif K.backend() == "theano": 56 self.outchannels = range(model1.output._keras_shape[1]) ---> 57 else: 58 if verbose: 59 print "Evaluated output channels (0-based index):", AttributeError: 'Tensor' object has no attribute 'shape'
hiranumn commented 6 years ago

I cannot seem to reproduce the error. Did anyone else encounter this?