sandun herath
sandun herath

Reputation: 13

SHAP DeepExplainer gives an error for model with 1D CNN as a first layer

Initially when I tried to work with KernelExplainer, it won't work as dimension of my dataset is (no. of samples, 186, 1). I saw from few forums that KernelExplainer won't work(correct me if i am wrong) for dimensions grater than 2. \ So, I switched to DeepExplainer, my model is

cnn = Sequential()
cnn.add(Conv1D(128,3,input_shape=(x_train.shape[1],1), activation='relu'))
cnn.add(BatchNormalization())
cnn.add(MaxPooling1D(pool_size=2))
cnn.add(Conv1D(64,3, activation='relu'))
cnn.add(BatchNormalization())
cnn.add(MaxPooling1D(pool_size=2))
cnn.add(Conv1D(64,2, activation='relu'))
cnn.add(BatchNormalization())
cnn.add(MaxPooling1D(pool_size=2))
cnn.add(Conv1D(64,2, activation='relu'))
cnn.add(Flatten())
cnn.add(Dense(128, activation='relu'))
cnn.add(Dropout(0.5))
cnn.add(Dense(64, activation='relu'))
cnn.add(Dense(5, activation='softmax'))

and when I run this code

import shap
data = x_train[np.random.choice(x_train.shape[0], 50, replace=False)]

S1 = shap.DeepExplainer((cnn.layers[0].input, cnn.layers[-1].output), data)
ShapValues = S1.shap_values(x_test[1:20], check_additivity=False)

I received an new error LookupError: gradient registry has no entry for: shap_AddV2

-------------------------------------------------------------------------------------
StagingError                              Traceback (most recent call last)
<ipython-input-110-e8372a4a287b> in <module>
      6 #calculating SHAP Values
      7 #Return approximate SHAP values for the model applied to the data given by X.
----> 8 ShapValues = S1.shap_values(x_test[1:20], check_additivity=False)

/opt/conda/lib/python3.7/site-packages/shap/explainers/_deep/__init__.py in shap_values(self, X, ranked_outputs, output_rank_order, check_additivity)
    122             were chosen as "top".
    123         """
--> 124         return self.explainer.shap_values(X, ranked_outputs, output_rank_order, check_additivity=check_additivity)

/opt/conda/lib/python3.7/site-packages/shap/explainers/_deep/deep_tf.py in shap_values(self, X, ranked_outputs, output_rank_order, check_additivity)
    306                 # run attribution computation graph
    307                 feature_ind = model_output_ranks[j,i]
--> 308                 sample_phis = self.run(self.phi_symbolic(feature_ind), self.model_inputs, joint_input)
    309 
    310                 # assign the attributions to the right part of the output arrays

/opt/conda/lib/python3.7/site-packages/shap/explainers/_deep/deep_tf.py in run(self, out, model_inputs, X)
    363 
    364                 return final_out
--> 365             return self.execute_with_overridden_gradients(anon)
    366 
    367     def custom_grad(self, op, *grads):

/opt/conda/lib/python3.7/site-packages/shap/explainers/_deep/deep_tf.py in execute_with_overridden_gradients(self, f)
    399         # define the computation graph for the attribution values using a custom gradient-like computation
    400         try:
--> 401             out = f()
    402         finally:
    403             # reinstate the backpropagatable check

/opt/conda/lib/python3.7/site-packages/shap/explainers/_deep/deep_tf.py in anon()
    359                     v = tf.constant(data, dtype=self.model_inputs[i].dtype)
    360                     inputs.append(v)
--> 361                 final_out = out(inputs)
    362                 tf_execute.record_gradient = tf_backprop._record_gradient
    363 

/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in __call__(self, *args, **kwds)
    826     tracing_count = self.experimental_get_tracing_count()
    827     with trace.Trace(self._name) as tm:
--> 828       result = self._call(*args, **kwds)
    829       compiler = "xla" if self._experimental_compile else "nonXla"
    830       new_tracing_count = self.experimental_get_tracing_count()

/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in _call(self, *args, **kwds)
    869       # This is the first call of __call__, so we have to initialize.
    870       initializers = []
--> 871       self._initialize(args, kwds, add_initializers_to=initializers)
    872     finally:
    873       # At this point we know that the initialization is complete (or less

/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in _initialize(self, args, kwds, add_initializers_to)
    724     self._concrete_stateful_fn = (
    725         self._stateful_fn._get_concrete_function_internal_garbage_collected(  # pylint: disable=protected-access
--> 726             *args, **kwds))
    727 
    728     def invalid_creator_scope(*unused_args, **unused_kwds):

/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _get_concrete_function_internal_garbage_collected(self, *args, **kwargs)
   2967       args, kwargs = None, None
   2968     with self._lock:
-> 2969       graph_function, _ = self._maybe_define_function(args, kwargs)
   2970     return graph_function
   2971 

/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _maybe_define_function(self, args, kwargs)
   3359 
   3360           self._function_cache.missed.add(call_context_key)
-> 3361           graph_function = self._create_graph_function(args, kwargs)
   3362           self._function_cache.primary[cache_key] = graph_function
   3363 

/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/function.py in _create_graph_function(self, args, kwargs, override_flat_arg_shapes)
   3204             arg_names=arg_names,
   3205             override_flat_arg_shapes=override_flat_arg_shapes,
-> 3206             capture_by_value=self._capture_by_value),
   3207         self._function_attributes,
   3208         function_spec=self.function_spec,

/opt/conda/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in func_graph_from_py_func(name, python_func, args, kwargs, signature, func_graph, autograph, autograph_options, add_control_dependencies, arg_names, op_return_value, collections, capture_by_value, override_flat_arg_shapes)
    988         _, original_func = tf_decorator.unwrap(python_func)
    989 
--> 990       func_outputs = python_func(*func_args, **func_kwargs)
    991 
    992       # invariant: `func_outputs` contains only Tensors, CompositeTensors,

/opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/def_function.py in wrapped_fn(*args, **kwds)
    632             xla_context.Exit()
    633         else:
--> 634           out = weak_wrapped_fn().__wrapped__(*args, **kwds)
    635         return out
    636 

/opt/conda/lib/python3.7/site-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)
    975           except Exception as e:  # pylint:disable=broad-except
    976             if hasattr(e, "ag_error_metadata"):
--> 977               raise e.ag_error_metadata.to_exception(e)
    978             else:
    979               raise

StagingError: in user code

    opt/conda/lib/python3.7/site-packages/shap/explainers/_deep/deep_tf.py:248 grad_graph  *
        x_grad = tape.gradient(out, shap_rAnD)
    /opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/backprop.py:1086 gradient  **
        unconnected_gradients=unconnected_gradients)
    /opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/imperative_grad.py:77 imperative_grad
        compat.as_str(unconnected_gradients.value))
    /opt/conda/lib/python3.7/site-packages/tensorflow/python/eager/backprop.py:151 _gradient_function
        grad_fn = ops._gradient_registry.lookup(op_name)  # pylint: disable=protected-access
    /opt/conda/lib/python3.7/site-packages/tensorflow/python/framework/registry.py:99 lookup
        %s registry has no entry for: %s" % (self._name, name))
LookupError: gradient registry has no entry for: shap_AddV2

DeepExplainer worked with images as expected but regarding this it is difficult to figure out. I tried finding a similar tutorial for applying interpretability with shap for but coudn't. I want help regarding the selection of the proper explainer for this task. Thank you.

Upvotes: 1

Views: 162

Answers (0)

Related Questions