Skip to content

Commit

Permalink
Merge branch 'develop' into r0.5
Browse files Browse the repository at this point in the history
  • Loading branch information
ashahba committed Jun 23, 2023
2 parents 9b74b91 + d767ac8 commit ba89871
Show file tree
Hide file tree
Showing 11 changed files with 77 additions and 83 deletions.
4 changes: 2 additions & 2 deletions explainer/attributions/attributions.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,11 +215,11 @@ def __init__(self,
self.shap_values = self.explainer.shap_values(self.targets, nsamples=nsamples)
self.info_panel = force_plot_info_panel

def visualize(self) -> None:
def visualize(self):
'''
Display the force plot of the of the target example(s)
'''
self.force_plot(self.explainer.expected_value, self.shap_values[0], self.targets)
return self.force_plot(self.explainer.expected_value, self.shap_values[0], self.targets)


class PartitionExplainer(FeatureAttributions):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,19 @@
"# Explaining Custom CNN CIFAR-10 Classification Using the Attributions Explainer"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# temp workaround for captum bug\n",
"# issue on captum: https://github.com/pytorch/captum/issues/1114\n",
"# Note: this matplotlib version is not guaranteed to work with any other application of intel-xai\n",
"\n",
"! pip install -U matplotlib==3.6.3 --no-deps"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand Down Expand Up @@ -206,6 +219,15 @@
"net.eval()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import matplotlib"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -231,13 +253,6 @@
"attributions.smoothgrad(net).visualize(input,labels[ind],original_image,\"Smooth Grad\")\n",
"attributions.featureablation(net).visualize(input,labels[ind],original_image,\"Feature Ablation\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand All @@ -256,7 +271,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.6"
"version": "3.9.16"
},
"vscode": {
"interpreter": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -240,13 +240,6 @@
"ke = attributions.kernel_explainer(model, X_train.iloc[1:101, :], X_train.iloc[0, :])\n",
"ke.visualize()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand All @@ -265,7 +258,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.6"
"version": "3.9.16"
},
"vscode": {
"interpreter": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@
"source": [
"# instatiate gradient explainer object\n",
"# run the deep explainer\n",
"grViz = attributions.gradient_explainer(model, X_test[:100], X_test[matches[:6]], 2, classes)\n",
"grViz = attributions.gradient_explainer(model, X_test[:100], X_test[matches[:6]], classes, 2)\n",
"grViz.visualize()"
]
},
Expand Down Expand Up @@ -309,7 +309,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.6"
"version": "3.9.16"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,9 @@
"cell_type": "code",
"execution_count": null,
"id": "ad8a9723-5dbe-44eb-9baa-eca188e435f2",
"metadata": {},
"metadata": {
"scrolled": true
},
"outputs": [],
"source": [
"import numpy as np\n",
Expand Down Expand Up @@ -705,29 +707,29 @@
"from explainer import cam\n",
"final_image_dim = (224, 224)\n",
"targetLayer = viz_model._model.layer4\n",
"xgradcam = cam.xgradcam(viz_model._model, targetLayer, \n",
"xgc = cam.x_gradcam(viz_model._model, targetLayer, \n",
" label_map_func('Normal'), \n",
" images[0],\n",
" final_image_dim,\n",
" 'cpu')\n",
"\n",
"xgradcam.visualize()\n",
"xgc.visualize()\n",
"\n",
"xgradcam = cam.xgradcam(viz_model._model, targetLayer, \n",
"xgc = cam.x_gradcam(viz_model._model, targetLayer, \n",
" label_map_func('Normal'), \n",
" images[1],\n",
" final_image_dim,\n",
" 'cpu')\n",
"\n",
"xgradcam.visualize()\n",
"xgc.visualize()\n",
"\n",
"xgradcam = cam.xgradcam(viz_model._model, targetLayer, \n",
"xgc = cam.x_gradcam(viz_model._model, targetLayer, \n",
" label_map_func('Normal'), \n",
" images[2],\n",
" final_image_dim,\n",
" 'cpu')\n",
"\n",
"xgradcam.visualize()"
"xgc.visualize()"
]
},
{
Expand Down Expand Up @@ -777,30 +779,29 @@
"\n",
"final_image_dim = (224, 224)\n",
"targetLayer = viz_model._model.layer4\n",
"#targetLayer = \"_IPEXConv2d-169\"\n",
"xgradcam = cam.xgradcam(viz_model._model, targetLayer, \n",
"xgc = cam.x_gradcam(viz_model._model, targetLayer, \n",
" label_map_func('Benign'), \n",
" images[0],\n",
" final_image_dim,\n",
" 'cpu')\n",
"\n",
"xgradcam.visualize()\n",
"xgc.visualize()\n",
"\n",
"xgradcam = cam.xgradcam(viz_model._model, targetLayer, \n",
"xgc = cam.x_gradcam(viz_model._model, targetLayer, \n",
" label_map_func('Benign'), \n",
" images[1],\n",
" final_image_dim,\n",
" 'cpu')\n",
"\n",
"xgradcam.visualize()\n",
"xgc.visualize()\n",
"\n",
"xgradcam = cam.xgradcam(viz_model._model, targetLayer, \n",
"xgc = cam.x_gradcam(viz_model._model, targetLayer, \n",
" label_map_func('Benign'), \n",
" images[2],\n",
" final_image_dim,\n",
" 'cpu')\n",
"\n",
"xgradcam.visualize()"
"xgc.visualize()"
]
},
{
Expand Down Expand Up @@ -1121,7 +1122,7 @@
"source": [
"import transformers\n",
"transformers.set_seed(1)\n",
"nlp_history = nlp_model.train(train_nlp_dataset, output_dir, epochs=3, use_trainer=True)"
"nlp_history = nlp_model.train(train_nlp_dataset, output_dir, epochs=3, use_trainer=True, seed=1)"
]
},
{
Expand Down Expand Up @@ -1281,7 +1282,7 @@
"outputs": [],
"source": [
"from explainer import attributions\n",
"partition_explainer = attributions.partition_explainer(f, r\"\\W+\", test_nlp_dataset.class_names)(np.array(mal_classified_as_ben_text))\n",
"partition_explainer = attributions.partition_text_explainer(f, test_nlp_dataset.class_names, np.array(mal_classified_as_ben_text), r\"\\W+\")\n",
"partition_explainer.visualize()"
]
},
Expand All @@ -1305,8 +1306,8 @@
"outputs": [],
"source": [
"from intel_extension_for_transformers.optimization.trainer import NLPTrainer\n",
"from intel_extension_for_transformers import objectives, OptimizedModel, QuantizationConfig\n",
"from intel_extension_for_transformers import metrics as nlptk_metrics"
"from intel_extension_for_transformers.optimization import objectives, OptimizedModel, QuantizationConfig\n",
"from intel_extension_for_transformers.optimization import metrics as nlptk_metrics"
]
},
{
Expand Down Expand Up @@ -1389,7 +1390,7 @@
"metadata": {},
"outputs": [],
"source": [
"quantized_model.save(os.path.join(output_dir, 'quantized_BERT'))\n",
"quantizer.save_model(os.path.join(output_dir, 'quantized_BERT'))\n",
"nlp_model._model.config.save_pretrained(os.path.join(output_dir, 'quantized_BERT'))"
]
},
Expand Down Expand Up @@ -1504,7 +1505,7 @@
"viz_weight = test_viz_metrics[1]\n",
"\n",
"# final weight of nlp is its overall validation accuracy\n",
"nlp_weight = eval_acc\n",
"nlp_weight = test_nlp_metrics['eval_accuracy']\n",
"\n",
"def convert_nomenclature(df_pid):\n",
" return 'P' + df_pid[:-1] + '_' + df_pid[-1]\n",
Expand Down Expand Up @@ -1597,7 +1598,7 @@
"oh_y_pred = np.eye(n_values)[y_pred]\n",
"y_true = [label_map_func(i) for i in label]\n",
"\n",
"ensemble_cm = metrics.confusion_matrix(oh_y_true, oh_y_pred, test_nlp_dataset.class_names)\n",
"ensemble_cm = metrics.confusion_matrix(y_true, oh_y_pred, test_nlp_dataset.class_names)\n",
"ensemble_cm.visualize()\n",
"print(ensemble_cm.report)"
]
Expand Down Expand Up @@ -1640,42 +1641,40 @@
"final_image_dim = (224, 224)\n",
"targetLayer = viz_model._model.layer4\n",
"#targetLayer = \"_IPEXConv2d-169\"\n",
"xgradcam = cam.xgradcam(viz_model._model, targetLayer, \n",
"xgc = cam.x_gradcam(viz_model._model, targetLayer, \n",
" label_map_func('Malignant'), \n",
" images[0],\n",
" final_image_dim,\n",
" 'cpu')\n",
"\n",
"xgradcam.visualize()\n",
"xgc.visualize()\n",
"\n",
"xgradcam = cam.xgradcam(viz_model._model, targetLayer, \n",
"xgc = cam.x_gradcam(viz_model._model, targetLayer, \n",
" label_map_func('Malignant'), \n",
" images[1],\n",
" final_image_dim,\n",
" 'cpu')\n",
"\n",
"xgradcam.visualize()\n",
"xgc.visualize()\n",
"\n",
"xgradcam = cam.xgradcam(viz_model._model, targetLayer, \n",
"xgc = cam.x_gradcam(viz_model._model, targetLayer, \n",
" label_map_func('Malignant'), \n",
" images[2],\n",
" final_image_dim,\n",
" 'cpu')\n",
"\n",
"xgradcam.visualize()\n",
"xgc.visualize()\n",
"\n",
"xgradcam = cam.xgradcam(viz_model._model, targetLayer, \n",
"xgc = cam.x_gradcam(viz_model._model, targetLayer, \n",
" label_map_func('Malignant'), \n",
" images[3],\n",
" final_image_dim,\n",
" 'cpu')\n",
"\n",
"xgradcam.visualize()\n",
"xgc.visualize()\n",
"text_for_shap = np.expand_dims(np.array(ensemble_results.iloc[image_idx]['text']), axis=0)\n",
"ensemble_partition_explainer = attributions.partition_explainer(f, r\"\\W+\", test_nlp_dataset.class_names)\\\n",
" (text_for_shap)\n",
"ensemble_partition_explainer.visualize()\n",
"\n"
"ensemble_partition_explainer = attributions.partition_text_explainer(f, test_nlp_dataset.class_names, text_for_shap, r\"\\W+\")\n",
"ensemble_partition_explainer.visualize()"
]
},
{
Expand Down
4 changes: 1 addition & 3 deletions notebooks/explainer/multimodal_cancer_detection/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,9 @@ The `dataset_utils.py` holds the supporting functions that prepare the image and

To run `Multimodal_Cancer_Detection.ipynb`, install the following dependencies:
1. [Intel® Explainable AI](https://github.com/IntelAI/intel-xai-tools)
2. [Intel® Transfer Learning Tool](https://github.com/IntelAI/transfer-learning)
2. `pip install intel-transfer-learning-tool==0.5`
3. `pip install intel-extension-for-transformers`
4. `pip install scikit-image`
5. `pip install jupyterlab`
6. `pip install jupyter-dash`
7. `pip install nltk`
8. `pip install docx2txt`
9. `pip install openpyxl`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ This notebook demonstrates how to use the attributions explainer API to explain

To run `partitionexplainer.ipynb`, install the following dependencies:
1. [Intel® Explainable AI](https://github.com/IntelAI/intel-xai-tools)
2. pip install jupyter-dash

## References

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@
" preds = model.predict(X_batch)\n",
" return preds\n",
"\n",
"partition_explainer = attributions.partition_explainer(make_predictions, r\"\\W+\", selected_categories)(X_batch_text)"
"partition_explainer = attributions.partition_text_explainer(make_predictions, selected_categories, X_batch_text, r\"\\W+\")"
]
},
{
Expand Down Expand Up @@ -468,7 +468,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.6"
"version": "3.9.16"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -490,6 +490,7 @@
"\n",
" # If training_args are given, we use the `Trainer` API to train the model\n",
" if self.training_args:\n",
" self.model.train()\n",
" self.trainer = Trainer(model=self.model,\n",
" args=self.training_args,\n",
" train_dataset=self.train_ds,\n",
Expand Down Expand Up @@ -534,6 +535,7 @@
" def evaluate(self, batch_size=16):\n",
" \n",
" if self.trainer:\n",
" self.model.eval()\n",
" metrics = self.trainer.evaluate()\n",
" for key in metrics.keys():\n",
" print(\"{}: {}\".format(key, metrics[key]))\n",
Expand Down Expand Up @@ -872,8 +874,7 @@
"from explainer import attributions\n",
"# Get shap values\n",
"text_for_shap = dataset.dataset['test'][:10]['text']\n",
"partition_explainer = attributions.partition_explainer(f, r\"\\W+\", dataset.class_labels.names)\n",
"partition_explainer(text_for_shap)"
"partition_explainer = attributions.partition_text_explainer(f, dataset.class_labels.names, text_for_shap, r\"\\W+\", )"
]
},
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,10 @@ The notebook performs the following steps:

## Running the notebook

To run the notebook, follow the instructions to setup the [PyTorch notebook environment](/notebooks#pytorch-environment).

To run `PyTorch_Text_Classifier_fine_tuning_with_Attributions.ipynb`, install the following dependencies:
1. [Intel® Explainable AI](https://github.com/IntelAI/intel-xai-tools)
2. `pip install intel-transfer-learning-tool==0.5`

## References

Expand Down
Loading

0 comments on commit ba89871

Please sign in to comment.