Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

visualize confusion matrix #81

Merged
merged 11 commits into from
Jun 30, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
include settings.ini
include pyproject.toml
include LICENSE
include CONTRIBUTING.md
include README.md
Expand Down
2 changes: 1 addition & 1 deletion chitra/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""A Deep Learning Computer Vision Utility library"""

__version__ = "0.1.0b1"
__version__ = "0.1.0b2"

from .image import Chitra
Empty file.
64 changes: 64 additions & 0 deletions chitra/visualization/metrics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
"""https://www.kaggle.com/grfiv4/plot-a-confusion-matrix"""
from itertools import product
from typing import List, Union

import matplotlib.pyplot as plt
import numpy as np
from sklearn.metrics import confusion_matrix

from chitra.logging import logger


def detect_multilabel(labels: Union[List, np.ndarray]):
if isinstance(labels, (np.ndarray, list, tuple)):
labels = np.unique(labels)
else:
raise UserWarning(
f"expect data type for label was list or np.ndarray but got {type(labels)}"
)

if len(labels) > 2:
return True
return False


def cm_accuracy(cm: np.ndarray):
return np.trace(cm) / float(np.sum(cm))


def plot_confusion_matrix(y_pred: Union[np.ndarray, List],
y_true: Union[np.ndarray, List],
display_labels=None,
include_values: bool = True,
title: str = 'Confusion Matrix',
cmap: str = None):
if detect_multilabel(y_true):
logger.warning("You might want to use multi-label version!")

if display_labels is None:
display_labels = np.unique(y_true)

n_classes = len(display_labels)
tick_marks = np.arange(n_classes)

if cmap is None:
cmap = plt.get_cmap('Blues')

cm = confusion_matrix(y_true, y_pred)
accuracy = cm_accuracy(cm)
error = 1 - accuracy

plt.imshow(cm, cmap=cmap)

if include_values:
for i, j in product(range(n_classes), range(n_classes)):
plt.text(i, j, "{:,}".format(cm[i, j]))

plt.xticks(tick_marks, display_labels, rotation=45)
plt.yticks(tick_marks, display_labels)
plt.title(title)
plt.xlabel(
f'Predicted Label\nAccuracy={accuracy:0.4f}; Error={error:0.4f}')
plt.ylabel('True Label')

plt.show()
1 change: 0 additions & 1 deletion docs/.gitignore

This file was deleted.

42 changes: 0 additions & 42 deletions docs/sitemap.xml

This file was deleted.

15 changes: 15 additions & 0 deletions docs/source/visualization/metrics.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# Visualizing Metrics

## Plot Confusion Matrix

```python
from chitra.visualization.metrics import plot_confusion_matrix

y_pred = [1, 1, 0, 1]
y_true = [0, 1, 0, 1]
display_labels = ('class A', 'class B')

plot_confusion_matrix(y_pred, y_true, display_labels=display_labels)
```

![Preview](./preview.png)
Binary file added docs/source/visualization/preview.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
File renamed without changes
File renamed without changes
3 changes: 3 additions & 0 deletions examples/examples/model_server.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from chitra.serve import create_api

app = create_api(lambda x: x, run=True, api_type='question-ans')
7 changes: 7 additions & 0 deletions examples/examples/visualization/confusion_matrix.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from chitra.visualization.metrics import plot_confusion_matrix

y_pred = [1, 1, 0, 1]
y_true = [0, 1, 0, 1]
display_labels = ('class A', 'class B')

plot_confusion_matrix(y_pred, y_true, display_labels=display_labels)
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
4 changes: 0 additions & 4 deletions nbs/examples/model_server.py

This file was deleted.

14 changes: 4 additions & 10 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@ requires = [
"matplotlib",
"pillow",
"imgaug >=0.4.0",
"requests >=2.24.0,<3.0.0"
"requests >=2.24.0,<3.0.0",
"scikit-learn",
]
description-file = "README.md"
requires-python = ">=3.7"
Expand All @@ -40,17 +41,10 @@ Documentation = "https://chitra.readthedocs.io/en/latest"

[tool.flit.metadata.requires-extra]
all = [
# Converter
"onnx",
"onnx2pytorch",
"tf2onnx",
# Server
"onnx", "onnx2pytorch", "tf2onnx",
"fastapi", "uvicorn", "pydantic",
# tf
"tensorflow-serving-api",
# torch
"grpc",
"torch"
"torch",
]

[tool.isort]
Expand Down
27 changes: 27 additions & 0 deletions tests/test_visualization_metrics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import numpy as np
import pytest

from chitra.visualization.metrics import cm_accuracy
from chitra.visualization.metrics import detect_multilabel
from chitra.visualization.metrics import plot_confusion_matrix


def test_detect_multilabel():
with pytest.raises(UserWarning):
detect_multilabel({"label1": "this will raise UserWarning"})

assert detect_multilabel([1, 2, 3, 4])
assert not detect_multilabel([0, 1, 1, 0])


def test_cm_accuracy():
x = np.asarray([[1, 2], [1, 2]])
assert cm_accuracy(x) == 0.5


def test_plot_confusion_matrix():
y_pred = [1, 1, 0, 1]
y_true = [0, 1, 0, 1]
display_labels = ('watermark', 'non watermark')
assert plot_confusion_matrix(y_pred, y_true,
display_labels=display_labels) is None