Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Prepare Lite for Core Migration #783

Merged
merged 8 commits into from
Oct 9, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 16 additions & 2 deletions .github/workflows/lite-tests-and-coverage.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ jobs:
- name: run object detection tests and report coverage
run: |
pip install -e ".[test]"
COVERAGE_FILE=.coverage.detection python -m coverage run --omit "tests/*" -m pytest -v tests/detection/
COVERAGE_FILE=.coverage.detection python -m coverage run --omit "tests/*" -m pytest -v tests/object_detection/
python -m coverage combine
python -m coverage report -m
python -m coverage json
Expand All @@ -50,7 +50,7 @@ jobs:
- name: run semantic segmentation tests and report coverage
run: |
pip install -e ".[test]"
COVERAGE_FILE=.coverage.segmentation python -m coverage run --omit "tests/*" -m pytest -v tests/segmentation/
COVERAGE_FILE=.coverage.segmentation python -m coverage run --omit "tests/*" -m pytest -v tests/semantic_segmentation/
python -m coverage combine
python -m coverage report -m
python -m coverage json
Expand All @@ -61,3 +61,17 @@ jobs:
exit 1
fi
working-directory: ./lite
# - name: run nlp generation tests and report coverage
# run: |
# pip install -e ".[test]"
# COVERAGE_FILE=.coverage.generation python -m coverage run --omit "tests/*" -m pytest -v tests/text_generation
# python -m coverage combine
# python -m coverage report -m
# python -m coverage json
# export TOTAL=$(python -c "import json;print(json.load(open('coverage.json'))['totals']['percent_covered_display'])")
# echo "total=$TOTAL" >> $GITHUB_ENV
# if (( $TOTAL < 90 )); then
# echo "Coverage is below 90%"
# exit 1
# fi
# working-directory: ./lite
140 changes: 139 additions & 1 deletion lite/README.md
Original file line number Diff line number Diff line change
@@ -1 +1,139 @@
# valor-lite: Compute classification, object detection, and segmentation metrics locally.
# valor-lite: Fast, local machine learning evaluation.

czaloom marked this conversation as resolved.
Show resolved Hide resolved
valor-lite is a lightweight, numpy-based library designed for fast and seamless evaluation of machine learning models. It is optimized for environments where quick, responsive evaluations are essential, whether as part of a larger service or embedded within user-facing tools.

valor-lite is maintained by Striveworks, a cutting-edge MLOps company based in Austin, Texas. If you'd like to learn more or have questions, we invite you to connect with us on [Slack](https://striveworks-public.slack.com/join/shared_invite/zt-1a0jx768y-2J1fffN~b4fXYM8GecvOhA#/shared-invite/email) or explore our [GitHub repository](https://github.com/striveworks/valor).

For additional details, be sure to check out our user [documentation](https://striveworks.github.io/valor/). We're excited to support you in making the most of Valor!

## Usage

### Classification

```python
from valor_lite.classification import DataLoader, Classification, MetricType

classifications = [
Classification(
uid="uid0",
groundtruth="dog",
predictions=["dog", "cat", "bird"],
scores=[0.75, 0.2, 0.05],
),
Classification(
uid="uid1",
groundtruth="cat",
predictions=["dog", "cat", "bird"],
scores=[0.41, 0.39, 0.1],
),
]

loader = DataLoader()
loader.add_data(classifications)
evaluator = loader.finalize()

metrics = evaluator.evaluate()

assert metrics[MetricType.Precision][0].to_dict() == {
'type': 'Precision',
'value': [0.5],
'parameters': {
'score_thresholds': [0.0],
'hardmax': True,
'label': 'dog'
}
}
```

### Object Detection

```python
from valor_lite.object_detection import DataLoader, Detection, BoundingBox, MetricType

detections = [
Detection(
uid="uid0",
groundtruths=[
BoundingBox(
xmin=0, xmax=10,
ymin=0, ymax=10,
labels=["dog"]
),
BoundingBox(
xmin=20, xmax=30,
ymin=20, ymax=30,
labels=["cat"]
),
],
predictions=[
BoundingBox(
xmin=1, xmax=11,
ymin=1, ymax=11,
labels=["dog", "cat", "bird"],
scores=[0.85, 0.1, 0.05]
),
BoundingBox(
xmin=21, xmax=31,
ymin=21, ymax=31,
labels=["dog", "cat", "bird"],
scores=[0.34, 0.33, 0.33]
),
],
),
]

loader = DataLoader()
loader.add_bounding_boxes(detections)
evaluator = loader.finalize()

metrics = evaluator.evaluate()

assert metrics[MetricType.Precision][0].to_dict() == {
'type': 'Precision',
'value': 0.5,
'parameters': {
'iou_threshold': 0.5,
'score_threshold': 0.5,
'label': 'dog'
}
}
```

### Semantic Segmentation

```python
import numpy as np
from valor_lite.semantic_segmentation import DataLoader, Segmentation, Bitmask, MetricType

segmentations = [
Segmentation(
uid="uid0",
groundtruths=[
Bitmask(
mask=np.random.randint(2, size=(10,10), dtype=np.bool_),
label="sky",
),
Bitmask(
mask=np.random.randint(2, size=(10,10), dtype=np.bool_),
label="ground",
)
],
predictions=[
Bitmask(
mask=np.random.randint(2, size=(10,10), dtype=np.bool_),
label="sky",
),
Bitmask(
mask=np.random.randint(2, size=(10,10), dtype=np.bool_),
label="ground",
)
]
),
]

loader = DataLoader()
loader.add_data(segmentations)
evaluator = loader.finalize()

print(metrics[MetricType.Precision][0])
```
2 changes: 1 addition & 1 deletion lite/benchmarks/benchmark_objdet.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import requests
from tqdm import tqdm
from valor_lite.detection import DataLoader, MetricType
from valor_lite.object_detection import DataLoader, MetricType


class AnnotationType(str, Enum):
Expand Down
2 changes: 1 addition & 1 deletion lite/examples/object-detection.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
"import pandas as pd\n",
"import matplotlib.pyplot as plt\n",
"\n",
"from valor_lite.detection import DataLoader, MetricType"
"from valor_lite.object_detection import DataLoader, MetricType"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion lite/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[project]
name = "valor-lite"
dynamic = ["version"]
description = "Compute valor metrics directly in your client."
description = "Compute valor metrics locally."
readme = "README.md"
requires-python = ">=3.10"
license = { file = "LICENSE" }
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
import numpy as np
import pytest
from shapely.geometry import Polygon as ShapelyPolygon
from valor_lite.detection import Bitmask, BoundingBox, Detection, Polygon
from valor_lite.object_detection import (
Bitmask,
BoundingBox,
Detection,
Polygon,
)


@pytest.fixture
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import numpy as np
from valor_lite.detection import (
from valor_lite.object_detection import (
DataLoader,
Detection,
MetricType,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import numpy as np
from valor_lite.detection import (
from valor_lite.object_detection import (
DataLoader,
Detection,
MetricType,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
import numpy as np
from valor_lite.detection import DataLoader, Detection, Evaluator, MetricType
from valor_lite.detection.computation import compute_confusion_matrix
from valor_lite.object_detection import (
DataLoader,
Detection,
Evaluator,
MetricType,
)
from valor_lite.object_detection.computation import compute_confusion_matrix


def test_confusion_matrix_no_data():
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from valor_lite.detection import DataLoader, Detection, MetricType
from valor_lite.object_detection import DataLoader, Detection, MetricType


def test_counts_metrics_first_class(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import numpy as np
import pytest
from shapely.geometry import Polygon as ShapelyPolygon
from valor_lite.detection import (
from valor_lite.object_detection import (
Bitmask,
BoundingBox,
DataLoader,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from valor_lite.detection import DataLoader, Detection, MetricType
from valor_lite.object_detection import DataLoader, Detection, MetricType


def test_metadata_using_torch_metrics_example(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,12 @@

import numpy as np
import pytest
from valor_lite.detection import BoundingBox, DataLoader, Detection, MetricType
from valor_lite.object_detection import (
BoundingBox,
DataLoader,
Detection,
MetricType,
)


@pytest.fixture
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import numpy as np
from shapely.geometry import Polygon as ShapelyPolygon
from valor_lite.detection.computation import (
from valor_lite.object_detection.computation import (
compute_bbox_iou,
compute_bitmask_iou,
compute_polygon_iou,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import numpy as np
from valor_lite.detection import (
from valor_lite.object_detection import (
DataLoader,
Detection,
MetricType,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from valor_lite.detection import DataLoader, Detection, MetricType
from valor_lite.object_detection import DataLoader, Detection, MetricType


def test_precision_metrics_first_class(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from valor_lite.detection import DataLoader, Detection, MetricType
from valor_lite.object_detection import DataLoader, Detection, MetricType


def test_recall_metrics_first_class(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
import numpy as np
import pytest
from shapely.geometry import Polygon as ShapelyPolygon
from valor_lite.detection import Bitmask, BoundingBox, Detection, Polygon
from valor_lite.object_detection import (
Bitmask,
BoundingBox,
Detection,
Polygon,
)


def test_BoundingBox():
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from random import choice, uniform

from valor_lite.detection import BoundingBox, DataLoader, Detection
from valor_lite.object_detection import BoundingBox, DataLoader, Detection


def _generate_random_detections(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import numpy as np
import pytest
from valor_lite.segmentation import Bitmask, Segmentation
from valor_lite.semantic_segmentation import Bitmask, Segmentation


def _generate_boolean_mask(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from valor_lite.segmentation import (
from valor_lite.semantic_segmentation import (
Accuracy,
DataLoader,
MetricType,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import numpy as np
import pytest
from valor_lite.segmentation import Bitmask, Segmentation
from valor_lite.semantic_segmentation import Bitmask, Segmentation


def test_bitmask():
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,8 @@
from valor_lite.segmentation import DataLoader, MetricType, Segmentation
from valor_lite.semantic_segmentation import (
DataLoader,
MetricType,
Segmentation,
)


def test_confusion_matrix_basic_segmentations(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import pytest
from valor_lite.segmentation import DataLoader
from valor_lite.semantic_segmentation import DataLoader


def test_no_data():
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from valor_lite.segmentation import DataLoader, Segmentation
from valor_lite.semantic_segmentation import DataLoader, Segmentation


def test_metadata_using_large_random_segmentations(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
from valor_lite.segmentation import F1, DataLoader, MetricType, Segmentation
from valor_lite.semantic_segmentation import (
F1,
DataLoader,
MetricType,
Segmentation,
)


def test_f1_basic_segmentations(basic_segmentations: list[Segmentation]):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import numpy as np
from valor_lite.segmentation import DataLoader, Segmentation
from valor_lite.semantic_segmentation import DataLoader, Segmentation


def test_filtering(segmentations_from_boxes: list[Segmentation]):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from valor_lite.segmentation import (
from valor_lite.semantic_segmentation import (
DataLoader,
IoU,
MetricType,
Expand Down
Loading
Loading