+
{% with docstring_sections = function.docstring.parsed %}
{% include "docstring.html" with context %}
{% endwith %}
diff --git a/rerun_py/mkdocs.yml b/rerun_py/mkdocs.yml
index 9122a734af4b..69ff4b800f00 100644
--- a/rerun_py/mkdocs.yml
+++ b/rerun_py/mkdocs.yml
@@ -2,6 +2,7 @@
# Top-level config for mkdocs
# See: https://www.mkdocs.org/user-guide/configuration/
site_name: Rerun Python APIs
+site_url: https://ref.rerun.io/docs/python/latest
repo_url: https://github.com/rerun-io/rerun/
# Use the material theme
diff --git a/rerun_py/pyproject.toml b/rerun_py/pyproject.toml
index d60ea1b9f109..c8e8474948f4 100644
--- a/rerun_py/pyproject.toml
+++ b/rerun_py/pyproject.toml
@@ -11,7 +11,13 @@ classifiers = [
"Topic :: Scientific/Engineering :: Artificial Intelligence",
"Topic :: Scientific/Engineering :: Visualization",
]
-dependencies = ["deprecated", "numpy>=1.23", "pyarrow==10.0.1"]
+dependencies = [
+ # Must match list in `.github/workflows/reusable_build_and_test_wheels.yml`
+ "deprecated",
+ "numpy>=1.23",
+ "pillow>=9.5.0,<10", # Used for JPEG encoding
+ "pyarrow==10.0.1",
+]
description = "The Rerun Logging SDK"
keywords = ["computer-vision", "logging", "rerun"]
name = "rerun-sdk"
@@ -51,10 +57,10 @@ extend-exclude = [
"examples/python/objectron/proto/objectron/proto.py",
# Copied from https://github.com/huggingface/diffusers/blob/main/src/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion_depth2img.py
- "examples/python/stable_diffusion/huggingface_pipeline.py",
+ "examples/python/depth_guided_stable_diffusion/huggingface_pipeline.py",
# Copied from https://github.com/colmap/colmap/blob/bf3e19140f491c3042bfd85b7192ef7d249808ec/scripts/python/read_write_model.py
- "examples/python/colmap/read_write_model.py",
+ "examples/python/structure_from_motion/read_write_model.py",
]
ignore = [
# Missing docstring in public function - TODO(emilk): enable for SDK but not for examples
@@ -86,6 +92,9 @@ select = [
[tool.ruff.flake8-tidy-imports]
ban-relative-imports = "all"
+[tool.ruff.isort]
+required-imports = ["from __future__ import annotations"]
+
[tool.maturin]
# We use a python package from inside the rerun_sdk folder to avoid conflicting
# with the other `rerun` pypi package. The rerun_sdk.pth adds this to the pythonpath
diff --git a/rerun_py/requirements-doc.txt b/rerun_py/requirements-doc.txt
index 3683da922b9d..33413e43a40a 100644
--- a/rerun_py/requirements-doc.txt
+++ b/rerun_py/requirements-doc.txt
@@ -3,7 +3,7 @@ mkdocs-gen-files
mkdocs-literate-nav
mkdocs-material
mkdocs-material-extensions
-mkdocs-redirects
+git+https://github.com/rerun-io/mkdocs-redirects.git@v1.3.1 # forked mkdocs-redirects with https://github.com/rerun-io/mkdocs-redirects/commit/d367a0847928438b66f73508e49852be1190409b
mkdocstrings
mkdocstrings-python
mike
diff --git a/rerun_py/requirements-lint.txt b/rerun_py/requirements-lint.txt
index 1811d3ef6afa..96c997692321 100644
--- a/rerun_py/requirements-lint.txt
+++ b/rerun_py/requirements-lint.txt
@@ -6,4 +6,4 @@ pip-check-reqs==2.4.3 # Checks for missing deps in requirements.txt files
pyupgrade==2.37.3
ruff==0.0.251
types-Deprecated==1.2.9
-types-requests==2.28.10
+types-requests>=2.31,<3
diff --git a/rerun_py/rerun/__init__.py b/rerun_py/rerun/__init__.py
index 4e72d42e2cc4..feedd2553ce0 100644
--- a/rerun_py/rerun/__init__.py
+++ b/rerun_py/rerun/__init__.py
@@ -11,6 +11,8 @@
real rerun module by adding it to the path and then, and then
replacing our own module content with it.
"""
+from __future__ import annotations
+
import pathlib
import sys
diff --git a/rerun_py/rerun_demo/__init__.py b/rerun_py/rerun_demo/__init__.py
index 0726ded049ac..56f4ec4658d2 100644
--- a/rerun_py/rerun_demo/__init__.py
+++ b/rerun_py/rerun_demo/__init__.py
@@ -11,6 +11,8 @@
real rerun module by adding it to the path and then, and then
replacing our own module content with it.
"""
+from __future__ import annotations
+
import pathlib
import sys
diff --git a/rerun_py/rerun_sdk/rerun/__init__.py b/rerun_py/rerun_sdk/rerun/__init__.py
index cd5c88b3d789..fcb098e1a858 100644
--- a/rerun_py/rerun_sdk/rerun/__init__.py
+++ b/rerun_py/rerun_sdk/rerun/__init__.py
@@ -1,14 +1,23 @@
"""The Rerun Python SDK, which is a wrapper around the re_sdk crate."""
+from __future__ import annotations
import atexit
import logging
import sys
from inspect import getmembers, isfunction
-from typing import Optional
import rerun_bindings as bindings # type: ignore[attr-defined]
from rerun import experimental
+from rerun.components.transform3d import (
+ Quaternion,
+ Rigid3D,
+ RotationAxisAngle,
+ Scale3D,
+ Translation3D,
+ TranslationAndMat3,
+ TranslationRotationScale3D,
+)
from rerun.log.annotation import AnnotationInfo, ClassDescription, log_annotation_context
from rerun.log.arrow import log_arrow
from rerun.log.bounding_box import log_obb
@@ -24,7 +33,13 @@
from rerun.log.scalar import log_scalar
from rerun.log.tensor import log_tensor
from rerun.log.text import LoggingHandler, LogLevel, log_text_entry
-from rerun.log.transform import log_rigid3, log_unknown_transform, log_view_coordinates
+from rerun.log.transform import (
+ log_disconnected_space,
+ log_rigid3,
+ log_transform3d,
+ log_unknown_transform,
+ log_view_coordinates,
+)
from rerun.recording_stream import (
RecordingStream,
get_application_id,
@@ -36,13 +51,13 @@
set_global_data_recording,
set_thread_local_data_recording,
)
-
-# --- Init RecordingStream class ---
from rerun.recording_stream import _patch as recording_stream_patch
from rerun.script_helpers import script_add_args, script_setup, script_teardown
from rerun.sinks import connect, disconnect, memory_recording, save, serve, spawn
from rerun.time import reset_time, set_time_nanos, set_time_seconds, set_time_sequence
+# --- Init RecordingStream class ---
+
# Inject all relevant methods into the `RecordingStream` class.
# We need to do this from here to avoid circular import issues.
recording_stream_patch(
@@ -85,13 +100,14 @@
"log_arrow",
"log_cleared",
"log_depth_image",
+ "log_disconnected_space",
"log_extension_components",
- "log_image",
"log_image_file",
+ "log_image",
"log_line_segments",
"log_line_strip",
- "log_mesh",
"log_mesh_file",
+ "log_mesh",
"log_meshes",
"log_obb",
"log_path",
@@ -105,6 +121,7 @@
"log_segmentation_image",
"log_tensor",
"log_text_entry",
+ "log_transform3d",
"log_unknown_transform",
"log_view_coordinates",
# classes
@@ -122,6 +139,17 @@
"script_add_args",
"script_setup",
"script_teardown",
+ # Transform helpers
+ "Quaternion",
+ "Rigid3D",
+ "RotationAxisAngle",
+ "Scale3D",
+ "Transform3D",
+ "Transform3DArray",
+ "Transform3DType",
+ "Translation3D",
+ "TranslationAndMat3",
+ "TranslationRotationScale3D",
]
@@ -135,7 +163,7 @@
def init(
application_id: str,
- recording_id: Optional[str] = None,
+ recording_id: str | None = None,
spawn: bool = False,
default_enabled: bool = True,
strict: bool = False,
@@ -196,7 +224,7 @@ def init(
def new_recording(
application_id: str,
- recording_id: Optional[str] = None,
+ recording_id: str | None = None,
make_default: bool = False,
make_thread_default: bool = False,
spawn: bool = False,
diff --git a/rerun_py/rerun_sdk/rerun/__main__.py b/rerun_py/rerun_sdk/rerun/__main__.py
index 8213af58ec28..3af923bf37bf 100644
--- a/rerun_py/rerun_sdk/rerun/__main__.py
+++ b/rerun_py/rerun_sdk/rerun/__main__.py
@@ -1,8 +1,9 @@
"""See `python3 -m rerun --help`."""
+from __future__ import annotations
import sys
-from rerun import bindings, unregister_shutdown # type: ignore[attr-defined]
+from rerun import bindings, unregister_shutdown
def main() -> None:
diff --git a/rerun_py/rerun_sdk/rerun/color_conversion.py b/rerun_py/rerun_sdk/rerun/color_conversion.py
index acebb0560b7d..8d0cf599d363 100644
--- a/rerun_py/rerun_sdk/rerun/color_conversion.py
+++ b/rerun_py/rerun_sdk/rerun/color_conversion.py
@@ -1,5 +1,5 @@
"""Color conversion utilities."""
-from typing import Union
+from __future__ import annotations
import numpy as np
import numpy.typing as npt
@@ -33,7 +33,7 @@ def u8_array_to_rgba(arr: npt.NDArray[np.uint8]) -> npt.NDArray[np.uint32]:
return arr # type: ignore[return-value]
-def linear_to_gamma_u8_value(linear: npt.NDArray[Union[np.float32, np.float64]]) -> npt.NDArray[np.uint8]:
+def linear_to_gamma_u8_value(linear: npt.NDArray[np.float32 | np.float64]) -> npt.NDArray[np.uint8]:
"""
Transform color values from linear [0.0, 1.0] to gamma encoded [0, 255].
@@ -73,7 +73,7 @@ def linear_to_gamma_u8_value(linear: npt.NDArray[Union[np.float32, np.float64]])
return gamma.astype(np.uint8)
-def linear_to_gamma_u8_pixel(linear: npt.NDArray[Union[np.float32, np.float64]]) -> npt.NDArray[np.uint8]:
+def linear_to_gamma_u8_pixel(linear: npt.NDArray[np.float32 | np.float64]) -> npt.NDArray[np.uint8]:
"""
Transform color pixels from linear [0, 1] to gamma encoded [0, 255].
diff --git a/rerun_py/rerun_sdk/rerun/components/__init__.py b/rerun_py/rerun_sdk/rerun/components/__init__.py
index b81ebf5b8060..983b41c21bfd 100644
--- a/rerun_py/rerun_sdk/rerun/components/__init__.py
+++ b/rerun_py/rerun_sdk/rerun/components/__init__.py
@@ -1,5 +1,4 @@
"""The components package defines Python wrapper types for common registered Rerun components."""
-
from __future__ import annotations
from typing import Any, Final, Type, cast
@@ -16,6 +15,7 @@
"draw_order",
"experimental",
"label",
+ "pinhole",
"point",
"quaternion",
"radius",
@@ -65,23 +65,42 @@ def __arrow_ext_class__(self: type[pa.ExtensionType]) -> type[pa.ExtensionArray]
return cast(Type[pa.ExtensionType], component_type)
+def union_discriminant_type(data_type: pa.DenseUnionType, discriminant: str) -> pa.DataType:
+ """Return the data type of the given discriminant."""
+ return next(f.type for f in list(data_type) if f.name == discriminant)
+
+
def build_dense_union(data_type: pa.DenseUnionType, discriminant: str, child: pa.Array) -> pa.UnionArray:
"""
Build a dense UnionArray given the `data_type`, a discriminant, and the child value array.
If the discriminant string doesn't match any possible value, a `ValueError` is raised.
+
+ WARNING: Because of #705, each new union component needs to be handled in `array_to_rust` on the native side.
"""
try:
idx = [f.name for f in list(data_type)].index(discriminant)
type_ids = pa.array([idx] * len(child), type=pa.int8())
value_offsets = pa.array(range(len(child)), type=pa.int32())
+
children = [pa.nulls(0, type=f.type) for f in list(data_type)]
- children[idx] = child.cast(data_type[idx].type)
+ try:
+ children[idx] = child.cast(data_type[idx].type, safe=False)
+ except pa.ArrowInvalid:
+ # Since we're having issues with nullability in union types (see below),
+ # the cast sometimes fails but can be skipped.
+ children[idx] = child
+
return pa.Array.from_buffers(
type=data_type,
length=len(child),
buffers=[None, type_ids.buffers()[1], value_offsets.buffers()[1]],
children=children,
- ).cast(data_type)
+ )
+ # Cast doesn't work for non-flat unions it seems - we're getting issues about the nullability of union variants.
+ # It's pointless anyways since on the native side we have to cast the field types
+ # See https://github.com/rerun-io/rerun/issues/795
+ # .cast(data_type)
+
except ValueError as e:
raise ValueError(e.args)
diff --git a/rerun_py/rerun_sdk/rerun/components/disconnected_space.py b/rerun_py/rerun_sdk/rerun/components/disconnected_space.py
new file mode 100644
index 000000000000..1ce505c81bfc
--- /dev/null
+++ b/rerun_py/rerun_sdk/rerun/components/disconnected_space.py
@@ -0,0 +1,29 @@
+from __future__ import annotations
+
+import pyarrow as pa
+
+from rerun.components import REGISTERED_COMPONENT_NAMES, ComponentTypeFactory
+
+__all__ = [
+ "DisconnectedSpaceArray",
+ "DisconnectedSpaceType",
+]
+
+
+class DisconnectedSpaceArray(pa.ExtensionArray): # type: ignore[misc]
+ @classmethod
+ def single(cls) -> DisconnectedSpaceArray:
+ """Build a `DisconnectedSpaceArray` with a single element."""
+
+ storage = pa.array([False], type=DisconnectedSpaceType.storage_type)
+
+ # TODO(clement) enable extension type wrapper
+ # return cast(DisconnectedSpaceArray, pa.ExtensionArray.from_storage(DisconnectedSpaceType(), storage))
+ return storage # type: ignore[no-any-return]
+
+
+DisconnectedSpaceType = ComponentTypeFactory(
+ "DisconnectedSpaceType", DisconnectedSpaceArray, REGISTERED_COMPONENT_NAMES["rerun.disconnected_space"]
+)
+
+pa.register_extension_type(DisconnectedSpaceType())
diff --git a/rerun_py/rerun_sdk/rerun/components/pinhole.py b/rerun_py/rerun_sdk/rerun/components/pinhole.py
new file mode 100644
index 000000000000..dc1a423cc8e3
--- /dev/null
+++ b/rerun_py/rerun_sdk/rerun/components/pinhole.py
@@ -0,0 +1,50 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+
+import numpy as np
+import numpy.typing as npt
+import pyarrow as pa
+
+from rerun.components import REGISTERED_COMPONENT_NAMES, ComponentTypeFactory
+from rerun.log import _normalize_matrix3
+
+__all__ = [
+ "PinholeArray",
+ "PinholeType",
+]
+
+
+@dataclass
+class Pinhole:
+ """Camera perspective projection (a.k.a. intrinsics)."""
+
+ # Row-major intrinsics matrix for projecting from camera space to image space.
+ image_from_cam: npt.ArrayLike
+
+ # Pixel resolution (usually integers) of child image space. Width and height.
+ resolution: npt.ArrayLike | None
+
+
+class PinholeArray(pa.ExtensionArray): # type: ignore[misc]
+ def from_pinhole(pinhole: Pinhole) -> PinholeArray:
+ """Build a `PinholeArray` from a single pinhole."""
+
+ image_from_cam = _normalize_matrix3(pinhole.image_from_cam)
+ resolution = None if pinhole.resolution is None else np.array(pinhole.resolution, dtype=np.float32).flatten()
+ storage = pa.StructArray.from_arrays(
+ [
+ pa.FixedSizeListArray.from_arrays(image_from_cam, type=PinholeType.storage_type["image_from_cam"].type),
+ pa.FixedSizeListArray.from_arrays(resolution, type=PinholeType.storage_type["resolution"].type),
+ ],
+ fields=list(PinholeType.storage_type),
+ )
+
+ # TODO(clement) enable extension type wrapper
+ # return cast(PinholeArray, pa.ExtensionArray.from_storage(PinholeType(), storage))
+ return storage # type: ignore[no-any-return]
+
+
+PinholeType = ComponentTypeFactory("PinholeType", PinholeArray, REGISTERED_COMPONENT_NAMES["rerun.pinhole"])
+
+pa.register_extension_type(PinholeType())
diff --git a/rerun_py/rerun_sdk/rerun/components/quaternion.py b/rerun_py/rerun_sdk/rerun/components/quaternion.py
index f04091139447..443b63e9abcf 100644
--- a/rerun_py/rerun_sdk/rerun/components/quaternion.py
+++ b/rerun_py/rerun_sdk/rerun/components/quaternion.py
@@ -6,10 +6,24 @@
from rerun.components import REGISTERED_COMPONENT_NAMES, ComponentTypeFactory
-__all__ = [
- "QuaternionArray",
- "QuaternionType",
-]
+__all__ = ["QuaternionArray", "QuaternionType", "Quaternion"]
+
+
+# @dataclass # not a dataclass in order to force explicit xyzw call
+class Quaternion:
+ """3D rotation expressed via a Quaternion."""
+
+ xyzw: npt.ArrayLike
+ """
+ Quaternion given as a 4-element array of floats in the order (x, y, z, w).
+ """
+ # TODO(andreas): Other representations.
+
+ def __init__(self, *, xyzw: npt.ArrayLike):
+ self.xyzw = xyzw
+
+ def __array__(self) -> npt.NDArray[np.float32]:
+ return np.asarray(self.xyzw, dtype=np.float32)
class QuaternionArray(pa.ExtensionArray): # type: ignore[misc]
diff --git a/rerun_py/rerun_sdk/rerun/components/rect2d.py b/rerun_py/rerun_sdk/rerun/components/rect2d.py
index a0d521e5f6ca..55fb918a4bb4 100644
--- a/rerun_py/rerun_sdk/rerun/components/rect2d.py
+++ b/rerun_py/rerun_sdk/rerun/components/rect2d.py
@@ -6,11 +6,7 @@
import numpy.typing as npt
import pyarrow as pa
-from rerun.components import (
- REGISTERED_COMPONENT_NAMES,
- ComponentTypeFactory,
- build_dense_union,
-)
+from rerun.components import REGISTERED_COMPONENT_NAMES, ComponentTypeFactory, build_dense_union
__all__ = [
"Rect2DArray",
diff --git a/rerun_py/rerun_sdk/rerun/components/tensor.py b/rerun_py/rerun_sdk/rerun/components/tensor.py
index b5d50cd2933e..3ccc8ee70c02 100644
--- a/rerun_py/rerun_sdk/rerun/components/tensor.py
+++ b/rerun_py/rerun_sdk/rerun/components/tensor.py
@@ -8,11 +8,7 @@
import pyarrow as pa
from rerun import bindings
-from rerun.components import (
- REGISTERED_COMPONENT_NAMES,
- ComponentTypeFactory,
- build_dense_union,
-)
+from rerun.components import REGISTERED_COMPONENT_NAMES, ComponentTypeFactory, build_dense_union
__all__ = [
"TensorArray",
diff --git a/rerun_py/rerun_sdk/rerun/components/transform3d.py b/rerun_py/rerun_sdk/rerun/components/transform3d.py
new file mode 100644
index 000000000000..701169e0f350
--- /dev/null
+++ b/rerun_py/rerun_sdk/rerun/components/transform3d.py
@@ -0,0 +1,272 @@
+from __future__ import annotations
+
+from dataclasses import dataclass
+
+import numpy as np
+import numpy.typing as npt
+import pyarrow as pa
+
+from rerun.components import (
+ REGISTERED_COMPONENT_NAMES,
+ ComponentTypeFactory,
+ build_dense_union,
+ union_discriminant_type,
+)
+from rerun.components.quaternion import Quaternion
+from rerun.components.vec import Vec3DType
+from rerun.log import _normalize_matrix3
+
+__all__ = [
+ "Quaternion",
+ "Rigid3D",
+ "RotationAxisAngle",
+ "Scale3D",
+ "Transform3D",
+ "Transform3DArray",
+ "Transform3DType",
+ "Translation3D",
+ "TranslationAndMat3",
+ "TranslationRotationScale3D",
+]
+
+
+@dataclass
+class Transform3D:
+ """An affine transform between two 3D spaces, represented in a given direction."""
+
+ transform: TranslationAndMat3 | TranslationRotationScale3D
+ """Representation of a 3D transform."""
+
+ from_parent: bool = False
+ """
+ If True, the transform maps from the parent space to the child space.
+ Otherwise, the transform maps from the child space to the parent space.
+ """
+
+
+@dataclass
+class TranslationAndMat3:
+ """Representation of a affine transform via a 3x3 translation matrix paired with a translation."""
+
+ translation: npt.ArrayLike | Translation3D | None = None
+ """3D translation vector, applied after the matrix. Uses (0, 0, 0) if not set."""
+
+ matrix: npt.ArrayLike | None = None
+ """The row-major 3x3 matrix for scale, rotation & skew matrix. Uses identity if not set."""
+
+
+@dataclass
+class Rigid3D:
+ """Representation of a rigid transform via separate translation & rotation."""
+
+ translation: Translation3D | npt.ArrayLike | None = None
+ """3D translation vector, applied last."""
+
+ rotation: Quaternion | RotationAxisAngle | None = None
+ """3D rotation, represented as a quaternion or axis + angle, applied second."""
+
+
+@dataclass
+class TranslationRotationScale3D:
+ """Representation of an affine transform via separate translation, rotation & scale."""
+
+ translation: Translation3D | npt.ArrayLike | None = None
+ """3D translation vector, applied last."""
+
+ rotation: Quaternion | RotationAxisAngle | None = None
+ """3D rotation, represented as a quaternion or axis + angle, applied second."""
+
+ scale: Scale3D | npt.ArrayLike | float | None = None
+ """3D scaling either a 3D vector, scalar or None. Applied first."""
+
+
+@dataclass
+class Translation3D:
+ """3D translation expressed as a vector."""
+
+ translation: npt.ArrayLike
+
+
+@dataclass
+class Scale3D:
+ """3D scale expressed as either a uniform scale or a vector."""
+
+ scale: npt.ArrayLike | float
+
+
+@dataclass
+class RotationAxisAngle:
+ """3D rotation expressed via a rotation axis and angle."""
+
+ axis: npt.ArrayLike
+ """
+ Axis to rotate around.
+
+ This is not required to be normalized.
+ If normalization fails (typically because the vector is length zero), the rotation is silently ignored.
+ """
+
+ degrees: float | None = None
+ """3D rotation angle in degrees. Only one of `degrees` or `radians` should be set."""
+
+ radians: float | None = None
+ """3D rotation angle in radians. Only one of `degrees` or `radians` should be set."""
+
+
+def optional_translation_to_arrow(translation: npt.ArrayLike | Translation3D | None) -> pa.UnionArray:
+ # "unpack" rr.Translation3D first.
+ if isinstance(translation, Translation3D):
+ translation = translation.translation
+
+ if translation is None:
+ return pa.nulls(1, type=Vec3DType.storage_type)
+
+ np_translation = np.array(translation, dtype=np.float32).flatten()
+ if np_translation.size != 3:
+ raise ValueError(f"Expected three dimensional translation vector, shape was instead {np_translation.shape}")
+ return pa.FixedSizeListArray.from_arrays(np_translation, type=Vec3DType.storage_type)
+
+
+def build_struct_array_from_translation_mat3(
+ translation_mat3: TranslationAndMat3, type: pa.StructType
+) -> pa.StructArray:
+ translation = optional_translation_to_arrow(translation_mat3.translation)
+ matrix = pa.FixedSizeListArray.from_arrays(_normalize_matrix3(translation_mat3.matrix), type=type["matrix"].type)
+
+ return pa.StructArray.from_arrays(
+ [
+ translation,
+ matrix,
+ ],
+ fields=list(type),
+ )
+
+
+def build_struct_array_from_axis_angle_rotation(
+ rotation: RotationAxisAngle, axis_angle_type: pa.StructType
+) -> pa.StructArray:
+ if rotation.degrees is None and rotation.radians is None:
+ raise ValueError("RotationAxisAngle must have either degrees or radians set")
+ if rotation.degrees is not None and rotation.radians is not None:
+ raise ValueError("RotationAxisAngle must have either degrees or radians set, not both")
+
+ axis = np.array(rotation.axis, dtype=np.float32).flatten()
+ axis = pa.FixedSizeListArray.from_arrays(axis, type=axis_angle_type["axis"].type)
+
+ if rotation.degrees is not None:
+ angle = pa.array([rotation.degrees], type=pa.float32())
+ angle_variant = "Degrees"
+ else:
+ angle = pa.array([rotation.radians], type=pa.float32())
+ angle_variant = "Radians"
+ angle = build_dense_union(axis_angle_type["angle"].type, angle_variant, angle)
+
+ return pa.StructArray.from_arrays(
+ [
+ axis,
+ angle,
+ ],
+ fields=list(axis_angle_type),
+ )
+
+
+def build_union_array_from_rotation(
+ rotation: Quaternion | RotationAxisAngle | None, type: pa.DenseUnionType
+) -> pa.UnionArray:
+ if rotation is None:
+ return pa.nulls(1, type=type)
+ elif isinstance(rotation, RotationAxisAngle):
+ rotation_discriminant = "AxisAngle"
+ axis_angle_type = union_discriminant_type(type, rotation_discriminant)
+ stored_rotation = build_struct_array_from_axis_angle_rotation(rotation, axis_angle_type)
+ elif isinstance(rotation, Quaternion):
+ rotation_discriminant = "Quaternion"
+ np_rotation = np.array(rotation.xyzw, dtype=np.float32).flatten()
+ stored_rotation = pa.FixedSizeListArray.from_arrays(
+ np_rotation, type=union_discriminant_type(type, rotation_discriminant)
+ )
+ else:
+ raise ValueError(
+ f"Unknown 3d rotation representation: {rotation}. " + "Expected `RotationAxisAngle`/`Quaternion` or `None`."
+ )
+
+ return build_dense_union(type, rotation_discriminant, stored_rotation)
+
+
+def build_union_array_from_scale(
+ scale: Scale3D | npt.ArrayLike | float | None, type: pa.DenseUnionType
+) -> pa.UnionArray:
+ # "unpack" rr.Scale3D first.
+ if isinstance(scale, Scale3D):
+ scale = scale.scale
+
+ if scale is None:
+ return pa.nulls(1, type=type)
+ elif np.isscalar(scale):
+ scale_discriminant = "Uniform"
+ scale = pa.array([scale], type=pa.float32())
+ else:
+ scale_discriminant = "ThreeD"
+ scale = np.array(scale, dtype=np.float32).flatten()
+ if len(scale) != 3:
+ raise ValueError(f"Scale vector must have 3 elements, got {len(scale)}")
+ scale = pa.FixedSizeListArray.from_arrays(scale, type=union_discriminant_type(type, scale_discriminant))
+
+ return build_dense_union(type, scale_discriminant, scale)
+
+
+def build_struct_array_from_translation_rotation_scale(
+ transform: TranslationRotationScale3D, type: pa.StructType
+) -> pa.StructArray:
+ translation = optional_translation_to_arrow(transform.translation)
+ rotation = build_union_array_from_rotation(transform.rotation, type["rotation"].type)
+ scale = build_union_array_from_scale(transform.scale, type["scale"].type)
+
+ return pa.StructArray.from_arrays(
+ [
+ translation,
+ rotation,
+ scale,
+ ],
+ fields=list(type),
+ )
+
+
+class Transform3DArray(pa.ExtensionArray): # type: ignore[misc]
+ def from_transform(transform: Transform3D) -> Transform3DArray:
+ """Build a `Transform3DArray` from a single transform."""
+
+ transform_repr_union_type = Transform3DType.storage_type[0].type
+
+ if isinstance(transform.transform, TranslationAndMat3):
+ discriminant_affine3d = "TranslationAndMat3"
+ repr_type = union_discriminant_type(transform_repr_union_type, discriminant_affine3d)
+ transform_repr = build_struct_array_from_translation_mat3(transform.transform, repr_type)
+ elif isinstance(transform.transform, TranslationRotationScale3D):
+ discriminant_affine3d = "TranslationRotationScale"
+ repr_type = union_discriminant_type(transform_repr_union_type, discriminant_affine3d)
+ transform_repr = build_struct_array_from_translation_rotation_scale(transform.transform, repr_type)
+ else:
+ raise ValueError(
+ f"Unknown transform 3d representation: {transform.transform} "
+ + " Expected `TranslationAndMat3` or `TranslationRotationScale3D`."
+ )
+
+ storage = pa.StructArray.from_arrays(
+ [
+ build_dense_union(transform_repr_union_type, discriminant_affine3d, transform_repr),
+ pa.array([transform.from_parent], type=Transform3DType.storage_type[1].type),
+ ],
+ fields=list(Transform3DType.storage_type),
+ )
+
+ # TODO(clement) enable extension type wrapper
+ # return cast(Transform3DArray, pa.ExtensionArray.from_storage(Transform3DType(), storage))
+ return storage # type: ignore[no-any-return]
+
+
+Transform3DType = ComponentTypeFactory(
+ "Transform3DType", Transform3DArray, REGISTERED_COMPONENT_NAMES["rerun.transform3d"]
+)
+
+pa.register_extension_type(Transform3DType())
diff --git a/rerun_py/rerun_sdk/rerun/experimental.py b/rerun_py/rerun_sdk/rerun/experimental.py
index 4d0a03f50dfa..34827d7037df 100644
--- a/rerun_py/rerun_sdk/rerun/experimental.py
+++ b/rerun_py/rerun_sdk/rerun/experimental.py
@@ -4,6 +4,7 @@
These features are not yet stable and may change in future releases without
going through the normal deprecation cycle.
"""
+from __future__ import annotations
from rerun.log.experimental.text import log_text_box
diff --git a/rerun_py/rerun_sdk/rerun/log/__init__.py b/rerun_py/rerun_sdk/rerun/log/__init__.py
index 1c681218940a..4388a09810b7 100644
--- a/rerun_py/rerun_sdk/rerun/log/__init__.py
+++ b/rerun_py/rerun_sdk/rerun/log/__init__.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
from typing import Optional, Sequence, Union
import numpy as np
@@ -11,11 +13,11 @@
OptionalKeyPointIds = Optional[Union[int, npt.ArrayLike]]
-def _to_sequence(array: Optional[npt.ArrayLike]) -> Optional[Sequence[float]]:
+def _to_sequence(array: npt.ArrayLike | None) -> Sequence[float] | None:
return np.require(array, float).tolist() # type: ignore[no-any-return]
-def _normalize_colors(colors: Optional[Union[Color, Colors]] = None) -> npt.NDArray[np.uint8]:
+def _normalize_colors(colors: Color | Colors | None = None) -> npt.NDArray[np.uint8]:
"""
Normalize flexible colors arrays.
@@ -47,7 +49,7 @@ def _normalize_ids(class_ids: OptionalClassIds = None) -> npt.NDArray[np.uint16]
return np.atleast_1d(np.array(class_ids, dtype=np.uint16, copy=False))
-def _normalize_radii(radii: Optional[npt.ArrayLike] = None) -> npt.NDArray[np.float32]:
+def _normalize_radii(radii: npt.ArrayLike | None = None) -> npt.NDArray[np.float32]:
"""Normalize flexible radii arrays."""
if radii is None:
return np.array((), dtype=np.float32)
@@ -55,8 +57,17 @@ def _normalize_radii(radii: Optional[npt.ArrayLike] = None) -> npt.NDArray[np.fl
return np.atleast_1d(np.array(radii, dtype=np.float32, copy=False))
-def _normalize_labels(labels: Optional[Union[str, Sequence[str]]]) -> Sequence[str]:
+def _normalize_labels(labels: str | Sequence[str] | None) -> Sequence[str]:
if labels is None:
return []
else:
return labels
+
+
+def _normalize_matrix3(matrix: npt.ArrayLike | None) -> npt.ArrayLike:
+ matrix = np.eye(3) if matrix is None else matrix
+ matrix = np.array(matrix, dtype=np.float32, order="F")
+ if matrix.shape != (3, 3):
+ raise ValueError(f"Expected 3x3 matrix, shape was instead {matrix.shape}")
+ # Rerun is column major internally, tell numpy to use Fortran order which is just that.
+ return matrix.flatten(order="F")
diff --git a/rerun_py/rerun_sdk/rerun/log/annotation.py b/rerun_py/rerun_sdk/rerun/log/annotation.py
index e55ec0723630..3d204f9062aa 100644
--- a/rerun_py/rerun_sdk/rerun/log/annotation.py
+++ b/rerun_py/rerun_sdk/rerun/log/annotation.py
@@ -1,5 +1,7 @@
+from __future__ import annotations
+
from dataclasses import dataclass
-from typing import Iterable, Optional, Sequence, Tuple, Union
+from typing import Iterable, Sequence, Tuple, Union
from rerun import bindings
from rerun.log import Color, _normalize_colors
@@ -25,10 +27,10 @@ class AnnotationInfo:
id: int = 0
"""The id of the class or key-point to annotate"""
- label: Optional[str] = None
+ label: str | None = None
"""The label that will be shown in the UI"""
- color: Optional[Color] = None
+ color: Color | None = None
"""The color that will be applied to the annotated entity"""
@@ -53,13 +55,13 @@ class ClassDescription:
Keypoints in turn may be connected to each other by connections (typically used for skeleton edges).
"""
- info: Optional[AnnotationInfoLike] = None
+ info: AnnotationInfoLike | None = None
"""The annotation info for the class"""
- keypoint_annotations: Optional[Iterable[AnnotationInfoLike]] = None
+ keypoint_annotations: Iterable[AnnotationInfoLike] | None = None
"""The annotation infos for the all key-points"""
- keypoint_connections: Optional[Iterable[Union[int, Tuple[int, int]]]] = None
+ keypoint_connections: Iterable[int | tuple[int, int]] | None = None
"""The connections between key-points"""
@@ -77,10 +79,10 @@ def coerce_class_descriptor_like(arg: ClassDescriptionLike) -> ClassDescription:
@log_decorator
def log_annotation_context(
entity_path: str,
- class_descriptions: Union[ClassDescriptionLike, Iterable[ClassDescriptionLike]],
+ class_descriptions: ClassDescriptionLike | Iterable[ClassDescriptionLike],
*,
timeless: bool = True,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log an annotation context made up of a collection of [ClassDescription][rerun.log.annotation.ClassDescription]s.
@@ -131,7 +133,7 @@ def log_annotation_context(
# Convert back to fixed tuple for easy pyo3 conversion
# This is pretty messy but will likely go away / be refactored with pending data-model changes.
- def info_to_tuple(info: Optional[AnnotationInfoLike]) -> Tuple[int, Optional[str], Optional[Sequence[int]]]:
+ def info_to_tuple(info: AnnotationInfoLike | None) -> tuple[int, str | None, Sequence[int] | None]:
if info is None:
return (0, None, None)
info = coerce_annotation_info(info)
@@ -139,7 +141,7 @@ def info_to_tuple(info: Optional[AnnotationInfoLike]) -> Tuple[int, Optional[str
return (info.id, info.label, color)
def keypoint_connections_to_flat_list(
- keypoint_connections: Optional[Iterable[Union[int, Tuple[int, int]]]]
+ keypoint_connections: Iterable[int | tuple[int, int]] | None
) -> Sequence[int]:
if keypoint_connections is None:
return []
diff --git a/rerun_py/rerun_sdk/rerun/log/arrow.py b/rerun_py/rerun_sdk/rerun/log/arrow.py
index 570261077345..ca01b302a4e9 100644
--- a/rerun_py/rerun_sdk/rerun/log/arrow.py
+++ b/rerun_py/rerun_sdk/rerun/log/arrow.py
@@ -1,4 +1,6 @@
-from typing import Any, Dict, Optional
+from __future__ import annotations
+
+from typing import Any
import numpy as np
import numpy.typing as npt
@@ -22,15 +24,15 @@
@log_decorator
def log_arrow(
entity_path: str,
- origin: Optional[npt.ArrayLike],
- vector: Optional[npt.ArrayLike] = None,
+ origin: npt.ArrayLike | None,
+ vector: npt.ArrayLike | None = None,
*,
- color: Optional[Color] = None,
- label: Optional[str] = None,
- width_scale: Optional[float] = None,
- ext: Optional[Dict[str, Any]] = None,
+ color: Color | None = None,
+ label: str | None = None,
+ width_scale: float | None = None,
+ ext: dict[str, Any] | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log a 3D arrow.
@@ -66,8 +68,8 @@ def log_arrow(
"""
- instanced: Dict[str, Any] = {}
- splats: Dict[str, Any] = {}
+ instanced: dict[str, Any] = {}
+ splats: dict[str, Any] = {}
if origin is not None:
if vector is None:
diff --git a/rerun_py/rerun_sdk/rerun/log/bounding_box.py b/rerun_py/rerun_sdk/rerun/log/bounding_box.py
index 2bc1b3eba163..bbb17943cc89 100644
--- a/rerun_py/rerun_sdk/rerun/log/bounding_box.py
+++ b/rerun_py/rerun_sdk/rerun/log/bounding_box.py
@@ -1,4 +1,6 @@
-from typing import Any, Dict, Optional
+from __future__ import annotations
+
+from typing import Any
import numpy as np
import numpy.typing as npt
@@ -26,16 +28,16 @@
def log_obb(
entity_path: str,
*,
- half_size: Optional[npt.ArrayLike],
- position: Optional[npt.ArrayLike] = None,
- rotation_q: Optional[npt.ArrayLike] = None,
- color: Optional[Color] = None,
- stroke_width: Optional[float] = None,
- label: Optional[str] = None,
- class_id: Optional[int] = None,
- ext: Optional[Dict[str, Any]] = None,
+ half_size: npt.ArrayLike | None,
+ position: npt.ArrayLike | None = None,
+ rotation_q: npt.ArrayLike | None = None,
+ color: Color | None = None,
+ stroke_width: float | None = None,
+ label: str | None = None,
+ class_id: int | None = None,
+ ext: dict[str, Any] | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log a 3D Oriented Bounding Box, or OBB.
@@ -76,8 +78,8 @@ def log_obb(
"""
recording = RecordingStream.to_native(recording)
- instanced: Dict[str, Any] = {}
- splats: Dict[str, Any] = {}
+ instanced: dict[str, Any] = {}
+ splats: dict[str, Any] = {}
if half_size is not None:
half_size = np.require(half_size, dtype="float32")
diff --git a/rerun_py/rerun_sdk/rerun/log/camera.py b/rerun_py/rerun_sdk/rerun/log/camera.py
index abc685946111..0a3b306cb669 100644
--- a/rerun_py/rerun_sdk/rerun/log/camera.py
+++ b/rerun_py/rerun_sdk/rerun/log/camera.py
@@ -1,9 +1,9 @@
-from typing import Optional
+from __future__ import annotations
-import numpy as np
import numpy.typing as npt
from rerun import bindings
+from rerun.components.pinhole import Pinhole, PinholeArray
from rerun.log.log_decorator import log_decorator
from rerun.recording_stream import RecordingStream
@@ -20,7 +20,7 @@ def log_pinhole(
width: int,
height: int,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log a perspective camera model.
@@ -69,11 +69,5 @@ def log_pinhole(
"""
- # Transform arrow handling happens inside the python bridge
- bindings.log_pinhole(
- entity_path,
- resolution=[width, height],
- child_from_parent=np.asarray(child_from_parent).T.tolist(),
- timeless=timeless,
- recording=recording,
- )
+ instanced = {"rerun.pinhole": PinholeArray.from_pinhole(Pinhole(child_from_parent, [width, height]))}
+ bindings.log_arrow_msg(entity_path, components=instanced, timeless=timeless)
diff --git a/rerun_py/rerun_sdk/rerun/log/clear.py b/rerun_py/rerun_sdk/rerun/log/clear.py
index 4a0ab3ddc13d..ff94e55d5ab8 100644
--- a/rerun_py/rerun_sdk/rerun/log/clear.py
+++ b/rerun_py/rerun_sdk/rerun/log/clear.py
@@ -1,4 +1,4 @@
-from typing import Optional
+from __future__ import annotations
from rerun import bindings
from rerun.recording_stream import RecordingStream
@@ -8,7 +8,7 @@ def log_cleared(
entity_path: str,
*,
recursive: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Indicate that an entity at a given path should no longer be displayed.
diff --git a/rerun_py/rerun_sdk/rerun/log/error_utils.py b/rerun_py/rerun_sdk/rerun/log/error_utils.py
index 4a4366235e50..07eb9feec92e 100644
--- a/rerun_py/rerun_sdk/rerun/log/error_utils.py
+++ b/rerun_py/rerun_sdk/rerun/log/error_utils.py
@@ -1,6 +1,7 @@
+from __future__ import annotations
+
import inspect
import logging
-from typing import Optional
import rerun
from rerun.log.text_internal import LogLevel, log_text_entry_internal
@@ -20,7 +21,7 @@ def _build_warning_context_string(skip_first: int) -> str:
def _send_warning(
message: str,
depth_to_user_code: int,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Sends a warning about the usage of the Rerun SDK.
diff --git a/rerun_py/rerun_sdk/rerun/log/experimental/text.py b/rerun_py/rerun_sdk/rerun/log/experimental/text.py
index 6cc1fc16a627..1e75ecbcb38b 100644
--- a/rerun_py/rerun_sdk/rerun/log/experimental/text.py
+++ b/rerun_py/rerun_sdk/rerun/log/experimental/text.py
@@ -1,20 +1,23 @@
+from __future__ import annotations
+
import logging
-from typing import Any, Dict, Optional
+from typing import Any
-# Fully qualified to avoid circular import
import rerun.log.extension_components
from rerun import bindings
from rerun.components.experimental.text_box import TextBoxArray
from rerun.components.instance import InstanceArray
from rerun.log.log_decorator import log_decorator
+# Fully qualified to avoid circular import
+
@log_decorator
def log_text_box(
entity_path: str,
text: str,
*,
- ext: Optional[Dict[str, Any]] = None,
+ ext: dict[str, Any] | None = None,
timeless: bool = False,
) -> None:
"""
@@ -34,13 +37,13 @@ def log_text_box(
Whether the text-box should be timeless.
"""
- instanced: Dict[str, Any] = {}
- splats: Dict[str, Any] = {}
+ instanced: dict[str, Any] = {}
+ splats: dict[str, Any] = {}
if text:
instanced["rerun.text_box"] = TextBoxArray.from_bodies([(text,)])
else:
- logging.warning(f"Null text entry in log_text_entry('{entity_path}') will be dropped.")
+ logging.warning(f"Null text entry in log_text_entry('{entity_path}') will be dropped.")
if ext:
rerun.log.extension_components._add_extension_components(instanced, splats, ext, None)
diff --git a/rerun_py/rerun_sdk/rerun/log/extension_components.py b/rerun_py/rerun_sdk/rerun/log/extension_components.py
index c68ebcd3cd61..1580bdd95799 100644
--- a/rerun_py/rerun_sdk/rerun/log/extension_components.py
+++ b/rerun_py/rerun_sdk/rerun/log/extension_components.py
@@ -1,16 +1,19 @@
-from typing import Any, Dict, Optional, Sequence
+from __future__ import annotations
+
+from typing import Any, Sequence
import numpy as np
import numpy.typing as npt
import pyarrow as pa
-# Fully qualified to avoid circular import
import rerun.log.error_utils
from rerun import bindings
from rerun.components.instance import InstanceArray
from rerun.log.log_decorator import log_decorator
from rerun.recording_stream import RecordingStream
+# Fully qualified to avoid circular import
+
__all__ = [
"_add_extension_components",
"log_extension_components",
@@ -18,14 +21,14 @@
EXT_PREFIX = "ext."
-EXT_COMPONENT_TYPES: Dict[str, Any] = {}
+EXT_COMPONENT_TYPES: dict[str, Any] = {}
def _add_extension_components(
- instanced: Dict[str, Any],
- splats: Dict[str, Any],
- ext: Dict[str, Any],
- identifiers: Optional[npt.NDArray[np.uint64]],
+ instanced: dict[str, Any],
+ splats: dict[str, Any],
+ ext: dict[str, Any],
+ identifiers: npt.NDArray[np.uint64] | None,
) -> None:
for name, value in ext.items():
# Don't log empty components
@@ -66,11 +69,11 @@ def _add_extension_components(
@log_decorator
def log_extension_components(
entity_path: str,
- ext: Dict[str, Any],
+ ext: dict[str, Any],
*,
- identifiers: Optional[Sequence[int]] = None,
+ identifiers: Sequence[int] | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log an arbitrary collection of extension components.
@@ -126,8 +129,8 @@ def log_extension_components(
except ValueError:
rerun.log.error_utils._send_warning("Only integer identifiers supported", 1)
- instanced: Dict[str, Any] = {}
- splats: Dict[str, Any] = {}
+ instanced: dict[str, Any] = {}
+ splats: dict[str, Any] = {}
if len(identifiers_np):
instanced["rerun.instance_key"] = InstanceArray.from_numpy(identifiers_np)
diff --git a/rerun_py/rerun_sdk/rerun/log/file.py b/rerun_py/rerun_sdk/rerun/log/file.py
index a916bb6726bf..056acda18f9e 100644
--- a/rerun_py/rerun_sdk/rerun/log/file.py
+++ b/rerun_py/rerun_sdk/rerun/log/file.py
@@ -1,7 +1,8 @@
+from __future__ import annotations
+
from dataclasses import dataclass
from enum import Enum
from pathlib import Path
-from typing import Optional
import numpy as np
import numpy.typing as npt
@@ -49,11 +50,11 @@ def log_mesh_file(
entity_path: str,
mesh_format: MeshFormat,
*,
- mesh_bytes: Optional[bytes] = None,
- mesh_path: Optional[Path] = None,
- transform: Optional[npt.ArrayLike] = None,
+ mesh_bytes: bytes | None = None,
+ mesh_path: Path | None = None,
+ transform: npt.ArrayLike | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log the contents of a mesh file (.gltf, .glb, .obj, …).
@@ -116,11 +117,11 @@ def log_mesh_file(
def log_image_file(
entity_path: str,
*,
- img_bytes: Optional[bytes] = None,
- img_path: Optional[Path] = None,
- img_format: Optional[ImageFormat] = None,
+ img_bytes: bytes | None = None,
+ img_path: Path | None = None,
+ img_format: ImageFormat | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log an image file given its contents or path on disk.
diff --git a/rerun_py/rerun_sdk/rerun/log/image.py b/rerun_py/rerun_sdk/rerun/log/image.py
index 696b62f8f142..41f794a3b5b9 100644
--- a/rerun_py/rerun_sdk/rerun/log/image.py
+++ b/rerun_py/rerun_sdk/rerun/log/image.py
@@ -1,10 +1,15 @@
-from typing import Any, Dict, Optional
+from __future__ import annotations
+
+from io import BytesIO
+from typing import Any
import numpy as np
import numpy.typing as npt
+from PIL import Image
from rerun import bindings
from rerun.log.error_utils import _send_warning
+from rerun.log.file import ImageFormat, log_image_file
from rerun.log.log_decorator import log_decorator
from rerun.log.tensor import Tensor, _log_tensor, _to_numpy
from rerun.recording_stream import RecordingStream
@@ -21,10 +26,11 @@ def log_image(
entity_path: str,
image: Tensor,
*,
- draw_order: Optional[float] = None,
- ext: Optional[Dict[str, Any]] = None,
+ draw_order: float | None = None,
+ ext: dict[str, Any] | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
+ jpeg_quality: int | None = None,
) -> None:
"""
Log a gray or color image.
@@ -57,6 +63,14 @@ def log_image(
Specifies the [`rerun.RecordingStream`][] to use.
If left unspecified, defaults to the current active data recording, if there is one.
See also: [`rerun.init`][], [`rerun.set_global_data_recording`][].
+ jpeg_quality:
+ If set, encode the image as a JPEG to save storage space.
+ Higher quality = larger file size.
+ A quality of 95 still saves a lot of space, but is visually very similar.
+ JPEG compression works best for photographs.
+ Only RGB images are supported.
+ Note that compressing to JPEG costs a bit of CPU time, both when logging
+ and later when viewing them.
"""
@@ -88,6 +102,23 @@ def log_image(
if interpretable_as_image and num_non_empty_dims != len(shape):
image = np.squeeze(image)
+ if jpeg_quality is not None:
+ # TODO(emilk): encode JPEG in background thread instead
+
+ if image.dtype not in ["uint8", "sint32", "float32"]:
+ # Convert to a format supported by Image.fromarray
+ image = image.astype("float32")
+
+ pil_image = Image.fromarray(image)
+ output = BytesIO()
+ pil_image.save(output, format="JPEG", quality=jpeg_quality)
+ jpeg_bytes = output.getvalue()
+ output.close()
+
+ # TODO(emilk): pass draw_order too
+ log_image_file(entity_path=entity_path, img_bytes=jpeg_bytes, img_format=ImageFormat.JPEG, timeless=timeless)
+ return
+
_log_tensor(entity_path, image, draw_order=draw_order, ext=ext, timeless=timeless, recording=recording)
@@ -96,11 +127,11 @@ def log_depth_image(
entity_path: str,
image: Tensor,
*,
- draw_order: Optional[float] = None,
- meter: Optional[float] = None,
- ext: Optional[Dict[str, Any]] = None,
+ draw_order: float | None = None,
+ meter: float | None = None,
+ ext: dict[str, Any] | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log a depth image.
@@ -175,10 +206,10 @@ def log_segmentation_image(
entity_path: str,
image: npt.ArrayLike,
*,
- draw_order: Optional[float] = None,
- ext: Optional[Dict[str, Any]] = None,
+ draw_order: float | None = None,
+ ext: dict[str, Any] | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log an image made up of integer class-ids.
diff --git a/rerun_py/rerun_sdk/rerun/log/lines.py b/rerun_py/rerun_sdk/rerun/log/lines.py
index a8df9bf4eac3..888369a98789 100644
--- a/rerun_py/rerun_sdk/rerun/log/lines.py
+++ b/rerun_py/rerun_sdk/rerun/log/lines.py
@@ -1,4 +1,6 @@
-from typing import Any, Dict, Optional
+from __future__ import annotations
+
+from typing import Any
import numpy as np
import numpy.typing as npt
@@ -25,13 +27,13 @@
@deprecated(version="0.2.0", reason="Use log_line_strip instead")
def log_path(
entity_path: str,
- positions: Optional[npt.ArrayLike],
+ positions: npt.ArrayLike | None,
*,
- stroke_width: Optional[float] = None,
- color: Optional[Color] = None,
- ext: Optional[Dict[str, Any]] = None,
+ stroke_width: float | None = None,
+ color: Color | None = None,
+ ext: dict[str, Any] | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
log_line_strip(
entity_path, positions, stroke_width=stroke_width, color=color, ext=ext, timeless=timeless, recording=recording
@@ -41,14 +43,14 @@ def log_path(
@log_decorator
def log_line_strip(
entity_path: str,
- positions: Optional[npt.ArrayLike],
+ positions: npt.ArrayLike | None,
*,
- stroke_width: Optional[float] = None,
- color: Optional[Color] = None,
- draw_order: Optional[float] = None,
- ext: Optional[Dict[str, Any]] = None,
+ stroke_width: float | None = None,
+ color: Color | None = None,
+ draw_order: float | None = None,
+ ext: dict[str, Any] | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
r"""
Log a line strip through 2D or 3D space.
@@ -92,8 +94,8 @@ def log_line_strip(
if positions is not None:
positions = np.require(positions, dtype="float32")
- instanced: Dict[str, Any] = {}
- splats: Dict[str, Any] = {}
+ instanced: dict[str, Any] = {}
+ splats: dict[str, Any] = {}
if positions is not None:
if positions.shape[1] == 2:
@@ -132,12 +134,12 @@ def log_line_segments(
entity_path: str,
positions: npt.ArrayLike,
*,
- stroke_width: Optional[float] = None,
- color: Optional[Color] = None,
- draw_order: Optional[float] = None,
- ext: Optional[Dict[str, Any]] = None,
+ stroke_width: float | None = None,
+ color: Color | None = None,
+ draw_order: float | None = None,
+ ext: dict[str, Any] | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
r"""
Log many 2D or 3D line segments.
@@ -181,8 +183,8 @@ def log_line_segments(
positions = np.require([], dtype="float32")
positions = np.require(positions, dtype="float32")
- instanced: Dict[str, Any] = {}
- splats: Dict[str, Any] = {}
+ instanced: dict[str, Any] = {}
+ splats: dict[str, Any] = {}
if positions is not None:
# If not a multiple of 2, drop the last row
diff --git a/rerun_py/rerun_sdk/rerun/log/log_decorator.py b/rerun_py/rerun_sdk/rerun/log/log_decorator.py
index 0d1db629c5f0..330b018471e2 100644
--- a/rerun_py/rerun_sdk/rerun/log/log_decorator.py
+++ b/rerun_py/rerun_sdk/rerun/log/log_decorator.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import functools
import logging
import traceback
diff --git a/rerun_py/rerun_sdk/rerun/log/mesh.py b/rerun_py/rerun_sdk/rerun/log/mesh.py
index ccf78f61f51a..d24cf3ad8127 100644
--- a/rerun_py/rerun_sdk/rerun/log/mesh.py
+++ b/rerun_py/rerun_sdk/rerun/log/mesh.py
@@ -1,13 +1,12 @@
-from typing import Any, Optional, Sequence
+from __future__ import annotations
+
+from typing import Any, Sequence
import numpy as np
import numpy.typing as npt
from rerun import bindings
-from rerun.log import (
- Colors,
- _normalize_colors,
-)
+from rerun.log import Colors, _normalize_colors
from rerun.log.log_decorator import log_decorator
from rerun.recording_stream import RecordingStream
@@ -22,12 +21,12 @@ def log_mesh(
entity_path: str,
positions: Any,
*,
- indices: Optional[Any] = None,
- normals: Optional[Any] = None,
- albedo_factor: Optional[Any] = None,
- vertex_colors: Optional[Colors] = None,
+ indices: Any | None = None,
+ normals: Any | None = None,
+ albedo_factor: Any | None = None,
+ vertex_colors: Colors | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log a raw 3D mesh by specifying its vertex positions, and optionally indices, normals and albedo factor.
@@ -113,12 +112,12 @@ def log_meshes(
entity_path: str,
position_buffers: Sequence[npt.ArrayLike],
*,
- vertex_color_buffers: Sequence[Optional[Colors]],
- index_buffers: Sequence[Optional[npt.ArrayLike]],
- normal_buffers: Sequence[Optional[npt.ArrayLike]],
- albedo_factors: Sequence[Optional[npt.ArrayLike]],
+ vertex_color_buffers: Sequence[Colors | None],
+ index_buffers: Sequence[npt.ArrayLike | None],
+ normal_buffers: Sequence[npt.ArrayLike | None],
+ albedo_factors: Sequence[npt.ArrayLike | None],
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log multiple raw 3D meshes by specifying their different buffers and albedo factors.
diff --git a/rerun_py/rerun_sdk/rerun/log/points.py b/rerun_py/rerun_sdk/rerun/log/points.py
index cde9ff86c715..c71bdf5bb6af 100644
--- a/rerun_py/rerun_sdk/rerun/log/points.py
+++ b/rerun_py/rerun_sdk/rerun/log/points.py
@@ -1,4 +1,6 @@
-from typing import Any, Dict, Optional, Sequence, Union
+from __future__ import annotations
+
+from typing import Any, Sequence
import numpy as np
import numpy.typing as npt
@@ -35,17 +37,17 @@
@log_decorator
def log_point(
entity_path: str,
- position: Optional[npt.ArrayLike] = None,
+ position: npt.ArrayLike | None = None,
*,
- radius: Optional[float] = None,
- color: Optional[Color] = None,
- label: Optional[str] = None,
- class_id: Optional[int] = None,
- keypoint_id: Optional[int] = None,
- draw_order: Optional[float] = None,
- ext: Optional[Dict[str, Any]] = None,
+ radius: float | None = None,
+ color: Color | None = None,
+ label: str | None = None,
+ class_id: int | None = None,
+ keypoint_id: int | None = None,
+ draw_order: float | None = None,
+ ext: dict[str, Any] | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log a 2D or 3D point, with a position and optional color, radii, label, etc.
@@ -103,8 +105,8 @@ def log_point(
if position is not None:
position = np.require(position, dtype="float32")
- instanced: Dict[str, Any] = {}
- splats: Dict[str, Any] = {}
+ instanced: dict[str, Any] = {}
+ splats: dict[str, Any] = {}
if position is not None:
if position.size == 2:
@@ -147,18 +149,18 @@ def log_point(
@log_decorator
def log_points(
entity_path: str,
- positions: Optional[npt.ArrayLike] = None,
+ positions: npt.ArrayLike | None = None,
*,
- identifiers: Optional[npt.ArrayLike] = None,
- colors: Optional[Union[Color, Colors]] = None,
- radii: Optional[npt.ArrayLike] = None,
- labels: Optional[Sequence[str]] = None,
+ identifiers: npt.ArrayLike | None = None,
+ colors: Color | Colors | None = None,
+ radii: npt.ArrayLike | None = None,
+ labels: Sequence[str] | None = None,
class_ids: OptionalClassIds = None,
keypoint_ids: OptionalKeyPointIds = None,
- draw_order: Optional[float] = None,
- ext: Optional[Dict[str, Any]] = None,
+ draw_order: float | None = None,
+ ext: dict[str, Any] | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log 2D or 3D points, with positions and optional colors, radii, labels, etc.
diff --git a/rerun_py/rerun_sdk/rerun/log/rects.py b/rerun_py/rerun_sdk/rerun/log/rects.py
index 8b3987c8f581..7d6fcce35e57 100644
--- a/rerun_py/rerun_sdk/rerun/log/rects.py
+++ b/rerun_py/rerun_sdk/rerun/log/rects.py
@@ -1,4 +1,6 @@
-from typing import Any, Dict, Optional, Sequence, Union
+from __future__ import annotations
+
+from typing import Any, Sequence
import numpy as np
import numpy.typing as npt
@@ -10,14 +12,7 @@
from rerun.components.instance import InstanceArray
from rerun.components.label import LabelArray
from rerun.components.rect2d import Rect2DArray, RectFormat
-from rerun.log import (
- Color,
- Colors,
- OptionalClassIds,
- _normalize_colors,
- _normalize_ids,
- _normalize_labels,
-)
+from rerun.log import Color, Colors, OptionalClassIds, _normalize_colors, _normalize_ids, _normalize_labels
from rerun.log.error_utils import _send_warning
from rerun.log.extension_components import _add_extension_components
from rerun.log.log_decorator import log_decorator
@@ -33,16 +28,16 @@
@log_decorator
def log_rect(
entity_path: str,
- rect: Optional[npt.ArrayLike],
+ rect: npt.ArrayLike | None,
*,
rect_format: RectFormat = RectFormat.XYWH,
- color: Optional[Color] = None,
- label: Optional[str] = None,
- class_id: Optional[int] = None,
- draw_order: Optional[float] = None,
- ext: Optional[Dict[str, Any]] = None,
+ color: Color | None = None,
+ label: str | None = None,
+ class_id: int | None = None,
+ draw_order: float | None = None,
+ ext: dict[str, Any] | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log a 2D rectangle.
@@ -85,8 +80,8 @@ def log_rect(
rects = np.zeros((0, 4), dtype="float32")
assert type(rects) is np.ndarray
- instanced: Dict[str, Any] = {}
- splats: Dict[str, Any] = {}
+ instanced: dict[str, Any] = {}
+ splats: dict[str, Any] = {}
instanced["rerun.rect2d"] = Rect2DArray.from_numpy_and_format(rects, rect_format)
@@ -129,17 +124,17 @@ def log_rect(
@log_decorator
def log_rects(
entity_path: str,
- rects: Optional[npt.ArrayLike],
+ rects: npt.ArrayLike | None,
*,
rect_format: RectFormat = RectFormat.XYWH,
- identifiers: Optional[Sequence[int]] = None,
- colors: Optional[Union[Color, Colors]] = None,
- labels: Optional[Sequence[str]] = None,
+ identifiers: Sequence[int] | None = None,
+ colors: Color | Colors | None = None,
+ labels: Sequence[str] | None = None,
class_ids: OptionalClassIds = None,
- draw_order: Optional[float] = None,
- ext: Optional[Dict[str, Any]] = None,
+ draw_order: float | None = None,
+ ext: dict[str, Any] | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log multiple 2D rectangles.
diff --git a/rerun_py/rerun_sdk/rerun/log/scalar.py b/rerun_py/rerun_sdk/rerun/log/scalar.py
index 74bd5cf11ba0..6e4d9f909d5e 100644
--- a/rerun_py/rerun_sdk/rerun/log/scalar.py
+++ b/rerun_py/rerun_sdk/rerun/log/scalar.py
@@ -1,4 +1,6 @@
-from typing import Any, Dict, Optional
+from __future__ import annotations
+
+from typing import Any
import numpy as np
@@ -23,12 +25,12 @@ def log_scalar(
entity_path: str,
scalar: float,
*,
- label: Optional[str] = None,
- color: Optional[Color] = None,
- radius: Optional[float] = None,
- scattered: Optional[bool] = None,
- ext: Optional[Dict[str, Any]] = None,
- recording: Optional[RecordingStream] = None,
+ label: str | None = None,
+ color: Color | None = None,
+ radius: float | None = None,
+ scattered: bool | None = None,
+ ext: dict[str, Any] | None = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log a double-precision scalar that will be visualized as a timeseries plot.
@@ -120,8 +122,8 @@ def log_scalar(
"""
recording = RecordingStream.to_native(recording)
- instanced: Dict[str, Any] = {}
- splats: Dict[str, Any] = {}
+ instanced: dict[str, Any] = {}
+ splats: dict[str, Any] = {}
instanced["rerun.scalar"] = ScalarArray.from_numpy(np.array([scalar]))
diff --git a/rerun_py/rerun_sdk/rerun/log/tensor.py b/rerun_py/rerun_sdk/rerun/log/tensor.py
index 89f5e9136572..9ea4087d8b11 100644
--- a/rerun_py/rerun_sdk/rerun/log/tensor.py
+++ b/rerun_py/rerun_sdk/rerun/log/tensor.py
@@ -1,4 +1,6 @@
-from typing import Any, Dict, Iterable, Optional, Protocol, Union
+from __future__ import annotations
+
+from typing import Any, Iterable, Protocol, Union
import numpy as np
import numpy.typing as npt
@@ -45,11 +47,11 @@ def log_tensor(
entity_path: str,
tensor: npt.ArrayLike,
*,
- names: Optional[Iterable[Optional[str]]] = None,
- meter: Optional[float] = None,
- ext: Optional[Dict[str, Any]] = None,
+ names: Iterable[str | None] | None = None,
+ meter: float | None = None,
+ ext: dict[str, Any] | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log an n-dimensional tensor.
@@ -88,13 +90,13 @@ def log_tensor(
def _log_tensor(
entity_path: str,
tensor: npt.NDArray[Any],
- draw_order: Optional[float] = None,
- names: Optional[Iterable[Optional[str]]] = None,
- meter: Optional[float] = None,
+ draw_order: float | None = None,
+ names: Iterable[str | None] | None = None,
+ meter: float | None = None,
meaning: bindings.TensorDataMeaning = None,
- ext: Optional[Dict[str, Any]] = None,
+ ext: dict[str, Any] | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""Log a general tensor, perhaps with named dimensions."""
@@ -134,8 +136,8 @@ def _log_tensor(
)
return
- instanced: Dict[str, Any] = {}
- splats: Dict[str, Any] = {}
+ instanced: dict[str, Any] = {}
+ splats: dict[str, Any] = {}
instanced["rerun.tensor"] = TensorArray.from_numpy(tensor, names, meaning, meter)
diff --git a/rerun_py/rerun_sdk/rerun/log/text.py b/rerun_py/rerun_sdk/rerun/log/text.py
index 83e735102a2f..2cc333ba6240 100644
--- a/rerun_py/rerun_sdk/rerun/log/text.py
+++ b/rerun_py/rerun_sdk/rerun/log/text.py
@@ -1,7 +1,8 @@
+from __future__ import annotations
+
import logging
-from typing import Any, Dict, Final, Optional
+from typing import Any, Final
-# Fully qualified to avoid circular import
import rerun.log.extension_components
from rerun import bindings
from rerun.components.color import ColorRGBAArray
@@ -12,6 +13,8 @@
from rerun.log.text_internal import LogLevel
from rerun.recording_stream import RecordingStream
+# Fully qualified to avoid circular import
+
__all__ = [
"LogLevel",
"LoggingHandler",
@@ -51,7 +54,7 @@ class LoggingHandler(logging.Handler):
logging.DEBUG: LogLevel.DEBUG,
}
- def __init__(self, root_entity_path: Optional[str] = None):
+ def __init__(self, root_entity_path: str | None = None):
logging.Handler.__init__(self)
self.root_entity_path = root_entity_path
@@ -72,11 +75,11 @@ def log_text_entry(
entity_path: str,
text: str,
*,
- level: Optional[str] = LogLevel.INFO,
- color: Optional[Color] = None,
- ext: Optional[Dict[str, Any]] = None,
+ level: str | None = LogLevel.INFO,
+ color: Color | None = None,
+ ext: dict[str, Any] | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log a text entry, with optional level.
@@ -106,8 +109,8 @@ def log_text_entry(
recording = RecordingStream.to_native(recording)
- instanced: Dict[str, Any] = {}
- splats: Dict[str, Any] = {}
+ instanced: dict[str, Any] = {}
+ splats: dict[str, Any] = {}
if text:
instanced["rerun.text_entry"] = TextEntryArray.from_bodies_and_levels([(text, level)])
diff --git a/rerun_py/rerun_sdk/rerun/log/text_internal.py b/rerun_py/rerun_sdk/rerun/log/text_internal.py
index da7ab108eaf1..620bef756671 100644
--- a/rerun_py/rerun_sdk/rerun/log/text_internal.py
+++ b/rerun_py/rerun_sdk/rerun/log/text_internal.py
@@ -1,8 +1,9 @@
+from __future__ import annotations
+
import logging
from dataclasses import dataclass
-from typing import Any, Dict, Final, Optional
+from typing import Any, Final
-# Fully qualified to avoid circular import
from rerun import bindings
from rerun.components.color import ColorRGBAArray
from rerun.components.instance import InstanceArray
@@ -10,6 +11,8 @@
from rerun.log import Color, _normalize_colors
from rerun.recording_stream import RecordingStream
+# Fully qualified to avoid circular import
+
__all__ = [
"LogLevel",
"log_text_entry_internal",
@@ -48,10 +51,10 @@ def log_text_entry_internal(
entity_path: str,
text: str,
*,
- level: Optional[str] = LogLevel.INFO,
- color: Optional[Color] = None,
+ level: str | None = LogLevel.INFO,
+ color: Color | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Internal API to log a text entry, with optional level.
@@ -81,8 +84,8 @@ def log_text_entry_internal(
"""
recording = RecordingStream.to_native(recording)
- instanced: Dict[str, Any] = {}
- splats: Dict[str, Any] = {}
+ instanced: dict[str, Any] = {}
+ splats: dict[str, Any] = {}
if text:
instanced["rerun.text_entry"] = TextEntryArray.from_bodies_and_levels([(text, level)])
diff --git a/rerun_py/rerun_sdk/rerun/log/transform.py b/rerun_py/rerun_sdk/rerun/log/transform.py
index 53971a492b13..8988f6a9d0ee 100644
--- a/rerun_py/rerun_sdk/rerun/log/transform.py
+++ b/rerun_py/rerun_sdk/rerun/log/transform.py
@@ -3,12 +3,26 @@
Learn more about transforms [in the manual](https://www.rerun.io/docs/concepts/spaces-and-transforms)
"""
-from typing import Optional, Tuple
+from __future__ import annotations
+
+from typing import Any
import numpy.typing as npt
+from deprecated import deprecated
from rerun import bindings
-from rerun.log import _to_sequence
+from rerun.components.disconnected_space import DisconnectedSpaceArray
+from rerun.components.quaternion import Quaternion
+from rerun.components.transform3d import (
+ Rigid3D,
+ RotationAxisAngle,
+ Scale3D,
+ Transform3D,
+ Transform3DArray,
+ Translation3D,
+ TranslationAndMat3,
+ TranslationRotationScale3D,
+)
from rerun.log.error_utils import _send_warning
from rerun.log.log_decorator import log_decorator
from rerun.recording_stream import RecordingStream
@@ -16,7 +30,9 @@
__all__ = [
"log_view_coordinates",
"log_unknown_transform",
+ "log_disconnected_space",
"log_rigid3",
+ "log_transform3d",
]
@@ -26,9 +42,9 @@ def log_view_coordinates(
*,
xyz: str = "",
up: str = "",
- right_handed: Optional[bool] = None,
+ right_handed: bool | None = None,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log the view coordinates for an entity.
@@ -112,11 +128,12 @@ def log_view_coordinates(
)
+@deprecated(version="0.7.0", reason="Use log_disconnected_space instead.")
@log_decorator
def log_unknown_transform(
entity_path: str,
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log that this entity is NOT in the same space as the parent, but you do not (yet) know how they relate.
@@ -136,22 +153,148 @@ def log_unknown_transform(
"""
recording = RecordingStream.to_native(recording)
- bindings.log_unknown_transform(
- entity_path,
- timeless=timeless,
- recording=recording,
+ instanced: dict[str, Any] = {}
+ instanced["rerun.disconnected_transform"] = DisconnectedSpaceArray.single()
+ bindings.log_arrow_msg(entity_path, components=instanced, timeless=timeless, recording=recording)
+
+
+@log_decorator
+def log_disconnected_space(
+ entity_path: str,
+ timeless: bool = False,
+ recording: RecordingStream | None = None,
+) -> None:
+ """
+ Log that this entity is NOT in the same space as the parent.
+
+ This is useful for specifying that a subgraph is independent of the rest of the scene.
+ If a transform or pinhole is logged on the same path, this component will be ignored.
+
+ Parameters
+ ----------
+ entity_path:
+ The path of the affected entity.
+
+ timeless:
+ Log the data as timeless.
+
+ recording:
+ Specifies the [`rerun.RecordingStream`][] to use.
+ If left unspecified, defaults to the current active data recording, if there is one.
+ See also: [`rerun.init`][], [`rerun.set_global_data_recording`][].
+ """
+ recording = RecordingStream.to_native(recording)
+
+ instanced: dict[str, Any] = {}
+ instanced["rerun.disconnected_transform"] = DisconnectedSpaceArray.single()
+ bindings.log_arrow_msg(entity_path, components=instanced, timeless=timeless, recording=recording)
+
+
+@log_decorator
+def log_transform3d(
+ entity_path: str,
+ transform: (
+ TranslationAndMat3
+ | TranslationRotationScale3D
+ | RotationAxisAngle
+ | Translation3D
+ | Scale3D
+ | Quaternion
+ | Rigid3D
+ ),
+ *,
+ from_parent: bool = False,
+ timeless: bool = False,
+ recording: RecordingStream | None = None,
+) -> None:
+ """
+ Log an (affine) 3D transform between this entity and the parent.
+
+ If `from_parent` is set to `True`, the transformation is from the parent to the space of the entity_path,
+ otherwise it is from the child to the parent.
+
+ Note that new transforms replace previous, i.e. if you call this function several times on the same path,
+ each new transform will replace the previous one and does not combine with it.
+
+ Examples
+ --------
+ ```
+ # Log translation only.
+ rr.log_transform3d("transform_test/translation", rr.Translation3D((2, 1, 3)))
+
+ # Log scale along the x axis only.
+ rr.log_transform3d("transform_test/x_scaled", rr.Scale3D((3, 1, 1)))
+
+ # Log a rotation around the z axis.
+ rr.log_transform3d("transform_test/z_rotated_object", rr.RotationAxisAngle((0, 0, 1), degrees=20))
+
+ # Log scale followed by translation along the Y-axis.
+ rr.log_transform3d(
+ "transform_test/scaled_and_translated_object", rr.TranslationRotationScale3D([0.0, 1.0, 0.0], scale=2)
)
+ # Log translation + rotation, also called a rigid transform.
+ rr.log_transform3d("transform_test/rigid3", rr.Rigid3D([1, 2, 3], rr.RotationAxisAngle((0, 1, 0), radians=1.57)))
+
+ # Log translation, rotation & scale all at once.
+ rr.log_transform3d(
+ "transform_test/transformed",
+ rr.TranslationRotationScale3D(
+ translation=[0, 1, 5],
+ rotation=rr.RotationAxisAngle((0, 0, 1), degrees=20),
+ scale=2,
+ ),
+ )
+ ```
+
+ Parameters
+ ----------
+ entity_path:
+ Path of the *child* space in the space hierarchy.
+ transform:
+ Instance of a rerun data class that describes a three dimensional transform.
+ One of:
+ * `TranslationAndMat3`
+ * `TranslationRotationScale3D`
+ * `Rigid3D`
+ * `RotationAxisAngle`
+ * `Translation3D`
+ * `Quaternion`
+ * `Scale3D`
+ from_parent:
+ If True, the transform is from the parent to the child, otherwise it is from the child to the parent.
+ timeless:
+ If true, the transform will be timeless (default: False).
+ recording:
+ Specifies the [`rerun.RecordingStream`][] to use.
+ If left unspecified, defaults to the current active data recording, if there is one.
+ See also: [`rerun.init`][], [`rerun.set_global_data_recording`][].
+
+ """
+ # Convert additionally supported types to TranslationRotationScale3D
+ if isinstance(transform, RotationAxisAngle) or isinstance(transform, Quaternion):
+ transform = TranslationRotationScale3D(rotation=transform)
+ elif isinstance(transform, Translation3D):
+ transform = TranslationRotationScale3D(translation=transform)
+ elif isinstance(transform, Scale3D):
+ transform = TranslationRotationScale3D(scale=transform)
+ elif isinstance(transform, Rigid3D):
+ transform = TranslationRotationScale3D(rotation=transform.rotation, translation=transform.translation)
+
+ instanced = {"rerun.transform3d": Transform3DArray.from_transform(Transform3D(transform, from_parent))}
+ bindings.log_arrow_msg(entity_path, components=instanced, timeless=timeless, recording=recording)
+
+@deprecated(version="0.7.0", reason="Use log_transform3d instead and, if xyz was set, use log_view_coordinates.")
@log_decorator
def log_rigid3(
entity_path: str,
*,
- parent_from_child: Optional[Tuple[npt.ArrayLike, npt.ArrayLike]] = None,
- child_from_parent: Optional[Tuple[npt.ArrayLike, npt.ArrayLike]] = None,
+ parent_from_child: tuple[npt.ArrayLike, npt.ArrayLike] | None = None,
+ child_from_parent: tuple[npt.ArrayLike, npt.ArrayLike] | None = None,
xyz: str = "",
timeless: bool = False,
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Log a proper rigid 3D transform between this entity and the parent.
@@ -202,22 +345,23 @@ def log_rigid3(
raise TypeError("Set either parent_from_child or child_from_parent, but not both.")
if parent_from_child:
- (t, q) = parent_from_child
- bindings.log_rigid3(
+ rotation = None
+ if parent_from_child[1] is not None:
+ rotation = Quaternion(xyzw=parent_from_child[1])
+ log_transform3d(
entity_path,
- parent_from_child=True,
- rotation_q=_to_sequence(q),
- translation=_to_sequence(t),
+ Rigid3D(translation=parent_from_child[0], rotation=rotation),
timeless=timeless,
recording=recording,
)
elif child_from_parent:
- (t, q) = child_from_parent
- bindings.log_rigid3(
+ rotation = None
+ if child_from_parent[1] is not None:
+ rotation = Quaternion(xyzw=child_from_parent[1])
+ log_transform3d(
entity_path,
- parent_from_child=False,
- rotation_q=_to_sequence(q),
- translation=_to_sequence(t),
+ Rigid3D(translation=child_from_parent[0], rotation=rotation),
+ from_parent=True,
timeless=timeless,
recording=recording,
)
diff --git a/rerun_py/rerun_sdk/rerun/recording.py b/rerun_py/rerun_sdk/rerun/recording.py
index cdc2cf42aa25..a99a228b42de 100644
--- a/rerun_py/rerun_sdk/rerun/recording.py
+++ b/rerun_py/rerun_sdk/rerun/recording.py
@@ -1,10 +1,11 @@
"""Helper functions for directly working with recordings."""
+from __future__ import annotations
import base64
import logging
import random
import string
-from typing import Any, Optional
+from typing import Any
from rerun import bindings
@@ -21,7 +22,7 @@ def as_html(
self,
width: int = DEFAULT_WIDTH,
height: int = DEFAULT_HEIGHT,
- app_url: Optional[str] = None,
+ app_url: str | None = None,
timeout_ms: int = DEFAULT_TIMEOUT,
) -> str:
"""
@@ -95,7 +96,7 @@ def show(
self,
width: int = DEFAULT_WIDTH,
height: int = DEFAULT_HEIGHT,
- app_url: Optional[str] = None,
+ app_url: str | None = None,
timeout_ms: int = DEFAULT_TIMEOUT,
) -> Any:
"""
diff --git a/rerun_py/rerun_sdk/rerun/recording_stream.py b/rerun_py/rerun_sdk/rerun/recording_stream.py
index bb769496dc47..fa731ce34c6e 100644
--- a/rerun_py/rerun_sdk/rerun/recording_stream.py
+++ b/rerun_py/rerun_sdk/rerun/recording_stream.py
@@ -1,4 +1,4 @@
-from typing import Optional
+from __future__ import annotations
from rerun import bindings
@@ -67,7 +67,7 @@ class RecordingStream:
def __init__(self, inner: bindings.PyRecordingStream) -> None:
self.inner = inner
- self._prev: Optional["RecordingStream"] = None
+ self._prev: RecordingStream | None = None
def __enter__(self): # type: ignore[no-untyped-def]
self._prev = set_thread_local_data_recording(self)
@@ -77,7 +77,7 @@ def __exit__(self, type, value, traceback): # type: ignore[no-untyped-def]
self._prev = set_thread_local_data_recording(self._prev) # type: ignore[arg-type]
# NOTE: The type is a string because we cannot reference `RecordingStream` yet at this point.
- def to_native(self: Optional["RecordingStream"]) -> Optional[bindings.PyRecordingStream]:
+ def to_native(self: RecordingStream | None) -> bindings.PyRecordingStream | None:
return self.inner if self is not None else None
def __del__(self): # type: ignore[no-untyped-def]
@@ -115,7 +115,7 @@ def wrapper(self, *args: Any, **kwargs: Any) -> Any: # type: ignore[no-untyped-
def is_enabled(
- recording: Optional[RecordingStream] = None,
+ recording: RecordingStream | None = None,
) -> bool:
"""
Is this Rerun recording enabled.
@@ -131,8 +131,8 @@ def is_enabled(
def get_application_id(
- recording: Optional[RecordingStream] = None,
-) -> Optional[str]:
+ recording: RecordingStream | None = None,
+) -> str | None:
"""
Get the application ID that this recording is associated with, if any.
@@ -154,8 +154,8 @@ def get_application_id(
def get_recording_id(
- recording: Optional[RecordingStream] = None,
-) -> Optional[str]:
+ recording: RecordingStream | None = None,
+) -> str | None:
"""
Get the recording ID that this recording is logging to, as a UUIDv4, if any.
@@ -191,8 +191,8 @@ def get_recording_id(
def get_data_recording(
- recording: Optional[RecordingStream] = None,
-) -> Optional[RecordingStream]:
+ recording: RecordingStream | None = None,
+) -> RecordingStream | None:
"""
Returns the most appropriate recording to log data to, in the current context, if any.
@@ -217,7 +217,7 @@ def get_data_recording(
return RecordingStream(result) if result is not None else None
-def get_global_data_recording() -> Optional[RecordingStream]:
+def get_global_data_recording() -> RecordingStream | None:
"""
Returns the currently active global recording, if any.
@@ -230,7 +230,7 @@ def get_global_data_recording() -> Optional[RecordingStream]:
return RecordingStream(result) if result is not None else None
-def set_global_data_recording(recording: RecordingStream) -> Optional[RecordingStream]:
+def set_global_data_recording(recording: RecordingStream) -> RecordingStream | None:
"""
Replaces the currently active global recording with the specified one.
@@ -243,7 +243,7 @@ def set_global_data_recording(recording: RecordingStream) -> Optional[RecordingS
return RecordingStream(result) if result is not None else None
-def get_thread_local_data_recording() -> Optional[RecordingStream]:
+def get_thread_local_data_recording() -> RecordingStream | None:
"""
Returns the currently active thread-local recording, if any.
@@ -256,7 +256,7 @@ def get_thread_local_data_recording() -> Optional[RecordingStream]:
return RecordingStream(result) if result is not None else None
-def set_thread_local_data_recording(recording: RecordingStream) -> Optional[RecordingStream]:
+def set_thread_local_data_recording(recording: RecordingStream) -> RecordingStream | None:
"""
Replaces the currently active thread-local recording with the specified one.
diff --git a/rerun_py/rerun_sdk/rerun/script_helpers.py b/rerun_py/rerun_sdk/rerun/script_helpers.py
index b8cbdbf1b443..253963de8647 100644
--- a/rerun_py/rerun_sdk/rerun/script_helpers.py
+++ b/rerun_py/rerun_sdk/rerun/script_helpers.py
@@ -18,6 +18,8 @@
```
"""
+from __future__ import annotations
+
from argparse import ArgumentParser, Namespace
import rerun as rr
diff --git a/rerun_py/rerun_sdk/rerun/sinks.py b/rerun_py/rerun_sdk/rerun/sinks.py
index ad854e38d1ac..c00ad0fc53f6 100644
--- a/rerun_py/rerun_sdk/rerun/sinks.py
+++ b/rerun_py/rerun_sdk/rerun/sinks.py
@@ -1,5 +1,6 @@
+from __future__ import annotations
+
import logging
-from typing import Optional
import rerun_bindings as bindings # type: ignore[attr-defined]
@@ -9,7 +10,7 @@
# --- Sinks ---
-def connect(addr: Optional[str] = None, recording: Optional[RecordingStream] = None) -> None:
+def connect(addr: str | None = None, recording: RecordingStream | None = None) -> None:
"""
Connect to a remote Rerun Viewer on the given ip:port.
@@ -34,7 +35,7 @@ def connect(addr: Optional[str] = None, recording: Optional[RecordingStream] = N
_connect = connect # we need this because Python scoping is horrible
-def save(path: str, recording: Optional[RecordingStream] = None) -> None:
+def save(path: str, recording: RecordingStream | None = None) -> None:
"""
Stream all log-data to a file.
@@ -57,7 +58,7 @@ def save(path: str, recording: Optional[RecordingStream] = None) -> None:
bindings.save(path=path, recording=recording)
-def disconnect(recording: Optional[RecordingStream] = None) -> None:
+def disconnect(recording: RecordingStream | None = None) -> None:
"""
Closes all TCP connections, servers, and files.
@@ -77,7 +78,7 @@ def disconnect(recording: Optional[RecordingStream] = None) -> None:
bindings.disconnect(recording=recording)
-def memory_recording(recording: Optional[RecordingStream] = None) -> MemoryRecording:
+def memory_recording(recording: RecordingStream | None = None) -> MemoryRecording:
"""
Streams all log-data to a memory buffer.
@@ -103,9 +104,9 @@ def memory_recording(recording: Optional[RecordingStream] = None) -> MemoryRecor
def serve(
open_browser: bool = True,
- web_port: Optional[int] = None,
- ws_port: Optional[int] = None,
- recording: Optional[RecordingStream] = None,
+ web_port: int | None = None,
+ ws_port: int | None = None,
+ recording: RecordingStream | None = None,
) -> None:
"""
Serve log-data over WebSockets and serve a Rerun web viewer over HTTP.
@@ -134,7 +135,7 @@ def serve(
bindings.serve(open_browser, web_port, ws_port, recording=recording)
-def spawn(port: int = 9876, connect: bool = True, recording: Optional[RecordingStream] = None) -> None:
+def spawn(port: int = 9876, connect: bool = True, recording: RecordingStream | None = None) -> None:
"""
Spawn a Rerun Viewer, listening on the given port.
diff --git a/rerun_py/rerun_sdk/rerun/time.py b/rerun_py/rerun_sdk/rerun/time.py
index 4c2370d3581a..9aa6a57d2a3e 100644
--- a/rerun_py/rerun_sdk/rerun/time.py
+++ b/rerun_py/rerun_sdk/rerun/time.py
@@ -1,4 +1,4 @@
-from typing import Optional
+from __future__ import annotations
import rerun_bindings as bindings # type: ignore[attr-defined]
@@ -7,7 +7,7 @@
# --- Time ---
-def set_time_sequence(timeline: str, sequence: Optional[int], recording: Optional[RecordingStream] = None) -> None:
+def set_time_sequence(timeline: str, sequence: int | None, recording: RecordingStream | None = None) -> None:
"""
Set the current time for this thread as an integer sequence.
@@ -36,7 +36,7 @@ def set_time_sequence(timeline: str, sequence: Optional[int], recording: Optiona
bindings.set_time_sequence(timeline, sequence, recording=recording)
-def set_time_seconds(timeline: str, seconds: Optional[float], recording: Optional[RecordingStream] = None) -> None:
+def set_time_seconds(timeline: str, seconds: float | None, recording: RecordingStream | None = None) -> None:
"""
Set the current time for this thread in seconds.
@@ -73,7 +73,7 @@ def set_time_seconds(timeline: str, seconds: Optional[float], recording: Optiona
bindings.set_time_seconds(timeline, seconds, recording=recording)
-def set_time_nanos(timeline: str, nanos: Optional[int], recording: Optional[RecordingStream] = None) -> None:
+def set_time_nanos(timeline: str, nanos: int | None, recording: RecordingStream | None = None) -> None:
"""
Set the current time for this thread.
@@ -111,7 +111,7 @@ def set_time_nanos(timeline: str, nanos: Optional[int], recording: Optional[Reco
bindings.set_time_nanos(timeline, nanos, recording=recording)
-def reset_time(recording: Optional[RecordingStream] = None) -> None:
+def reset_time(recording: RecordingStream | None = None) -> None:
"""
Clear all timeline information on this thread.
diff --git a/rerun_py/rerun_sdk/rerun_demo/__init__.py b/rerun_py/rerun_sdk/rerun_demo/__init__.py
index 9f07b733fc92..64e1b3805f76 100644
--- a/rerun_py/rerun_sdk/rerun_demo/__init__.py
+++ b/rerun_py/rerun_sdk/rerun_demo/__init__.py
@@ -21,4 +21,6 @@
cannot carry any dependencies beyond those of rerun itself. This generally limits
demos to only using the standard library and numpy for data generation.
"""
+from __future__ import annotations
+
__all__ = ["data", "turbo", "util"]
diff --git a/rerun_py/rerun_sdk/rerun_demo/__main__.py b/rerun_py/rerun_sdk/rerun_demo/__main__.py
index dcb58f23d1ef..c8831a7cfe6b 100644
--- a/rerun_py/rerun_sdk/rerun_demo/__main__.py
+++ b/rerun_py/rerun_sdk/rerun_demo/__main__.py
@@ -1,4 +1,5 @@
"""Demo program which loads an rrd file built into the package."""
+from __future__ import annotations
import argparse
import pathlib
@@ -25,7 +26,7 @@ def run_cube(args: argparse.Namespace):
rr.script_teardown(args)
-def run_colmap(args):
+def run_structure_from_motion(args):
from rerun import bindings, unregister_shutdown # type: ignore[attr-defined]
serve_opts = []
@@ -46,7 +47,7 @@ def run_colmap(args):
rrd_file = pathlib.Path(__file__).parent.joinpath("colmap_fiat.rrd").resolve()
if not rrd_file.exists():
- print("No demo file found at {}. Package was built without demo support".format(rrd_file), file=sys.stderr)
+ print(f"No demo file found at {rrd_file}. Package was built without demo support", file=sys.stderr)
exit(1)
else:
exit(bindings.main([sys.argv[0], str(rrd_file)] + serve_opts))
@@ -66,7 +67,7 @@ def main() -> None:
)
group.add_argument(
- "--colmap",
+ "--structure-from-motion",
action="store_true",
help="Run the COLMAP data demo",
)
@@ -75,14 +76,14 @@ def main() -> None:
args = parser.parse_args()
- if not any([args.cube, args.colmap]):
+ if not any([args.cube, args.structure_from_motion]):
args.cube = True
if args.cube:
run_cube(args)
- elif args.colmap:
- run_colmap(args)
+ elif args.structure_from_motion:
+ run_structure_from_motion(args)
if __name__ == "__main__":
diff --git a/rerun_py/rerun_sdk/rerun_demo/data.py b/rerun_py/rerun_sdk/rerun_demo/data.py
index 24d6ec894e2b..20c7c5f90332 100644
--- a/rerun_py/rerun_sdk/rerun_demo/data.py
+++ b/rerun_py/rerun_sdk/rerun_demo/data.py
@@ -1,4 +1,5 @@
"""Simple data to be used for Rerun demos."""
+from __future__ import annotations
from collections import namedtuple
from math import cos, sin, tau
diff --git a/rerun_py/rerun_sdk/rerun_demo/turbo.py b/rerun_py/rerun_sdk/rerun_demo/turbo.py
index 9e454e8b2194..cc77461ca838 100644
--- a/rerun_py/rerun_sdk/rerun_demo/turbo.py
+++ b/rerun_py/rerun_sdk/rerun_demo/turbo.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import numpy as np
turbo_colormap_data = np.array(
diff --git a/rerun_py/rerun_sdk/rerun_demo/util.py b/rerun_py/rerun_sdk/rerun_demo/util.py
index 76496e1a4135..03f5172ddecb 100644
--- a/rerun_py/rerun_sdk/rerun_demo/util.py
+++ b/rerun_py/rerun_sdk/rerun_demo/util.py
@@ -1,4 +1,5 @@
"""Simpe utilities to be used for Rerun demos."""
+from __future__ import annotations
import numpy as np
diff --git a/rerun_py/src/arrow.rs b/rerun_py/src/arrow.rs
index def3f5558683..2be65a7a681e 100644
--- a/rerun_py/src/arrow.rs
+++ b/rerun_py/src/arrow.rs
@@ -39,12 +39,14 @@ fn array_to_rust(arrow_array: &PyAny, name: Option<&str>) -> PyResult<(Box::name() {
field.data_type = ::data_type();
} else if name == ::name() {
field.data_type = ::data_type();
+ } else if name == ::name() {
+ field.data_type = ::data_type();
}
}
diff --git a/rerun_py/src/python_bridge.rs b/rerun_py/src/python_bridge.rs
index da18e9155e11..abe7b6ec2949 100644
--- a/rerun_py/src/python_bridge.rs
+++ b/rerun_py/src/python_bridge.rs
@@ -22,10 +22,10 @@ use rerun::{
pub use rerun::{
components::{
AnnotationContext, AnnotationInfo, Arrow3D, Box3D, ClassDescription, ClassId, ColorRGBA,
- DrawOrder, EncodedMesh3D, InstanceKey, KeypointId, Label, LineStrip2D, LineStrip3D, Mat3x3,
- Mesh3D, MeshFormat, MeshId, Pinhole, Point2D, Point3D, Quaternion, Radius, RawMesh3D,
- Rect2D, Rigid3, Scalar, ScalarPlotProps, Size3D, Tensor, TensorData, TensorDimension,
- TensorId, TextEntry, Transform, Vec2D, Vec3D, Vec4D, ViewCoordinates,
+ DisconnectedSpace, DrawOrder, EncodedMesh3D, InstanceKey, KeypointId, Label, LineStrip2D,
+ LineStrip3D, Mat3x3, Mesh3D, MeshFormat, MeshId, Pinhole, Point2D, Point3D, Quaternion,
+ Radius, RawMesh3D, Rect2D, Scalar, ScalarPlotProps, Size3D, Tensor, TensorData,
+ TensorDimension, TensorId, TextEntry, Transform3D, Vec2D, Vec3D, Vec4D, ViewCoordinates,
},
coordinates::{Axis3, Handedness, Sign, SignedAxis3},
};
@@ -151,9 +151,6 @@ fn rerun_bindings(_py: Python<'_>, m: &PyModule) -> PyResult<()> {
m.add_function(wrap_pyfunction!(log_image_file, m)?)?;
m.add_function(wrap_pyfunction!(log_mesh_file, m)?)?;
m.add_function(wrap_pyfunction!(log_meshes, m)?)?;
- m.add_function(wrap_pyfunction!(log_pinhole, m)?)?;
- m.add_function(wrap_pyfunction!(log_rigid3, m)?)?;
- m.add_function(wrap_pyfunction!(log_unknown_transform, m)?)?;
m.add_function(wrap_pyfunction!(log_view_coordinates_up_handedness, m)?)?;
m.add_function(wrap_pyfunction!(log_view_coordinates_xyz, m)?)?;
@@ -589,91 +586,6 @@ fn reset_time(recording: Option<&PyRecordingStream>) {
recording.reset_time();
}
-// --- Log transforms ---
-
-#[pyfunction]
-fn log_unknown_transform(
- entity_path: &str,
- timeless: bool,
- recording: Option<&PyRecordingStream>,
-) -> PyResult<()> {
- let transform = re_log_types::Transform::Unknown;
- log_transform(entity_path, transform, timeless, recording)
-}
-
-#[pyfunction]
-fn log_rigid3(
- entity_path: &str,
- parent_from_child: bool,
- rotation_q: re_log_types::Quaternion,
- translation: [f32; 3],
- timeless: bool,
- recording: Option<&PyRecordingStream>,
-) -> PyResult<()> {
- let rotation = glam::Quat::from_slice(&rotation_q);
- let translation = glam::Vec3::from_slice(&translation);
- let transform = macaw::IsoTransform::from_rotation_translation(rotation, translation);
-
- let transform = if parent_from_child {
- re_log_types::Rigid3::new_parent_from_child(transform)
- } else {
- re_log_types::Rigid3::new_child_from_parent(transform)
- };
-
- let transform = re_log_types::Transform::Rigid3(transform);
-
- log_transform(entity_path, transform, timeless, recording)
-}
-
-#[pyfunction]
-fn log_pinhole(
- entity_path: &str,
- resolution: [f32; 2],
- child_from_parent: [[f32; 3]; 3],
- timeless: bool,
- recording: Option<&PyRecordingStream>,
-) -> PyResult<()> {
- let transform = re_log_types::Transform::Pinhole(re_log_types::Pinhole {
- image_from_cam: child_from_parent.into(),
- resolution: Some(resolution.into()),
- });
-
- log_transform(entity_path, transform, timeless, recording)
-}
-
-fn log_transform(
- entity_path: &str,
- transform: re_log_types::Transform,
- timeless: bool,
- recording: Option<&PyRecordingStream>,
-) -> PyResult<()> {
- let Some(recording) = get_data_recording(recording) else { return Ok(()); };
-
- let entity_path = parse_entity_path(entity_path)?;
- if entity_path.is_root() {
- return Err(PyTypeError::new_err("Transforms are between a child entity and its parent, so the root cannot have a transform"));
- }
- let time_point = time(timeless, &recording);
-
- // We currently log arrow transforms from inside the bridge because we are
- // using glam and macaw to potentially do matrix-inversion as part of the
- // logging pipeline. Implementing these data-transforms consistently on the
- // python side will take a bit of additional work and testing to ensure we aren't
- // introducing new numerical issues.
-
- let row = DataRow::from_cells1(
- RowId::random(),
- entity_path,
- time_point,
- 1,
- [transform].as_slice(),
- );
-
- recording.record_row(row);
-
- Ok(())
-}
-
// --- Log view coordinates ---
#[pyfunction]
diff --git a/rerun_py/tests/unit/api_tests.py b/rerun_py/tests/unit/api_tests.py
deleted file mode 100644
index 6753dbf30e42..000000000000
--- a/rerun_py/tests/unit/api_tests.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import rerun as rr
-from rerun.log.text import LogLevel
-
-
-def test_text() -> None:
- rr.log_text_entry("path", "text", level=None)
- rr.log_text_entry("path", "text", level=LogLevel.INFO)
- rr.log_text_entry("path", None, level=LogLevel.INFO) # type: ignore[arg-type]
diff --git a/rerun_py/tests/unit/test_color_conversion.py b/rerun_py/tests/unit/test_color_conversion.py
index cf8c3599ddba..ff0e743e4de0 100644
--- a/rerun_py/tests/unit/test_color_conversion.py
+++ b/rerun_py/tests/unit/test_color_conversion.py
@@ -1,4 +1,6 @@
"""Test for color_conversion module."""
+from __future__ import annotations
+
import numpy as np
from rerun.color_conversion import linear_to_gamma_u8_pixel, linear_to_gamma_u8_value
diff --git a/scripts/build_demo_app.py b/scripts/build_demo_app.py
index bf25a0af2178..68b996137d48 100755
--- a/scripts/build_demo_app.py
+++ b/scripts/build_demo_app.py
@@ -1,6 +1,7 @@
#!/usr/bin/env python3
"""Build `demo.rerun.io`."""
+from __future__ import annotations
import argparse
import http.server
@@ -10,7 +11,6 @@
import subprocess
import threading
from functools import partial
-from typing import List
from jinja2 import Template
@@ -22,7 +22,7 @@ def __init__(
title: str,
description: str,
commit: str,
- build_args: List[str],
+ build_args: list[str],
):
self.path = os.path.join("examples/python", name, "main.py")
self.name = name
@@ -37,26 +37,29 @@ def save(self) -> None:
in_path = os.path.abspath(self.path)
out_dir = f"{BASE_PATH}/examples/{self.name}"
- logging.info(f"Running {in_path}, outputting to {out_dir}")
os.makedirs(out_dir, exist_ok=True)
+ rrd_path = os.path.join(out_dir, "data.rrd")
+ logging.info(f"Running {self.name}, outputting to {rrd_path}")
+
+ args = [
+ "python3",
+ in_path,
+ f"--save={rrd_path}",
+ ]
+
subprocess.run(
- [
- "python3",
- in_path,
- "--num-frames=30",
- "--steps=200",
- f"--save={out_dir}/data.rrd",
- ]
- + self.build_args,
+ args + self.build_args,
check=True,
)
+ print(f"{rrd_path}: {os.path.getsize(rrd_path) / 1e6:.1f} MB")
+
def supports_save(self) -> bool:
with open(self.path) as f:
return "script_add_args" in f.read()
-def copy_static_assets(examples: List[Example]) -> None:
+def copy_static_assets(examples: list[Example]) -> None:
# copy root
src = os.path.join(SCRIPT_PATH, "demo_assets/static")
dst = BASE_PATH
@@ -80,7 +83,7 @@ def build_wasm() -> None:
subprocess.run(["cargo", "r", "-p", "re_build_web_viewer", "--", "--release"])
-def copy_wasm(examples: List[Example]) -> None:
+def copy_wasm(examples: list[Example]) -> None:
files = ["re_viewer_bg.wasm", "re_viewer.js"]
for example in examples:
for file in files:
@@ -90,7 +93,7 @@ def copy_wasm(examples: List[Example]) -> None:
)
-def collect_examples() -> List[Example]:
+def collect_examples() -> list[Example]:
commit = os.environ.get("COMMIT_HASH") or "main"
logging.info(f"Commit hash: {commit}")
examples = []
@@ -100,22 +103,25 @@ def collect_examples() -> List[Example]:
title=EXAMPLES[name]["title"],
description=EXAMPLES[name]["description"],
commit=commit,
- build_args=EXAMPLES[name]["build_args"].split(" "),
+ build_args=EXAMPLES[name]["build_args"],
)
- if example.supports_save():
- examples.append(example)
+ assert example.supports_save(), f'Example "{name}" does not support saving'
+ examples.append(example)
+
return examples
-def save_examples_rrd(examples: List[Example]) -> None:
- logging.info("\nSaving examples as .rrd")
+def save_examples_rrd(examples: list[Example]) -> None:
+ logging.info("\nSaving examples as .rrd…")
+ print("")
for example in examples:
example.save()
+ print("")
-def render_examples(examples: List[Example]) -> None:
- logging.info("\nRendering examples")
+def render_examples(examples: list[Example]) -> None:
+ logging.info("Rendering examples")
template_path = os.path.join(SCRIPT_PATH, "demo_assets/templates/example.html")
with open(template_path) as f:
@@ -155,14 +161,14 @@ def main() -> None:
)
parser.add_argument("--skip-wasm-build", action="store_true", help="Skip the web viewer Wasm build")
- args, unknown = parser.parse_known_args()
- for arg in unknown:
- logging.warning(f"unknown arg: {arg}")
+ args = parser.parse_args()
if not args.skip_wasm_build:
build_wasm()
+ shutil.rmtree(f"{BASE_PATH}/examples", ignore_errors=True)
examples = collect_examples()
+ assert len(examples) > 0, "No examples found"
save_examples_rrd(examples)
render_examples(examples)
copy_static_assets(examples)
@@ -186,30 +192,17 @@ def main() -> None:
SCRIPT_PATH = os.path.dirname(os.path.relpath(__file__))
# When adding examples, add their requirements to `requirements-web-demo.txt`
EXAMPLES = {
- "api_demo": {
- "title": "API Demo",
+ "arkit_scenes": {
+ "title": "ARKit Scenes",
"description": """
- This is a swiss-army-knife example showing the usage of most of the Rerun SDK APIs.
- The data logged is static and meaningless.
+ Visualizes the