Add support for GLB files in LFS tracking, enhance find_cute_box_with_image.ipynb with new functions for 3D coordinate extraction, and introduce interactive_example.py for marker processing workflow.
This commit is contained in:
1
.gitattributes
vendored
1
.gitattributes
vendored
@ -1,2 +1,3 @@
|
|||||||
*.parquet filter=lfs diff=lfs merge=lfs -text
|
*.parquet filter=lfs diff=lfs merge=lfs -text
|
||||||
*.pdf filter=lfs diff=lfs merge=lfs -text
|
*.pdf filter=lfs diff=lfs merge=lfs -text
|
||||||
|
*.glb filter=lfs diff=lfs merge=lfs -text
|
||||||
|
|||||||
@ -2,30 +2,34 @@
|
|||||||
"cells": [
|
"cells": [
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 1,
|
"execution_count": 33,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"from datetime import datetime\n",
|
"from datetime import datetime\n",
|
||||||
"from pathlib import Path\n",
|
"from pathlib import Path\n",
|
||||||
"from typing import Any, Final, TypeAlias, cast, TypedDict\n",
|
"from typing import Any, Final, Optional, TypeAlias, TypedDict, Union, cast\n",
|
||||||
|
"from dataclasses import dataclass\n",
|
||||||
"\n",
|
"\n",
|
||||||
"import cv2\n",
|
"import cv2\n",
|
||||||
"import numpy as np\n",
|
"import numpy as np\n",
|
||||||
|
"import orjson\n",
|
||||||
|
"import trimesh\n",
|
||||||
|
"from beartype import beartype\n",
|
||||||
"from cv2 import aruco\n",
|
"from cv2 import aruco\n",
|
||||||
"from cv2.typing import MatLike\n",
|
"from cv2.typing import MatLike\n",
|
||||||
|
"from jaxtyping import Float, Int, Num, jaxtyped\n",
|
||||||
"from loguru import logger\n",
|
"from loguru import logger\n",
|
||||||
"from matplotlib import pyplot as plt\n",
|
"from matplotlib import pyplot as plt\n",
|
||||||
"from numpy.typing import ArrayLike\n",
|
"from numpy.typing import ArrayLike\n",
|
||||||
"from numpy.typing import NDArray as NDArrayT\n",
|
"from numpy.typing import NDArray as NDArrayT\n",
|
||||||
"import orjson\n",
|
|
||||||
"\n",
|
"\n",
|
||||||
"NDArray: TypeAlias = np.ndarray"
|
"NDArray: TypeAlias = np.ndarray"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 2,
|
"execution_count": 34,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
@ -38,7 +42,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 3,
|
"execution_count": 35,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
@ -50,7 +54,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 4,
|
"execution_count": 36,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
@ -62,7 +66,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 5,
|
"execution_count": 37,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
@ -84,7 +88,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 6,
|
"execution_count": 38,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
@ -117,14 +121,16 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 11,
|
"execution_count": null,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"class Marker(TypedDict):\n",
|
"@jaxtyped(typechecker=beartype)\n",
|
||||||
|
"@dataclass\n",
|
||||||
|
"class Marker:\n",
|
||||||
" id: int\n",
|
" id: int\n",
|
||||||
" center: NDArray\n",
|
" center: Num[NDArray, \"2\"]\n",
|
||||||
" corners: NDArray\n",
|
" corners: Num[NDArray, \"4 2\"]\n",
|
||||||
"\n",
|
"\n",
|
||||||
"\n",
|
"\n",
|
||||||
"output_markers: list[Marker] = []\n",
|
"output_markers: list[Marker] = []\n",
|
||||||
@ -155,14 +161,14 @@
|
|||||||
" for m, i in zip(markers, ids):\n",
|
" for m, i in zip(markers, ids):\n",
|
||||||
" center = np.mean(m, axis=0).astype(int) # type: ignore\n",
|
" center = np.mean(m, axis=0).astype(int) # type: ignore\n",
|
||||||
" output_markers.append(\n",
|
" output_markers.append(\n",
|
||||||
" {\n",
|
" Marker(\n",
|
||||||
" \"id\": i[0],\n",
|
" id=int(i[0]),\n",
|
||||||
" \"center\": flip_y(normalize_point(center), 1),\n",
|
" center=flip_y(normalize_point(center), 1),\n",
|
||||||
" \"corners\": np.array([flip_y(normalize_point(c), 1) for c in m]),\n",
|
" corners=np.array([flip_y(normalize_point(c), 1) for c in m]),\n",
|
||||||
" }\n",
|
" )\n",
|
||||||
" )\n",
|
" )\n",
|
||||||
"\n",
|
"\n",
|
||||||
"with open(\"output/aruco_3d_coords.json\", \"wb\") as f:\n",
|
"with open(\"output/aruco_2d_uv_coords_normalized.json\", \"wb\") as f:\n",
|
||||||
" f.write(orjson.dumps(output_markers, option=orjson.OPT_SERIALIZE_NUMPY))"
|
" f.write(orjson.dumps(output_markers, option=orjson.OPT_SERIALIZE_NUMPY))"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
@ -186,17 +192,10 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 10,
|
"execution_count": 43,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"from typing import Optional, Union\n",
|
|
||||||
"\n",
|
|
||||||
"import numpy as np\n",
|
|
||||||
"import trimesh\n",
|
|
||||||
"from jaxtyping import Float, Int, Num, jaxtyped\n",
|
|
||||||
"from beartype import beartype\n",
|
|
||||||
"\n",
|
|
||||||
"@jaxtyped(typechecker=beartype)\n",
|
"@jaxtyped(typechecker=beartype)\n",
|
||||||
"def interpolate_uvs_to_3d(\n",
|
"def interpolate_uvs_to_3d(\n",
|
||||||
" uv_points: Num[NDArray, \"N 2\"],\n",
|
" uv_points: Num[NDArray, \"N 2\"],\n",
|
||||||
@ -284,6 +283,22 @@
|
|||||||
" epsilon=epsilon,\n",
|
" epsilon=epsilon,\n",
|
||||||
" )"
|
" )"
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 46,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"m = trimesh.load_mesh(\"sample/standard_box.glb\")\n",
|
||||||
|
"def marker_to_3d_coords(marker: Marker, mesh: trimesh.Trimesh):\n",
|
||||||
|
" uv_points = marker.corners\n",
|
||||||
|
" return interpolate_uvs_to_3d_trimesh(uv_points, mesh)\n",
|
||||||
|
"\n",
|
||||||
|
"results = [marker_to_3d_coords(marker, m) for marker in output_markers]\n",
|
||||||
|
"id_to_3d_coords = {marker.id: result for marker, result in zip(output_markers, results)}\n",
|
||||||
|
"# note that the glb is Y up (but in blender it's Z up)"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
|
|||||||
18
interactive_example.py
Normal file
18
interactive_example.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
# %%
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
|
# %%
|
||||||
|
|
||||||
|
# %% [markdown]
|
||||||
|
# # Extract the 3D coordinates of the ArUco markers from the image
|
||||||
|
#
|
||||||
|
# 1. Load the image
|
||||||
|
# 2. Detect the ArUco markers
|
||||||
|
# 3. Get the 3D coordinates of the markers
|
||||||
|
# 4. Save the 3D coordinates to a file
|
||||||
|
|
||||||
|
|
||||||
|
# %%
|
||||||
|
|
||||||
|
# %%
|
||||||
BIN
sample/standard_box.glb
LFS
Normal file
BIN
sample/standard_box.glb
LFS
Normal file
Binary file not shown.
Reference in New Issue
Block a user