From 755d65f7d72ea6f56584066dd6610c14b1d25801 Mon Sep 17 00:00:00 2001 From: crosstyan Date: Fri, 11 Jul 2025 10:17:24 +0800 Subject: [PATCH] I'm trying to reconstruct a multi-person scene using epipolar line. The code is not yet completed. --- rebuild_by_epipolar_line.ipynb | 752 ++++++++++++++++++++++ rebuild_by_epipolar_line.py | 1062 ++++++++++++++++++++++++++++++++ 2 files changed, 1814 insertions(+) create mode 100644 rebuild_by_epipolar_line.ipynb create mode 100644 rebuild_by_epipolar_line.py diff --git a/rebuild_by_epipolar_line.ipynb b/rebuild_by_epipolar_line.ipynb new file mode 100644 index 0000000..47c4eab --- /dev/null +++ b/rebuild_by_epipolar_line.ipynb @@ -0,0 +1,752 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "65c62d87", + "metadata": {}, + "outputs": [], + "source": [ + "from _collections_abc import dict_values\n", + "from math import isnan\n", + "from pathlib import Path\n", + "from re import L\n", + "import awkward as ak\n", + "from typing import (\n", + " Any,\n", + " Generator,\n", + " Iterable,\n", + " Optional,\n", + " Sequence,\n", + " TypeAlias,\n", + " TypedDict,\n", + " cast,\n", + " TypeVar,\n", + ")\n", + "from datetime import datetime, timedelta\n", + "from jaxtyping import Array, Float, Num, jaxtyped\n", + "import numpy as np\n", + "from cv2 import undistortPoints\n", + "from app.camera import Camera, CameraParams, Detection\n", + "import jax.numpy as jnp\n", + "from beartype import beartype\n", + "from scipy.spatial.transform import Rotation as R\n", + "from app.camera import (\n", + " Camera,\n", + " CameraID,\n", + " CameraParams,\n", + " Detection,\n", + " calculate_affinity_matrix_by_epipolar_constraint,\n", + " classify_by_camera,\n", + ")\n", + "from copy import copy as shallow_copy\n", + "import jax\n", + "from beartype.typing import Mapping, Sequence\n", + "from itertools import chain\n", + "import orjson\n", + "from app.visualize.whole_body import visualize_whole_body\n", + "from matplotlib import pyplot as plt" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "74ec95dd", + "metadata": {}, + "outputs": [], + "source": [ + "NDArray: TypeAlias = np.ndarray\n", + "DetectionGenerator: TypeAlias = Generator[Detection, None, None]\n", + "\n", + "DELTA_T_MIN = timedelta(milliseconds=1)\n", + "\"\"\"所有类型\"\"\"\n", + "\n", + "T = TypeVar(\"T\")\n", + "\n", + "\n", + "def unwrap(val: Optional[T]) -> T:\n", + " if val is None:\n", + " raise ValueError(\"None\")\n", + " return val\n", + "\n", + "\n", + "class KeypointDataset(TypedDict):\n", + " frame_index: int\n", + " boxes: Num[NDArray, \"N 4\"]\n", + " kps: Num[NDArray, \"N J 2\"]\n", + " kps_scores: Num[NDArray, \"N J\"]\n", + "\n", + "\n", + "class Resolution(TypedDict):\n", + " width: int\n", + " height: int\n", + "\n", + "\n", + "class Intrinsic(TypedDict):\n", + " camera_matrix: Num[Array, \"3 3\"]\n", + " \"\"\"\n", + " K\n", + " \"\"\"\n", + " distortion_coefficients: Num[Array, \"N\"]\n", + " \"\"\"\n", + " distortion coefficients; usually 5\n", + " \"\"\"\n", + "\n", + "\n", + "class Extrinsic(TypedDict):\n", + " rvec: Num[NDArray, \"3\"]\n", + " tvec: Num[NDArray, \"3\"]\n", + "\n", + "\n", + "class ExternalCameraParams(TypedDict):\n", + " name: str\n", + " port: int\n", + " intrinsic: Intrinsic\n", + " extrinsic: Extrinsic\n", + " resolution: Resolution\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "2e192496", + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"获得所有机位的相机内外参\"\"\"\n", + "def get_camera_params(camera_path: Path) -> ak.Array:\n", + " camera_dataset: ak.Array = ak.from_parquet(camera_path / \"camera_params.parquet\")\n", + " return camera_dataset\n", + "\n", + "\"\"\"获取所有机位的2d检测数据\"\"\"\n", + "def get_camera_detect(\n", + " detect_path: Path, camera_port: list[int], camera_dataset: ak.Array\n", + ") -> dict[int, ak.Array]:\n", + " keypoint_data: dict[int, ak.Array] = {}\n", + " for element_port in ak.to_numpy(camera_dataset[\"port\"]):\n", + " if element_port in camera_port:\n", + " keypoint_data[int(element_port)] = ak.from_parquet(\n", + " detect_path / f\"{element_port}.parquet\"\n", + " )\n", + " return keypoint_data" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "6eee3591", + "metadata": {}, + "outputs": [], + "source": [ + "# 相机内外参路径\n", + "CAMERA_PATH = Path(\"/home/admin/Documents/2025_4_17/camera_params\")\n", + "# 所有机位的相机内外参\n", + "AK_CAMERA_DATASET: ak.Array = get_camera_params(CAMERA_PATH)\n", + "# 2d检测数据路径\n", + "DATASET_PATH = Path(\"/home/admin/Documents/2025_4_17/detect_result/many_people_01/\")\n", + "# 指定机位的2d检测数据\n", + "camera_port = [5607, 5608, 5609]\n", + "KEYPOINT_DATASET = get_camera_detect(DATASET_PATH, camera_port, AK_CAMERA_DATASET)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "ce225126", + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/tmp/ipykernel_2333927/1636344639.py:1: DeprecationWarning: __array__ implementation doesn't accept a copy keyword, so passing copy=False failed. __array__ must implement 'dtype' and 'copy' keyword arguments. To learn more, see the migration guide https://numpy.org/devdocs/numpy_2_0_migration_guide.html#adapting-to-changes-in-the-copy-keyword\n", + " kps_5607 =np.array(KEYPOINT_DATASET[5607]['kps'])\n" + ] + }, + { + "data": { + "text/plain": [ + "(710, 2, 133, 2)" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/tmp/ipykernel_2333927/1636344639.py:3: DeprecationWarning: __array__ implementation doesn't accept a copy keyword, so passing copy=False failed. __array__ must implement 'dtype' and 'copy' keyword arguments. To learn more, see the migration guide https://numpy.org/devdocs/numpy_2_0_migration_guide.html#adapting-to-changes-in-the-copy-keyword\n", + " kps_5607_socers = np.array(KEYPOINT_DATASET[5607]['kps_scores'])\n" + ] + }, + { + "data": { + "text/plain": [ + "(710, 2, 133)" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "kps_5607 =np.array(KEYPOINT_DATASET[5607]['kps'])\n", + "display(kps_5607.shape)\n", + "kps_5607_socers = np.array(KEYPOINT_DATASET[5607]['kps_scores'])\n", + "display(kps_5607_socers.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "abf50aa8", + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "\"\"\"将所有2d检测数据打包\"\"\"\n", + "\n", + "\n", + "@jaxtyped(typechecker=beartype)\n", + "def undistort_points(\n", + " points: Num[NDArray, \"M 2\"],\n", + " camera_matrix: Num[NDArray, \"3 3\"],\n", + " dist_coeffs: Num[NDArray, \"N\"],\n", + ") -> Num[NDArray, \"M 2\"]:\n", + " K = camera_matrix\n", + " dist = dist_coeffs\n", + " res = undistortPoints(points, K, dist, P=K) # type: ignore\n", + " return res.reshape(-1, 2)\n", + "\n", + "\n", + "@jaxtyped(typechecker=beartype)\n", + "def to_transformation_matrix(\n", + " rvec: Num[NDArray, \"3\"], tvec: Num[NDArray, \"3\"]\n", + ") -> Num[NDArray, \"4 4\"]:\n", + " res = np.eye(4)\n", + " res[:3, :3] = R.from_rotvec(rvec).as_matrix()\n", + " res[:3, 3] = tvec\n", + " return res\n", + "\n", + "\n", + "def from_camera_params(camera: ExternalCameraParams) -> Camera:\n", + " rt = jnp.array(\n", + " to_transformation_matrix(\n", + " ak.to_numpy(camera[\"extrinsic\"][\"rvec\"]),\n", + " ak.to_numpy(camera[\"extrinsic\"][\"tvec\"]),\n", + " )\n", + " )\n", + " K = jnp.array(camera[\"intrinsic\"][\"camera_matrix\"]).reshape(3, 3)\n", + " dist_coeffs = jnp.array(camera[\"intrinsic\"][\"distortion_coefficients\"])\n", + " image_size = jnp.array(\n", + " (camera[\"resolution\"][\"width\"], camera[\"resolution\"][\"height\"])\n", + " )\n", + " return Camera(\n", + " id=camera[\"name\"],\n", + " params=CameraParams(\n", + " K=K,\n", + " Rt=rt,\n", + " dist_coeffs=dist_coeffs,\n", + " image_size=image_size,\n", + " ),\n", + " )\n", + "\n", + "\n", + "def preprocess_keypoint_dataset(\n", + " dataset: Sequence[KeypointDataset],\n", + " camera: Camera,\n", + " fps: float,\n", + " start_timestamp: datetime,\n", + ") -> Generator[Detection, None, None]:\n", + " frame_interval_s = 1 / fps\n", + " for el in dataset:\n", + " frame_index = el[\"frame_index\"]\n", + " timestamp = start_timestamp + timedelta(seconds=frame_index * frame_interval_s)\n", + " for kp, kp_score, boxes in zip(el[\"kps\"], el[\"kps_scores\"], el[\"boxes\"]):\n", + " kp = undistort_points(\n", + " np.asarray(kp),\n", + " np.asarray(camera.params.K),\n", + " np.asarray(camera.params.dist_coeffs),\n", + " )\n", + "\n", + " yield Detection(\n", + " keypoints=jnp.array(kp),\n", + " confidences=jnp.array(kp_score),\n", + " camera=camera,\n", + " timestamp=timestamp,\n", + " )\n", + "\n", + "\n", + "def sync_batch_gen(\n", + " gens: list[DetectionGenerator], diff: timedelta\n", + ") -> Generator[list[Detection], Any, None]:\n", + " from more_itertools import partition\n", + "\n", + " \"\"\"\n", + " given a list of detection generators, return a generator that yields a batch of detections\n", + "\n", + " Args:\n", + " gens: list of detection generators\n", + " diff: maximum timestamp difference between detections to consider them part of the same batch\n", + " \"\"\"\n", + " N = len(gens)\n", + " last_batch_timestamp: Optional[datetime] = None\n", + " current_batch: list[Detection] = []\n", + " paused: list[bool] = [False] * N\n", + " finished: list[bool] = [False] * N\n", + " unmached_detections: list[Detection] = []\n", + "\n", + " def reset_paused():\n", + " \"\"\"\n", + " reset paused list based on finished list\n", + " \"\"\"\n", + " for i in range(N):\n", + " if not finished[i]:\n", + " paused[i] = False\n", + " else:\n", + " paused[i] = True\n", + "\n", + " EPS = 1e-6\n", + " # a small epsilon to avoid floating point precision issues\n", + " diff_esp = diff - timedelta(seconds=EPS)\n", + " while True:\n", + " for i, gen in enumerate(gens):\n", + " try:\n", + " if finished[i] or paused[i]:\n", + " if all(finished):\n", + " if len(current_batch) > 0:\n", + " # All generators exhausted, flush remaining batch and exit\n", + " yield current_batch\n", + " return\n", + " else:\n", + " continue\n", + " val = next(gen)\n", + " if last_batch_timestamp is None:\n", + " last_batch_timestamp = val.timestamp\n", + " current_batch.append(val)\n", + " else:\n", + " if abs(val.timestamp - last_batch_timestamp) >= diff_esp:\n", + " unmached_detections.append(val)\n", + " paused[i] = True\n", + " if all(paused):\n", + " yield current_batch\n", + " reset_paused()\n", + " last_batch_timestamp = last_batch_timestamp + diff\n", + " bad, good = partition(\n", + " lambda x: x.timestamp < unwrap(last_batch_timestamp),\n", + " unmached_detections,\n", + " )\n", + " current_batch = list(good)\n", + " unmached_detections = list(bad)\n", + " else:\n", + " current_batch.append(val)\n", + " except StopIteration:\n", + " return\n", + "\n", + "\n", + "def get_batch_detect(\n", + " keypoint_dataset,\n", + " camera_dataset,\n", + " camera_port: list[int],\n", + " FPS: int = 24,\n", + " batch_fps: int = 10,\n", + ") -> Generator[list[Detection], Any, None]:\n", + " gen_data = [\n", + " preprocess_keypoint_dataset(\n", + " keypoint_dataset[port],\n", + " from_camera_params(camera_dataset[camera_dataset[\"port\"] == port][0]),\n", + " FPS,\n", + " datetime(2024, 4, 2, 12, 0, 0),\n", + " )\n", + " for port in camera_port\n", + " ]\n", + "\n", + " sync_gen: Generator[list[Detection], Any, None] = sync_batch_gen(\n", + " gen_data,\n", + " timedelta(seconds=1 / batch_fps),\n", + " )\n", + " return sync_gen\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "id": "82ac4c3e", + "metadata": {}, + "outputs": [], + "source": [ + "# 将所有的2d检测数据打包\n", + "sync_gen: Generator[list[Detection], Any, None] = get_batch_detect(\n", + " KEYPOINT_DATASET,\n", + " AK_CAMERA_DATASET,\n", + " camera_port,\n", + " batch_fps=24,\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 86, + "id": "33559b73", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[Detection(, 2024-04-02 12:00:00.333333),\n", + " Detection(, 2024-04-02 12:00:00.333333),\n", + " Detection(, 2024-04-02 12:00:00.333333),\n", + " Detection(, 2024-04-02 12:00:00.375000),\n", + " Detection(, 2024-04-02 12:00:00.375000),\n", + " Detection(, 2024-04-02 12:00:00.375000)]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "6" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "detections = next(sync_gen)\n", + "display(detections)\n", + "display(len(detections))" + ] + }, + { + "cell_type": "code", + "execution_count": 87, + "id": "87d44153", + "metadata": {}, + "outputs": [], + "source": [ + "# 极限约束\n", + "from app.camera import calculate_affinity_matrix_by_epipolar_constraint\n", + "\n", + "sorted_detections, affinity_matrix = calculate_affinity_matrix_by_epipolar_constraint(\n", + " detections, alpha_2d=3500\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": 88, + "id": "821ca702", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[{'timestamp': '2024-04-02 12:00:00.333333',\n", + " 'camera': 'AE_01',\n", + " 'keypoint': (133, 2)},\n", + " {'timestamp': '2024-04-02 12:00:00.375000',\n", + " 'camera': 'AE_01',\n", + " 'keypoint': (133, 2)},\n", + " {'timestamp': '2024-04-02 12:00:00.333333',\n", + " 'camera': 'AE_1A',\n", + " 'keypoint': (133, 2)},\n", + " {'timestamp': '2024-04-02 12:00:00.375000',\n", + " 'camera': 'AE_1A',\n", + " 'keypoint': (133, 2)},\n", + " {'timestamp': '2024-04-02 12:00:00.333333',\n", + " 'camera': 'AE_08',\n", + " 'keypoint': (133, 2)},\n", + " {'timestamp': '2024-04-02 12:00:00.375000',\n", + " 'camera': 'AE_08',\n", + " 'keypoint': (133, 2)}]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "Array([[ -inf, -inf, 0.729, 0.63 , 0.549, 0.453],\n", + " [ -inf, -inf, 0.786, 0.702, 0.651, 0.559],\n", + " [0.729, 0.786, -inf, -inf, 0.846, 0.787],\n", + " [0.63 , 0.702, -inf, -inf, 0.907, 0.847],\n", + " [0.549, 0.651, 0.846, 0.907, -inf, -inf],\n", + " [0.453, 0.559, 0.787, 0.847, -inf, -inf]], dtype=float32)" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "display(\n", + " list(\n", + " map(\n", + " lambda x: {\n", + " \"timestamp\": str(x.timestamp),\n", + " \"camera\": x.camera.id,\n", + " \"keypoint\": x.keypoints.shape,\n", + " },\n", + " sorted_detections,\n", + " )\n", + " )\n", + ")\n", + "with jnp.printoptions(precision=3, suppress=True):\n", + " display(affinity_matrix)" + ] + }, + { + "cell_type": "code", + "execution_count": 89, + "id": "10499f36", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[[0, 2, 4], [1, 3, 5]]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "array([[0, 0, 1, 0, 1, 0],\n", + " [0, 0, 0, 1, 0, 1],\n", + " [1, 0, 0, 0, 1, 0],\n", + " [0, 1, 0, 0, 0, 1],\n", + " [1, 0, 1, 0, 0, 0],\n", + " [0, 1, 0, 1, 0, 0]])" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "from app.solver._old import GLPKSolver\n", + "\n", + "def clusters_to_detections(\n", + " clusters: list[list[int]], sorted_detections: list[Detection]\n", + ") -> list[list[Detection]]:\n", + " \"\"\"\n", + " given a list of clusters (which is the indices of the detections in the sorted_detections list),\n", + " extract the detections from the sorted_detections list\n", + "\n", + " Args:\n", + " clusters: list of clusters, each cluster is a list of indices of the detections in the `sorted_detections` list\n", + " sorted_detections: list of SORTED detections\n", + "\n", + " Returns:\n", + " list of clusters, each cluster is a list of detections\n", + " \"\"\"\n", + " return [[sorted_detections[i] for i in cluster] for cluster in clusters]\n", + "\n", + "\n", + "solver = GLPKSolver()\n", + "aff_np = np.asarray(affinity_matrix).astype(np.float64)\n", + "clusters, sol_matrix = solver.solve(aff_np)\n", + "display(clusters)\n", + "display(sol_matrix)" + ] + }, + { + "cell_type": "code", + "execution_count": 90, + "id": "b7a05c6b", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAjYAAAFICAYAAABDdrQZAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjMsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvZiW1igAAAAlwSFlzAAAPYQAAD2EBqD+naQAASltJREFUeJzt3XmcVNWB9//PrbXXqt7o6m5ooFEEFUQQxFY0iz2AIe4TlTDEEEdHg4kmDiGYuGWDwDwmk8SNmXnUyWM0+ptoJoyaQUAJ2iIgoIAiKtIsvUAvVb1W13J+f5QUFmt3U013l9/363Ve0veee+tUXbv4cu4551rGGIOIiIhICrD1dQNEREREkkXBRkRERFKGgo2IiIikDAUbERERSRkKNiIiIpIyFGxEREQkZSjYiIiISMpQsBEREZGUoWAjIiIiKUPBRkRERFJGvw42Dz30EMOHDyctLY3Jkyfz1ltv9XWTREREpB/rt8Hmj3/8I9///ve57777ePvttxk3bhzTpk2jrq6ur5smIiIi/ZTVXx+COXnyZCZNmsTvfvc7AKLRKKWlpXznO9/hhz/8YR+3TkRERPojR1834Gg6OzvZsGEDCxYsiG+z2WxUVFRQWVl51GOCwSDBYDD+czQapaGhgfz8fCzL6vU2i4iIyMkzxtDc3ExJSQk2W/dvLPXLYHPgwAEikQg+ny9hu8/n4/333z/qMQsXLuSBBx44Fc0TERGRXrZ7926GDBnS7eP67Rib7lqwYAF+vz9eqqqq+rpJIiIi0kPZ2dk9Oq5f9tgUFBRgt9upra1N2F5bW0tRUdFRj3G73bjd7lPRPBEREellPR1G0i97bFwuF+eddx4rVqyIb4tGo6xYsYLy8vI+bJmIiIj0Z/2yxwbg+9//PjfeeCMTJ07k/PPP59e//jWtra3MmTOnr5smIiIi/VS/DTbXX389+/fv595776WmpoZzzz2Xl19++YgBxSIiIiIH9dt1bE5WIBDA6/X2dTNERESkB/x+Px6Pp9vH9csxNiIiIiI9oWAjIiIiKUPBRkRERFKGgo2IiIikDAUbERERSRkKNiIiIpIyFGxEREQkZSjYiIiISMpQsBEREZGUoWAjIiIiKUPBRkRERFKGgo2IiIikDAUbERERSRkKNiIiIpIyFGxEREQkZSjYiIiISMpQsBEREZGUoWAjIiIiKUPBRkRERFKGgo2IiIikDAUbERERSRkKNiIiIpIyFGxEREQkZSjYiIiISMpQsBEROYzDHiuHb3O7IDujb9okIl3j6OsGiIj0B24X2G0weBC4nLFtoRB8tBdKfZDvhZwsOOCHzTv6tq0icmwKNiKSEpwOKMwFmw1q6yEShTQ3hMMQDJ34+Aw3GANVNRAJ20gz6eQ6Mzkt6qXwgJdcgjianOyybcZuixCJ9v57EpHuU7ARkQFveDHkZMMn+2LBpmww+Fvg/LNgV83Re1gsKxZkANy4yWsewljb2YwfZSgqzua09mL2fpTHpgN1lJ29h0h6M5PbKvifzgIWVv/vqX2DItJlSR9js3DhQiZNmkR2djaFhYVcddVVbN++PaFOR0cHc+fOJT8/n6ysLK699lpqa2sT6lRVVTFjxgwyMjIoLCxk3rx5hMPhZDdXRFJAoNlOVZUbTzifM4MTGLlnGl9r+yaT9t2ENz3tiPppuLlixAiu5iru5m4e4AGu4ApaTCtP7lvNTypfZNmWA9hb8ng1uprf1f2ZvVsH882diwmGfORm69+EIv1V0n87X3vtNebOncukSZMIh8PcfffdTJ06lW3btpGZmQnA9773Pf7nf/6H5557Dq/Xy+23384111zD66+/DkAkEmHGjBkUFRXxxhtvUF1dzTe+8Q2cTie/+MUvkt1kERkAnA44vcRiV1U6+eQzlKGMYhTFFJPdkkX6qPcZd7rFR3ssnv1kM+HRO7h60pm89nok4TwjhhQwe/8/kd7Qxiu8yxM8QR11hAmDAfyxev/Kf3IGb/ItvoUrMoNNuSv5YNcBlkb+gM0WObKBItI/mF5WV1dnAPPaa68ZY4xpamoyTqfTPPfcc/E67733ngFMZWWlMcaYF1980dhsNlNTUxOv88gjjxiPx2OCwWCXXtfv9xtiX1MqKioDuDiclrnM82XzA35gltgWmUUsMvOYZ67majOBCaaYYpNGmrGwTK49y9xt/dDcxV3mjOxC89uyH5iSPFvC+Ww2y9ixmaL8rr2+Hbs5I6fUTBhtM5aF8WRifHl9/7moqKR68fv9Pcodvd6f6vfH/vmTl5cHwIYNGwiFQlRUVMTrjB49mqFDh1JZWckFF1xAZWUlY8eOxefzxetMmzaN2267ja1btzJ+/PgjXicYDBIMBuM/BwKB3npLInIKGQNVZjdb2EHh0Hre3dNOZ9gctW5jpIVfsoSv8lXusf+EV3y/ItyUOMo3Go19bzY1x6Zwh4/T+ZKbDSMGR2jt2E2BF/I80NQCHZ1JfIMiklS9Gmyi0Sh33nknF110EWPGjAGgpqYGl8tFTk5OQl2fz0dNTU28zmdDzcH9B/cdzcKFC3nggQeS/A5EpK9FwoatzbHRv7V7oPMEQ+0iRPgzf+bN4N/IqfEztAjqGo+sl50J7R3Q0n7sc7W2w4d7YtPAd9dCexB8uUc/n4j0D726QN/cuXPZsmULzzzzTG++DAALFizA7/fHy+7du3v9NUXk1BpefOTCeYezWTBhFAwqaSArLXLM3pX9jeDNioWWo8lww+BCOG0wfHNGrBTlx3przNE7jESkH+i1YHP77bezbNkyVq1axZAhQ+Lbi4qK6OzspKmpKaF+bW0tRUVF8TqHz5I6+PPBOodzu914PJ6EIiKp5YPdnHD9mKiBrR9DfRNU18eCzrG0tMfqH87pgC9MgHR37NbTyg2x3pqJoyEz/ejHiEj/kPRgY4zh9ttv5/nnn2flypWUlZUl7D/vvPNwOp2sWLEivm379u1UVVVRXl4OQHl5Oe+++y51dXXxOsuXL8fj8XDWWWclu8kiMkBYwNkjjr3fZkGaC/JzYMxpcP6Z8M6Hx64faIEzhx+5PRSG1ZtivTrZGbE1b159G158A6oPnNx7EJFe1qMhx8dx2223Ga/Xa1599VVTXV0dL21tbfE6t956qxk6dKhZuXKlWb9+vSkvLzfl5eXx/eFw2IwZM8ZMnTrVbNq0ybz88stm0KBBZsGCBV1uh2ZFqaikZrFZx95XPgYz/QLMuJGY267B3Hk9xuk4/vkcdozbhRlSeOLXHlp04vOpqKgkp/R0VlTSg82xGvj444/H67S3t5tvf/vbJjc312RkZJirr77aVFdXJ5znk08+MZdddplJT083BQUF5q677jKhUKjL7VCwUVFJzTK8GJOdcez9nkzM6GGYKy/BDCvCWMcJQgeLzYbJSIuVY9Xx5WEy07p2PhUVlZMvPQ021qdhJOUEAgG8Xm9fN0NEesnZI+C9ncce7+LJhCGFsG1n185ns2BYMezcByUFUFMf2+6wgycrtl+zoUROHb/f36Pxsr06K0pEpLds2wmDcmMDfY+mvQPe+6Tr54uaWKiB2JTynGzw5ce2h8MKNSIDhYKNiAxIxhyael3qi03btttiA32dDji9tOfnPtAEDYHYQOFwJDYzSkQGBj3JTUQGLP+ngaO5LXbrye2KbfPlda+3RkRSh4KNiAx4Tc2JP39S3TftEJG+p1tRIiIikjIUbERERCRlKNiIiIhIylCwERHpIcsGNhu40i0ysvR1KtIfaPCwiAhg+/Sp4Q6nhTPdhmWBJ9+B3WnhzrDwDHJgWRbZ+XYcLgvLZuFKjz1hMxKGi4dksfQ31eyqCfbhuxARBRsRSTk2R2w9G4fThisjFj6ych240y3sLovcYieWBRkeO+4MGw4L3G6LELGQEuqMgoHAgTCREATbouyvCoGBjzaECXcaolEItkZia79HoOwuF1++wMvjL9Qdt20i0rsUbEQkZVw4PpuSi9Jpt6JgQSRk6OyIPXOhpSFCZ3uUaMjg3xGkyMB4f4RR7VGyotDUFmUL8EIUDhiIdPO1azpDXDIqC4c9tqifiPQNBRsRGZDsDrA5rIRt06bksOKDAG+tacYAGR4blnWojj0ImfUhfgqMBkYBjcRCzAbgauCbwE5gKfA6EOpCWywbhDBU7emg1Odm5z7djhLpKwo2ItKn7DaYflkeAU/00DanRXq2Hes4x4VDBocrsca4i7KpPxOyRjoxBtr8Ecynp3U4LG6+ZBALf1HFyx+38zbwDtAOlAFDiQWc84DhwFeB/wV+DLzfhfdhgFfW+Zl8draCjUgfUrARkT7ldtm48sJcfrx0N+Fw7LZRJGRoCxz/fk4kZIh+porLaXF5gZcP9rXx8r/Vx5LGZzjdFqNHpXPbdcXc99hu8hpDXAt4P1NnH7EenNOA8cBUYBIwD3i2C+/lw90dfPXCXN2OEulDCjYi0qfyPA6ySp20NIZpC0RPfMAxOOwWrf4IjrTYjCZjjqzzSXWQDa/6WfDNwTz0cBX/EYzSdFidLCDj0z+nA4OBPYDTYZGTZWd/U/iYbQiGotQ2hHQ7SqQPaeEFEelTLqeF5YSsvK7/O8uyoCAHCnNjxWYDb5aNcKiNguwow0ttDCkEF3AxMBeoMJDXHmXD1hbe2trCFdcWsd8GHSSWA0DVp2U7sBL4APBk2lny3eGcVZZ+RHtsdmK3vAys3dJMxfk5J/ORiMhJULARkT7XWBsmt7jrwebsEVDghTwP5HrAZsGgXBd76iLUVkew0u2c0Q5LhsNM4PvAf3ca/vUPjfyXgdJXDvAlt8WVE70neKVD6v1h7l1axTdmFHLm8MRw40q3xaaAR2K3o8pK3LicxxshJCK9RbeiRKRP+fJcvLelldxiZ5fq223QGICy/EzSOguI1HsocbsY68ihoL6DlsZWfL56Jn1QQ2N7CC/wBrDMDYPPS2fEmjZ+FIXm/9xHYVk6VUPSWL+no0uvXVXTyb/8v73Mmz2YZX9r4G+bmo+oEwwZqmqDjCxNY+vH7d34JEQkGRRsRKRPeTLtVO/uJGuIvUv17ZbFl1wXcKW7nNxhHbzT4qamZDMjckooqz2Lr5eEeHf8Ckr3XkT4wzx2sIZ17GBQejulVpSfW/CRgeqIwV0f4qffHcZ9S3fzSXXXxsQcaAqz5Pd7WXT7MNJcNpa/5T+izppNzUw516NgI9IHdCtKRPpcZ0cUu8PCfoJ/allYfL14KpPqruaOzc/wjb8+T37TaPY2hsjbOYm129uYsf53vBt4l//48L+o56+8zSgu44eMa57HmmGT+UuaxSagFqhqDPHrZ6q5e84QsjO6/nV4oCnMD3+3iy9P8vKlid4jpqXvqGpnWJFuR4n0BQUbEelTgwtd7K7pJNgexX2CcGEwvGEqWcAC9rGPfVTz84wFTDy7kGjFK/w6bREftexnbYaNWnuIB/mI9TzHm/yER4f+kaixsNkSw8bG7a08/2o9d/3DYBxd6zQCYuHmJ/++hwvGZDF9Si7hjkPTsIIhw+5Pb0eJyKmlYCMifSozzUZre4SWhkiXZkZ1RgPYnbFFYrxZYPM28P/2/hehkW+DFSHcaYhEDC+n2VhFbLG9x4iwfHcVH+RsIRo5ch74XyubaPCH+dYVvuMuCni49mCUXz9dzaSRWUzMTMf2mW/Uv21qZso4TzfOJiLJoGAjIn3KsiyMgabaMDlFJw42u6qhpS3253AYAq3gb7EIhW20B2Pr4LQ2RUjPtRMFfg/UAKcNBvsxemSiBh77Uw1nDk/nS92YKQWxcPPIS7Wcf4GHG2cUxsPN+5+0k+916HaUyCmmYCMifSrf66DeH6apJkRu0YlnRrldMHJobC2bUh9MuwC+dqkdp8Mweji4ndCwL5wwy6owF6pqj78acDBk+Ml/7OH6v8vn9CHdu4UUNoZf/3c1NgtmTR+EzQbhiGHnvmC3zyUiJ0fBRkT6lMtp0RmO0tIQITv/xINcOjqhwQ8Zbjj3jFhY+aTaTiQSob0DykqgcV+IvJJDwaatA0LHXjA4rjEQ5v88tY95s0vwZnVjwA3QGYryxLI6nA6Lef8wGLfLYu3WZiaPye7WeUTk5CjYiEi/EGyPYnda2LqwCIXTAe1BePYVWLMZPJlO9tQFCbTBB1UQOBDGUxALJrnZkJPdtWAD8EFVB//finp+/K0huLt4G8mVbtHZbohE4clldQRaI/xozhDqGkKU+lyku/VVK3Kq6LdNRPqMyxGbpRTsNJgodLYb0jJP/LVU2wBnDo+Njdm5D/Y3uVi5Icy+/bFtHa1RXGk2cr2xHp49dd1r14p1frZ93M4/XuXD6kK28RY6aKqLJadIFJY+X8OOqg6+/bViaupDjD0t4wRnEJFk6fVgs2jRIizL4s4774xv6+joYO7cueTn55OVlcW1115LbW1twnFVVVXMmDGDjIwMCgsLmTdvHuFwF//JJSIDgs0GFrHxKADN9WGyuzAzKmrgvU8g+9O84Mm0E2g59P0QCRlcVmxtnKM9DLMr53/q5f0MynUy7YKcLhxh8dnHiUei8PuX9rNpeyvnjc7k0kndG5AsIj3Xq8Fm3bp1PPbYY5xzzjkJ27/3ve/xl7/8heeee47XXnuNffv2cc0118T3RyIRZsyYQWdnJ2+88QZPPvkkTzzxBPfee29vNldETrnE7pCmmjBeX9cWRI8ayP/0eVFnDHUT7OwkIw0GD4oNLE6LRgg77HR09qxlnWHDr/6wjysuyTvi2VBd9b9rm3jhtQamnOshzaXZUSKnhOklzc3NZuTIkWb58uXmC1/4grnjjjuMMcY0NTUZp9NpnnvuuXjd9957zwCmsrLSGGPMiy++aGw2m6mpqYnXeeSRR4zH4zHBYLBLr+/3+w2xf0KpqKj001KU7zQ/mjMk/nPBUKc5/ypvt86RlYFZ8t2hZkih3bicmDxPbPvYL2WZ4ePSE+peNrfAuNKsbp1/eLHbPPSDMuPLcx6zzriKbDN8XNox9w8rchubre8/bxWVgVT8fn+P8kev9djMnTuXGTNmUFFRkbB9w4YNhEKhhO2jR49m6NChVFZWAlBZWcnYsWPx+XzxOtOmTSMQCLB169bearKInGIuh0XoMwvmtTREyM7r3myk9o7YWjg19VE6Q9AQiG1vqg2RP7hrD9Y8nk+qgzz18gFuu7YI2zE6XdKybHS0Ro95jl01QaLH3i0iSdQrD8F85plnePvtt1m3bt0R+2pqanC5XOTk5CRs9/l81NTUxOt8NtQc3H9w39EEg0GCwUMPsQsEAifzFkTkFIgtznco2HS2RXG6LWx2iB5nzZnPykizE+w0RKImYbt/f4TTJx32FWc4/O5Xl1S+28zHezs47CXiMnPstDV1scEi0quS3mOze/du7rjjDp566inS0k7dwlQLFy7E6/XGS2lp6Sl7bRHpmSE+F3tqDw2CiX46M8rdhZlRB6Wn2ejojB4xSLi9OUJapg3rM6dq80fI8HavRwjAGKipD3X7OBE59ZIebDZs2EBdXR0TJkzA4XDgcDh47bXX+M1vfoPD4cDn89HZ2UlTU1PCcbW1tRQVFQFQVFR0xCypgz8frHO4BQsW4Pf742X37t3JfmsikmQup0UwlHiPJnAgjCe/653J+V4Hpw1J49ZrfXx1Si5fnZLLjCm5jBmewZAcF2eOSGfwIBdZGTbcdgtPlh27LTbAuCtTuUVkYEn6rahLL72Ud999N2HbnDlzGD16NPPnz6e0tBSn08mKFSu49tprAdi+fTtVVVWUl5cDUF5ezs9//nPq6uooLCwEYPny5Xg8Hs4666yjvq7b7cbtdif77YhIL7KOcl+osSZETpGD2p1dm870wa527lu6m6x0G4ML3fEzTjgjkzE5aeR/OQ+C4M2yM+TMNOpLvHR+ZjxMS1uEjlCsu6czFGV3bSd7aoM4t7ZQDBy8wRQFPgGagANAS4/esYj0tqQHm+zsbMaMGZOwLTMzk/z8/Pj2m266ie9///vk5eXh8Xj4zne+Q3l5ORdccAEAU6dO5ayzzmL27NksXryYmpoafvzjHzN37lyFF5EUMsTn4r2d7Qnb/LVhRkzo+oJ2kSjsqo6Nr9v6ceK5zu1op6k2xCebOwC4+IYc3lvTSnNtON5b48l0xKdiu5w2hvhctLZH+HvgB8CfgGVAPTARyAHOApYCb376OnanRTh0jAE4InJK9crg4RP51a9+hc1m49prryUYDDJt2jQefvjh+H673c6yZcu47bbbKC8vJzMzkxtvvJGf/OQnfdFcEektBr4+rYBxp2cQDUZpqg5ywG1x5umZ7C1sJxQysedItcV6WCJR06UF9+zEelrq94QYNNwVDzYH55EGPxNCOjoTx858uCdWdxdQDtwAXAA8CrwAfEQs3Nz56X//14o9UiHYpmlPIv2BZUxP1uXs/wKBAF6vt6+bISLH4XJa5GU7+NkgF9cZ2N3QiRnnYf00DzveD5Gb68CZbpHuttHZbmhtjRAOG8IRQ/WBTqIGag50Ut8UJhIx7K7rxOcPsygYZTdQM8iOf4aXhicaMMDwyRk8+mGQvfVdm8E0GlgONAN1wE5gI7AG+Bj4CvCsBdO+U8DLjxwgovHFIknj9/vxeDzdPq5PemxERAA6QwZHQ4jRDSFWEPtCOn1FPZM/CPC/rYaMeUN4eYcfm83C6bbIzrbjctpw2C2Kx6RhsyxOS8/El+vCRA3NbRF27myncV8HZ3YYijqiBM5Np+MaLxuBQreNqwod/LU6hDHgjFh8sq2djs6j//vufWA+cDvwDrGeoDzgIuA64D+J9QCJSP+hYCMifaqT2BiWAqAYyAZKdod4ONPG/6lq5ZXHG+LrzxxtFpPdYeE47HEFG4B70ywO5NipKXPj3x5kXUcU27npjKkJc1mWnV/Z4Lqri/jFkt3sqgoeeeJPPQuc+Wm7QsA+4Dzgz8A/AbXAx0o3Iv2Ggo2I9Ck/sQG6NmAEsfAww4L8tigX/bGBNAMdAIajjq8JdxrCh/W4vA6sb4G9ByIM2tNJ5YEwZ9SGKSp08OjHQQrrI3y9wIltpo26xuPfPwoDvwJ+AWwHzgF+D9wG/IZY7824d9p5VuvzifQLvf50bxGR4wkCHxC77fMicBPwqysG8fQ0L3kui0t7sNZMBHiEWAhZVRvmn3wO3gPezLHzQ5vF+0DG5Byq24JdGvTbAPwbcDqxMTb/APwVuAN4N82iuN0wNtqjRY1FJMkUbESkX8nLcZAxLJ0nBjv4+dVeGs/K5O+/nN/t8+wC1gKT9oZYWuLkemCQ184f3RZ3um3sOyOD9g9acXSxp2UD8GtiA4rfJ3Y7ai8wodPgrA7Fp4KLSN9SsBGRfuXyS/J4/pV6bA6IuGxs2dPBmWXpXHZhTrfP9Qxwdm2I7CInvwUuaYzwRhRsZ2VRVR8krzHCGd043w7gx0Al8BjwB+DpNBt/vDSb/wAau91CEUk2BRsR6TcG5TgoKXCx6f1WwiGDK92iozPK756tZnp5LmNP7/rCfRBbHfhXzVFKMm3ssWD7+jY8TREyxmaT82EL62tDXNjNNgaJ9QRtI3Zbqs6CznTdhBLpLxRsRKTfuPySPP7n9UYiUWhpiJD96TOjGpsjLPn9Xu6aVcLgQa5unXNzp8EEDWe6LNZ77FxT4KKxPcJoj8XGmjD5nNzYmPRsG20BLc4n0l8o2IhIv3Cwt+adHa0ANOwLkVvsjO/fU9fJoif3cv8tpeR5uj6h0xjY3xrhC1k2arx2rrssn2Xr/TQXOti7P8xDnNxaNK40G6EOBRuR/kLBRkT6ha9efKi3BqCxOoQrzSIzxx6v8/4n7fz+xf3ccUMxGWld//qqDkSxDXJgMu10Dnbz0e4OtmXY6GiNEkj2GxGRPqVgIyJ9blCOg8GDDvXWALQ0RggFDTZ74o2i1RsDvP9JOzdf5cNuP/xMR1e9L8Sbg52Uup08vrqJRgzvGehsV0+LSKpRsBGRPnf5xXm8+JneGoiFjsaaoy+e98flB4hGDf94ha9L42OaqkNkDnbyd1/MZW1bJ7ZMG5GIwSjXiKQcBRsR6VMFOQ5KBrnY/JneGoBoBA5UHT3YRA0sfb6WQblOZkzJPeFrtPojnDUqA5MJQRPFk++gpYsPwjyRrDw7zQ1adlikv1CwEZE+9dUpuUf01hzkrw1hO8btpmDI8Ltnq6k438u4kcefBh4KGoZHXaxc7wfAU+jAvz98sk0HID3bTkezgo1If6FgIyJ9JivdhjfLcURvzUGW3SJvsPOo+wCaWiIs/s+9fO/rJZT6jj0N3JthJyfHQW1bLMxk5dqPeZtLRAY2BRsR6TNfmODltbcDR+2tAThQ1UnhsOOvW7PvQIif/sceFnxzCPneo08Dv/CcbLbvbye7INb9k1PkpFW3j0RSkoKNiPSJzHQbZSVu3v3w6L01ANEo+MrcJzzXR3s6eOrl2DTwzPTErzWnw2LiWVn8dUUTeSVOLJtFWpaN9pbkjRw+mXVwRCS5FGxEpE9UnJ/DW9tajtlbc1BBqRNbF9bje31zM1s+auPOG0pwfGZczpll6ezb38mejzvILXbidFuYCERCyYkjnkEOmg+o90ekv1CwEZE+EQ4bLp3kPf5aNAYsm4U7vWtfVc+tqKexOcwtVxdhfToP/IsTvLz4RhOtTREyvHbSsmy0NEUwSepmcaZZdGrlYZF+Q8FGRPrEy5WN2G0WV16Sd8w6BmhtCpOV17WV+IyBf3+hlpxsO1dckkdBjgO3y6L6QCehoMFfF8ZT4NAsJpEUpmAjIn0iEoVfP72PL5137Ona4WCU1sYIuUXHnhl1uM6w4aFna/jCeA+3XlPEmk3NGAMmCvt3dTJ4dBqNNcmZ6i0i/Y+CjYj0mZb2KL/8z71857pifHlHhpemujDRKAkPw+wKf2uEnz++B8uCt99viW+v29XJsLFpNNVqqrdIqlKwEZE+taeuk9+/uJ/5NxTjsVtkAFnAcOCc9ih52zrwdvFW1GfV+8P87D/2EPzMIOEDVZ10tERpaUzerai0DBsdSZxhJSInpwtzDUREetfWjQEe64jwVa+DjoYQdcAm4BMDl+wP0/JJJ5ssqOvmgN/Dq/vrwvhrw4SSNdjXArvLItypCd8i/YWCjYj0uXrgufda+SGwBlgBWMDITkOLP4Lda6ck3aKu7eQChAH8dSEiB+9EWZ/ptLY7sRyHbnlZrnRsaVmHfnZnYM8uOPRzWjb2TC/7bW0Y/hvQgGSR/kDBRkT6nAH+HZgOtANlwBbgvzx2zhybzjmrW/iazaIDQwuw52Req3Ac2V8swlhOLFfaoR3RSGxFwIM/drZjOpoP/RxsI9p84NDPjfvobG9mu+cGIlE7CjYi/YOCjYj0C1XA08AQ4EOgCbg/EOHFmhB/HOZiooF/XtfGU8BeurHa78GnaNoceIo9VDWcRdvmZ4l2tGA62w/Vi0ZiU6e6xcKEOrt5jIj0JgUbEekXDPBfwA1AGHAC8zsMD/ojZPgjbDo3i1ecURa40skOtvI/mbnYPIPAsrAcbuw5PsDCcrqwZebGThqNYLkzP/1zlOIJ9bR0eok0H4BwEgKJwxlbPCei6eMi/UWvzIrau3cv//AP/0B+fj7p6emMHTuW9evXx/cbY7j33nspLi4mPT2diooKduzYkXCOhoYGZs2ahcfjIScnh5tuuomWlpbDX0pEUkgVsI/YOJsRwFft8MJFmeTsjXB5YALpX/wmvxj/FcpzSyg5/XzsOUXYvT4sdzqh6h2Eqj+g86MNtFY+FytvPEvz/z4SK688hnP/G7QGc5LWXstmB0wPenpEpLckvcemsbGRiy66iC996Uu89NJLDBo0iB07dpCbmxuvs3jxYn7zm9/w5JNPUlZWxj333MO0adPYtm0baWmxe96zZs2iurqa5cuXEwqFmDNnDrfccgt/+MMfkt1kEeknDPAS8G3gQWBmp+ELf2vlbkcGQ0M+vvHqY3xiDD8HWvd/0u2HT2YV59PeknXiiiIycJkkmz9/vpkyZcox90ejUVNUVGSWLFkS39bU1GTcbrd5+umnjTHGbNu2zQBm3bp18TovvfSSsSzL7N27t0vt8Pv9htj3pIqKygArs8D8C5j8NMvcf32O+a0r3VRM+KrxgJkOxtuDc9qdlrl8UYUZdPsTBocrKe20XOkm++9u7fPPS0UlFYvf7+9RDkn6raj//u//ZuLEiXzta1+jsLCQ8ePH82//9m/x/Tt37qSmpoaKior4Nq/Xy+TJk6msrASgsrKSnJwcJk6cGK9TUVGBzWZj7dq1yW6yiPQzfwDWAT8KG9gd4pfAaZ1tnAO8DPh7cM60LBtRWzpal1QktSX9N/zjjz/mkUceYeTIkfz1r3/ltttu47vf/S5PPvkkADU1NQD4fL6E43w+X3xfTU0NhYWFCfsdDgd5eXnxOocLBoMEAoGEIiIDkwH+CNxnoLTTkOlw8FThCNacxDkzPDbaA8l7qjeA5c4kGmxN3glF5KQlPdhEo1EmTJjAL37xC8aPH88tt9zCzTffzKOPPprsl0qwcOFCvF5vvJSWlvbq64lI72uOwp8uyOATl3XS5/IWOmmsTu4zoixXWuKUcRHpc0kPNsXFxZx11lkJ284880yqqqoAKCoqAqC2tjahTm1tbXxfUVERdXV1CfvD4TANDQ3xOodbsGABfr8/Xnbv3p2U9yMifevgDfeTlVvkoGFfCBMOYjnTTnyAiAxISQ82F110Edu3b0/Y9sEHHzBs2DAAysrKKCoqYsWKFfH9gUCAtWvXUl5eDkB5eTlNTU1s2LAhXmflypVEo1EmT5581Nd1u914PJ6EIiJykGeQg9amCKa9BZsro6+bIyK9JOnTvb/3ve9x4YUX8otf/ILrrruOt956i6VLl7J06VIALMvizjvv5Gc/+xkjR46MT/cuKSnhqquuAmI9PNOnT4/fwgqFQtx+++3ccMMNlJSUJLvJIpLiLAtcaTbCjkEY/9HH6YlIiujRXKoT+Mtf/mLGjBlj3G63GT16tFm6dGnC/mg0au655x7j8/mM2+02l156qdm+fXtCnfr6ejNz5kyTlZVlPB6PmTNnjmlubu5yGzTdW0UlBYqF+crtBcaV4zFZX/7HHp/H4bLM9NvyTdaF1xnvFfOM3VuUlPY5h5xt0s6Z2vefk4pKCpaeTve2jEnmHIH+IxAI4PV6+7oZInKSKv4xj7X/E8Wc+fe0rPz3Hp0jO9/OhK94WP/h3+EYNIyW135PJAk9N+4zLoRohOCHWoZCJNn8fn+PhpVoQQcR6ddCHQZn+sl9VeX4HLTU6+nbIp8HCjYikvKy8x0E6vWgSpHPAwUbEUl52fkOmqpDYNkwUT2wUiSVKdiISMrzDHLQ0hjFluEl2taTBzIcnS0ti2hHc9LOJyInT8FGRPq9k1l32GYHV5pFZ3s0Nu87ifMlbJk5RFubknY+ETl5CjYi0q8FDoTJzrf3+Hhnmo1wyBCJpOQEUBE5jIKNiPRrne1RXCcxKyot00ZrYyS2MoaIpLykrzwsIpJMxtjAlYlld2Ole8AYTGfb8W8pmUMDhHN8DpobNNVb5PNCwUZE+rU9jaNIO2c6rrRiMsuvA8Byuo99gDEJT9x2nWZjz+q/gNUKWAmh56Q53ZhwMHnnE5GTpmAjIv1a24EAkU8+wnLXHFp52HacMTeWDVtaJgeHHO9tv5RQay7Y/bFcE+5MWtts7ixMR2vSziciJ0/BRkT6NRMKYjkO66GJHu/WUiRhppIJKniIfJ5o8LCI9GvR1kZsmTk9Pj7SfABbdkHyGiQi/ZqCjYj0b9Ew2B2xNWh6wATbsNyZSW6UiPRXCjYi0q+ZUBDLZseyJefOebTNj5XR/ScGH8GKfX2a494WE5FTTcFGRPq3g7OYbCf3dWU5XZhwCNMewJaeffLtsjtj45MjoZM/l4gkjYKNiPRvxhDtaMGWdnJhxObOxARbtU6fSIpTsBGR/i8S6vkYm3AnlsOV5AaJSH+lYCMi/V54/64eHxttqceenZ/E1ohIf6ZgIyL9XrSjpecrBhs4ueeDH51ls2FCWnVYpL9RsBGRfi/SVNvvZh/Z0r2YYFtfN0NEDqNgIyL9ngl1wEk+CsGW4SXa5k9Si+iNTiARSQIFGxHp96JtfqI97h0xYIGV7iHa3pzUdolI/6NgIyIDgi09u0czo6IdLViuDCxLX3cinwf6TReRAcHuGQTWcZ7qfSzG9HiquIgMPAo2IjIgWO7Mk1qPxnKmxcbqiEhKU7ARkQHBlp6N5Uzr+fHeQUQDdclrT1Y+kZaGpJ1PRJJDwUZEBgTL4cZKzzqZM4CBaGB/7LbWybbHnYHp1HRvkf4mOY/LFRHpbQ4XtrRsur2ajYlCNIItLQsTDoJlYbnSe6OFItIPJL3HJhKJcM8991BWVkZ6ejqnnXYaP/3pTzHm0KPnjDHce++9FBcXk56eTkVFBTt27Eg4T0NDA7NmzcLj8ZCTk8NNN91ES0tLspsrIgNExF+LLTOn+weaKCYawZadT7Q9kPR2iUj/kvRg88tf/pJHHnmE3/3ud7z33nv88pe/ZPHixfz2t7+N11m8eDG/+c1vePTRR1m7di2ZmZlMmzaNjo5DA/tmzZrF1q1bWb58OcuWLWP16tXccsstyW6uiAwQJtSBLcPb180Qkf7OJNmMGTPMt771rYRt11xzjZk1a5YxxphoNGqKiorMkiVL4vubmpqM2+02Tz/9tDHGmG3bthnArFu3Ll7npZdeMpZlmb1793apHX6/3xB7SoyKisoAL1ZatvFefbfJvHh2j47Prvgn471insHhNo5Bw03G+VefdJtcp00y7tFT+vyzUVFJ1eL3+3uUQ5LeY3PhhReyYsUKPvjgAwA2b97MmjVruOyyywDYuXMnNTU1VFRUxI/xer1MnjyZyspKACorK8nJyWHixInxOhUVFdhsNtauXXvU1w0GgwQCgYQiIqnDhDuxHM6eHWyzx9bAiZzcYxlEpP9L+uDhH/7whwQCAUaPHo3dbicSifDzn/+cWbNmAVBTUwOAz+dLOM7n88X31dTUUFhYmNhQh4O8vLx4ncMtXLiQBx54INlvR0T6g0gIEwpiuTLAstHdJ33HQlHP18ARkYEj6T02zz77LE899RR/+MMfePvtt3nyySf5l3/5F5588slkv1SCBQsW4Pf742X37t29+noicuqYcCeWZcOy23v2WIXWJqKd7fCZSQwny5buSe5DNUUkKZLeYzNv3jx++MMfcsMNNwAwduxYdu3axcKFC7nxxhspKioCoLa2luLi4vhxtbW1nHvuuQAUFRVRV5e4kFY4HKahoSF+/OHcbjdutzvZb0dE+gkTjUA0jOXKwHR0/2GWJhibVRkNtmK5M0+6PbYML6GaHSeuKCKnVNJ7bNra2rDZEk9rt9uJRmNdx2VlZRQVFbFixYr4/kAgwNq1aykvLwegvLycpqYmNmzYEK+zcuVKotEokydPTnaTRWRAMJhoJNZr07PDY/8JtmFzZySvWSLSryS9x+byyy/n5z//OUOHDuXss89m48aNPPjgg3zrW98CwLIs7rzzTn72s58xcuRIysrKuOeeeygpKeGqq64C4Mwzz2T69OncfPPNPProo4RCIW6//XZuuOEGSkpKkt1kERkgos0HsGUXEG1t6uaRhnC9bk+LfC70aC7VcQQCAXPHHXeYoUOHmrS0NDNixAjzox/9yASDwXidaDRq7rnnHuPz+Yzb7TaXXnqp2b59e8J56uvrzcyZM01WVpbxeDxmzpw5prm5ucvt0HRvFZUUKpbNZE+ba9InfNU4B5/Z7eMzJl9j0sZcagBjuTNNdsUtJ92mjPOvMfaCoX3/2aiopGjp6XRvy5gkjqbrRwKBAF6vt6+bISLJYNnInnobwQ/exJbppWPLym4d7h55AZYrnY6tq7DcmWRdPIvmV5aeVJMyzr+G4MfriRyoOqnziMjR+f1+PB5Pt4/TQzBFZOCIhsHW/TvoNs+g2DRxEUl5+k0XkQEjEjjQoydzaw0bkc8PBRsRGTBMNIxl736PjWXr4Uyq450zLRMTbEv6eUXk5CjYiMjAEY306JaSY9CwI89xkrembK4MTKeCjUh/o2AjIgOGCbZiuTO6H0psh54xZcLB2LOjeqEXR0T6noKNiAwcPZzE2eNF/URkwFGwEZEBw0QjEAl3bzCwzY49b3DsFpSIpDwFGxEZOIzBmCh0awCxBSZKuGFvrzVLRPoPBRsRGViM6d4Tvm222No3Jtp7bRKRfkPBRkQGBhMFm41oSwP2rLwuH2ZzZ2LPzu/FholIf6JgIyL9n4liOtuxuTNjY2W6NaPJ+rQkkWUDy8JENG5HpL9RsBGRgcEAWESDLVjurC4fZrnTiX52vRkDRCMnt2ifzR4LN5HOnp9DRHqFgo2IDCjR5m7eisrIIdra9JkthmiwFSut6+FIRAYOBRsRGXiSfGdJRFKHgo2IDCjRlgZsmd3psfFg2gNJbcMwT5TrBu3gtNyeLRgoIr1HwUZEBpRoZ3vssQpdVJTjIi9cl7DNQytOq2cDfyeWwJ3nR+jcuoKnroFZ54BD36Qi/Ub3H5MrItKnDF29F1WWA3cUr2dd5iAumwhrNsDWOrgvbxmrhob445auv6oFXDIM/v4sWLQG9jZHiUTgV9NhUgkseR32Nvfk/YhIMinYiMiA4LW1Y3eGCbS2YkvLIhY1jn0ryJcJD3wJfra6lZqxmdwQbeQ/r4b3DsDpeZ1EQ/D8e9DZxY6bQZnwxTJYsAJaPp0M9cwWSHdAeSn89Mvw9BZY8TFEdYdKpM+oA1VE+r00Bzw07EUeKG8hzX7ilYcznPCDKfDrN+GDeojaXfz7WyGu+SM0dcDpeXB2IXzzXCjo4l2tulb46WuHQg3EAszjm2Jh6b0DUD4E7vlC188pIsmnYCMi/d74Iti3v5lsV5S7p0Rw280Jnxf19LvwdvWhnw3wYQM8+Aa8vAMa2uCGMXD7+XDzBBjmPfENrqP1xEQNPPQWuOywbi+8XgU/+zJMHqzJWyJ9QcFGRPq9SYPh39+GTxphd2OE80ui2BzOY9ZvC8H6fUffV5ABL7wP/7QMNtdAThrsaIAZZ8D3ymFMYfcDSUc4Fpi+MhLsFjzwKlxzFnz/Qsg8djNFpBco2IhIv/fEptig3zQnPLkJ9gZivS096REp8cQG+da1wuLXoTATZp8DDe3w/22Dwdk9m+XUHob7X4XpIyEvHX68ArbVQba7B40UkR5TsBGRfi8QhGAEWoLgSYPdtQHOLPXw5bLun2uoF3b7Y3+uaYE3dsOSN2I9OXdeEOvpCfXwQeD17fCL1fDtSZDlgpc+jL2GiJw6mhUlIgPG7gCMyIV3g50sftPFz86FKLBqZ9fPkemMBRCIjbtZtRMuGhobJ3NaXixAnYz9bYkzp0Tk1FKPjYgMGO/Wwsg8MOFO2o2Ln62Gy8+A0QVdP8dj66E5eOjn9w/EQojNig0uTkYgCQQ15VukryjYiMiAUeWH03LB+Gux5/jY3xYbJ/NPE2PryXRFbWvi6jcRA89uif1XRAY+BRsRGTCaOmKL4BlzKIXUtMDdr8QG7/aUMo1I6lCwEZEBw/DpgnsdLdjSs+PbjxtqLAvLskEk1OvtE5G+1+1gs3r1ai6//HJKSkqwLIsXXnghYb8xhnvvvZfi4mLS09OpqKhgx44dCXUaGhqYNWsWHo+HnJwcbrrpJlpaEqcOvPPOO1x88cWkpaVRWlrK4sWLu//uRCTlWMAwWx2n53Sxi8ayg92BCWs0r8jnQbeDTWtrK+PGjeOhhx466v7Fixfzm9/8hkcffZS1a9eSmZnJtGnT6OjoiNeZNWsWW7duZfny5SxbtozVq1dzyy23xPcHAgGmTp3KsGHD2LBhA0uWLOH+++9n6dKlPXiLIpIqCjNh/hS47/wGbsz8G6fl9nWLRKTfMScBMM8//3z852g0aoqKisySJUvi25qamozb7TZPP/20McaYbdu2GcCsW7cuXuell14ylmWZvXv3GmOMefjhh01ubq4JBoPxOvPnzzejRo3qctv8fr8h1nOtoqIywEuaAzNrLGb9LZjffQWTm4Y5PQ+z+O8wbvsJjrc5TPa0uQbL1ufvQ0VFpevF7/f3KJskdYzNzp07qampoaKiIr7N6/UyefJkKisrAaisrCQnJ4eJEyfG61RUVGCz2Vi7dm28ziWXXILL5YrXmTZtGtu3b6exsfGorx0MBgkEAglFRAY2hy325OzFfwe3TYLHN8L3XobGjtjU7E01sUcXiIgclNRgU1NTA4DP50vY7vP54vtqamooLCxM2O9wOMjLy0uoc7RzfPY1Drdw4UK8Xm+8lJaWnvwbEpE+YQFn5MNd5TD1tNgjCu57FR5el7gq8H9tg2A4tgbNsQzKiPKFrF247Ka3my0i/UDKzIpasGABfr8/Xnbv3t3XTRKRHshPj61LM/302DOdhmTHHiq54uNY//RnBSPwp/eOvRjeoEz412lRbs96ma+dqWAj8nmQ1GBTVFQEQG1tbcL22tra+L6ioiLq6uoS9ofDYRoaGhLqHO0cn32Nw7ndbjweT0IRkYGn1Atr98R6YsYUwrzlsadvd9egjFiPT3UL3P2/YaYMQ4ONRT4HkhpsysrKKCoqYsWKFfFtgUCAtWvXUl5eDkB5eTlNTU1s2LAhXmflypVEo1EmT54cr7N69WpCoUPrTixfvpxRo0aRm6tvJpFUtrkGvjwi9ud7VsYW5euuEbkw7yJ4+l1obI89iuGht2BEXnLbKiL9UHdHGzc3N5uNGzeajRs3GsA8+OCDZuPGjWbXrl3GGGMWLVpkcnJyzJ///GfzzjvvmCuvvNKUlZWZ9vb2+DmmT59uxo8fb9auXWvWrFljRo4caWbOnBnf39TUZHw+n5k9e7bZsmWLeeaZZ0xGRoZ57LHHutxOzYpSURm4JT8dY/Xw2DMLME9dgxldgJlUgrnu7L5/PyoqKt0vPZ0V1e1gs2rVqqM24MYbbzTGxKZ833PPPcbn8xm3220uvfRSs3379oRz1NfXm5kzZ5qsrCzj8XjMnDlzTHNzc0KdzZs3mylTphi3220GDx5sFi1a1K12KtioqHz+yugCzH9cGfsvYO64ADPY0/ftUlFR6X7pabCxjPnMQ1dSSCAQwOv19nUzROQUGV0A3zwXHqyEulbwumNTxH+5JvYtKSIDi9/v79F42S4+D1dEpP+aUAzXnQ3/5w3Y3xbbNmlwbLyOQo3I50vKTPcWkc+nsYWxnpnPhhqbBeOL4Q2t+iDyuaMeGxEZ0HY2wT//FfzBQ9tsFrxelbhNRD4fFGxEZEBrOcpDu8NR9daIfF7pVpSIiIikDAUbERERSRkKNiIiIpIyFGxEREQkZSjYiIiISMpQsBEREZGUoWAjIiIiKUPBRkRERFKGgo2IiIikDAUbERERSRkKNiIiIpIyFGxEREQkZSjYiIiISMpQsBEREZGUoWAjIiIiKUPBRkRERFKGgo2IiIikDAUbERERSRkKNiIiIpIyFGxEREQkZSjYiIiISMpQsBEREZGUoWAjIiIiKaPbwWb16tVcfvnllJSUYFkWL7zwQnxfKBRi/vz5jB07lszMTEpKSvjGN77Bvn37Es7R0NDArFmz8Hg85OTkcNNNN9HS0pJQ55133uHiiy8mLS2N0tJSFi9e3LN3KCIiIp8b3Q42ra2tjBs3joceeuiIfW1tbbz99tvcc889vP322/zpT39i+/btXHHFFQn1Zs2axdatW1m+fDnLli1j9erV3HLLLfH9gUCAqVOnMmzYMDZs2MCSJUu4//77Wbp0aQ/eooiIiHxumJMAmOeff/64dd566y0DmF27dhljjNm2bZsBzLp16+J1XnrpJWNZltm7d68xxpiHH37Y5ObmmmAwGK8zf/58M2rUqC63ze/3G0BFRUVFRUVlABa/39+NRHJIr4+x8fv9WJZFTk4OAJWVleTk5DBx4sR4nYqKCmw2G2vXro3XueSSS3C5XPE606ZNY/v27TQ2NvZ2k0VERGSAcvTmyTs6Opg/fz4zZ87E4/EAUFNTQ2FhYWIjHA7y8vKoqamJ1ykrK0uo4/P54vtyc3OPeK1gMEgwGIz/HAgEkvpeREREpP/rtR6bUCjEddddhzGGRx55pLdeJm7hwoV4vd54KS0t7fXXFBERkf6lV4LNwVCza9culi9fHu+tASgqKqKuri6hfjgcpqGhgaKionid2trahDoHfz5Y53ALFizA7/fHy+7du5P5lkRERGQASHqwORhqduzYwSuvvEJ+fn7C/vLycpqamtiwYUN828qVK4lGo0yePDleZ/Xq1YRCoXid5cuXM2rUqKPehgJwu914PJ6EIiIiIp8z3R1t3NzcbDZu3Gg2btxoAPPggw+ajRs3ml27dpnOzk5zxRVXmCFDhphNmzaZ6urqePnsDKfp06eb8ePHm7Vr15o1a9aYkSNHmpkzZ8b3NzU1GZ/PZ2bPnm22bNlinnnmGZORkWEee+yxLrdTs6JUVFRUVFQGbunprKhuB5tVq1YdtQE33nij2blz5zEbuGrVqvg56uvrzcyZM01WVpbxeDxmzpw5prm5OeF1Nm/ebKZMmWLcbrcZPHiwWbRoUbfaqWCjoqKioqIycEtPg41ljDGkoEAggNfr7etmiIiISA/4/f4eDSvRs6JEREQkZSjYiIiISMpQsBEREZGUoWAjIiIiKUPBRkRERFKGgo2IiIikDAUbERERSRkKNiIiIpIyFGxEREQkZSjYiIiISMpQsBEREZGUoWAjIiIiKUPBRkRERFKGgo2IiIikDAUbERERSRkKNiIiIpIyFGxEREQkZSjYiIiISMpQsBEREZGUoWAjIiIiKUPBRkRERFKGgo2IiIikDAUbERERSRkKNiIiIpIyFGxEREQkZSjYiIiISMpQsBEREZGUoWAjIiIiKaPbwWb16tVcfvnllJSUYFkWL7zwwjHr3nrrrViWxa9//euE7Q0NDcyaNQuPx0NOTg433XQTLS0tCXXeeecdLr74YtLS0igtLWXx4sXdbaqIiIh8znQ72LS2tjJu3Dgeeuih49Z7/vnnefPNNykpKTli36xZs9i6dSvLly9n2bJlrF69mltuuSW+PxAIMHXqVIYNG8aGDRtYsmQJ999/P0uXLu1uc0VEROTzxJwEwDz//PNHbN+zZ48ZPHiw2bJlixk2bJj51a9+Fd+3bds2A5h169bFt7300kvGsiyzd+9eY4wxDz/8sMnNzTXBYDBeZ/78+WbUqFFdbpvf7zeAioqKioqKygAsfr+/+8HEGJP0MTbRaJTZs2czb948zj777CP2V1ZWkpOTw8SJE+PbKioqsNlsrF27Nl7nkksuweVyxetMmzaN7du309jYmOwmi4iISIpwJPuEv/zlL3E4HHz3u9896v6amhoKCwsTG+FwkJeXR01NTbxOWVlZQh2fzxffl5ube8R5g8EgwWAw/nMgEDip9yEiIiIDT1J7bDZs2MC//uu/8sQTT2BZVjJPfUILFy7E6/XGS2lp6Sl9fREREel7SQ02f/vb36irq2Po0KE4HA4cDge7du3irrvuYvjw4QAUFRVRV1eXcFw4HKahoYGioqJ4ndra2oQ6B38+WOdwCxYswO/3x8vu3buT+dZERERkAEjqrajZs2dTUVGRsG3atGnMnj2bOXPmAFBeXk5TUxMbNmzgvPPOA2DlypVEo1EmT54cr/OjH/2IUCiE0+kEYPny5YwaNeqot6EA3G43brc7mW9HREREBprujjZubm42GzduNBs3bjSAefDBB83GjRvNrl27jlr/8FlRxhgzffp0M378eLN27VqzZs0aM3LkSDNz5sz4/qamJuPz+czs2bPNli1bzDPPPGMyMjLMY4891uV2alaUioqKiorKwC09nRXV7WCzatWqozbgxhtvPGr9owWb+vp6M3PmTJOVlWU8Ho+ZM2eOaW5uTqizefNmM2XKFON2u83gwYPNokWLutVOBRsVFRUVFZWBW3oabCxjjCEFBQIBvF5vXzdDREREesDv9+PxeLp9XMo+KypF85qIiMjnQk//Hk/ZYFNfX9/XTRAREZEeam5u7tFxSV+gr7/Iy8sDoKqqSrek+kggEKC0tJTdu3f3qDtRTp6uQd/TNehb+vz7XnevgTGG5ubmoz5rsitSNtjYbLHOKK/Xq/+Z+5jH49E16GO6Bn1P16Bv6fPve925BifTIZGyt6JERETk80fBRkRERFJGygYbt9vNfffdp9WI+5CuQd/TNeh7ugZ9S59/3zvV1yBl17ERERGRz5+U7bERERGRzx8FGxEREUkZCjYiIiKSMhRsREREJGWkZLB56KGHGD58OGlpaUyePJm33nqrr5uUMu6//34sy0ooo0ePju/v6Ohg7ty55Ofnk5WVxbXXXkttbW3COaqqqpgxYwYZGRkUFhYyb948wuHwqX4rA8bq1au5/PLLKSkpwbIsXnjhhYT9xhjuvfdeiouLSU9Pp6Kigh07diTUaWhoYNasWXg8HnJycrjppptoaWlJqPPOO+9w8cUXk5aWRmlpKYsXL+7ttzZgnOgafPOb3zzi92L69OkJdXQNem7hwoVMmjSJ7OxsCgsLueqqq9i+fXtCnWR997z66qtMmDABt9vN6aefzhNPPNHbb29A6Mo1+OIXv3jE78Gtt96aUOeUXIMePRO8H3vmmWeMy+Uy//f//l+zdetWc/PNN5ucnBxTW1vb101LCffdd585++yzTXV1dbzs378/vv/WW281paWlZsWKFWb9+vXmggsuMBdeeGF8fzgcNmPGjDEVFRVm48aN5sUXXzQFBQVmwYIFffF2BoQXX3zR/OhHPzJ/+tOfDGCef/75hP2LFi0yXq/XvPDCC2bz5s3miiuuMGVlZaa9vT1eZ/r06WbcuHHmzTffNH/729/M6aefbmbOnBnf7/f7jc/nM7NmzTJbtmwxTz/9tElPTzePPfbYqXqb/dqJrsGNN95opk+fnvB70dDQkFBH16Dnpk2bZh5//HGzZcsWs2nTJvOVr3zFDB061LS0tMTrJOO75+OPPzYZGRnm+9//vtm2bZv57W9/a+x2u3n55ZdP6fvtj7pyDb7whS+Ym2++OeH3wO/3x/efqmuQcsHm/PPPN3Pnzo3/HIlETElJiVm4cGEftip13HfffWbcuHFH3dfU1GScTqd57rnn4tvee+89A5jKykpjTOwvCJvNZmpqauJ1HnnkEePxeEwwGOzVtqeCw/9SjUajpqioyCxZsiS+rampybjdbvP0008bY4zZtm2bAcy6devidV566SVjWZbZu3evMcaYhx9+2OTm5iZcg/nz55tRo0b18jsaeI4VbK688spjHqNrkFx1dXUGMK+99poxJnnfPT/4wQ/M2WefnfBa119/vZk2bVpvv6UB5/BrYEws2Nxxxx3HPOZUXYOUuhXV2dnJhg0bqKioiG+z2WxUVFRQWVnZhy1LLTt27KCkpIQRI0Ywa9YsqqqqANiwYQOhUCjh8x89ejRDhw6Nf/6VlZWMHTsWn88XrzNt2jQCgQBbt249tW8kBezcuZOampqEz9zr9TJ58uSEzzwnJ4eJEyfG61RUVGCz2Vi7dm28ziWXXILL5YrXmTZtGtu3b6exsfEUvZuB7dVXX6WwsJBRo0Zx2223UV9fH9+na5Bcfr8fOPSw42R991RWViac42Ad/f1xpMOvwUFPPfUUBQUFjBkzhgULFtDW1hbfd6quQUo9BPPAgQNEIpGEDw3A5/Px/vvv91GrUsvkyZN54oknGDVqFNXV1TzwwANcfPHFbNmyhZqaGlwuFzk5OQnH+Hw+ampqAKipqTnq9Tm4T7rn4Gd2tM/0s595YWFhwn6Hw0FeXl5CnbKysiPOcXBfbm5ur7Q/VUyfPp1rrrmGsrIyPvroI+6++24uu+wyKisrsdvtugZJFI1GufPOO7nooosYM2YMQNK+e45VJxAI0N7eTnp6em+8pQHnaNcA4Otf/zrDhg2jpKSEd955h/nz57N9+3b+9Kc/AafuGqRUsJHed9lll8X/fM455zB58mSGDRvGs88+q196+dy64YYb4n8eO3Ys55xzDqeddhqvvvoql156aR+2LPXMnTuXLVu2sGbNmr5uyufWsa7BLbfcEv/z2LFjKS4u5tJLL+Wjjz7itNNOO2XtS6lbUQUFBdjt9iNGwtfW1lJUVNRHrUptOTk5nHHGGXz44YcUFRXR2dlJU1NTQp3Pfv5FRUVHvT4H90n3HPzMjvf/fFFREXV1dQn7w+EwDQ0Nui69ZMSIERQUFPDhhx8CugbJcvvtt7Ns2TJWrVrFkCFD4tuT9d1zrDoej0f/cPvUsa7B0UyePBkg4ffgVFyDlAo2LpeL8847jxUrVsS3RaNRVqxYQXl5eR+2LHW1tLTw0UcfUVxczHnnnYfT6Uz4/Ldv305VVVX88y8vL+fdd99N+JJfvnw5Ho+Hs84665S3f6ArKyujqKgo4TMPBAKsXbs24TNvampiw4YN8TorV64kGo3Gv3jKy8tZvXo1oVAoXmf58uWMGjVKt0B6YM+ePdTX11NcXAzoGpwsYwy33347zz//PCtXrjzill2yvnvKy8sTznGwjv7+OPE1OJpNmzYBJPwenJJr0OVhxgPEM888Y9xut3niiSfMtm3bzC233GJycnISRmFLz911113m1VdfNTt37jSvv/66qaioMAUFBaaurs4YE5tyOXToULNy5Uqzfv16U15ebsrLy+PHH5zuN3XqVLNp0ybz8ssvm0GDBmm693E0NzebjRs3mo0bNxrAPPjgg2bjxo1m165dxpjYdO+cnBzz5z//2bzzzjvmyiuvPOp07/Hjx5u1a9eaNWvWmJEjRyZMNW5qajI+n8/Mnj3bbNmyxTzzzDMmIyNDU40/dbxr0NzcbP75n//ZVFZWmp07d5pXXnnFTJgwwYwcOdJ0dHTEz6Fr0HO33Xab8Xq95tVXX02YStzW1havk4zvnoNTjefNm2fee+8989BDD2m696dOdA0+/PBD85Of/MSsX7/e7Ny50/z5z382I0aMMJdcckn8HKfqGqRcsDHGmN/+9rdm6NChxuVymfPPP9+8+eabfd2klHH99deb4uJi43K5zODBg831119vPvzww/j+9vZ28+1vf9vk5uaajIwMc/XVV5vq6uqEc3zyySfmsssuM+np6aagoMDcddddJhQKneq3MmCsWrXKAEeUG2+80RgTm/J9zz33GJ/PZ9xut7n00kvN9u3bE85RX19vZs6cabKysozH4zFz5swxzc3NCXU2b95spkyZYtxutxk8eLBZtGjRqXqL/d7xrkFbW5uZOnWqGTRokHE6nWbYsGHm5ptvPuIfU7oGPXe0zx4wjz/+eLxOsr57Vq1aZc4991zjcrnMiBEjEl7j8+xE16CqqspccsklJi8vz7jdbnP66aebefPmJaxjY8ypuQbWpw0WERERGfBSaoyNiIiIfL4p2IiIiEjKULARERGRlKFgIyIiIilDwUZERERShoKNiIiIpAwFGxEREUkZCjYiIiKSMhRsREREJGUo2IiIiEjKULARERGRlKFgIyIiIinj/wdZde5AThdBXAAAAABJRU5ErkJggg==", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "WIDTH = 2560\n", + "HEIGHT = 1440\n", + "\n", + "clusters_detections = clusters_to_detections(clusters, sorted_detections)\n", + "im = np.zeros((HEIGHT, WIDTH, 3), dtype=np.uint8)\n", + "for el in clusters_detections[0]:\n", + " im = visualize_whole_body(np.asarray(el.keypoints), im)\n", + "\n", + "p = plt.imshow(im)\n", + "display(p)" + ] + }, + { + "cell_type": "code", + "execution_count": 91, + "id": "eac843c2", + "metadata": {}, + "outputs": [], + "source": [ + "# im_prime = np.zeros((HEIGHT, WIDTH, 3), dtype=np.uint8)\n", + "# for el in clusters_detections[1]:\n", + "# im_prime = visualize_whole_body(np.asarray(el.keypoints), im_prime)\n", + "\n", + "# p_prime = plt.imshow(im_prime)\n", + "# display(p_prime)" + ] + }, + { + "cell_type": "code", + "execution_count": 92, + "id": "037dcc22", + "metadata": {}, + "outputs": [], + "source": [ + "@jaxtyped(typechecker=beartype)\n", + "def triangulate_one_point_from_multiple_views_linear(\n", + " proj_matrices: Float[Array, \"N 3 4\"],\n", + " points: Num[Array, \"N 2\"],\n", + " confidences: Optional[Float[Array, \"N\"]] = None,\n", + " conf_threshold: float = 0.4, # 0.2\n", + ") -> Float[Array, \"3\"]:\n", + " \"\"\"\n", + " Args:\n", + " proj_matrices: 形状为(N, 3, 4)的投影矩阵序列\n", + " points: 形状为(N, 2)的点坐标序列\n", + " confidences: 形状为(N,)的置信度序列,范围[0.0, 1.0]\n", + " conf_threshold: 置信度阈值,低于该值的观测不参与DLT\n", + " Returns:\n", + " point_3d: 形状为(3,)的三角测量得到的3D点\n", + " \"\"\"\n", + " assert len(proj_matrices) == len(points)\n", + " N = len(proj_matrices)\n", + " # 置信度加权DLT\n", + " if confidences is None:\n", + " weights = jnp.ones(N, dtype=jnp.float32)\n", + " else:\n", + " valid_mask = confidences >= conf_threshold\n", + " weights = jnp.where(valid_mask, confidences, 0.0)\n", + " sum_weights = jnp.sum(weights)\n", + " weights = jnp.where(sum_weights > 0, weights / sum_weights, weights)\n", + "\n", + " A = jnp.zeros((N * 2, 4), dtype=jnp.float32)\n", + " for i in range(N):\n", + " x, y = points[i]\n", + " row1 = proj_matrices[i, 2] * x - proj_matrices[i, 0]\n", + " row2 = proj_matrices[i, 2] * y - proj_matrices[i, 1]\n", + " A = A.at[2 * i].set(row1 * weights[i])\n", + " A = A.at[2 * i + 1].set(row2 * weights[i])\n", + "\n", + " _, _, vh = jnp.linalg.svd(A, full_matrices=False)\n", + " point_3d_homo = vh[-1]\n", + " point_3d_homo = jnp.where(point_3d_homo[3] < 0, -point_3d_homo, point_3d_homo)\n", + " is_zero_weight = jnp.sum(weights) == 0\n", + " point_3d = jnp.where(\n", + " is_zero_weight,\n", + " jnp.full((3,), jnp.nan, dtype=jnp.float32),\n", + " jnp.where(\n", + " jnp.abs(point_3d_homo[3]) > 1e-8,\n", + " point_3d_homo[:3] / point_3d_homo[3],\n", + " jnp.full((3,), jnp.nan, dtype=jnp.float32),\n", + " ),\n", + " )\n", + " return point_3d\n", + "\n", + "\n", + "jaxtyped(typechecker=beartype)\n", + "def triangulate_points_from_multiple_views_linear(\n", + " proj_matrices: Float[Array, \"N 3 4\"],\n", + " points: Num[Array, \"N P 2\"],\n", + " confidences: Optional[Float[Array, \"N P\"]] = None,\n", + ") -> Float[Array, \"P 3\"]:\n", + " \"\"\"\n", + " Batch‐triangulate P points observed by N cameras, linearly via SVD.\n", + "\n", + " Args:\n", + " proj_matrices: (N, 3, 4) projection matrices\n", + " points: (N, P, 2) image-coordinates per view\n", + " confidences: (N, P, 1) optional per-view confidences in [0,1]\n", + "\n", + " Returns:\n", + " (P, 3) 3D point for each of the P tracks\n", + " \"\"\"\n", + " N, P, _ = points.shape\n", + " assert proj_matrices.shape[0] == N\n", + "\n", + " if confidences is None:\n", + " conf = jnp.ones((N, P), dtype=jnp.float32)\n", + " else:\n", + " conf = jnp.array(confidences)\n", + "\n", + " vmap_triangulate = jax.vmap(\n", + " triangulate_one_point_from_multiple_views_linear,\n", + " in_axes=(None, 1, 1),\n", + " out_axes=0,\n", + " )\n", + " return vmap_triangulate(proj_matrices, points, conf)\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 93, + "id": "8fc0074d", + "metadata": {}, + "outputs": [], + "source": [ + "def triangle_from_cluster(cluster: list[Detection]) -> Float[Array, \"3\"]:\n", + " proj_matrices = jnp.array([el.camera.params.projection_matrix for el in cluster])\n", + " points = jnp.array([el.keypoints for el in cluster])\n", + " confidences = jnp.array([el.confidences for el in cluster])\n", + " return triangulate_points_from_multiple_views_linear(\n", + " proj_matrices, points, confidences=confidences\n", + " )\n", + "\n", + "\n", + "res = {\n", + " \"a\": [triangle_from_cluster(clusters_detections[0]).tolist()],\n", + " \"b\": [triangle_from_cluster(clusters_detections[1]).tolist()],\n", + "}\n", + "with open(\"samples/Test_YEU.json\", \"wb\") as f:\n", + " f.write(orjson.dumps(res))" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "cvth3pe", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/rebuild_by_epipolar_line.py b/rebuild_by_epipolar_line.py new file mode 100644 index 0000000..99f7510 --- /dev/null +++ b/rebuild_by_epipolar_line.py @@ -0,0 +1,1062 @@ +from math import isnan +from pathlib import Path +from re import L +import awkward as ak +from typing import ( + Any, + Generator, + Optional, + Sequence, + TypeAlias, + TypedDict, + cast, + TypeVar, +) +from datetime import datetime, timedelta +from jaxtyping import Array, Float, Num, jaxtyped +import numpy as np +from cv2 import undistortPoints +from sympy import true +from app import camera +from app.camera import Camera, CameraParams, Detection +import jax.numpy as jnp +from beartype import beartype +from scipy.spatial.transform import Rotation as R +from app.camera import ( + Camera, + CameraID, + CameraParams, + Detection, + calculate_affinity_matrix_by_epipolar_constraint, + classify_by_camera, +) +import jax +from beartype.typing import Mapping, Sequence +import orjson +from app.visualize.whole_body import visualize_whole_body +from matplotlib import pyplot as plt +from app.solver._old import GLPKSolver +from app.tracking import ( + TrackingID, + AffinityResult, + LastDifferenceVelocityFilter, + Tracking, + TrackingState, +) +from copy import copy as shallow_copy +from pyrsistent import pvector, v, m, pmap, PMap, freeze, thaw +from optax.assignment import hungarian_algorithm as linear_sum_assignment +from itertools import chain + +NDArray: TypeAlias = np.ndarray +DetectionGenerator: TypeAlias = Generator[Detection, None, None] + +DELTA_T_MIN = timedelta(milliseconds=1) +"""所有类型""" + +T = TypeVar("T") + + +def unwrap(val: Optional[T]) -> T: + if val is None: + raise ValueError("None") + return val + + +class KeypointDataset(TypedDict): + frame_index: int + boxes: Num[NDArray, "N 4"] + kps: Num[NDArray, "N J 2"] + kps_scores: Num[NDArray, "N J"] + + +class Resolution(TypedDict): + width: int + height: int + + +class Intrinsic(TypedDict): + camera_matrix: Num[Array, "3 3"] + """ + K + """ + distortion_coefficients: Num[Array, "N"] + """ + distortion coefficients; usually 5 + """ + + +class Extrinsic(TypedDict): + rvec: Num[NDArray, "3"] + tvec: Num[NDArray, "3"] + + +class ExternalCameraParams(TypedDict): + name: str + port: int + intrinsic: Intrinsic + extrinsic: Extrinsic + resolution: Resolution + + +"""获得所有机位的相机内外参""" + + +def get_camera_params(camera_path: Path) -> ak.Array: + camera_dataset: ak.Array = ak.from_parquet(camera_path / "camera_params.parquet") + return camera_dataset + + +"""获取所有机位的2d检测数据""" + + +def get_camera_detect( + detect_path: Path, camera_port: list[int], camera_dataset: ak.Array +) -> dict[int, ak.Array]: + keypoint_data: dict[int, ak.Array] = {} + for element_port in ak.to_numpy(camera_dataset["port"]): + if element_port in camera_port: + keypoint_data[int(element_port)] = ak.from_parquet( + detect_path / f"{element_port}.parquet" + ) + return keypoint_data + + +"""获得指定帧的2d检测数据(一段完整的跳跃片段)""" + + +def get_segment( + camera_port: list[int], frame_index: list[int], keypoint_data: dict[int, ak.Array] +) -> dict[int, ak.Array]: + for port in camera_port: + segement_data = [] + camera_data = keypoint_data[port] + for index, element_frame in enumerate(ak.to_numpy(camera_data["frame_index"])): + if element_frame in frame_index: + segement_data.append(camera_data[index]) + keypoint_data[port] = ak.Array(segement_data) + + return keypoint_data + + +"""将所有2d检测数据打包""" + + +@jaxtyped(typechecker=beartype) +def undistort_points( + points: Num[NDArray, "M 2"], + camera_matrix: Num[NDArray, "3 3"], + dist_coeffs: Num[NDArray, "N"], +) -> Num[NDArray, "M 2"]: + K = camera_matrix + dist = dist_coeffs + res = undistortPoints(points, K, dist, P=K) # type: ignore + return res.reshape(-1, 2) + + +@jaxtyped(typechecker=beartype) +def to_transformation_matrix( + rvec: Num[NDArray, "3"], tvec: Num[NDArray, "3"] +) -> Num[NDArray, "4 4"]: + res = np.eye(4) + res[:3, :3] = R.from_rotvec(rvec).as_matrix() + res[:3, 3] = tvec + return res + + +def from_camera_params(camera: ExternalCameraParams) -> Camera: + rt = jnp.array( + to_transformation_matrix( + ak.to_numpy(camera["extrinsic"]["rvec"]), + ak.to_numpy(camera["extrinsic"]["tvec"]), + ) + ) + K = jnp.array(camera["intrinsic"]["camera_matrix"]).reshape(3, 3) + dist_coeffs = jnp.array(camera["intrinsic"]["distortion_coefficients"]) + image_size = jnp.array( + (camera["resolution"]["width"], camera["resolution"]["height"]) + ) + return Camera( + id=camera["name"], + params=CameraParams( + K=K, + Rt=rt, + dist_coeffs=dist_coeffs, + image_size=image_size, + ), + ) + + +def preprocess_keypoint_dataset( + dataset: Sequence[KeypointDataset], + camera: Camera, + fps: float, + start_timestamp: datetime, +) -> Generator[Detection, None, None]: + frame_interval_s = 1 / fps + for el in dataset: + frame_index = el["frame_index"] + timestamp = start_timestamp + timedelta(seconds=frame_index * frame_interval_s) + for kp, kp_score, boxes in zip(el["kps"], el["kps_scores"], el["boxes"]): + kp = undistort_points( + np.asarray(kp), + np.asarray(camera.params.K), + np.asarray(camera.params.dist_coeffs), + ) + + yield Detection( + keypoints=jnp.array(kp), + confidences=jnp.array(kp_score), + camera=camera, + timestamp=timestamp, + ) + + +def sync_batch_gen( + gens: list[DetectionGenerator], diff: timedelta +) -> Generator[list[Detection], Any, None]: + from more_itertools import partition + + """ + given a list of detection generators, return a generator that yields a batch of detections + + Args: + gens: list of detection generators + diff: maximum timestamp difference between detections to consider them part of the same batch + """ + N = len(gens) + last_batch_timestamp: Optional[datetime] = None + current_batch: list[Detection] = [] + paused: list[bool] = [False] * N + finished: list[bool] = [False] * N + unmached_detections: list[Detection] = [] + + def reset_paused(): + """ + reset paused list based on finished list + """ + for i in range(N): + if not finished[i]: + paused[i] = False + else: + paused[i] = True + + EPS = 1e-6 + # a small epsilon to avoid floating point precision issues + diff_esp = diff - timedelta(seconds=EPS) + while True: + for i, gen in enumerate(gens): + try: + if finished[i] or paused[i]: + if all(finished): + if len(current_batch) > 0: + # All generators exhausted, flush remaining batch and exit + yield current_batch + return + else: + continue + val = next(gen) + if last_batch_timestamp is None: + last_batch_timestamp = val.timestamp + current_batch.append(val) + else: + if abs(val.timestamp - last_batch_timestamp) >= diff_esp: + unmached_detections.append(val) + paused[i] = True + if all(paused): + yield current_batch + reset_paused() + last_batch_timestamp = last_batch_timestamp + diff + bad, good = partition( + lambda x: x.timestamp < unwrap(last_batch_timestamp), + unmached_detections, + ) + current_batch = list(good) + unmached_detections = list(bad) + else: + current_batch.append(val) + except StopIteration: + return + + +def get_batch_detect( + keypoint_dataset, + camera_dataset, + camera_port: list[int], + FPS: int = 24, + batch_fps: int = 10, +) -> Generator[list[Detection], Any, None]: + gen_data = [ + preprocess_keypoint_dataset( + keypoint_dataset[port], + from_camera_params(camera_dataset[camera_dataset["port"] == port][0]), + FPS, + datetime(2024, 4, 2, 12, 0, 0), + ) + for port in camera_port + ] + + sync_gen: Generator[list[Detection], Any, None] = sync_batch_gen( + gen_data, + timedelta(seconds=1 / batch_fps), + ) + return sync_gen + + +@jaxtyped(typechecker=beartype) +def triangulate_one_point_from_multiple_views_linear( + proj_matrices: Float[Array, "N 3 4"], + points: Num[Array, "N 2"], + confidences: Optional[Float[Array, "N"]] = None, + conf_threshold: float = 0.4, # 0.2 +) -> Float[Array, "3"]: + """ + Args: + proj_matrices: 形状为(N, 3, 4)的投影矩阵序列 + points: 形状为(N, 2)的点坐标序列 + confidences: 形状为(N,)的置信度序列,范围[0.0, 1.0] + conf_threshold: 置信度阈值,低于该值的观测不参与DLT + Returns: + point_3d: 形状为(3,)的三角测量得到的3D点 + """ + assert len(proj_matrices) == len(points) + N = len(proj_matrices) + # 置信度加权DLT + if confidences is None: + weights = jnp.ones(N, dtype=jnp.float32) + else: + valid_mask = confidences >= conf_threshold + weights = jnp.where(valid_mask, confidences, 0.0) + sum_weights = jnp.sum(weights) + weights = jnp.where(sum_weights > 0, weights / sum_weights, weights) + + A = jnp.zeros((N * 2, 4), dtype=jnp.float32) + for i in range(N): + x, y = points[i] + row1 = proj_matrices[i, 2] * x - proj_matrices[i, 0] + row2 = proj_matrices[i, 2] * y - proj_matrices[i, 1] + A = A.at[2 * i].set(row1 * weights[i]) + A = A.at[2 * i + 1].set(row2 * weights[i]) + + _, _, vh = jnp.linalg.svd(A, full_matrices=False) + point_3d_homo = vh[-1] + point_3d_homo = jnp.where(point_3d_homo[3] < 0, -point_3d_homo, point_3d_homo) + is_zero_weight = jnp.sum(weights) == 0 + point_3d = jnp.where( + is_zero_weight, + jnp.full((3,), jnp.nan, dtype=jnp.float32), + jnp.where( + jnp.abs(point_3d_homo[3]) > 1e-8, + point_3d_homo[:3] / point_3d_homo[3], + jnp.full((3,), jnp.nan, dtype=jnp.float32), + ), + ) + return point_3d + + +jaxtyped(typechecker=beartype) + + +def triangulate_points_from_multiple_views_linear( + proj_matrices: Float[Array, "N 3 4"], + points: Num[Array, "N P 2"], + confidences: Optional[Float[Array, "N P"]] = None, +) -> Float[Array, "P 3"]: + """ + Batch‐triangulate P points observed by N cameras, linearly via SVD. + + Args: + proj_matrices: (N, 3, 4) projection matrices + points: (N, P, 2) image-coordinates per view + confidences: (N, P, 1) optional per-view confidences in [0,1] + + Returns: + (P, 3) 3D point for each of the P tracks + """ + N, P, _ = points.shape + assert proj_matrices.shape[0] == N + + if confidences is None: + conf = jnp.ones((N, P), dtype=jnp.float32) + else: + conf = jnp.array(confidences) + + vmap_triangulate = jax.vmap( + triangulate_one_point_from_multiple_views_linear, + in_axes=(None, 1, 1), + out_axes=0, + ) + return vmap_triangulate(proj_matrices, points, conf) + + +def clusters_to_detections( + clusters: list[list[int]], sorted_detections: list[Detection] +) -> list[list[Detection]]: + """ + given a list of clusters (which is the indices of the detections in the sorted_detections list), + extract the detections from the sorted_detections list + + Args: + clusters: list of clusters, each cluster is a list of indices of the detections in the `sorted_detections` list + sorted_detections: list of SORTED detections + + Returns: + list of clusters, each cluster is a list of detections + """ + return [[sorted_detections[i] for i in cluster] for cluster in clusters] + + +# def triangle_from_cluster(cluster: list[Detection]) -> Float[Array, "3"]: +# proj_matrices = jnp.array([el.camera.params.projection_matrix for el in cluster]) +# points = jnp.array([el.keypoints for el in cluster]) +# confidences = jnp.array([el.confidences for el in cluster]) +# return triangulate_points_from_multiple_views_linear( +# proj_matrices, points, confidences=confidences +# ) + + +def group_by_cluster_by_camera( + cluster: Sequence[Detection], +) -> PMap[CameraID, Detection]: + """ + group the detections by camera, and preserve the latest detection for each camera + """ + r: dict[CameraID, Detection] = {} + for el in cluster: + if el.camera.id in r: + eld = r[el.camera.id] + preserved = max([eld, el], key=lambda x: x.timestamp) + r[el.camera.id] = preserved + return pmap(r) + + +@jaxtyped(typechecker=beartype) +def triangle_from_cluster( + cluster: Sequence[Detection], +) -> tuple[Float[Array, "N 3"], datetime]: + proj_matrices = jnp.array([el.camera.params.projection_matrix for el in cluster]) + points = jnp.array([el.keypoints_undistorted for el in cluster]) + confidences = jnp.array([el.confidences for el in cluster]) + latest_timestamp = max(el.timestamp for el in cluster) + return ( + triangulate_points_from_multiple_views_linear( + proj_matrices, points, confidences=confidences + ), + latest_timestamp, + ) + + +class GlobalTrackingState: + _last_id: int + _trackings: dict[int, Tracking] + + def __init__(self): + self._last_id = 0 + self._trackings = {} + + def __repr__(self) -> str: + return ( + f"GlobalTrackingState(last_id={self._last_id}, trackings={self._trackings})" + ) + + @property + def trackings(self) -> dict[int, Tracking]: + return shallow_copy(self._trackings) + + def add_tracking(self, cluster: Sequence[Detection]) -> Tracking: + if len(cluster) < 2: + raise ValueError( + "cluster must contain at least 2 detections to form a tracking" + ) + kps_3d, latest_timestamp = triangle_from_cluster(cluster) + next_id = self._last_id + 1 + tracking_state = TrackingState( + keypoints=kps_3d, + last_active_timestamp=latest_timestamp, + historical_detections_by_camera=group_by_cluster_by_camera(cluster), + ) + tracking = Tracking( + id=next_id, + state=tracking_state, + velocity_filter=LastDifferenceVelocityFilter(kps_3d, latest_timestamp), + ) + self._trackings[next_id] = tracking + self._last_id = next_id + return tracking + + +@jaxtyped(typechecker=beartype) +def triangulate_one_point_from_multiple_views_linear_time_weighted( + proj_matrices: Float[Array, "N 3 4"], + points: Num[Array, "N 2"], + delta_t: Num[Array, "N"], + lambda_t: float = 10.0, + confidences: Optional[Float[Array, "N"]] = None, +) -> Float[Array, "3"]: + """ + Triangulate one point from multiple views with time-weighted linear least squares. + + Implements the incremental reconstruction method from "Cross-View Tracking for Multi-Human 3D Pose" + with weighting formula: w_i = exp(-λ_t(t-t_i)) / ||c^i^T||_2 + + Args: + proj_matrices: Shape (N, 3, 4) projection matrices sequence + points: Shape (N, 2) point coordinates sequence + delta_t: Time differences between current time and each observation (in seconds) + lambda_t: Time penalty rate (higher values decrease influence of older observations) + confidences: Shape (N,) confidence values in range [0.0, 1.0] + + Returns: + point_3d: Shape (3,) triangulated 3D point + """ + assert len(proj_matrices) == len(points) + assert len(delta_t) == len(points) + + N = len(proj_matrices) + + # Prepare confidence weights + confi: Float[Array, "N"] + if confidences is None: + confi = jnp.ones(N, dtype=np.float32) + else: + confi = jnp.sqrt(jnp.clip(confidences, 0, 1)) + + time_weights = jnp.exp(-lambda_t * delta_t) + weights = time_weights * confi + sum_weights = jnp.sum(weights) + weights = jnp.where(sum_weights > 0, weights / sum_weights, weights) + + A = jnp.zeros((N * 2, 4), dtype=np.float32) + for i in range(N): + x, y = points[i] + row1 = proj_matrices[i, 2] * x - proj_matrices[i, 0] + row2 = proj_matrices[i, 2] * y - proj_matrices[i, 1] + A = A.at[2 * i].set(row1 * weights[i]) + A = A.at[2 * i + 1].set(row2 * weights[i]) + + _, _, vh = jnp.linalg.svd(A, full_matrices=False) + point_3d_homo = vh[-1] + point_3d_homo = jnp.where(point_3d_homo[3] < 0, -point_3d_homo, point_3d_homo) + is_zero_weight = jnp.sum(weights) == 0 + point_3d = jnp.where( + is_zero_weight, + jnp.full((3,), jnp.nan, dtype=jnp.float32), + jnp.where( + jnp.abs(point_3d_homo[3]) > 1e-8, + point_3d_homo[:3] / point_3d_homo[3], + jnp.full((3,), jnp.nan, dtype=jnp.float32), + ), + ) + return point_3d + + +@jaxtyped(typechecker=beartype) +def triangulate_points_from_multiple_views_linear_time_weighted( + proj_matrices: Float[Array, "N 3 4"], + points: Num[Array, "N P 2"], + delta_t: Num[Array, "N"], + lambda_t: float = 10.0, + confidences: Optional[Float[Array, "N P"]] = None, +) -> Float[Array, "P 3"]: + """ + Vectorized version that triangulates P points from N camera views with time-weighting. + + This function uses JAX's vmap to efficiently triangulate multiple points in parallel. + + Args: + proj_matrices: Shape (N, 3, 4) projection matrices for N cameras + points: Shape (N, P, 2) 2D points for P keypoints across N cameras + delta_t: Shape (N,) time differences between current time and each camera's timestamp (seconds) + lambda_t: Time penalty rate (higher values decrease influence of older observations) + confidences: Shape (N, P) confidence values for each point in each camera + + Returns: + points_3d: Shape (P, 3) triangulated 3D points + """ + N, P, _ = points.shape + assert ( + proj_matrices.shape[0] == N + ), "Number of projection matrices must match number of cameras" + assert delta_t.shape[0] == N, "Number of time deltas must match number of cameras" + + if confidences is None: + # Create uniform confidences if none provided + conf = jnp.ones((N, P), dtype=jnp.float32) + else: + conf = confidences + + # Define the vmapped version of the single-point function + # We map over the second dimension (P points) of the input arrays + vmap_triangulate = jax.vmap( + triangulate_one_point_from_multiple_views_linear_time_weighted, + in_axes=( + None, + 1, + None, + None, + 1, + ), # proj_matrices and delta_t static, map over points + out_axes=0, # Output has first dimension corresponding to points + ) + + # For each point p, extract the 2D coordinates from all cameras and triangulate + return vmap_triangulate( + proj_matrices, # (N, 3, 4) - static across points + points, # (N, P, 2) - map over dim 1 (P) + delta_t, # (N,) - static across points + lambda_t, # scalar - static + conf, # (N, P) - map over dim 1 (P) + ) + + +DetectionMap: TypeAlias = PMap[CameraID, Detection] + + +def update_tracking( + tracking: Tracking, + detections: Sequence[Detection], + max_delta_t: timedelta = timedelta(milliseconds=100), + lambda_t: float = 10.0, +) -> None: + """ + update the tracking with a new set of detections + + Args: + tracking: the tracking to update + detections: the detections to update the tracking with + max_delta_t: the maximum time difference between the last active timestamp and the latest detection + lambda_t: the lambda value for the time difference + + Note: + the function would mutate the tracking object + """ + last_active_timestamp = tracking.state.last_active_timestamp + latest_timestamp = max(d.timestamp for d in detections) + + d = tracking.state.historical_detections_by_camera + for detection in detections: + d = cast(DetectionMap, d.update({detection.camera.id: detection})) + for camera_id, detection in d.items(): + if detection.timestamp - latest_timestamp > max_delta_t: + d = d.remove(camera_id) + new_detections = d + new_detections_list = list(new_detections.values()) + project_matrices = jnp.stack( + [detection.camera.params.projection_matrix for detection in new_detections_list] + ) + delta_t = jnp.array( + [ + detection.timestamp.timestamp() - last_active_timestamp.timestamp() + for detection in new_detections_list + ] + ) + kps = jnp.stack([detection.keypoints for detection in new_detections_list]) + conf = jnp.stack([detection.confidences for detection in new_detections_list]) + kps_3d = triangulate_points_from_multiple_views_linear_time_weighted( + project_matrices, kps, delta_t, lambda_t, conf + ) + new_state = TrackingState( + keypoints=kps_3d, + last_active_timestamp=latest_timestamp, + historical_detections_by_camera=new_detections, + ) + tracking.update(kps_3d, latest_timestamp) + tracking.state = new_state + + +def filter_camera_port(detections: list[Detection]): + camera_port = set() + for detection in detections: + camera_port.add(detection.camera.id) + return list(camera_port) + + +@beartype +def calculate_camera_affinity_matrix_jax( + trackings: Sequence[Tracking], + camera_detections: Sequence[Detection], + w_2d: float, + alpha_2d: float, + w_3d: float, + alpha_3d: float, + lambda_a: float, +) -> Float[Array, "T D"]: + """ + Vectorized implementation to compute an affinity matrix between *trackings* + and *detections* coming from **one** camera. + + Compared with the simple double-for-loop version, this leverages `jax`'s + broadcasting + `vmap` facilities and avoids Python loops over every + (tracking, detection) pair. The mathematical definition of the affinity + is **unchanged**, so the result remains bit-identical to the reference + implementation used in the tests. + """ + + # ------------------------------------------------------------------ + # Quick validations / early-exit guards + # ------------------------------------------------------------------ + if len(trackings) == 0 or len(camera_detections) == 0: + # Return an empty affinity matrix with appropriate shape. + return jnp.zeros((len(trackings), len(camera_detections))) # type: ignore[return-value] + + cam = next(iter(camera_detections)).camera + # Ensure every detection truly belongs to the same camera (guard clause) + cam_id = cam.id + if any(det.camera.id != cam_id for det in camera_detections): + raise ValueError( + "All detections passed to `calculate_camera_affinity_matrix` must come from one camera." + ) + + # We will rely on a single `Camera` instance (all detections share it) + w_img_, h_img_ = cam.params.image_size + w_img, h_img = float(w_img_), float(h_img_) + + # ------------------------------------------------------------------ + # Gather data into ndarray / DeviceArray batches so that we can compute + # everything in a single (or a few) fused kernels. + # ------------------------------------------------------------------ + + # === Tracking-side tensors === + kps3d_trk: Float[Array, "T J 3"] = jnp.stack( + [trk.state.keypoints for trk in trackings] + ) # (T, J, 3) + J = kps3d_trk.shape[1] + # === Detection-side tensors === + kps2d_det: Float[Array, "D J 2"] = jnp.stack( + [det.keypoints for det in camera_detections] + ) # (D, J, 2) + + # ------------------------------------------------------------------ + # Compute Δt matrix – shape (T, D) + # ------------------------------------------------------------------ + # Epoch timestamps are ~1.7 × 10⁹; storing them in float32 wipes out + # sub‑second detail (resolution ≈ 200 ms). Keep them in float64 until + # after subtraction so we preserve Δt‑on‑the‑order‑of‑milliseconds. + # --- timestamps ---------- + t0 = min( + chain( + (trk.state.last_active_timestamp for trk in trackings), + (det.timestamp for det in camera_detections), + ) + ).timestamp() # common origin (float) + ts_trk = jnp.array( + [trk.state.last_active_timestamp.timestamp() - t0 for trk in trackings], + dtype=jnp.float32, # now small, ms-scale fits in fp32 + ) + ts_det = jnp.array( + [det.timestamp.timestamp() - t0 for det in camera_detections], + dtype=jnp.float32, + ) + # Δt in seconds, fp32 throughout + delta_t = ts_det[None, :] - ts_trk[:, None] # (T,D) + min_dt_s = float(DELTA_T_MIN.total_seconds()) + delta_t = jnp.clip(delta_t, a_min=min_dt_s, a_max=None) + + # ------------------------------------------------------------------ + # ---------- 2D affinity ------------------------------------------- + # ------------------------------------------------------------------ + # Project each tracking's 3D keypoints onto the image once. + # `Camera.project` works per-sample, so we vmap over the first axis. + + proj_fn = jax.vmap(cam.project, in_axes=0) # maps over the keypoint sets + kps2d_trk_proj: Float[Array, "T J 2"] = proj_fn(kps3d_trk) # (T, J, 2) + + # Normalise keypoints by image size so absolute units do not bias distance + norm_trk = kps2d_trk_proj / jnp.array([w_img, h_img]) + norm_det = kps2d_det / jnp.array([w_img, h_img]) + + # L2 distance for every (T, D, J) + # reshape for broadcasting: (T,1,J,2) vs (1,D,J,2) + diff2d = norm_trk[:, None, :, :] - norm_det[None, :, :, :] + dist2d: Float[Array, "T D J"] = jnp.linalg.norm(diff2d, axis=-1) + + # Compute per-keypoint 2D affinity + delta_t_broadcast = delta_t[:, :, None] # (T, D, 1) + affinity_2d = ( + w_2d + * (1 - dist2d / (alpha_2d * delta_t_broadcast)) + * jnp.exp(-lambda_a * delta_t_broadcast) + ) + + # ------------------------------------------------------------------ + # ---------- 3D affinity ------------------------------------------- + # ------------------------------------------------------------------ + # For each detection pre-compute back-projected 3D points lying on z=0 plane. + + backproj_points_list = [ + det.camera.unproject_points_to_z_plane(det.keypoints, z=0.0) + for det in camera_detections + ] # each (J,3) + backproj: Float[Array, "D J 3"] = jnp.stack(backproj_points_list) # (D, J, 3) + + zero_velocity = jnp.zeros((J, 3)) + trk_velocities = jnp.stack( + [ + trk.velocity if trk.velocity is not None else zero_velocity + for trk in trackings + ] + ) + + predicted_pose: Float[Array, "T D J 3"] = ( + kps3d_trk[:, None, :, :] # (T,1,J,3) + + trk_velocities[:, None, :, :] * delta_t[:, :, None, None] # (T,D,1,1) + ) + + # Camera center – shape (3,) -> will broadcast + cam_center = cam.params.location + + # Compute perpendicular distance using vectorized formula + # p1 = cam_center (3,) + # p2 = backproj (D, J, 3) + # P = predicted_pose (T, D, J, 3) + # Broadcast plan: v1 = P - p1 → (T, D, J, 3) + # v2 = p2[None, ...]-p1 → (1, D, J, 3) + # Shapes now line up; no stray singleton axis. + p1 = cam_center + p2 = backproj + P = predicted_pose + v1 = P - p1 + v2 = p2[None, :, :, :] - p1 # (1, D, J, 3) + cross = jnp.cross(v1, v2) # (T, D, J, 3) + num = jnp.linalg.norm(cross, axis=-1) # (T, D, J) + den = jnp.linalg.norm(v2, axis=-1) # (1, D, J) + dist3d: Float[Array, "T D J"] = num / den + + affinity_3d = ( + w_3d * (1 - dist3d / alpha_3d) * jnp.exp(-lambda_a * delta_t_broadcast) + ) + + # ------------------------------------------------------------------ + # Combine and reduce across keypoints → (T, D) + # ------------------------------------------------------------------ + total_affinity: Float[Array, "T D"] = jnp.sum(affinity_2d + affinity_3d, axis=-1) + return total_affinity # type: ignore[return-value] + + +@beartype +def calculate_affinity_matrix( + trackings: Sequence[Tracking], + detections: Sequence[Detection] | Mapping[CameraID, list[Detection]], + w_2d: float, + alpha_2d: float, + w_3d: float, + alpha_3d: float, + lambda_a: float, +) -> dict[CameraID, AffinityResult]: + """ + Calculate the affinity matrix between a set of trackings and detections. + + Args: + trackings: Sequence of tracking objects + detections: Sequence of detection objects or a group detections by ID + w_2d: Weight for 2D affinity + alpha_2d: Normalization factor for 2D distance + w_3d: Weight for 3D affinity + alpha_3d: Normalization factor for 3D distance + lambda_a: Decay rate for time difference + Returns: + A dictionary mapping camera IDs to affinity results. + """ + if isinstance(detections, Mapping): + detection_by_camera = detections + else: + detection_by_camera = classify_by_camera(detections) + + res: dict[CameraID, AffinityResult] = {} + for camera_id, camera_detections in detection_by_camera.items(): + affinity_matrix = calculate_camera_affinity_matrix_jax( + trackings, + camera_detections, + w_2d, + alpha_2d, + w_3d, + alpha_3d, + lambda_a, + ) + # row, col + """ + indices_T:表示匹配到检测的tracking的索引(在tracking列表中的下标) + indices_D:表示匹配到tracking的detection的索引(在detections列表中的下标) + """ + indices_T, indices_D = linear_sum_assignment(affinity_matrix) + affinity_result = AffinityResult( + matrix=affinity_matrix, + trackings=trackings, + detections=camera_detections, + indices_T=indices_T, + indices_D=indices_D, + ) + res[camera_id] = affinity_result + return res + + +# 对每一个3d目标进行滑动窗口平滑处理 +def smooth_3d_keypoints( + all_3d_kps: dict[str, list], window_size: int = 5 +) -> dict[str, list]: + # window_size = 5 + kernel = np.ones(window_size) / window_size + smoothed_points = dict() + for item_object_index in all_3d_kps.keys(): + item_object = np.array(all_3d_kps[item_object_index]) + if item_object.shape[0] < window_size: + # 如果数据点少于窗口大小,则直接返回原始数据 + smoothed_points[item_object_index] = item_object.tolist() + continue + + # 对每个关键点的每个坐标轴分别做滑动平均 + item_smoothed = np.zeros_like(item_object) + # 遍历133个关节 + for kp_idx in range(item_object.shape[1]): + # 遍历每个关节的空间三维坐标点 + for axis in range(3): + # 对第i帧的滑动平滑方式 smoothed[i] = (point[i-2] + point[i-1] + point[i] + point[i+1] + point[i+2]) / 5 + item_smoothed[:, kp_idx, axis] = np.convolve( + item_object[:, kp_idx, axis], kernel, mode="same" + ) + smoothed_points[item_object_index] = item_smoothed.tolist() + return smoothed_points + + +# 相机内外参路径 +CAMERA_PATH = Path("/home/admin/Documents/2025_4_17/camera_params") +# 所有机位的相机内外参 +AK_CAMERA_DATASET: ak.Array = get_camera_params(CAMERA_PATH) +# 2d检测数据路径 +DATASET_PATH = Path("/home/admin/Documents/2025_4_17/detect_result/many_people_01/") +# 指定机位的2d检测数据 +camera_port = [5607, 5608, 5609] +KEYPOINT_DATASET = get_camera_detect(DATASET_PATH, camera_port, AK_CAMERA_DATASET) + +# 获取片段 +# FRAME_INDEX = [i for i in range(20, 140)] +# KEYPOINT_DATASET = get_segment(camera_port, FRAME_INDEX, KEYPOINT_DATASET) + +# 将所有的2d检测数据打包 +sync_gen: Generator[list[Detection], Any, None] = get_batch_detect( + KEYPOINT_DATASET, + AK_CAMERA_DATASET, + camera_port, + batch_fps=24, +) + + +# 图像宽高 +WIDTH = 2560 +HEIGHT = 1440 +# 跟踪超参数 +W_2D = 0.7 +ALPHA_2D = 60.0 +LAMBDA_A = 5.0 +W_3D = 0.3 +ALPHA_3D = 0.1 +# 3d数据,键为追踪目标id,值为该目标的所有3d数据 +all_3d_kps: dict[str, list] = {} +# 跟踪目标集合 +trackings: list[Tracking] = [] +# 建立追踪目标 +global_tracking_state = GlobalTrackingState() +count = 0 +while True: + count += 1 + try: + # 获取下一个时间戳的所有相机检测结果 + detections = next(sync_gen) + print(detections) + except StopIteration: + # 检测数据读取完毕,退出主循环 + print("No more detections.") + break + + # 计算相似度矩阵 + sorted_detections, affinity_matrix = ( + calculate_affinity_matrix_by_epipolar_constraint(detections, alpha_2d=3500) + ) + # 计算集群 + solver = GLPKSolver() + aff_np = np.asarray(affinity_matrix).astype(np.float64) + clusters, sol_matrix = solver.solve(aff_np) + print(f"Clusters: {clusters}") + + # 划分集群 + clusters_detections = clusters_to_detections(clusters, sorted_detections) + + # 获取当前的追踪目标 + trackings = sorted(global_tracking_state.trackings.values(), key=lambda x: x.id) + + # =====初始化跟踪===== + # 若当前帧没有跟踪目标,则初始化跟踪目标(假设第一帧就可以对两个目标完成初始化) + if len(trackings) == 0: + # 遍历每一个计算初始化跟踪目标 + for cluster in clusters_detections: + if len(cluster) < 2: + continue + + camera_port_this = filter_camera_port(cluster) + if len(camera_port_this) < len(camera_port): + continue + + global_tracking_state.add_tracking(cluster) + + # 保留第一帧的3d姿态数据,按id存储到all_3d_kps字典 + for element_tracking in global_tracking_state.trackings.values(): + if str(element_tracking.id) not in all_3d_kps.keys(): + all_3d_kps[str(element_tracking.id)] = [ + element_tracking.state.keypoints.tolist() + ] + # 跳过本帧后续处理,进入下一帧 + continue + + # =====丢失目标处理===== + + # =====更新跟踪状态===== + # 遍历集群,计算每个集群2d检测数据与跟踪目标的相似度矩阵 + for cluster in clusters_detections: + if len(cluster) == 0: + continue + # 计算所有跟踪目标雨检测目标的相似度矩阵 + affinities: dict[str, AffinityResult] = calculate_affinity_matrix( + trackings, + cluster, + w_2d=W_2D, + alpha_2d=ALPHA_2D, + w_3d=W_3D, + alpha_3d=ALPHA_3D, + lambda_a=LAMBDA_A, + ) + unmatch_detection = [] + # 遍历跟踪目标,更新跟踪目标 + for element_tracking in trackings: + # 存储每个跟踪目标匹配的2d检测数据 + tracking_detections = [] + for camera_name in affinities.keys(): + indices_T = affinities[camera_name].indices_T.item() + indices_S = affinities[camera_name].indices_D.item() + match_tracking = affinities[camera_name].trackings[indices_T] + if match_tracking == element_tracking: + tracking_detections.append( + affinities[camera_name].detections[indices_S] + ) + + # 判断2d检测数据数量是否可以更新跟踪目标 + if len(tracking_detections) > 2: + # 跟新跟踪目标 + update_tracking(element_tracking, tracking_detections) + # 记录更新后的3d姿态数据 + all_3d_kps[str(element_tracking.id)].append( + element_tracking.state.keypoints.tolist() + ) + + if count == 4: + break + + # break +# 对每一个3d目标进行滑动窗口平滑处理 +smoothed_points = smooth_3d_keypoints(all_3d_kps, window_size=5) + + +# 将结果保存到json文件中 +with open("samples/Test_YEU.json", "wb") as f: + f.write(orjson.dumps(smoothed_points)) + +"""=====代买还没改完,目前存在的问题是时间戳不同步,同一组中存在多帧的数据====="""