Skip to content

Commit f8dc1f8

Browse files
authored
Support Realigning Data Frames and Video Frames (#317)
* Renamed `DynamicProcessor` to `Preprocessor`. (#308) * Renamed `StaticProcessor` to `Processor`. (#308) * Bug fixed: missed unit cache. (#308) * Supported reversing datasets. (#308) * Code reformatted. (#308) * Added `realign_visual_data()` but not completed. (#308) * Bug fixed: dataset may not be loaded. (#308) * Removed `reverse()` because Pandas does not support it. (#308) * Removed `reverse()` because Pandas does not support it. (#308) * Supported specifying required header in the constructor of `Inference`. (#308) * Code reformatted. (#308) * Bug fixed: wrong header. (#305) (#308) * Added `VisualDataRealignmentByLatency`. (#308)
1 parent 731c270 commit f8dc1f8

File tree

5 files changed

+45
-14
lines changed

5 files changed

+45
-14
lines changed
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from leads.data_persistence.analyzer.dynamic import *
21
from leads.data_persistence.analyzer.inference import *
32
from leads.data_persistence.analyzer.jarvis import *
3+
from leads.data_persistence.analyzer.preprocess import *
44
from leads.data_persistence.analyzer.utils import *

leads/data_persistence/analyzer/inference.py

Lines changed: 35 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,31 @@
11
from abc import ABCMeta as _ABCMeta, abstractmethod as _abstractmethod
2-
from typing import Any as _Any, override as _override, Generator as _Generator
2+
from typing import Any as _Any, override as _override, Generator as _Generator, Literal as _Literal
33

44
from leads.data_persistence.analyzer.utils import time_invalid, speed_invalid, acceleration_invalid, \
55
mileage_invalid, latitude_invalid, longitude_invalid, distance_between
6-
from leads.data_persistence.core import CSVDataset, DEFAULT_HEADER
6+
from leads.data_persistence.core import CSVDataset, DEFAULT_HEADER, VISUAL_HEADER_ONLY
77

88

99
class Inference(object, metaclass=_ABCMeta):
10-
def __init__(self, required_depth: tuple[int, int] = (0, 0)) -> None:
10+
def __init__(self, required_depth: tuple[int, int] = (0, 0),
11+
required_header: tuple[str, ...] = DEFAULT_HEADER) -> None:
1112
"""
1213
Declare the scale of data this inference requires.
1314
:param required_depth: (-depth backward, depth forward)
15+
:param required_header: the necessary header that the dataset must contain for this inference to work
1416
"""
1517
self._required_depth: tuple[int, int] = required_depth
18+
self._required_header: tuple[str, ...] = required_header
1619

1720
def depth(self) -> tuple[int, int]:
1821
"""
1922
:return: (-depth backward, depth forward)
2023
"""
2124
return self._required_depth
2225

26+
def header(self) -> tuple[str, ...]:
27+
return self._required_header
28+
2329
@_abstractmethod
2430
def complete(self, *rows: dict[str, _Any], backward: bool = False) -> dict[str, _Any] | None:
2531
"""
@@ -45,7 +51,7 @@ class SafeSpeedInference(SpeedInferenceBase):
4551
"""
4652

4753
def __init__(self) -> None:
48-
super().__init__((0, 0))
54+
super().__init__()
4955

5056
@_override
5157
def complete(self, *rows: dict[str, _Any], backward: bool = False) -> dict[str, _Any] | None:
@@ -111,7 +117,7 @@ class SpeedInferenceByGPSGroundSpeed(SpeedInferenceBase):
111117
"""
112118

113119
def __init__(self) -> None:
114-
super().__init__((0, 0))
120+
super().__init__()
115121

116122
@_override
117123
def complete(self, *rows: dict[str, _Any], backward: bool = False) -> dict[str, _Any] | None:
@@ -225,6 +231,27 @@ def complete(self, *rows: dict[str, _Any], backward: bool = False) -> dict[str,
225231
}
226232

227233

234+
class VisualDataRealignmentByLatency(Inference):
235+
def __init__(self, *channels: _Literal["front", "left", "right", "rear"]) -> None:
236+
super().__init__((0, 1), VISUAL_HEADER_ONLY)
237+
self._channels: tuple[_Literal["front", "left", "right", "rear"], ...] = channels if channels else (
238+
"front", "left", "right", "rear")
239+
240+
@_override
241+
def complete(self, *rows: dict[str, _Any], backward: bool = False) -> dict[str, _Any] | None:
242+
if backward:
243+
return None
244+
target, base = rows
245+
original_target = target.copy()
246+
t_0, t = target["t"], base["t"]
247+
for channel in self._channels:
248+
if (new_latency := t_0 - t + base[f"{channel}_view_latency"]) > 0:
249+
continue
250+
target[f"{channel}_view_base64"] = base[f"{channel}_view_base64"]
251+
target[f"{channel}_view_latency"] = new_latency
252+
return None if target == original_target else target
253+
254+
228255
class InferredDataset(CSVDataset):
229256
def __init__(self, file: str, chunk_size: int = 100) -> None:
230257
super().__init__(file, chunk_size)
@@ -291,8 +318,9 @@ def complete(self, *inferences: Inference, enhanced: bool = False, assume_initia
291318
:param enhanced: True: use inferred data to infer other data; False: use only raw data to infer other data
292319
:param assume_initial_zeros: True: reasonably set any missing data in the first row to zero; False: no change
293320
"""
294-
if DEFAULT_HEADER in self.read_header():
295-
raise KeyError("Your dataset must include the default header")
321+
for inference in inferences:
322+
if not set(rh := inference.header()).issubset(ah := self.read_header()):
323+
raise KeyError(f"Inference {inference} requires header {rh} but the dataset only contains {ah}")
296324
if assume_initial_zeros:
297325
self.assume_initial_zeros()
298326
self._complete(inferences, enhanced, False)

leads/data_persistence/analyzer/dynamic.py renamed to leads/data_persistence/analyzer/preprocess.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from .._computational import array as _array, ndarray as _ndarray
55

66

7-
class DynamicProcessor(object):
7+
class Preprocessor(object):
88
def __init__(self, data_seq: _Sequence[dict[str, _Any]]) -> None:
99
self._data_seq: _Sequence[dict[str, _Any]] = data_seq
1010

leads/data_persistence/analyzer/static.py renamed to leads/data_persistence/analyzer/processor.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
from .._computational import sqrt as _sqrt
1616

1717

18-
class StaticProcessor(object):
18+
class Processor(object):
1919
def __init__(self, dataset: CSVDataset) -> None:
2020
if DEFAULT_HEADER in dataset.read_header():
2121
raise KeyError("Your dataset must include the default header")
@@ -54,6 +54,7 @@ def __init__(self, dataset: CSVDataset) -> None:
5454
self._lap_d: list[float] = []
5555
self._max_lap_x: float | None = None
5656
self._max_lap_y: float | None = None
57+
self._required_time: int = 0
5758

5859
def dataset(self) -> CSVDataset:
5960
return self._dataset
@@ -119,7 +120,7 @@ def baking_results(self) -> tuple[str, str, str, str, str, str, str, str, str, s
119120
return (
120121
f"Baked {self._valid_rows_count} / {self._read_rows_count} ROWS",
121122
f"Baking Rate: {100 * self._valid_rows_count / self._read_rows_count:.2f}%",
122-
f"Skipped Rows: {StaticProcessor._hide_others(self._invalid_rows, 5)}",
123+
f"Skipped Rows: {Processor._hide_others(self._invalid_rows, 5)}",
123124
f"Start Time: {_datetime.fromtimestamp(self._start_time * .001).strftime("%Y-%m-%d %H:%M:%S")}",
124125
f"End Time: {_datetime.fromtimestamp(self._end_time * .001).strftime("%Y-%m-%d %H:%M:%S")}",
125126
f"Duration: {format_duration(self._duration * .001)}",
@@ -128,7 +129,7 @@ def baking_results(self) -> tuple[str, str, str, str, str, str, str, str, str, s
128129
f"v\u2098\u2090\u2093: {self._max_speed:.2f} KM / H",
129130
f"v\u2090\u1D65\u1D4D: {self._avg_speed:.2f} KM / H",
130131
f"GPS Hit Rate: {100 * self._gps_valid_count / self._valid_rows_count:.2f}%",
131-
f"GPS Skipped Rows: {StaticProcessor._hide_others(self._gps_invalid_rows, 5)}"
132+
f"GPS Skipped Rows: {Processor._hide_others(self._gps_invalid_rows, 5)}"
132133
)
133134

134135
def erase_unit_cache(self) -> None:
@@ -138,6 +139,8 @@ def erase_unit_cache(self) -> None:
138139
self._lap_x.clear()
139140
self._lap_y.clear()
140141
self._lap_d.clear()
142+
self._max_lap_x = None
143+
self._max_lap_y = None
141144

142145
def foreach(self, do: _Callable[[dict[str, _Any], int], None], skip_invalid_rows: bool = True,
143146
skip_gps_invalid_rows: bool = False) -> None:

leads/data_persistence/core.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -186,7 +186,7 @@ def write_frame(self, *data: _Any) -> None:
186186
frame[self._header[i]] = d = data[i]
187187
if column := self._columns[i]:
188188
column.append(d)
189-
_DataFrame(data=frame, index=[self._i]).to_csv(self._file, mode="a", header=False)
189+
_DataFrame(frame, [self._i]).to_csv(self._file, mode="a", header=False)
190190
self._i += 1
191191

192192
def close(self) -> None:
@@ -261,7 +261,7 @@ def close(self) -> None:
261261
)
262262
VISUAL_HEADER_ONLY: tuple[str, str, str, str, str, str, str, str] = (
263263
"front_view_base64", "front_view_latency", "left_view_base64", "left_view_latency", "right_view_base64",
264-
"front_view_latency", "rear_view_base64", "rear_view_latency"
264+
"right_view_latency", "rear_view_base64", "rear_view_latency"
265265
)
266266
VISUAL_HEADER: _VisualHeader = DEFAULT_HEADER + VISUAL_HEADER_ONLY
267267
VISUAL_HEADER_FULL: _VisualHeaderFull = DEFAULT_HEADER_FULL + VISUAL_HEADER_ONLY

0 commit comments

Comments
 (0)