Skip to content

Commit

Permalink
Storage: Remove method add_acceleration
Browse files Browse the repository at this point in the history
  • Loading branch information
sanssecours committed Sep 4, 2024
1 parent 19986cb commit 51ee72e
Show file tree
Hide file tree
Showing 2 changed files with 46 additions and 73 deletions.
112 changes: 42 additions & 70 deletions mytoolit/measurement/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from datetime import datetime
from pathlib import Path
from types import TracebackType
from typing import Dict, Optional, Sequence, Type, Union
from typing import Dict, Optional, Type, Union

from tables import (
File,
Expand Down Expand Up @@ -216,9 +216,10 @@ def __init__(
Create new data
>>> filepath = Path("test.hdf5")
>>> streaming_data = StreamingData(values=[1, 2, 3], counter=1,
... timestamp=4306978.449)
>>> with Storage(filepath, StreamingConfiguration(first=True)) as data:
... data.add_acceleration(values=[12], counter=1,
... timestamp=4306978.449)
... data.add_streaming_data(streaming_data)
Read from existing file
Expand Down Expand Up @@ -263,62 +264,6 @@ def __init__(
f"incorrect format: {error}"
) from error

def add_acceleration(
self, values: Sequence[float], counter: int, timestamp: float
) -> None:
"""Append acceleration data
Parameters
----------
values:
The acceleration values that should be added
counter:
The message counter sent in the package that contained the
acceleration value
timestamp:
The timestamp of the acceleration message in milliseconds
Example
-------
>>> filepath = Path("test.hdf5")
>>> with Storage(filepath, StreamingConfiguration(first=True)) as data:
... data.add_acceleration(values=[12, 13, 14], counter=1,
... timestamp=4306978.449)
>>> filepath.unlink()
"""

if self.start_time is None:
self.start_time = timestamp
self.acceleration.attrs["Start_Time"] = datetime.now().isoformat()

assert isinstance(self.start_time, (int, float))

row = self.acceleration.row
timestamp = (timestamp - self.start_time) * 1000

if len(self.axes) == 1:
axis = self.axes[0]
for value in values:
row["timestamp"] = timestamp
row["counter"] = counter
row[axis] = value
row.append()
else:
row["timestamp"] = timestamp
row["counter"] = counter
for accelertation_type, value in zip(self.axes, values):
row[accelertation_type] = value
row.append()

# Flush data to disk every few values to keep memory usage in check
if self.acceleration.nrows % 1000 == 0:
self.acceleration.flush()

def add_streaming_data(
self,
streaming_data: StreamingData,
Expand Down Expand Up @@ -360,11 +305,36 @@ def add_streaming_data(
"""

self.add_acceleration(
values=streaming_data.values,
timestamp=streaming_data.timestamp * 1000,
counter=streaming_data.counter,
)
values = streaming_data.values
timestamp = streaming_data.timestamp
counter = streaming_data.counter

if self.start_time is None:
self.start_time = timestamp
self.acceleration.attrs["Start_Time"] = datetime.now().isoformat()

assert isinstance(self.start_time, (int, float))

row = self.acceleration.row
timestamp = (timestamp - self.start_time) * 1_000_000

if len(self.axes) == 1:
axis = self.axes[0]
for value in values:
row["timestamp"] = timestamp
row["counter"] = counter
row[axis] = value
row.append()
else:
row["timestamp"] = timestamp
row["counter"] = counter
for accelertation_type, value in zip(self.axes, values):
row[accelertation_type] = value
row.append()

# Flush data to disk every few values to keep memory usage in check
if self.acceleration.nrows % 1000 == 0:
self.acceleration.flush()

def add_acceleration_meta(self, name: str, value: str) -> None:
"""Add acceleration metadata
Expand Down Expand Up @@ -408,13 +378,15 @@ def dataloss(self) -> float:
... with Storage(filepath,
... StreamingConfiguration(first=True)) as storage:
... for counter in range(256):
... storage.add_acceleration(values=[1, 2, 3],
... counter=counter,
... timestamp=counter/10)
... storage.add_streaming_data(
... StreamingData(values=[1, 2, 3],
... counter=counter,
... timestamp=counter/10))
... for counter in range(128, 256):
... storage.add_acceleration(values=[4, 5, 6],
... counter=counter,
... timestamp=(255 + counter)/10)
... storage.add_streaming_data(
... StreamingData(values=[4, 5, 6],
... counter=counter,
... timestamp=(255 + counter)/10))
...
... dataloss = storage.dataloss()
... filepath.unlink()
Expand Down
7 changes: 4 additions & 3 deletions mytoolit/old/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
PCAN_RECEIVE_EVENT,
)

from mytoolit.can.streaming import StreamingData
from mytoolit.cmdline.parse import (
add_channel_arguments,
mac_address,
Expand Down Expand Up @@ -998,7 +999,7 @@ def vGetStreamingAccData(self):
self.vGetStreamingAccDataProcess()

def update_acceleration_data(self, data, timestamp_ms):
timestamp = round(timestamp_ms, 3)
timestamp = timestamp_ms / 1000
counter = data[1]

axes = [
Expand All @@ -1023,8 +1024,8 @@ def update_acceleration_data(self, data, timestamp_ms):
for start in range(2, 2 + number_values * 2, 2)
]

self.data.add_acceleration(
values=values, counter=counter, timestamp=timestamp
self.data.add_streaming_data(
StreamingData(values=values, counter=counter, timestamp=timestamp)
)
if self.tAccDataFormat == DataSets[1]:
axis_values = {axis: value for axis, value in zip(axes, values)}
Expand Down

0 comments on commit 51ee72e

Please sign in to comment.