Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Add] AR51 readout data (de)serialiser #103

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 7 additions & 4 deletions streaming_data_types/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,20 +15,21 @@
from streaming_data_types.eventdata_ev43 import deserialise_ev43, serialise_ev43
from streaming_data_types.eventdata_ev44 import deserialise_ev44, serialise_ev44
from streaming_data_types.finished_writing_wrdn import deserialise_wrdn, serialise_wrdn
from streaming_data_types.forwarder_config_update_rf5k import (
deserialise_rf5k,
serialise_rf5k,
)
from streaming_data_types.forwarder_config_update_fc00 import (
deserialise_fc00,
serialise_fc00,
)
from streaming_data_types.forwarder_config_update_rf5k import (
deserialise_rf5k,
serialise_rf5k,
)
from streaming_data_types.histogram_hs00 import deserialise_hs00, serialise_hs00
from streaming_data_types.histogram_hs01 import deserialise_hs01, serialise_hs01
from streaming_data_types.json_json import deserialise_json, serialise_json
from streaming_data_types.logdata_f142 import deserialise_f142, serialise_f142
from streaming_data_types.logdata_f144 import deserialise_f144, serialise_f144
from streaming_data_types.nicos_cache_ns10 import deserialise_ns10, serialise_ns10
from streaming_data_types.readout_data_ar51 import deserialise_ar51, serialise_ar51
from streaming_data_types.run_start_pl72 import deserialise_pl72, serialise_pl72
from streaming_data_types.run_stop_6s4t import deserialise_6s4t, serialise_6s4t
from streaming_data_types.sample_environment_senv import (
Expand Down Expand Up @@ -67,6 +68,7 @@
"json": serialise_json,
"ad00": serialise_ad00,
"da00": serialise_da00,
"ar51": serialise_ar51,
}


Expand Down Expand Up @@ -97,4 +99,5 @@
"json": deserialise_json,
"ad00": deserialise_ad00,
"da00": deserialise_da00,
"ar51": deserialise_ar51,
}
130 changes: 130 additions & 0 deletions streaming_data_types/fbschemas/readout_data_ar51/RawReadoutMessage.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
# automatically generated by the FlatBuffers compiler, do not modify

# namespace:

import flatbuffers
from flatbuffers.compat import import_numpy

np = import_numpy()


class RawReadoutMessage(object):
__slots__ = ["_tab"]

@classmethod
def GetRootAs(cls, buf, offset=0):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = RawReadoutMessage()
x.Init(buf, n + offset)
return x

@classmethod
def GetRootAsRawReadoutMessage(cls, buf, offset=0):
"""This method is deprecated. Please switch to GetRootAs."""
return cls.GetRootAs(buf, offset)

@classmethod
def RawReadoutMessageBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(
buf, offset, b"\x61\x72\x35\x31", size_prefixed=size_prefixed
)

# RawReadoutMessage
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)

# RawReadoutMessage
def SourceName(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.String(o + self._tab.Pos)
return None

# RawReadoutMessage
def MessageId(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int64Flags, o + self._tab.Pos)
return 0

# RawReadoutMessage
def RawData(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(
flatbuffers.number_types.Uint8Flags,
a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 1),
)
return 0

# RawReadoutMessage
def RawDataAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Uint8Flags, o)
return 0

# RawReadoutMessage
def RawDataLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
if o != 0:
return self._tab.VectorLen(o)
return 0

# RawReadoutMessage
def RawDataIsNone(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
return o == 0


def RawReadoutMessageStart(builder):
builder.StartObject(3)


def Start(builder):
RawReadoutMessageStart(builder)


def RawReadoutMessageAddSourceName(builder, sourceName):
builder.PrependUOffsetTRelativeSlot(
0, flatbuffers.number_types.UOffsetTFlags.py_type(sourceName), 0
)


def AddSourceName(builder, sourceName):
RawReadoutMessageAddSourceName(builder, sourceName)


def RawReadoutMessageAddMessageId(builder, messageId):
builder.PrependInt64Slot(1, messageId, 0)


def AddMessageId(builder, messageId):
RawReadoutMessageAddMessageId(builder, messageId)


def RawReadoutMessageAddRawData(builder, rawData):
builder.PrependUOffsetTRelativeSlot(
2, flatbuffers.number_types.UOffsetTFlags.py_type(rawData), 0
)


def AddRawData(builder, rawData):
RawReadoutMessageAddRawData(builder, rawData)


def RawReadoutMessageStartRawDataVector(builder, numElems):
return builder.StartVector(1, numElems, 1)


def StartRawDataVector(builder, numElems: int) -> int:
return RawReadoutMessageStartRawDataVector(builder, numElems)


def RawReadoutMessageEnd(builder):
return builder.EndObject()


def End(builder):
return RawReadoutMessageEnd(builder)
Empty file.
66 changes: 66 additions & 0 deletions streaming_data_types/readout_data_ar51.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
from collections import namedtuple

import flatbuffers
import numpy as np

import streaming_data_types.fbschemas.readout_data_ar51.RawReadoutMessage as RawReadoutMessage
from streaming_data_types.utils import check_schema_identifier

FILE_IDENTIFIER = b"ar51"


RawReadoutData = namedtuple(
"RawReadoutData",
(
"source_name",
"message_id",
"raw_data",
),
)


def deserialise_ar51(buffer):
"""
Deserialize FlatBuffer ar51.

:param buffer: The FlatBuffers buffer.
:return: The deserialized data.
"""
check_schema_identifier(buffer, FILE_IDENTIFIER)

event = RawReadoutMessage.RawReadoutMessage.GetRootAs(buffer, 0)

return RawReadoutData(
event.SourceName().decode("utf-8"),
event.MessageId(),
event.RawDataAsNumpy(),
)


def serialise_ar51(
source_name,
message_id,
raw_data,
):
"""
Serialize data as an ar51 FlatBuffers message.

:param source_name:
:param message_id:
:param raw_data:
:return:
"""
builder = flatbuffers.Builder(1024)
builder.ForceDefaults(True)

source = builder.CreateString(source_name)
raw_data_data = builder.CreateNumpyVector(np.asarray(raw_data).astype(np.ubyte))
RawReadoutMessage.RawReadoutMessageStart(builder)
RawReadoutMessage.RawReadoutMessageAddRawData(builder, raw_data_data)
RawReadoutMessage.RawReadoutMessageAddMessageId(builder, message_id)
RawReadoutMessage.RawReadoutMessageAddSourceName(builder, source)

data = RawReadoutMessage.RawReadoutMessageEnd(builder)
builder.Finish(data, file_identifier=FILE_IDENTIFIER)

return bytes(builder.Output())
80 changes: 80 additions & 0 deletions tests/test_ar51.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
import numpy as np
import pytest

from streaming_data_types import DESERIALISERS, SERIALISERS
from streaming_data_types.exceptions import WrongSchemaException
from streaming_data_types.readout_data_ar51 import deserialise_ar51, serialise_ar51


class TestSerialisationAR51:
def test_serialises_and_deserialises_ar51_message_correctly(self):
"""
Round-trip to check what we serialise is what we get back.
"""
original_entry = {
"source_name": "some_source",
"message_id": 123456,
"raw_data": bytearray(
[
0,
1,
2,
3,
4,
5,
6,
7,
8,
]
),
}

buf = serialise_ar51(**original_entry)
entry = deserialise_ar51(buf)

assert entry.source_name == original_entry["source_name"]
assert entry.message_id == original_entry["message_id"]
assert np.array_equal(entry.raw_data, original_entry["raw_data"])

def test_serialises_and_deserialises_ar51_message_correctly_for_numpy_arrays(self):
"""
Round-trip to check what we serialise is what we get back.
"""
original_entry = {
"source_name": "some_source",
"message_id": 123456,
"raw_data": np.array([100, 200, 30, 40, 50, 60, 70, 80, 90]),
}

buf = serialise_ar51(**original_entry)
entry = deserialise_ar51(buf)

assert entry.source_name == original_entry["source_name"]
assert entry.message_id == original_entry["message_id"]
assert np.array_equal(entry.raw_data, original_entry["raw_data"])

def test_if_buffer_has_wrong_id_then_throws(self):
original_entry = {
"source_name": "some_source",
"message_id": 123456,
"raw_data": np.array([100, 200, 300, 400, 500, 600, 700, 800, 900]),
}

buf = serialise_ar51(**original_entry)

# Manually introduce error in id.
buf = bytearray(buf)
buf[4:8] = b"1234"

with pytest.raises(WrongSchemaException):
deserialise_ar51(buf)

def test_schema_type_is_in_global_serialisers_list(self):
assert "ar51" in SERIALISERS
assert "ar51" in DESERIALISERS


if __name__ == "__main__":
import unittest

unittest.main()