Skip to content

Commit

Permalink
DO NOT MERGE: add two versions of dram stream writer
Browse files Browse the repository at this point in the history
  • Loading branch information
rroohhh committed Aug 16, 2021
1 parent 08952bf commit 688bc56
Showing 1 changed file with 67 additions and 3 deletions.
70 changes: 67 additions & 3 deletions naps/cores/dram_packet_ringbuffer/stream_if.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,9 @@
from naps.cores import AxiReader, AxiWriter, if_none_get_zynq_hp_port, StreamInfo, LastWrapper, StreamTee
from naps import PacketizedStream, BasicStream, stream_transformer, StatusSignal

__all__ = ["DramPacketRingbufferStreamWriter", "DramPacketRingbufferStreamReader"]
__all__ = ["DramPacketRingbufferStreamWriter", "DramPacketRingbufferStreamWriterV2", "DramPacketRingbufferStreamReader"]


class DramPacketRingbufferStreamWriter(Elaboratable):
class DramPacketRingbufferStreamWriterV2(Elaboratable):
def __init__(
self,
input: PacketizedStream,
Expand Down Expand Up @@ -70,6 +69,71 @@ def elaborate(self, platform):
return m



class DramPacketRingbufferStreamWriter(Elaboratable):
def __init__(
self,
input: PacketizedStream,
max_packet_size, n_buffers, base_address=0x0f80_0000,
axi=None,
):
self.max_packet_size = max_packet_size
self.base_address = base_address
self.n_buffers = n_buffers
self.axi = axi

self.buffer_base_list_cpu = [base_address + max_packet_size * i for i in range(n_buffers)]
self.buffer_base_list = Array(self.buffer_base_list_cpu)
self.buffer_level_list = Array([Signal(range(max_packet_size), name=f'buffer{i}_level') for i in range(n_buffers)])
self.current_write_buffer = Signal(range(n_buffers))

assert hasattr(input, "last")
self.input = input

self.overflowed_buffers = StatusSignal(32)
self.buffers_written = StatusSignal(32)

def elaborate(self, platform):
m = Module()

axi = if_none_get_zynq_hp_port(self.axi, m, platform)
assert len(self.input.payload) <= axi.data_bits

tee = m.submodules.tee = StreamTee(self.input)

data_stream = BasicStream(self.input.payload.shape())
m.d.comb += data_stream.connect_upstream(tee.get_output(), allow_partial=True)

transformer_input = tee.get_output()
address_stream = BasicStream(axi.write_address.payload.shape())
address_offset = Signal.like(axi.write_address.payload)
is_in_overflow = Signal()
stream_transformer(transformer_input, address_stream, m, latency=0, handle_out_of_band=False)

with m.If(transformer_input.ready & transformer_input.valid):
m.d.sync += self.buffer_level_list[self.current_write_buffer].eq(address_offset + axi.data_bytes)
with m.If(transformer_input.last):
m.d.sync += is_in_overflow.eq(0)
next_buffer = (self.current_write_buffer + 1) % self.n_buffers
m.d.sync += address_offset.eq(0)
m.d.sync += self.current_write_buffer.eq(next_buffer)
m.d.sync += self.buffers_written.eq(self.buffers_written + 1)
with m.Else():
with m.If((address_offset + axi.data_bytes < self.max_packet_size)):
m.d.sync += address_offset.eq(address_offset + axi.data_bytes)
with m.Else():
with m.If(~is_in_overflow):
m.d.sync += is_in_overflow.eq(1)
m.d.sync += self.overflowed_buffers.eq(self.overflowed_buffers + 1)
m.d.comb += address_stream.payload.eq(address_offset + self.buffer_base_list[self.current_write_buffer])

m.submodules.writer = AxiWriter(address_stream, data_stream, axi)

m.submodules.input_stream_info = StreamInfo(self.input)

return m


class DramPacketRingbufferStreamReader(Elaboratable):
def __init__(self, writer: DramPacketRingbufferStreamWriter, data_width=64, length_fifo_depth=1, axi=None):
self.writer = writer
Expand Down

0 comments on commit 688bc56

Please sign in to comment.