forked from M-Labs/artiq-zynq
cxp GW: update test mode
This commit is contained in:
parent
635c62b8e6
commit
9251946952
@ -97,10 +97,8 @@ class RX_Pipeline(Module, AutoCSR):
|
||||
self.sync += self.ready.status.eq(gtx.rx_ready)
|
||||
|
||||
# DEBUG: init status
|
||||
self.txinit_phaligndone = CSRStatus()
|
||||
self.rxinit_phaligndone = CSRStatus()
|
||||
self.comb += [
|
||||
self.txinit_phaligndone.status.eq(gtx.tx_init.Xxphaligndone),
|
||||
self.rxinit_phaligndone.status.eq(gtx.rx_init.Xxphaligndone),
|
||||
]
|
||||
|
||||
@ -340,62 +338,61 @@ class CXP_Frame_Pipeline(Module, AutoCSR):
|
||||
|
||||
cdr = ClockDomainsRenamer("cxp_gtx_rx")
|
||||
|
||||
self.submodules.pixel_pipeline = pixel_pipeline = cdr(Pixel_Pipeline(res_width, count_width))
|
||||
# self.submodules.pixel_pipeline = pixel_pipeline = cdr(Pixel_Pipeline(res_width, count_width))
|
||||
|
||||
# RTIO interface
|
||||
# # RTIO interface
|
||||
|
||||
n = 0
|
||||
cfg = pixel_pipeline.roi.cfg
|
||||
for offset, target in enumerate([cfg.x0, cfg.y0, cfg.x1, cfg.y1]):
|
||||
roi_boundary = Signal.like(target)
|
||||
self.sync.rio += If(self.config.o.stb & (self.config.o.address == 4*n+offset),
|
||||
roi_boundary.eq(self.config.o.data))
|
||||
self.specials += MultiReg(roi_boundary, target, "cxp_gtx_rx")
|
||||
# n = 0
|
||||
# cfg = pixel_pipeline.roi.cfg
|
||||
# for offset, target in enumerate([cfg.x0, cfg.y0, cfg.x1, cfg.y1]):
|
||||
# roi_boundary = Signal.like(target)
|
||||
# self.sync.rio += If(self.config.o.stb & (self.config.o.address == 4*n+offset),
|
||||
# roi_boundary.eq(self.config.o.data))
|
||||
# self.specials += MultiReg(roi_boundary, target, "cxp_gtx_rx")
|
||||
|
||||
|
||||
|
||||
roi_out = pixel_pipeline.roi.out
|
||||
update = Signal()
|
||||
self.submodules.ps = ps = PulseSynchronizer("cxp_gtx_rx", "sys")
|
||||
self.sync.cxp_gtx_rx += ps.i.eq(roi_out.update)
|
||||
self.sync += update.eq(ps.o)
|
||||
# roi_out = pixel_pipeline.roi.out
|
||||
# update = Signal()
|
||||
# self.submodules.ps = ps = PulseSynchronizer("cxp_gtx_rx", "sys")
|
||||
# self.sync.cxp_gtx_rx += ps.i.eq(roi_out.update)
|
||||
# self.sync += update.eq(ps.o)
|
||||
|
||||
sentinel = 2**count_width
|
||||
count_sys = Signal.like(roi_out.count)
|
||||
# count_rx = Signal.like(roi_out.count)
|
||||
# self.sync.cxp_gtx_rx += count_rx.eq(roi_out.count),
|
||||
# self.specials += MultiReg(count_rx, count_sys),
|
||||
# sentinel = 2**count_width
|
||||
# count_sys = Signal.like(roi_out.count)
|
||||
# # count_rx = Signal.like(roi_out.count)
|
||||
# # self.sync.cxp_gtx_rx += count_rx.eq(roi_out.count),
|
||||
# # self.specials += MultiReg(count_rx, count_sys),
|
||||
|
||||
self.specials += MultiReg(roi_out.count, count_sys),
|
||||
self.sync.rio += [
|
||||
self.gate_data.i.stb.eq(update),
|
||||
self.gate_data.i.data.eq(count_sys),
|
||||
]
|
||||
# self.specials += MultiReg(roi_out.count, count_sys),
|
||||
# self.sync.rio += [
|
||||
# self.gate_data.i.stb.eq(update),
|
||||
# self.gate_data.i.data.eq(count_sys),
|
||||
# ]
|
||||
|
||||
# crc_checker = cdr(CXPCRC32_Checker())
|
||||
# DEBUG:
|
||||
crc_checker = cdr(CXPCRC32_Checker())
|
||||
|
||||
# # TODO: handle full buffer gracefully
|
||||
# # TODO: investigate why there is a heartbeat message in the middle of the frame with k27.7 code too???
|
||||
# # NOTE: sometimes there are 0xFBFBFBFB K=0b1111
|
||||
# # perhaps the buffer is full overflowing and doing strange stuff
|
||||
# TODO: handle full buffer gracefully
|
||||
# TODO: investigate why there is a heartbeat message in the middle of the frame with k27.7 code too???
|
||||
# NOTE: sometimes there are 0xFBFBFBFB K=0b1111
|
||||
# perhaps the buffer is full overflowing and doing strange stuff
|
||||
|
||||
# # it should be mem block not "cycle buffer"
|
||||
# # self.submodules.dropper = dropper = cdr(DChar_Dropper())
|
||||
# buffer = cdr(Buffer(word_layout_dchar)) # crcchecker timinig is bad
|
||||
# buffer_cdc_fifo = cdr(Buffer(word_layout_dchar)) # to improve timing
|
||||
# cdc_fifo = stream.AsyncFIFO(word_layout_dchar, 2**log2_int(packet_size//word_width))
|
||||
# self.submodules += buffer, crc_checker, buffer_cdc_fifo
|
||||
# self.submodules += ClockDomainsRenamer({"write": "cxp_gtx_rx", "read": "sys"})(cdc_fifo)
|
||||
# it should be mem block not "cycle buffer"
|
||||
# self.submodules.dropper = dropper = cdr(DChar_Dropper())
|
||||
buffer = cdr(Buffer(word_layout_dchar)) # crcchecker timinig is bad
|
||||
buffer_cdc_fifo = cdr(Buffer(word_layout_dchar)) # to improve timing
|
||||
cdc_fifo = stream.AsyncFIFO(word_layout_dchar, 2**log2_int(packet_size//word_width))
|
||||
self.submodules += buffer, crc_checker, buffer_cdc_fifo
|
||||
self.submodules += ClockDomainsRenamer({"write": "cxp_gtx_rx", "read": "sys"})(cdc_fifo)
|
||||
|
||||
# pipeline = [buffer, crc_checker, buffer_cdc_fifo, cdc_fifo]
|
||||
# for s, d in zip(pipeline, pipeline[1:]):
|
||||
# self.comb += s.source.connect(d.sink)
|
||||
# framebuffers.append(pipeline[0])
|
||||
pipeline = [buffer, crc_checker, buffer_cdc_fifo, cdc_fifo]
|
||||
for s, d in zip(pipeline, pipeline[1:]):
|
||||
self.comb += s.source.connect(d.sink)
|
||||
|
||||
|
||||
# # DEBUG:
|
||||
# self.submodules.debug_out = debug_out = RX_Debug_Buffer(word_layout_dchar, 2**log2_int(packet_size//word_width))
|
||||
# self.comb += pipeline[-1].source.connect(debug_out.sink)
|
||||
self.submodules.debug_out = debug_out = RX_Debug_Buffer(word_layout_dchar, 2**log2_int(packet_size//word_width))
|
||||
self.comb += pipeline[-1].source.connect(debug_out.sink)
|
||||
|
||||
|
||||
|
||||
@ -417,7 +414,9 @@ class CXP_Frame_Pipeline(Module, AutoCSR):
|
||||
self.comb += d.source.connect(arbiter.sinks[i])
|
||||
|
||||
self.comb += arbiter.source.connect(broadcaster.sink)
|
||||
self.comb += broadcaster.sources[0].connect(pixel_pipeline.sink),
|
||||
# self.comb += broadcaster.sources[0].connect(pixel_pipeline.sink),
|
||||
# DEBUG
|
||||
self.comb += broadcaster.sources[0].connect(pipeline[0].sink),
|
||||
|
||||
# Control interface
|
||||
# only the simple topology MASTER:ch0, extension:ch1,2,3 is supported right now
|
||||
|
Loading…
Reference in New Issue
Block a user