From: Gerhard Sittig Date: Mon, 3 Oct 2022 16:29:46 +0000 (+0200) Subject: parallel: flush accumulated data when input data is exhausted X-Git-Url: https://sigrok.org/gitweb/?p=libsigrokdecode.git;a=commitdiff_plain;h=e303fd42917f4e15a06c9da5912344c8566fffb0 parallel: flush accumulated data when input data is exhausted The "parallel" decoder buffers the currently seen data pattern, and defers annotation emission until the end position is known. Which is why the last data pattern would not show up in the decoder's output. See bug #292 and its duplicates for examples and concerns. Catch the EOFError exception, and flush previously accumulated data. It is yet to get determined whether a warning annotation is due. Most probably not for "parallel" which merely visualizes data line states. But other decoders which have the concept of frames shall NOT follow this "parallel" decoder's naive approach, and claim that a frame had completed although its end condition was never seen. Add a developer TODO comment to raise awareness. --- diff --git a/decoders/parallel/pd.py b/decoders/parallel/pd.py index 96741e7..1e31208 100644 --- a/decoders/parallel/pd.py +++ b/decoders/parallel/pd.py @@ -257,7 +257,10 @@ class Decoder(srd.Decoder): # This results in robust operation for low-oversampled input. in_reset = False while True: - pins = self.wait(conds) + try: + pins = self.wait(conds) + except EOFError as e: + break clock_edge = cond_idx_clock is not None and self.matched[cond_idx_clock] data_edge = cond_idx_data_0 is not None and [idx for idx in range(cond_idx_data_0, cond_idx_data_N) if self.matched[idx]] reset_edge = cond_idx_reset is not None and self.matched[cond_idx_reset] @@ -275,3 +278,8 @@ class Decoder(srd.Decoder): data_bits = data_bits[:num_item_bits] item = bitpack(data_bits) self.handle_bits(self.samplenum, item, num_item_bits) + + self.handle_bits(self.samplenum, None, num_item_bits) + # TODO Determine whether a WARN annotation needs to get emitted. + # The decoder has not seen the end of the last accumulated item. + # Instead it just ran out of input data.