Browse Source

Improve throughput by reading data in larger chunks

Once a first byte is received, read as many as are immediately
available in larger chunks before going back to single-byte reads.
This prevents dropped data when the host is sending it quickly.
master
Jim Paris 3 years ago
parent
commit
b3399e6134
1 changed files with 21 additions and 3 deletions
  1. +21
    -3
      itm-decode.py

+ 21
- 3
itm-decode.py View File

@@ -69,6 +69,7 @@ class ITMParser:
except TimeoutException as e:
# Timeout inside a parser should be reported.
print(color("red", "Timeout"))
break
except ResyncException as e:
print(color("red", "Resync"))

@@ -153,15 +154,32 @@ def main(argv):
args = parser.parse_args()

ser = serial.Serial(args.device, args.baudrate)
ser.timeout = 1

print(color('green', 'ready'))

def input_stream():
while True:
# Read one byte with a 1s timeout.
ser.timeout = 1
data = ser.read(1)
if len(data) == 0:
# Timeout
yield None
else:
yield data[0]
continue
yield data[0]

# Then read as many more as there are immediately
# available, and send them. This is more efficient than
# reading each individual byte, when they're coming in
# fast. Once they stop, we'll go back to the normal
# 1 byte read with timeout.
ser.timeout = 0
while True:
data = ser.read(65536)
if len(data) == 0:
break
for c in data:
yield c

try:
ITMParser(input_stream()).process()


Loading…
Cancel
Save