added Sorter
This commit is contained in:
@@ -4,5 +4,6 @@ from .bits import read_bit
|
||||
from .bufjson import BufferedJSON
|
||||
from .filehandler import FileHandler
|
||||
from .randskip import randskip
|
||||
from .sorter import Sorter
|
||||
|
||||
|
||||
|
||||
75
dap/utils/sorter.py
Normal file
75
dap/utils/sorter.py
Normal file
@@ -0,0 +1,75 @@
|
||||
|
||||
class Sorter:
|
||||
|
||||
def __init__(self, window=1000):
|
||||
"""
|
||||
window: max allowed gap in IDs
|
||||
"""
|
||||
self.window = window
|
||||
self.buffer = {}
|
||||
self.max_ID = -1
|
||||
self.next_ID = 0
|
||||
|
||||
def add(self, ID, data):
|
||||
self.buffer[ID] = data
|
||||
self.max_ID = max(self.max_ID, ID)
|
||||
|
||||
def flush_ready(self):
|
||||
res = self._collect_contiguous_IDs()
|
||||
self._skip_over_gap()
|
||||
self._drop_old_entries()
|
||||
return res
|
||||
|
||||
def _collect_contiguous_IDs(self):
|
||||
res = []
|
||||
while self.next_ID in self.buffer:
|
||||
res.append(self.buffer.pop(self.next_ID))
|
||||
self.next_ID += 1
|
||||
return res
|
||||
|
||||
def _skip_over_gap(self):
|
||||
self.next_ID = max(self.next_ID, self.max_ID - self.window)
|
||||
|
||||
def _drop_old_entries(self):
|
||||
for i in list(self.buffer):
|
||||
if i < self.next_ID:
|
||||
del self.buffer[i]
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sorter = Sorter(window=5)
|
||||
|
||||
data = [
|
||||
0, # contiguous case
|
||||
2, # gap at 1
|
||||
1, # fills gap, triggers flush: 1, 2
|
||||
# gap between 3 and 7
|
||||
8, 9, 10, 11,
|
||||
3, # arrives too late
|
||||
12, 13, 14
|
||||
]
|
||||
|
||||
expected = (
|
||||
[0],
|
||||
[],
|
||||
[1, 2],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[],
|
||||
[8, 9, 10, 11, 12, 13, 14]
|
||||
)
|
||||
|
||||
for ID, exc in zip(data, expected):
|
||||
sorter.add(ID, ID)
|
||||
ready = sorter.flush_ready()
|
||||
assert exc == ready, f"{exc} != {ready}"
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user