further rework

- dump all every full hour
- finish all streams properly on exit
This commit is contained in:
2025-02-11 10:51:37 +01:00
parent 50f8c349ee
commit ce205f47a2
6 changed files with 386 additions and 296 deletions

View File

@ -4,6 +4,10 @@ import re
from select import select
class StreamDead(Exception):
"""raised when stream is dead"""
def parse_uri(uri):
scheme, _, hostport = uri.rpartition('://')
scheme = scheme or 'tcp'
@ -22,6 +26,18 @@ class Base:
select_dict = {}
def short_hostname(host):
"""psi/lin/se special
- treat case where -129129xxxx is appended
"""
host = socket.gethostbyaddr(host)[0]
match = re.match(r'([^.-]+)(?:-129129\d{6}|(-[~.]*|)).psi.ch', host)
if match:
host = match.group(1) + (match.group(2) or '')
return host
class Stream(Base):
_last_time = None
dead = False
@ -41,11 +57,13 @@ class Stream(Base):
self.timeout = timeout
self.socket = None
self.cache = {}
self.first_values = time.time()
self.errors = {}
self.start_time = time.time()
self.next_hour = (self.start_time // 3600 + 1) * 3600
self.generator = self.event_generator()
try:
self.connect()
self.init()
self.first_values = 0
except Exception as e:
print(self.uri, repr(e))
raise
@ -56,11 +74,7 @@ class Stream(Base):
self.settimeout(self.timeout)
host, _, port = self.uri.partition(':')
# try to convert uri to host name
host = socket.gethostbyaddr(host)[0]
# psi special: shorten name, in case a computer has several network connections
match = re.match(r'([^.-]+)(?:-129129\d{6}|(-[~.]*|)).psi.ch', host)
if match:
host = match.group(1) + (match.group(2) or '')
host = short_hostname(host)
self.tags['stream'] = f'{host}:{port}'
print(self.uri, '=', self.tags['stream'], 'connected')
self._buffer = []
@ -69,7 +83,7 @@ class Stream(Base):
self._pinged = False
def init(self):
pass
raise NotImplementedError
def ping(self):
raise NotImplementedError
@ -153,9 +167,7 @@ class Stream(Base):
received = self.socket.recv(NETBUFSIZE)
if not received:
raise ConnectionAbortedError('connection closed by other end')
except TimeoutError:
break
except BlockingIOError:
except (TimeoutError, BlockingIOError):
break
except Exception:
self._last_live = now
@ -174,8 +186,55 @@ class Stream(Base):
if len(received) < NETBUFSIZE and self.timeout == 0:
break
def event_generator(self):
raise NotImplementedError
class EventGenerator:
def get_tags(self, key):
"""get tags for key"""
raise NotImplementedError
def finish_events(self, events, end_time):
for key in list(self.cache):
self.cache.pop(key)
dbkey = '.'.join(key)
events.append((True, None, end_time, dbkey, 'float', self.tags))
events.append((False, 'END', end_time, dbkey, 'error', self.tags))
def get_events(self, events, maxevents):
"""get available events
:param events: a list events to be appended to
:param maxevents: hint for max number of events to be joined
there might be more after a full hour or when the stream is dying
:return: True when maxevents is reached
"""
for key, value, error, ts, tags in self.generator:
ts = max(self.start_time, min(ts or INF, time.time()))
if ts >= self.next_hour:
ts_ = (ts // 3600) * 3600
for key_, value_ in self.cache.items():
events.append((True, value_, ts_, '.'.join(key_), 'float', self.get_tags(key_)))
for key_, error_ in self.errors.items():
events.append((False, error_, ts_, '.'.join(key_), 'error', self.get_tags(key_)))
self.next_hour = ts_ + 3600
if value != self.cache.get(key, None) or error != self.errors.get(key, None):
dbkey = '.'.join(key)
events.append((True, value, ts, dbkey, 'float', tags))
self.cache[key] = value
if error and self.errors.get(key) != error:
events.append((False, error, ts, dbkey, 'error', tags))
self.errors[key] = error
elif len(events) >= maxevents:
return True
else:
if self.dead:
self.finish_events(events, self.dead)
raise StreamDead()
self.generator = self.event_generator()
return False
class EventStream:
return_on_wait = False
# return_on_wait = True: stop generator when no more streams have buffered content
# note: a stream with buffered content might not be ready to emit any event, because
@ -189,31 +248,38 @@ class EventGenerator:
ready = select(Stream.select_dict, [], [], timeout)[0]
return [Stream.select_dict[f] for f in ready]
def gen(self):
def get_events(self, maxevents=20):
"""return events from all streams
:param maxevents: hint for max number of events to be joined
there might be more after a full hour or when the stream is dying
:return: list of events
wait for at least one event
"""
events = []
while 1:
for stream in self.wait_ready(1):
if not isinstance(stream, Stream):
for streamcls, uri, *args in stream.events():
if uri not in self.streams:
print('ADD STREAM', uri, *args)
print('add stream', uri, *args)
self.streams[uri] = streamcls(uri, *args)
for name, stream in self.streams.items():
if stream.dead:
print('REMOVE STREAM', name)
for key in stream.cache:
yield key, None, 'END', stream.dead, stream.tags
try:
if stream.get_events(events, maxevents):
return events
except StreamDead:
self.streams.pop(name)
break
for key, value, error, ts, tags in stream.events():
ts = max(stream.first_values, min(ts or INF, time.time()))
prev = stream.cache.get(key, None)
if (value, error) != prev:
yield key, value, error, ts, tags
stream.cache[key] = value, error
if self.return_on_wait and not self.wait_ready(0):
return
def __iter__(self):
return self.gen()
if events:
return events
if events:
return events
def finish(self):
events = []
end_time = time.time()
for stream in self.streams.values():
stream.close()
stream.finish_events(events, end_time)
return events