proper logging, add signal handler for graceful termination

This commit is contained in:
2025-05-23 13:52:42 +02:00
parent 04bf09ae5a
commit ef35d18f37
6 changed files with 52 additions and 39 deletions

View File

@ -1,4 +1,6 @@
import sys import sys
import signal
import logging
from os.path import expanduser from os.path import expanduser
sys.path.append(expanduser('~')) sys.path.append(expanduser('~'))
import socket import socket
@ -20,6 +22,9 @@ Usage:
python feeder.py uri [device] [instrument] python feeder.py uri [device] [instrument]
""" """
logging.basicConfig(filename='logfile.log', filemode='w', level=logging.INFO,
format='%(asctime)s %(levelname)s %(message)s')
def main(dbname=None, access='write'): def main(dbname=None, access='write'):
# egen = EventStream(ScanReply(), ScanStream(), n=NicosStream('localhost:14002')) # egen = EventStream(ScanReply(), ScanStream(), n=NicosStream('localhost:14002'))
@ -42,14 +47,21 @@ def main(dbname=None, access='write'):
port = fm.info.get(ins, {}).get(service, {}) port = fm.info.get(ins, {}).get(service, {})
if port: if port:
uri = f'{host}:{port}' uri = f'{host}:{port}'
print('CREATE', uri, ins, cfginfo.get((ins, service))) logging.info('CREATE %s %s %s', uri, ins, cfginfo.get((ins, service)))
TrySecopConnect(uri) TrySecopConnect(uri)
db.set_instrument(uri, ins) db.set_instrument(uri, ins)
event_map = {'value': db.add_float, 'error': db.add_error, 'stream': db.add_stream} event_map = {'value': db.add_float, 'error': db.add_error, 'stream': db.add_stream}
running = [True]
def handler(self, num, frame):
running.clear()
signal.signal(signal.SIGTERM, handler)
try: try:
while 1: while running:
for kind, *args in egen.get_events(): for kind, *args in egen.get_events():
event_map[kind](*args) event_map[kind](*args)
db.flush() db.flush()
@ -57,6 +69,7 @@ def main(dbname=None, access='write'):
for kind, *args in egen.finish(): for kind, *args in egen.finish():
event_map[kind](*args) event_map[kind](*args)
db.disconnect() db.disconnect()
logging.info('gracefully finished')
if len(sys.argv) >= 3: if len(sys.argv) >= 3:

View File

@ -21,6 +21,7 @@
# ***************************************************************************** # *****************************************************************************
import re import re
import time import time
import logging
from datetime import datetime, timezone from datetime import datetime, timezone
from math import floor, ceil from math import floor, ceil
@ -192,8 +193,7 @@ class InfluxDBWrapper:
self.add_new_bucket(self._bucket, access == 'create') self.add_new_bucket(self._bucket, access == 'create')
self._write_buffer = [] self._write_buffer = []
self._alias = {} self._alias = {}
print('InfluxDBWrapper', self._url, self._org, self._bucket) logging.info('InfluxDBWrapper %s %s %s', self._url, self._org, self._bucket)
self.debug = False
def enable_write_access(self): def enable_write_access(self):
self._write_api_write = self._client.write_api(write_options=SYNCHRONOUS).write self._write_api_write = self._client.write_api(write_options=SYNCHRONOUS).write
@ -356,22 +356,21 @@ class InfluxDBWrapper:
msg.append(f'''|> keep(columns:["{'","'.join(columns + keylist)}"])''') msg.append(f'''|> keep(columns:["{'","'.join(columns + keylist)}"])''')
msg = '\n'.join(msg) msg = '\n'.join(msg)
if self.debug: logging.debug('MSG %r', msg)
print(msg)
self.msg = msg self.msg = msg
try: try:
reader = self._client.query_api().query_csv(msg) reader = self._client.query_api().query_csv(msg)
except Exception: except Exception as e:
print(msg) logging.exception("error in query: %r", msg)
raise raise
if self.debug: # if self.debug:
def readdebug(reader): # def readdebug(reader):
for row in reader: # for row in reader:
print(row) # print(row)
yield row # yield row
reader = readdebug(reader) # reader = readdebug(reader)
try: try:
row = next(reader) row = next(reader)
except StopIteration: except StopIteration:
@ -416,7 +415,7 @@ class InfluxDBWrapper:
# consume unused rows # consume unused rows
consumed = sum(1 for _ in rows) consumed = sum(1 for _ in rows)
if consumed: if consumed:
print('skip', consumed, 'rows') logging.info('skip %r rows', consumed)
if not row: # reader is at end if not row: # reader is at end
return return

View File

@ -1,4 +1,5 @@
import re import re
import logging
from ast import literal_eval from ast import literal_eval
from streams import Stream from streams import Stream
from secop import EnumConvert from secop import EnumConvert
@ -154,7 +155,7 @@ class NicosStream(Stream):
if self._init: if self._init:
raise TimeoutError('timeout receiving initial values') raise TimeoutError('timeout receiving initial values')
except Exception as e: except Exception as e:
print(self.uri, repr(e)) logging.exception('nicos %r', self.uri)
return return
for ts, devname, param, op, value in sorted([t, d, p, o, v] for (d, p), (o, v, t) in events.items()): for ts, devname, param, op, value in sorted([t, d, p, o, v] for (d, p), (o, v, t) in events.items()):
descr = self.descr.get(devname) descr = self.descr.get(devname)

View File

@ -3,6 +3,7 @@ import os
import json import json
import time import time
import socket import socket
import logging
from collections import namedtuple from collections import namedtuple
from select import select from select import select
from streams import Stream, Base, StreamDead from streams import Stream, Base, StreamDead
@ -112,7 +113,7 @@ class SecopStream(Stream):
except Exception as e: except Exception as e:
# probably other end closed # probably other end closed
print(self.uri, repr(e)) logging.info('%r on %s', e, self.uri)
SECOP_UDP_PORT = 10767 SECOP_UDP_PORT = 10767
@ -154,7 +155,7 @@ class ScanReply(UdpStream):
sock.sendto(json.dumps(dict(SECoP='discover')).encode('utf-8'), sock.sendto(json.dumps(dict(SECoP='discover')).encode('utf-8'),
('255.255.255.255', SECOP_UDP_PORT)) ('255.255.255.255', SECOP_UDP_PORT))
except OSError as e: except OSError as e:
print('could not send the broadcast:', e) logging.info('could not send the broadcast %r:', e)
self.socket = sock self.socket = sock
self.select_read[sock.fileno()] = self self.select_read[sock.fileno()] = self
@ -193,12 +194,11 @@ class TrySecopConnect(Base):
try: try:
self.socket.sendall(b'*IDN?\n') self.socket.sendall(b'*IDN?\n')
self.idn_sent = True self.idn_sent = True
print('SEND IDN', self.uri) logging.debug('SEND IDN %s', self.uri)
self.select_read[self.fno] = self self.select_read[self.fno] = self
return return
except Exception as e: except Exception as e:
print('NO CONN TO', self.uri) logging.info('NO CONN TO %s %r', self.uri, e)
print(e)
else: else:
reply = b'' reply = b''
try: try:
@ -206,12 +206,12 @@ class TrySecopConnect(Base):
if chunk: if chunk:
self.idn += chunk self.idn += chunk
if b'SECoP' in self.idn: if b'SECoP' in self.idn:
print('CONN TO', self.uri) logging.info('connected to %s', self.uri)
yield SecopStream, self.uri, {'stream': self.uri} yield SecopStream, self.uri, {'stream': self.uri}
if b'\n' not in self.idn: if b'\n' not in self.idn:
return return
except Exception as e: except Exception:
print(e) logging.exception('receiving')
self.select_read.pop(self.fno) self.select_read.pop(self.fno)

View File

@ -1,4 +1,5 @@
import time import time
import logging
from pathlib import Path from pathlib import Path
from configparser import ConfigParser from configparser import ConfigParser
from sehistory.influx import InfluxDBWrapper, abs_range, round_range, Table from sehistory.influx import InfluxDBWrapper, abs_range, round_range, Table
@ -400,7 +401,7 @@ class SEHistory(InfluxDBWrapper):
if prevts is not None and (previns is None or (ts or ETERNITY) < prevts): if prevts is not None and (previns is None or (ts or ETERNITY) < prevts):
ts = prevts + 0.001 ts = prevts + 0.001
except Exception as e: except Exception as e:
print(f'Exception in get_instrument {e!r}') logging.warning('Exception in get_instrument: %r', e)
tags['stream'] = stream tags['stream'] = stream
if flag: if flag:
tags['instrument'] = instrument tags['instrument'] = instrument
@ -418,7 +419,6 @@ class SEHistory(InfluxDBWrapper):
if flag: if flag:
addtags = {k: v for k, v in table.tags.items() addtags = {k: v for k, v in table.tags.items()
if k not in {'instrument', '_measurement', '_field'}} if k not in {'instrument', '_measurement', '_field'}}
print(ts, addtags)
self._add_point('_stream_', 'on', False, ts + 0.001, self._add_point('_stream_', 'on', False, ts + 0.001,
addtags) addtags)
self.flush() self.flush()

View File

@ -1,6 +1,7 @@
import socket import socket
import time import time
import re import re
import logging
from select import select from select import select
@ -27,10 +28,10 @@ class Base:
select_write = {} select_write = {}
def close(self): def close(self):
print('CLOSE BASE') logging.info('CLOSE BASE')
def finish_events(self, *args): def finish_events(self, *args):
print('FINISH BASE') logging.info('FINISH BASE')
def short_hostname(host): def short_hostname(host):
@ -74,7 +75,7 @@ class Stream(Base):
self.connect() self.connect()
self.init(**kwds) self.init(**kwds)
except Exception as e: except Exception as e:
print('FAIL', self.uri, repr(e)) logging.info('failled connecting %s %r', self.uri, e)
raise raise
def connect(self): def connect(self):
@ -84,7 +85,7 @@ class Stream(Base):
host, _, port = self.uri.partition(':') host, _, port = self.uri.partition(':')
# try to convert uri to host name # try to convert uri to host name
self.uri = self.tags['stream'] = f'{short_hostname(host)}:{port}' self.uri = self.tags['stream'] = f'{short_hostname(host)}:{port}'
print(f'{host}:{port}', '=', self.uri, 'connected') logging.info('connected %s:%s = %s', host, port, self.uri)
self._buffer = [] self._buffer = []
self._deadline = INF self._deadline = INF
self._next_connect = 0 self._next_connect = 0
@ -104,7 +105,7 @@ class Stream(Base):
if self.socket is None: if self.socket is None:
return return
self.select_read.pop(self.socket.fileno(), None) self.select_read.pop(self.socket.fileno(), None)
print(self.uri, 'close socket') logging.info('close socket %s', self.uri)
try: try:
self.socket.shutdown(socket.SHUT_RDWR) self.socket.shutdown(socket.SHUT_RDWR)
except socket.error: except socket.error:
@ -130,10 +131,9 @@ class Stream(Base):
self.dead = min(now, self._last_live + 1) self.dead = min(now, self._last_live + 1)
return True return True
if self._deadline == INF: if self._deadline == INF:
print(f'error "{e}" connecting to {self.uri} retrying for {self.max_offline} sec') logging.info(f'error %r connecting to %s retrying for %s sec',
e, self.uri, self.max_offline)
self._deadline = now + self.max_offline self._deadline = now + self.max_offline
else:
print('.', end='', flush=True)
self._next_connect = now + 0.5 self._next_connect = now + 0.5
return True return True
return False return False
@ -163,7 +163,7 @@ class Stream(Base):
now = time.time() now = time.time()
if now > self._next_ping: if now > self._next_ping:
if self._next_ping == self._ping_deadline: if self._next_ping == self._ping_deadline:
print(self.uri, 'no pong') self.log.info('no pong from %s', self.uri)
self.close() self.close()
return return
self.ping() self.ping()
@ -280,13 +280,13 @@ class EventStream:
stream = self.streams.get(uri) stream = self.streams.get(uri)
if stream: if stream:
stream.tags.update(kwargs) stream.tags.update(kwargs)
print('update stream', uri, kwargs) logging.info('update stream %s %r', uri, kwargs)
else: else:
try: try:
self.streams[uri] = stream = streamcls(uri, **kwargs) self.streams[uri] = stream = streamcls(uri, **kwargs)
print('added stream', uri, kwargs) logging.info('added stream %s %r', uri, kwargs)
except Exception as e: except Exception as e:
print('can not connect to', uri, repr(e), streamcls) logging.warning('can not connect to %s %r %r', uri, e, streamcls)
continue continue
device = stream.tags.get('device') device = stream.tags.get('device')
events.append(('stream', kwargs.get('instrument', ''), events.append(('stream', kwargs.get('instrument', ''),