add streams.py

was forgotten initially
This commit is contained in:
2025-01-16 12:56:31 +01:00
parent fc679cee21
commit 50f8c349ee
2 changed files with 264 additions and 16 deletions

View File

@ -21,6 +21,8 @@
# *****************************************************************************
import re
import time
import threading
import queue
from math import floor, ceil, copysign
from datetime import datetime
import pandas as pd
@ -44,7 +46,11 @@ def negnull2none(v):
class RegExp(str):
pass
"""indicates, tht this string should be treated as regexp
Usage: RegExp(<pattern>)
when used in InfluxDBWrapper.query, uses Go regexp syntax!
"""
def wildcard_filter(key, names):
@ -135,10 +141,11 @@ class InfluxDBWrapper:
based on nicos.services.cache.database.influxdb
(work of Konstantin Kholostov <k.kholostov@fz-juelich.de>)
"""
_update_queue = None
_write_thread = None
def __init__(self, url, token, org, bucket, create=False, watch_interval=300):
def __init__(self, url, token, org, bucket, threaded=False, create=False, watch_interval=300):
self._watch_interval = watch_interval
self._update_queue = []
self._url = url
self._token = token
self._org = org
@ -150,6 +157,34 @@ class InfluxDBWrapper:
self._active_streams = {}
self.set_time_precision(3)
self.add_new_bucket(self._bucket, create)
if threaded:
self._update_queue = queue.Queue(100)
self._write_thread = threading.Thread(target=self._write_thread)
def _write(self, point):
if self._update_queue:
self._update_queue.put(point)
else:
self._write_api_write(bucket=self._bucket, record=[point])
def _write_thread(self):
while 1:
points = [self.write_queue.get()]
try:
while 1:
points.append(self.write_queue.get(False))
except queue.Empty:
pass
self._write_api_write(bucket=self._bucket, record=points)
event = self._wait_complete
if event:
self._wait_complete = None
event.set()
def flush(self):
if self._write_thread:
self._wait_complete = event = threading.Event()
event.wait()
def set_time_precision(self, digits):
self.timedig = max(0, min(digits, 9))
@ -159,10 +194,12 @@ class InfluxDBWrapper:
return round(timevalue.timestamp(), self.timedig)
def disconnect(self):
for _ in range(10):
self._write_thread.join(1)
for stream, last in self._active_streams.items():
self._update_queue.append(Point('_streams_')
.time(last, write_precision=self._write_precision)
.field('interval', 0).tag('stream', stream))
self._write(Point('_streams_')
.time(last, write_precision=self._write_precision)
.field('interval', 0).tag('stream', stream))
self.flush()
self._client.close()
@ -199,12 +236,6 @@ class InfluxDBWrapper:
self.get_measurements(meas)
print('deleted', all)
def flush(self):
points = self._update_queue
if points:
self._update_queue = []
self._write_api_write(bucket=self._bucket, record=points)
def query(self, start=None, stop=None, interval=None, last=False, **tags):
"""Returns queried data as InfluxDB tables
@ -310,7 +341,7 @@ class InfluxDBWrapper:
if ts > self._deadline:
dl = ts // self._watch_interval * self._watch_interval
for stream, last in self._active_streams.items():
self._update_queue.append(
self._write(
Point('_streams_')
.time(datetime.utcfromtimestamp(last), write_precision=self._write_precision)
.field('interval', self._watch_interval).tag('stream', stream))
@ -321,9 +352,7 @@ class InfluxDBWrapper:
point.time(datetime.utcfromtimestamp(ts), write_precision=self._write_precision)
for key, val in tags.items():
point.tag(key, val)
self._update_queue.append(point)
if len(self._update_queue) > 0: # 100
self.flush()
self._write(point)
def write_float(self, measurement, field, value, ts, **tags):
# make sure value is float