added number of retries
This commit is contained in:
@@ -8,6 +8,9 @@ from sfdata import SFDataFiles, SFScanInfo
|
||||
|
||||
from partialjson.json_parser import JSONParser
|
||||
|
||||
from loguru import logger
|
||||
|
||||
MAX_STEP_WAIT = 300 # Maximum wait time in seconds for files to appear
|
||||
|
||||
class UnfinishedScanInfo(SFScanInfo):
|
||||
"""
|
||||
@@ -69,18 +72,26 @@ class UnfinishedScanInfo(SFScanInfo):
|
||||
yielded_count = 0
|
||||
|
||||
while True:
|
||||
retries = 0
|
||||
self._parse_partial_json(self.fname)
|
||||
|
||||
# Check if we have new files to yield
|
||||
while self.files and len(self.files) > yielded_count:
|
||||
|
||||
fns = self.files[yielded_count]
|
||||
|
||||
|
||||
if not files_available_on_disk(fns):
|
||||
time.sleep(self.refresh_interval)
|
||||
continue # Wait and recheck
|
||||
retries += 1
|
||||
if (retries * self.refresh_interval) < MAX_STEP_WAIT: # Wait up to 5 minutes for files to appear
|
||||
continue # Wait and recheck
|
||||
else:
|
||||
logger.error(f"Timeout waiting for files to become available for step {yielded_count} {fns}")
|
||||
# we still yield the remaining files to avoid infinite loop, but log an error and leave it to the caller to handle missing data
|
||||
|
||||
yielded_count += 1
|
||||
|
||||
retries = 0
|
||||
|
||||
try:
|
||||
with SFDataFiles(*fns) as data:
|
||||
yield data
|
||||
@@ -117,6 +128,8 @@ def is_finished_scan(fname):
|
||||
|
||||
def files_available_on_disk(fnames):
|
||||
"""Check if all files for this step are available on disk and contain some data."""
|
||||
# fnames = [fn for fn in fnames if not "PVDATA" in fn] # PVDATA files are not written to disk at the moment!
|
||||
# logger.debug(f"Skipping PVDATA files for availability check as a workaround!")
|
||||
if all(Path(fn).exists() for fn in fnames):
|
||||
return all(Path(fn).stat().st_size > 0 for fn in fnames)
|
||||
return False
|
||||
Reference in New Issue
Block a user