n_jobs dynamically chosen from the cpu count

This commit is contained in:
2022-10-19 16:57:13 +02:00
parent 8f99c0fa12
commit c1c2472f7d

View File

@@ -6,7 +6,7 @@ logger = logging.getLogger()
import numpy as np
from sfdata import SFDataFiles, sfdatafile, SFScanInfo, SFProcFile
from xraydb import material_mu
from joblib import Parallel, delayed
from joblib import Parallel, delayed, cpu_count
def scan_info(run_number,base_path=None,small_data=True):
"""Returns SFScanInfo object for a given run number.
@@ -68,7 +68,7 @@ def print_run_info(
break
def process_run(run_number, rois,detector='JF16T03V01', roi_img=True, calculate =None, only_shots=slice(None), n_jobs=12):
def process_run(run_number, rois,detector='JF16T03V01', roi_img=True, calculate =None, only_shots=slice(None), n_jobs=cpu_count()):
"""Process rois for a given detector. Save the results small data in the res/small_data/run...
By default only sum of rois is calculated, [mean,std,img] can be added to the "calculate" optional parameter.
"""