n_jobs dynamically chosen from the cpu count
This commit is contained in:
@@ -6,7 +6,7 @@ logger = logging.getLogger()
|
||||
import numpy as np
|
||||
from sfdata import SFDataFiles, sfdatafile, SFScanInfo, SFProcFile
|
||||
from xraydb import material_mu
|
||||
from joblib import Parallel, delayed
|
||||
from joblib import Parallel, delayed, cpu_count
|
||||
|
||||
def scan_info(run_number,base_path=None,small_data=True):
|
||||
"""Returns SFScanInfo object for a given run number.
|
||||
@@ -68,7 +68,7 @@ def print_run_info(
|
||||
break
|
||||
|
||||
|
||||
def process_run(run_number, rois,detector='JF16T03V01', roi_img=True, calculate =None, only_shots=slice(None), n_jobs=12):
|
||||
def process_run(run_number, rois,detector='JF16T03V01', roi_img=True, calculate =None, only_shots=slice(None), n_jobs=cpu_count()):
|
||||
"""Process rois for a given detector. Save the results small data in the res/small_data/run...
|
||||
By default only sum of rois is calculated, [mean,std,img] can be added to the "calculate" optional parameter.
|
||||
"""
|
||||
|
||||
Reference in New Issue
Block a user