start of beamtime p21741

This commit is contained in:
2024-05-07 11:29:52 +02:00
parent f9d036bb9e
commit a65e9070eb
8 changed files with 644 additions and 28 deletions

View File

@ -23,7 +23,7 @@ N_UNDS = list(range(3, 15 + 1))
# Cristallina without calibration # Cristallina without calibration
# offset is the difference between PSSS and undulator setpoint # offset is the difference between PSSS and undulator setpoint
# sign convention: Undulator - PSSS # sign convention: Undulator - PSSS
energy_offset = -59 # eV energy_offset = -104 # eV
# move the PSSS motor according to the energy # move the PSSS motor according to the energy

View File

@ -9,7 +9,8 @@ from slic.core.acquisition.detcfg import DetectorConfig
# TODO: JF settings regarding raw conversion, compression, etc. # TODO: JF settings regarding raw conversion, compression, etc.
detectors = [ detectors = [
"JF16T03V01", # "JF16T03V01",
"JF17T16V01",
] ]
# ALLOWED_PARAMS = dict( # ALLOWED_PARAMS = dict(
@ -27,8 +28,8 @@ detectors = [
# ) # )
detectors_with_config = DetectorConfig(detectors) detectors_with_config = DetectorConfig(detectors)
#detectors_with_config["JF16T03V01"]['save_dap_results'] = True # detectors_with_config["JF16T03V01"]['save_dap_results'] = True
#detectors_with_config["JF16T03V01"]['remove_raw_files'] = True # detectors_with_config["JF16T03V01"]['remove_raw_files'] = True
# detectors_with_config["JF16T03V01"]['disabled_modules'] = [0, 1] # bottom module:0, middle module:1, top module:2 # detectors_with_config["JF16T03V01"]['disabled_modules'] = [0, 1] # bottom module:0, middle module:1, top module:2

View File

@ -175,6 +175,10 @@ from devices.diffractometer import Diffractometer
diffractometer = Diffractometer("diffractometer") diffractometer = Diffractometer("diffractometer")
from devices.dilsc import Dilution
dilution = Dilution()
################# Stand setup ################## ################# Stand setup ##################
# TODO: requires the stand client, need small howto how to start and configure or let it run all the time # TODO: requires the stand client, need small howto how to start and configure or let it run all the time
@ -193,10 +197,15 @@ spreadsheet = Spreadsheet(
"TRXBASE": diffractometer.trx_base, "TRXBASE": diffractometer.trx_base,
"TRYBASE": diffractometer.try_base, "TRYBASE": diffractometer.try_base,
"THETA": diffractometer.theta, "THETA": diffractometer.theta,
"TWOTHETA": diffractometer.twotheta, "TWOTHETA": diffractometer.twotheta,
"Magnet_X": dilution.x,
"Magnet_Y": dilution.y,
"Magnet_Z": dilution.z,
"DilSc_T_plato": dilution.T_plato,
"DilSc_T_pucksensor": dilution.T_pucksensor,
}, },
placeholders=("comment", "sample"), placeholders=("comment", "sample", "run_usable"),
host="saresc-vcons-02.psi.ch", host="saresc-vcons-02.psi.ch",
port=9090, port=9090,
) )
@ -231,7 +240,7 @@ instrument = "cristallina"
# pgroup = "p21238" # Cristallina photon diagnostics p-group with Chris # pgroup = "p21238" # Cristallina photon diagnostics p-group with Chris
# pgroup = "p21224" # SwissMX commisioning 7 # pgroup = "p21224" # SwissMX commisioning 7
pgroup = "p19150" # Scratch # pgroup = "p19150" # Scratch
# pgroup = "p19152" # Scratch # pgroup = "p19152" # Scratch
# pgroup = "p20840" # Cr beamline commisioning (Jan-Feb 2023) # pgroup = "p20840" # Cr beamline commisioning (Jan-Feb 2023)
@ -243,7 +252,11 @@ pgroup = "p19150" # Scratch
# pgroup = "p21569" # Dil-Sc / diffractometer / tilted bunch / LiErF4 (/ TmVO4), November 17- # pgroup = "p21569" # Dil-Sc / diffractometer / tilted bunch / LiErF4 (/ TmVO4), November 17-
# pgroup = "p21592" # HVE commissioning # pgroup = "p21592" # HVE commissioning
pgroup = "p21640" # Dil-Sc / diffractometer / LiErF4 : 4 March 2024 # pgroup = "p21640" # Dil-Sc / diffractometer / LiErF4 : 4 March 2024
# pgroup = "p21920" # Beamline commissioning 2024
pgroup = "p21741" # CrQ - DilSc - SAXS LiHoF4
# setup pgroup specific logger # setup pgroup specific logger
setup_logging_pgroup(pgroup) setup_logging_pgroup(pgroup)
@ -255,6 +268,7 @@ daq = SFAcquisition(
default_pvs=pvs, default_pvs=pvs,
default_detectors=detectors, default_detectors=detectors,
rate_multiplicator=1, rate_multiplicator=1,
spreadsheet=spreadsheet,
) )

View File

@ -11,26 +11,60 @@ from frappy import states
from frappy.datatypes import StatusType from frappy.datatypes import StatusType
class DilSc(Device): class Dilution(Device):
def __init__(self, **kwargs): def __init__(self, **kwargs):
self.name = 'DilSc' self.name = 'DilSc'
ID = self.name ID = self.name
super().__init__(ID, **kwargs) super().__init__(ID, **kwargs)
self.dilsc = SecopClient('dilsc.psi.ch:5000') self.address = 'dilsc.psi.ch:5000'
self.dilsc = SecopClient(self.address)
self.dilsc.connect() self.dilsc.connect()
self.x = MagnetCoil("Magnet_X", self.dilsc, 'x', limit_low=-0.6, limit_high=0.6) self.x = MagnetCoil("X", self.dilsc, 'x', limit_low=-0.6, limit_high=0.6)
self.y = MagnetCoil("Magnet_Y", self.dilsc, 'y', limit_low=-0.6, limit_high=0.6) self.y = MagnetCoil("Y", self.dilsc, 'y', limit_low=-0.6, limit_high=0.6)
self.z = MagnetCoil("Magnet_Z", self.dilsc, 'z', limit_low=-5.2, limit_high=5.2) self.z = MagnetCoil("Z", self.dilsc, 'z', limit_low=-5.2, limit_high=5.2)
self.T_plato = Thermometer('T_plato', self.dilsc, limit_low=0, limit_high=300)
self.T_pucksensor = Thermometer('T_pucksensor', self.dilsc, limit_low=0, limit_high=300)
class Thermometer(Adjustable):
def __init__(self, name, dilsc_connection, limit_low=-0.0001, limit_high=0.0001):
super().__init__(name, limit_low=limit_low, limit_high=limit_high)
self.dilsc = dilsc_connection
def _check_connection(func):
def checker(self, *args, **kwargs):
if not self.dilsc.online:
raise ConnectionError(f'No connection to dilsc at {self.address}')
else:
return func(self, *args, **kwargs)
return checker
@_check_connection
def get_current_value(self):
cacheitem = self.dilsc.getParameter(f'{self.name}', 'value', trycache=False)
return cacheitem.value
@_check_connection
def set_target_value(self, value):
self.dilsc.setParameter(f'{self.name}', 'target', value)
@_check_connection
def is_moving(self):
response = self.dilsc.getParameter(f'{self.name}','status', trycache=False)
return response[0][0] > StatusType.PREPARED
class MagnetCoil(Adjustable): class MagnetCoil(Adjustable):
def __init__(self, name, dilsc_connection, direction, limit_low=-0.0001, limit_high=0.0001): def __init__(self, name, dilsc_connection, direction, limit_low=-0.0001, limit_high=0.0001):
# What's with name or ID?
super().__init__(name, limit_low=-0.0001, limit_high=0.0001) super().__init__(name, limit_low=-0.0001, limit_high=0.0001)
@ -41,25 +75,26 @@ class MagnetCoil(Adjustable):
self.dilsc = dilsc_connection self.dilsc = dilsc_connection
if not self.dilsc.online:
raise ConnectionError('No connection to dilsc.') def _check_connection(func):
def checker(self, *args, **kwargs):
if not self.dilsc.online:
raise ConnectionError(f'No connection to dilsc at {self.address}')
else:
return func(self, *args, **kwargs)
return checker
@_check_connection
def get_current_value(self): def get_current_value(self):
if self.dilsc.online: cacheitem = self.dilsc.getParameter(f'mf{self.direction}', 'value', trycache=False)
return self.dilsc.getParameter(f'mf{self.direction}', 'value',) return cacheitem.value
else:
raise ConnectionError('No connection to dilsc.')
@_check_connection
def set_target_value(self, value): def set_target_value(self, value):
if self.dilsc.online: self.dilsc.setParameter(f'mf{self.direction}', 'target', value)
self.dilsc.setParameter(f'mf{self.direction}', 'target', value)
else: @_check_connection
raise ConnectionError('No connection to dilsc.')
def is_moving(self): def is_moving(self):
response = self.dilsc.getParameter(f'mf{self.direction}','status', trycache=False) response = self.dilsc.getParameter(f'mf{self.direction}','status', trycache=False)
return response[0][0] > StatusType.PREPARED return response[0][0] > StatusType.PREPARED

View File

@ -0,0 +1,19 @@
from slic.core.acquisition import SFAcquisition
from .channels import detectors_with_config, detectors
from .channels import bs_channels_jf_direct_beam, bs_channels_pbps_snapshot
from .channels import pvs_cristallina, pvs_bernina, bs_channels_DCM_Bernina
instrument = "cristallina"
pgroup = "p21569"
acqui_bill = SFAcquisition(
instrument,
pgroup,
default_channels=bs_channels_pbps_snapshot,
default_pvs=pvs_cristallina,
default_detectors=detectors,
rate_multiplicator=1,
)

319
exp_temp/kb_focusing.py Normal file
View File

@ -0,0 +1,319 @@
from cam_server import PipelineClient
from cam_server.utils import get_host_port_from_stream_address
from bsread import source, SUB
from epics import PV
import numpy as np
import time
import datetime
from pathlib import Path
import json
from loguru import logger
wait_time_benders = 1.0 # can probably be reduced as we wait for the move to finish
wait_time_aperture = 0.5 # can probably be reduced as we wait for the move to finish
def get_position_from_pipeline(pip_instance_id, data_field_name, n_pulses=1):
pipeline_client = PipelineClient()
stream_address = pipeline_client.get_instance_stream(pip_instance_id)
stream_host, stream_port = get_host_port_from_stream_address(stream_address)
with source(host=stream_host, port=stream_port, mode=SUB) as input_stream:
sum_pos = 0
for i in np.arange(n_pulses):
input_stream.connect()
message = input_stream.receive()
pos = message.data.data[data_field_name].value
sum_pos = sum_pos + pos
mean_pos = sum_pos / n_pulses
return mean_pos
def evaluate_bender_scan():
""" Evaluation of data is in /sf/cristallina/applications/optic_tools/KBs
"""
pass
def kbV_focusing_acquire(
bender_us=[1.0, 1.2, 1.49, 1.54, 1.59],
bender_ds=[1.1, 1.3, 1.5, 1.6, 1.79, 1.84],
bender_us_start=1.1,
bender_ds_start=1.33,
aperture=[-0.3, 0, 0.3],
aperture_width=0.15,
aperture_height=1.2,
n_pulses=1,
):
""" Vertical KB mirror focusing acquisition with default parameters.
"""
return kb_focusing_acquire(
direction="vertical",
bender_us=bender_us,
bender_ds=bender_ds,
bender_us_start=bender_us_start,
bender_ds_start=bender_ds_start,
aperture=aperture,
aperture_size=aperture_width,
aperture_size_pendicular=aperture_height,
n_pulses=n_pulses,
)
def kbH_focusing_acquire(
bender_us=[1.55, 1.6, 1.7],
bender_ds=[1.7, 1.8, 1.9],
bender_us_start=1.2, # should be 0.3 below the maximum focus
bender_ds_start=1.5, # should be 0.3 below the maximum focus
aperture=[0.18, 0.48, 0.78],
aperture_height=0.15,
aperture_width=1.8,
n_pulses=1,
):
""" Horizontal KB mirror focusing acquisition with default parameters.
"""
return kb_focusing_acquire(
direction="horizontal",
bender_us=bender_us,
bender_ds=bender_ds,
bender_us_start=bender_us_start,
bender_ds_start=bender_ds_start,
aperture=aperture,
aperture_size=aperture_height,
aperture_size_pendicular=aperture_width,
n_pulses=n_pulses,
)
def kb_focusing_acquire(
direction="vertical",
bender_us=[1.49, 1.54, 1.59],
bender_ds=[1.79, 1.84, 1.89],
bender_us_start=1.29,
bender_ds_start=1.59,
aperture=[-0.3, 0, 0.3],
aperture_size=0.15,
aperture_size_pendicular=1.2,
n_pulses=1,
):
""" KB mirror focusing acquisition routine for Cristallina.
TODO: - split this up into separate routines
- Make inner loop a generator, yielding: bender_us_rb, bender_ds_rb, beam_positions
Input into live analysis.
"""
# Benders
if bender_us_start >= np.min(bender_us) - 0.3:
bender_us_start = max(0, np.min(bender_us) - 0.3)
if bender_ds_start >= np.min(bender_ds) - 0.3:
bender_ds_start = max(0, np.min(bender_ds) - 0.3)
bender_us = np.sort(bender_us)
bender_ds = np.sort(bender_ds)
KBV_NAME = "SAROP31-OKBV153"
KBH_NAME = "SAROP31-OKBH154"
if direction == "vertical":
kb_name = KBV_NAME
elif direction == "horizontal":
kb_name = KBH_NAME
BU = PV(kb_name + ":BU.VAL")
BD = PV(kb_name + ":BD.VAL")
# TODO: is the separation necessary?
BU_RB = PV(kb_name + ":BU.VAL")
BD_RB = PV(kb_name + ":BD.VAL")
# Aperture
aperture = np.sort(aperture)
APU_NAME = "SAROP31-OAPU149"
if direction == "vertical":
APU_CENTER = PV(APU_NAME + ":MOTOR_Y.VAL")
APU_CENTER_RB = PV(APU_NAME + ":MOTOR_Y.RBV")
APU_SIZE = PV(APU_NAME + ":MOTOR_H.VAL")
APU_SIZE_RB = PV(APU_NAME + ":MOTOR_H.RBV")
APU_SIZE_PERPENDICULAR = PV(APU_NAME + ":MOTOR_W.VAL")
APU_SIZE_PERPENDICULAR_RB = PV(APU_NAME + ":MOTOR_W.RBV")
APU_CENTER_PERPENDICULAR = PV(APU_NAME + ":MOTOR_X.VAL")
APU_CENTER_PERPENDICULAR_RB = PV(APU_NAME + ":MOTOR_X.RBV")
# Camera field name
data_field_name = "y_fit_mean"
elif direction == "horizontal":
APU_CENTER = PV(APU_NAME + ":MOTOR_X.VAL")
APU_SIZE = PV(APU_NAME + ":MOTOR_W.VAL")
APU_CENTER_RB = PV(APU_NAME + ":MOTOR_X.RBV")
APU_SIZE_RB = PV(APU_NAME + ":MOTOR_W.RBV")
APU_SIZE_PERPENDICULAR = PV(APU_NAME + ":MOTOR_H.VAL")
APU_SIZE_PERPENDICULAR_RB = PV(APU_NAME + ":MOTOR_H.RBV")
APU_CENTER_PERPENDICULAR = PV(APU_NAME + ":MOTOR_Y.VAL")
APU_CENTER_PERPENDICULAR_RB = PV(APU_NAME + ":MOTOR_Y.RBV")
# Camera field name
data_field_name = "x_fit_mean"
# Camera
CAMERA_NAME = "SARES30-CAMS156-XE"
pip_instance_id = CAMERA_NAME + "_sp"
### Acquisition start
apu_center_ref = APU_CENTER_RB.get()
apu_size_ref = APU_SIZE_RB.get()
apu_size_perp_ref = APU_SIZE_PERPENDICULAR.get()
apu_center_perp_ref = APU_CENTER_PERPENDICULAR.get()
logger.info("BU/BD sent to start")
BU.put(bender_us_start, wait=False)
BD.put(bender_ds_start, wait=False)
APU_SIZE.put(aperture_size, wait=False)
APU_SIZE_PERPENDICULAR.put(aperture_size_pendicular, wait=False)
APU_CENTER.put(aperture[0], wait=False)
BU.put(bender_us_start, wait=True)
BD.put(bender_ds_start, wait=True)
time.sleep(wait_time_benders)
logger.info(f"BU to start: {bender_us_start:6.3f}")
logger.info(f"BD to start: {bender_ds_start:6.3f}")
bender_us_rb = []
bender_ds_rb = []
beam_positions = []
bender_scan_data = {}
datestr, timestr = generate_date_and_time_str()
for bu in bender_us:
logger.info("")
BU.put(bu, wait=False)
for bd in bender_ds:
BU.put(bu, wait=False)
BD.put(bd, wait=False)
BU.put(bu, wait=True)
BD.put(bd, wait=True)
time.sleep(wait_time_benders)
logger.info(f" BU / BD positions = {bu:6.3f} / {bd:6.3f}")
bender_us_rb.append(BU_RB.get())
bender_ds_rb.append(BD_RB.get())
beam_pos = []
for apu in aperture:
APU_CENTER.put(apu, wait=True)
time.sleep(wait_time_aperture)
beam_pos_apu = get_position_from_pipeline(pip_instance_id, data_field_name, n_pulses=n_pulses)
logger.info(f" Aperture position = {apu:6.3f}; Beam position = {beam_pos_apu:6.3f}")
beam_pos.append(beam_pos_apu)
time.sleep(wait_time_aperture)
APU_CENTER.put(aperture[0], wait=False)
beam_positions.append(beam_pos)
BD.put(bender_ds_start, wait=True)
logger.info("")
logger.info(f"BD to start: {bender_ds_start:6.3f}")
time.sleep(wait_time_benders)
# save intermediate data
bender_scan_data["bender_us"] = bender_us_rb
bender_scan_data["bender_ds"] = bender_ds_rb
bender_scan_data["beam_positions"] = beam_positions
fpath = save_focusing_data(bender_scan_data, direction=direction, timestr=timestr, datestr=datestr)
out_fpath = convert_focusing_to_bender_data(fpath)
logger.info(f"BU to start: {bender_us_start:6.3f}")
APU_SIZE.put(apu_size_ref, wait=False)
APU_CENTER.put(apu_center_ref, wait=False)
APU_SIZE_PERPENDICULAR.put(apu_size_perp_ref, wait=False)
APU_CENTER_PERPENDICULAR.put(apu_center_perp_ref, wait=False)
BU.put(bender_us_start, wait=False)
BD.put(bender_ds_start, wait=False)
logger.info(f"Data saved to: {out_fpath}")
logger.info("Done")
return bender_scan_data
def generate_date_and_time_str():
t = datetime.datetime.now()
datestr = t.date().isoformat()
timestr = t.isoformat(timespec='minutes')
return datestr, timestr
def save_focusing_data(bender_scan_data, direction="unknown", timestr=None, datestr=None, beamline_directory="/sf/cristallina/applications/beamline/snapshots/KBs/"):
""" Saves bender focusing data to json for analysis in beamline directory.
"""
bender_scan_data["comment"] = "Cristallina bender focusing data"
if timestr is None:
datestr, timestr = generate_date_and_time_str()
directory = Path(beamline_directory) / datestr
directory.mkdir(parents=True, exist_ok=True)
fpath = directory / f"C_{timestr}_{direction}.json"
with open(fpath, "w") as f:
json.dump(bender_scan_data, f)
return fpath
def convert_focusing_to_bender_data(focusing_json_file):
""" Converts focusing data to text array for further processing.
"""
fpath = Path(focusing_json_file)
with open(fpath, "r") as f:
focusing_data = json.loads(f.read())
nrows = len(focusing_data['bender_us'])
arr = np.empty((nrows, 5))
arr[:, 0] = focusing_data['bender_us']
arr[:, 1] = focusing_data['bender_ds']
arr[:, 2:] = focusing_data['beam_positions']
Diff1 = arr[:,2] - arr[:,3]
Diff2 = arr[:,4] - arr[:,3]
# extend array with difference columns
arr = np.c_[arr, Diff1]
arr = np.c_[arr, Diff2]
out_fpath = fpath.with_suffix(".dat")
np.savetxt(out_fpath, arr)
return out_fpath

View File

@ -0,0 +1,13 @@
import requests
import numpy as np
from loguru import logger
def start_sequence(n: int = 100, pulse_phase: float = np.pi/8):
parameters = {"n":n, "pulse_phase":pulse_phase}
url = "http://oscillations.psi.ch:8000/pulse"
r = requests.get(url, params=parameters)
d = r.json()
return d['pids']

View File

@ -0,0 +1,215 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 9,
"id": "9cf09892-2e9f-4e30-8d30-a98b5405a769",
"metadata": {},
"outputs": [],
"source": [
"#PATH=\"${PATH:+${PATH}:}~/opt/bin\" # appending\n",
"import time"
]
},
{
"cell_type": "code",
"execution_count": 1,
"id": "a131be81-c3bc-4158-bd60-c0fbbd95434b",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b]0;⊚slic\u0007"
]
}
],
"source": [
"from slic.core.adjustable import Adjustable, PVAdjustable, DummyAdjustable"
]
},
{
"cell_type": "code",
"execution_count": 21,
"id": "5d9dd927-be4c-445f-9d71-ca7a2e4707c6",
"metadata": {},
"outputs": [],
"source": [
"Theta = DummyAdjustable(ID='THETA', name='Theta', process_time=1)\n",
"TwoTheta = DummyAdjustable(ID='TWOTHETA', name='TwoTheta', process_time=1)"
]
},
{
"cell_type": "code",
"execution_count": 22,
"id": "2b7f0ae3-928f-442d-8901-a46f8f3ed0ec",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Theta: 0.00, TwoTheta: 0.00\n",
"Theta: 0.40, TwoTheta: 0.80\n",
"Theta: 0.80, TwoTheta: 1.60\n",
"Theta: 1.20, TwoTheta: 2.40\n",
"Theta: 1.60, TwoTheta: 3.20\n",
"Theta: 2.00, TwoTheta: 4.00\n",
"Theta: 2.40, TwoTheta: 4.80\n",
"Theta: 2.80, TwoTheta: 5.60\n",
"Theta: 3.20, TwoTheta: 6.40\n",
"Theta: 3.60, TwoTheta: 7.20\n",
"DummyAdjustable \"Theta\" at 4\n",
"DummyAdjustable \"TwoTheta\" at 8\n"
]
}
],
"source": [
"Theta.mv(4)\n",
"TwoTheta.mv(8)\n",
"\n",
"while any([Theta.is_moving(), TwoTheta.is_moving()]):\n",
" \n",
" print(f\"Theta: {Theta.get_current_value():.2f}, TwoTheta: {TwoTheta.get_current_value():.2f}\")\n",
" time.sleep(0.1)\n"
]
},
{
"cell_type": "code",
"execution_count": 23,
"id": "567d2b53-b8ae-4969-9f12-bd6e23784cb1",
"metadata": {},
"outputs": [],
"source": [
"from slic.devices.general.motor import Motor\n",
"\n",
"newport = Motor(\"SARES30-MOBI1:MOT_5\")"
]
},
{
"cell_type": "code",
"execution_count": 24,
"id": "1f127ceb-4c01-4499-863a-3d7f67a92205",
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"<Popen: returncode: None args: 'caqtdm -macro \"P=SARES30-MOBI1:,M=MOT_5\" mot...>"
]
},
"execution_count": 24,
"metadata": {},
"output_type": "execute_result"
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"CAQTDM_DISPLAY_PATH=/sf/cristallina/config/qt:/sf/op/config/qt:/sf/bd/config/qt:/sf/diag/config/qt:/sf/id/config/qt:/sf/laser/config/qt:/sf/mag/config/qt:/sf/photo/config/qt:/sf/plc/config/qt:/sf/rf/config/qt:/sf/ts/config/qt:/sf/vcs/config/qt:/sf/controls/config/qt:/ioc/modules/qt:/ioc/qt\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
"(caQtDM:2000349): dbind-WARNING **: 12:21:21.231: Couldn't register with accessibility bus: Did not receive a reply. Possible causes include: the remote application did not send a reply, the message bus security policy blocked the reply, the reply timeout expired, or the network connection was broken.\n"
]
}
],
"source": [
"newport.gui()\n"
]
},
{
"cell_type": "markdown",
"id": "80d92623-55a0-46c7-a8ab-60fd86be5ab9",
"metadata": {},
"source": [
"# Speed settings\n",
"Speed in units of EGU/s (engineering units/second)\n",
"From the EPICS documentation\n",
"\n",
"````\n",
"Except where specified otherwise, fields associated with the motor position and its derivatives take values in user-specified \"engineering units\", such as degrees; the engineering unit name is contained in the field EGU. Thus, generally, speeds are expressed in EGU's per second. Accelerations, however, are expressed as the number of seconds taken to accelerate to full speed. However, additional fields are provided so that the motor position can be specified in steps and the speed in revolutions per second, and so that the step size can be set by specifying the number of steps per revolution and the number of EGU's per revolution.\n",
"````\n"
]
},
{
"cell_type": "code",
"execution_count": 37,
"id": "3a7d603b-993a-4d8a-948c-d619c5a24445",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"speed: 12.0 mm/s\n"
]
}
],
"source": [
"#slew_speed Slew speed or velocity .VELO\n",
"#base_speed Base or starting speed .VBAS\n",
"#acceleration Acceleration time (sec) .ACCL\n",
"speed = newport._motor.slew_speed\n",
"# newport._motor.base_speed\n",
"# newport._motor.acceleration\n",
"\n",
"units = newport._motor.EGU\n",
"\n",
"print(f\"speed: {speed:.1f} {units}/s\")"
]
},
{
"cell_type": "markdown",
"id": "864acb61-4ce1-47f9-a33d-c75b33e153ed",
"metadata": {},
"source": [
"So we can read and set the motor speed"
]
},
{
"cell_type": "code",
"execution_count": 38,
"id": "e0a3b805-2b1d-4128-848b-263647a9fb2f",
"metadata": {},
"outputs": [],
"source": [
"newport._motor.slew_speed = 14"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "2c80b15f-79cb-4151-9fb5-9df4cd38304b",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python [conda env:conda-slic]",
"language": "python",
"name": "conda-env-conda-slic-py"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.18"
}
},
"nbformat": 4,
"nbformat_minor": 5
}