save conf

This commit is contained in:
2025-07-17 16:14:56 +02:00
parent 2c68322c91
commit 284f7a891e
59 changed files with 352 additions and 12 deletions

View File

@@ -1,9 +1,9 @@
{
"Number_of_cycles": 50,
"Number_of_cycles": 500,
"Amplitude_mm": 1,
"Time_in_beam_s": 5,
"Time_out_of_beam_s": 10,
"Exposure_time": 0.0004,
"Img_Processing": 1,
"pixel_size_mu": 0.275
"Img_Processing": 0,
"pixel_size_mu": 1.1
}

View File

@@ -39,6 +39,7 @@ from utils import get_datestr, get_timestr
import ad
import myutility as myu
import cv2
import shutil
workdir = \
os.path.expanduser(rf'C:\Users\berti_r\Python_Projects\StagePerformaceDocu\data\data{get_datestr()}_alignment_tests')
@@ -50,6 +51,9 @@ plc = mfl.plc('5.17.17.136.1.1', 852)
plc.connect()
axis1 = mfl.axis(plc, 1)
#insert try catch later
def get_pixel_size():
config = myu.load_object(config_path)
return config.get("pixel_size_mu")
def init_nr_of_cycles():
config = myu.load_object(config_path)
@@ -61,6 +65,8 @@ def init_exposure_time():
def init_image_processing_yes_no():
config = myu.load_object(config_path)
return config.get("Img_Processing",0)
def safe_meas_settings(save_path):
shutil.copy(save_path, config_path)
def gaussian_2d(coords, amplitude, x0, y0, sigma_x, sigma_y, offset):
x, y = coords
@@ -128,6 +134,7 @@ def run_repeatability_series(
savedir = os.path.join(workdir,
f'{get_timestr()}_repeatibility_{motor_pv_prefix}')
safe_meas_settings(savedir)
savefile = os.path.join(savedir,
f'repeatibility_{motor_pv_prefix}.dat')
os.makedirs(savedir)
@@ -159,7 +166,22 @@ def run_repeatability_series(
start_pos_rbv = 4 #????
meas_pos_rbv = 5 #????
#---------------------------------------------capture------------------------------------------
com_x, com_y = aquire_avg(camera)
x_array = []
y_array = []
for nr_img in range(10):
sleep(0.1)
im = camera.get_image()
sleep(0.1)
if (1 == init_image_processing_yes_no()):
com_x_tmp, com_y_tmp = __process_img(im)
else:
com_x_tmp, com_y_tmp = image_center_of_mass(im, plot=False, verbose=False)
x_array.append(com_x_tmp)
y_array.append(com_y_tmp)
com_x = np.average(x_array)
com_y = np.average(y_array)
data_str = " {:6d} {:18f} {:18f} {:8.3f} {:8.3f} {:14.3f}\n".format(
i, start_pos_rbv, meas_pos_rbv, com_x, com_y, time.time())
@@ -328,7 +350,7 @@ def image_test(motor_on_off=0):
# Create a new figure and axis
fig, ax = plt.subplots()
ax.imshow(im)
ax.set_title(f"Center of Mass @{x:.2f}, {y:.2f}")
ax.set_title(f"Center of Mass @{x*get_pixel_size():.2f}, {y*get_pixel_size():.2f}")
ax.scatter([x], [y], c='red', s=50)
ax.axis('off')
plt.show()
@@ -345,7 +367,7 @@ def camera_thread_function( img_queue):
camera.start()
start_time = time.time()
for i in range(100):
for i in range(10):
img_queue.put((camera.get_image(),time.time()))
stop_time = time.time()
elapsed_time = stop_time - start_time
@@ -361,6 +383,7 @@ def static_test(motor_on_off=0):
savedir = os.path.join(workdir,
f'{get_timestr()}_static_{motor_on_off}')
safe_meas_settings(savedir)
savefile = os.path.join(savedir,
f'static_{motor_on_off}.dat')
os.makedirs(savedir)

View File

@@ -0,0 +1 @@
0 4.000000 5.000000 101.519 172.972 1752757974.096

View File

@@ -0,0 +1,50 @@
0 4.000000 5.000000 101.454 173.133 1752758141.111
1 4.000000 5.000000 101.673 173.077 1752758159.189
2 4.000000 5.000000 100.577 172.981 1752758177.195
3 4.000000 5.000000 101.014 172.910 1752758195.266
4 4.000000 5.000000 101.455 173.074 1752758213.312
5 4.000000 5.000000 101.057 173.065 1752758231.329
6 4.000000 5.000000 101.485 173.020 1752758249.341
7 4.000000 5.000000 101.700 173.063 1752758267.372
8 4.000000 5.000000 101.512 173.075 1752758285.437
9 4.000000 5.000000 101.605 173.223 1752758303.492
10 4.000000 5.000000 101.965 173.162 1752758321.501
11 4.000000 5.000000 102.299 173.142 1752758339.528
12 4.000000 5.000000 101.873 173.142 1752758357.569
13 4.000000 5.000000 101.823 172.996 1752758375.605
14 4.000000 5.000000 101.942 172.953 1752758393.624
15 4.000000 5.000000 101.893 172.895 1752758411.611
16 4.000000 5.000000 101.894 172.932 1752758429.632
17 4.000000 5.000000 102.235 172.788 1752758447.690
18 4.000000 5.000000 102.158 172.769 1752758465.716
19 4.000000 5.000000 101.608 172.562 1752758483.702
20 4.000000 5.000000 101.523 172.496 1752758501.707
21 4.000000 5.000000 101.612 172.602 1752758519.713
22 4.000000 5.000000 101.627 172.491 1752758537.682
23 4.000000 5.000000 101.670 172.415 1752758555.651
24 4.000000 5.000000 101.728 172.319 1752758573.627
25 4.000000 5.000000 102.157 172.250 1752758591.710
26 4.000000 5.000000 102.035 172.325 1752758609.994
27 4.000000 5.000000 102.151 172.089 1752758628.014
28 4.000000 5.000000 101.936 172.048 1752758646.023
29 4.000000 5.000000 102.097 172.066 1752758664.076
30 4.000000 5.000000 102.551 171.946 1752758682.350
31 4.000000 5.000000 102.265 171.850 1752758700.392
32 4.000000 5.000000 102.179 172.031 1752758718.400
33 4.000000 5.000000 102.205 172.001 1752758736.636
34 4.000000 5.000000 102.488 171.891 1752758754.596
35 4.000000 5.000000 102.373 171.817 1752758772.816
36 4.000000 5.000000 101.992 171.735 1752758790.769
37 4.000000 5.000000 101.865 171.697 1752758808.794
38 4.000000 5.000000 102.280 171.700 1752758826.811
39 4.000000 5.000000 102.427 171.533 1752758844.783
40 4.000000 5.000000 102.449 171.462 1752758862.897
41 4.000000 5.000000 102.432 171.340 1752758880.888
42 4.000000 5.000000 101.798 171.300 1752758899.091
43 4.000000 5.000000 101.118 171.156 1752758917.287
44 4.000000 5.000000 101.724 170.839 1752758935.313
45 4.000000 5.000000 102.012 170.879 1752758953.363
46 4.000000 5.000000 102.281 170.488 1752758971.446
47 4.000000 5.000000 102.079 170.657 1752758989.591
48 4.000000 5.000000 102.303 170.552 1752759007.690
49 4.000000 5.000000 101.703 170.506 1752759025.814

View File

@@ -98,6 +98,54 @@
}
],
"execution_count": 1
},
{
"metadata": {
"ExecuteTime": {
"end_time": "2025-07-17T13:46:01.465152Z",
"start_time": "2025-07-17T13:46:01.461007Z"
}
},
"cell_type": "code",
"source": [
"import numpy as np\n",
"\n",
"def pool_average_1d(data, pool_size=10):\n",
" data = np.asarray(data)\n",
" remainder = len(data) % pool_size\n",
"\n",
" if remainder != 0:\n",
" # Truncate the extra values that don't fit into a full block\n",
" data = data[:len(data) - remainder]\n",
"\n",
" # Reshape and average\n",
" pooled = data.reshape(-1, pool_size).mean(axis=1)\n",
" return pooled\n",
"\n",
"# Example usage\n",
"data = np.arange(103) # 1D array with 103 elements\n",
"pooled = pool_average_1d(data, pool_size=10)\n",
"\n",
"print(\"Original:\", data)\n",
"print(\"Pooled:\", pooled)\n"
],
"id": "5824fc6dfbfacfdc",
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Original: [ 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17\n",
" 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35\n",
" 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53\n",
" 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71\n",
" 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89\n",
" 90 91 92 93 94 95 96 97 98 99 100 101 102]\n",
"Pooled: [ 4.5 14.5 24.5 34.5 44.5 54.5 64.5 74.5 84.5 94.5]\n"
]
}
],
"execution_count": 1
}
],
"metadata": {

View File

@@ -17,3 +17,4 @@ timestamp,description,measurement_id,notes
2025-07-16 16:01:07.665920,Tuned PID gains for better damping,meas_20250710_120000,Observed 30% overshoot reduction
2025-07-16 16:07:33.382085,Tuned PID gains for better damping,meas_20250710_120000,Observed 30% overshoot reduction
2025-07-16 17:25:52.318083,Tuned PID gains for better damping,meas_20250710_120000,Observed 30% overshoot reduction
2025-07-17 15:43:44.147022,Tuned PID gains for better damping,meas_20250710_120000,Observed 30% overshoot reduction
1 timestamp description measurement_id notes
17 2025-07-16 16:01:07.665920 Tuned PID gains for better damping meas_20250710_120000 Observed 30% overshoot reduction
18 2025-07-16 16:07:33.382085 Tuned PID gains for better damping meas_20250710_120000 Observed 30% overshoot reduction
19 2025-07-16 17:25:52.318083 Tuned PID gains for better damping meas_20250710_120000 Observed 30% overshoot reduction
20 2025-07-17 15:43:44.147022 Tuned PID gains for better damping meas_20250710_120000 Observed 30% overshoot reduction

View File

@@ -26,7 +26,15 @@
"\n",
"## Improved image prcesing for optical position measurement\n",
"\n",
"I found that the lense flair effects the center of mass considerably. To fix that I implemented a threshhold. this introduces an undesiarable descritisation effect sinc pixels suddenly get added remove when passing the threshhold, another option would be to ron a gradient decent optimizer at a lover exposure time with som gausian blur for preprocesing. this would work great under the assumption the intencity distribution gausian\n",
"| Aproach | Result |\n",
"|-------------------------------------|------------------------------------------------------------------|\n",
"| Center of mass on raw image | heavily influenced by lensflare |\n",
"| Treshhold -> CM | Not influenced by lenseflare but introduced discretisation error |\n",
"| Gausian Blur -> Threshhold -> CM | Not influenced by lenseflare but introduced discretisation error |\n",
"| Gausian Blur -> Upscale 3d > TH >CM | Bad |\n",
"| 5 | Base |\n",
"\n",
"\n",
"\n",
"| Befor | After |\n",
"|:----------------------------------------:|:----------------------------------------:|\n",
@@ -79,6 +87,10 @@
"metadata": {
"jupyter": {
"source_hidden": true
},
"ExecuteTime": {
"end_time": "2025-07-17T14:13:34.725425Z",
"start_time": "2025-07-17T14:13:34.499524Z"
}
},
"source": [
@@ -287,8 +299,195 @@
"\n",
"\n"
],
"outputs": [],
"execution_count": null
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Path exists: C:\\Users\\berti_r\\Python_Projects\\StagePerformaceDocu\\Scripts\n",
"Path exists: C:\\Users\\berti_r\\Python_Projects\\StagePerformaceDocu\\Config\\config.json\n",
"Path exists: C:\\Users\\berti_r\\Python_Projects\\StagePerformaceDocu\\Config\\config.json\n",
"Path exists: C:\\Users\\berti_r\\Python_Projects\\templates\\motion_libs\n",
"Path exists: C:\\Users\\berti_r\\Python_Projects\\StagePerformaceDocu\\Config\\measurement.json\n",
"Constructor for PLC\n",
"Connect to PLC\n",
"is_open()=True\n",
"get_local_address()=None\n",
"read_device_info()=('Plc30 App', <pyads.structs.AdsVersion object at 0x000001E1DC595550>)\n",
"GVL_APP.nAXIS_NUM=3\n",
"Constructor for axis\n"
]
},
{
"data": {
"text/plain": [
"Dropdown(description='Test Type:', options=('Image Test', 'Repeatability Test', 'Static Test'), value='Image T…"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "24e6d309a6fa4bf08355a4d9b16c5140"
}
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"Output()"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "c7afe3bdcf394f1d867c3330e0203656"
}
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"BoundedIntText(value=500, description='Nr of cycles:', max=1000, min=1)"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "674c2ad7cc27438dbb50381677c4460b"
}
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"Output()"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "3a7767c1f7c0403a90815507626cb7d4"
}
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"Text(value='0.0004', description='Exposure [s]:')"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "d34235c2d58a42a6a07c6c7c83553e4a"
}
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"Output()"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "c58b963fbf24430e86e5a41e792940e0"
}
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"ToggleButton(value=False, description='Processing', tooltip='Toggle processing on/off')"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "d2935b4152c343c0af0327dbfbc9a318"
}
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"Output()"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "c4007ab71ec94cd682c3eb9a7a089eff"
}
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"Button(description='Set exposure time', style=ButtonStyle())"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "de9fd9d529d4439f86053c7cd04f764a"
}
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"Output()"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "51f5422cd751446ebd8a43237487e3f8"
}
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"Button(description='Start Measurement', style=ButtonStyle())"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "a6aecc0a05de4ccb8836e160d385376d"
}
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"Output()"
],
"application/vnd.jupyter.widget-view+json": {
"version_major": 2,
"version_minor": 0,
"model_id": "748a252c15184b26bb7202e02156790a"
}
},
"metadata": {},
"output_type": "display_data"
}
],
"execution_count": 1
},
{
"cell_type": "markdown",
@@ -557,10 +756,28 @@
"#mf.analyze_repeatability(axis_data_file_path_1,1.1)\n",
"\n",
"\n",
"x_vals, y_vals, times = myu.load_xy_data(axis_data_file_path_1)\n",
"def pool_average_1d(data, pool_size=10):\n",
" data = np.asarray(data)\n",
" remainder = len(data) % pool_size\n",
"\n",
" if remainder != 0:\n",
" # Truncate the extra values that don't fit into a full block\n",
" data = data[:len(data) - remainder]\n",
"\n",
" # Reshape and average\n",
" pooled = data.reshape(-1, pool_size).mean(axis=1)\n",
" return pooled\n",
"\n",
"x_vals1, y_vals1, times1 = myu.load_xy_data(axis_data_file_path_1)\n",
"times = times1[:10]\n",
"x_vals = pool_average_1d(x_vals1)\n",
"y_vals = pool_average_1d(y_vals1)\n",
"x_vals = x_vals*get_pixel_size()\n",
"y_vals = y_vals*get_pixel_size()\n",
"\n",
"\n",
"\n",
"\n",
"#Calc statistics\n",
"rms_x = np.sqrt(np.mean(np.square(x_vals)))\n",
"rms_y = np.sqrt(np.mean(np.square(y_vals)))\n",
@@ -631,8 +848,8 @@
" line_x.set_data(times[ind_min:ind_max:step], x_vals[ind_min:ind_max:step])\n",
" line_y.set_data(times[ind_min:ind_max:step], y_vals[ind_min:ind_max:step])\n",
"\n",
" ax1.relim()\n",
" ax2.relim()\n",
" #ax1.relim()\n",
" #ax2.relim()\n",
" ax1.autoscale_view()\n",
" ax2.autoscale_view()\n",
" fig_static.canvas.draw_idle()\n",