From e0268119330111987571e142c9d7481c90802016 Mon Sep 17 00:00:00 2001 From: Gobbo Alexandre Date: Mon, 17 Jan 2022 17:06:13 +0100 Subject: [PATCH] Initial commit --- bin/psss_panel | 15 + bin/psss_panel_editor | 13 + config/devices.properties | 16 + config/jcae.properties | 12 + config/setup.properties | 23 + devices/cam_server.properties | 25 + devices/dispatcher.properties | 13 + devices/histo_center.properties | 6 + devices/histo_fwhm.properties | 6 + devices/psss_center_average.properties | 4 + devices/psss_fwhm_average.properties | 4 + devices/psss_spectrum_y_average.properties | 5 + nb/PSSS/build.xml | 73 + nb/PSSS/nbproject/build-impl.xml | 1766 +++++++++++ nb/PSSS/nbproject/genfiles.properties | 8 + nb/PSSS/nbproject/private/config.properties | 0 nb/PSSS/nbproject/private/private.properties | 8 + nb/PSSS/nbproject/private/private.xml | 4 + nb/PSSS/nbproject/project.properties | 99 + nb/PSSS/nbproject/project.xml | 14 + plugins/PSSS$1.class | Bin 0 -> 518 bytes plugins/PSSS$2.class | Bin 0 -> 529 bytes plugins/PSSS$3.class | Bin 0 -> 522 bytes plugins/PSSS$4.class | Bin 0 -> 530 bytes plugins/PSSS$5.class | Bin 0 -> 530 bytes plugins/PSSS$6.class | Bin 0 -> 531 bytes plugins/PSSS$7.class | Bin 0 -> 524 bytes plugins/PSSS$8.class | Bin 0 -> 525 bytes plugins/PSSS$9.class | Bin 0 -> 524 bytes plugins/PSSS.class | Bin 0 -> 19372 bytes plugins/PSSS.form | 715 +++++ plugins/PSSS.java | 760 +++++ script/Lib/builtin_classes.py | 587 ++++ script/Lib/builtin_functions.py | 1895 ++++++++++++ script/Lib/builtin_utils.py | 174 ++ script/Lib/diffutils.py | 1032 +++++++ script/Lib/ijutils.py | 782 +++++ script/Lib/jeputils.py | 239 ++ script/Lib/mathutils.py | 681 +++++ script/Lib/plotutils.py | 119 + script/Lib/rsync.py | 149 + script/Lib/sessions.py | 348 +++ script/Lib/startup.py | 207 ++ script/Lib/startup_c.py | 2836 ++++++++++++++++++ script/Lib/statsutils.py | 191 ++ script/local.py | 7 + script/psss/CameraScan.py | 73 + script/psss/CrystalHeightScan.py | 81 + script/psss/EnergyScan.py | 101 + script/psss/psss.py | 188 ++ 50 files changed, 13279 insertions(+) create mode 100755 bin/psss_panel create mode 100755 bin/psss_panel_editor create mode 100755 config/devices.properties create mode 100755 config/jcae.properties create mode 100644 config/setup.properties create mode 100755 devices/cam_server.properties create mode 100755 devices/dispatcher.properties create mode 100755 devices/histo_center.properties create mode 100755 devices/histo_fwhm.properties create mode 100755 devices/psss_center_average.properties create mode 100755 devices/psss_fwhm_average.properties create mode 100755 devices/psss_spectrum_y_average.properties create mode 100644 nb/PSSS/build.xml create mode 100644 nb/PSSS/nbproject/build-impl.xml create mode 100644 nb/PSSS/nbproject/genfiles.properties create mode 100644 nb/PSSS/nbproject/private/config.properties create mode 100644 nb/PSSS/nbproject/private/private.properties create mode 100644 nb/PSSS/nbproject/private/private.xml create mode 100644 nb/PSSS/nbproject/project.properties create mode 100644 nb/PSSS/nbproject/project.xml create mode 100644 plugins/PSSS$1.class create mode 100644 plugins/PSSS$2.class create mode 100644 plugins/PSSS$3.class create mode 100644 plugins/PSSS$4.class create mode 100644 plugins/PSSS$5.class create mode 100644 plugins/PSSS$6.class create mode 100644 plugins/PSSS$7.class create mode 100644 plugins/PSSS$8.class create mode 100644 plugins/PSSS$9.class create mode 100644 plugins/PSSS.class create mode 100755 plugins/PSSS.form create mode 100755 plugins/PSSS.java create mode 100644 script/Lib/builtin_classes.py create mode 100644 script/Lib/builtin_functions.py create mode 100644 script/Lib/builtin_utils.py create mode 100644 script/Lib/diffutils.py create mode 100644 script/Lib/ijutils.py create mode 100644 script/Lib/jeputils.py create mode 100644 script/Lib/mathutils.py create mode 100644 script/Lib/plotutils.py create mode 100644 script/Lib/rsync.py create mode 100644 script/Lib/sessions.py create mode 100644 script/Lib/startup.py create mode 100644 script/Lib/startup_c.py create mode 100644 script/Lib/statsutils.py create mode 100755 script/local.py create mode 100755 script/psss/CameraScan.py create mode 100755 script/psss/CrystalHeightScan.py create mode 100755 script/psss/EnergyScan.py create mode 100755 script/psss/psss.py diff --git a/bin/psss_panel b/bin/psss_panel new file mode 100755 index 0000000..008fafe --- /dev/null +++ b/bin/psss_panel @@ -0,0 +1,15 @@ +#!/bin/bash + + +/opt/gfa/pshell/pshell \ + -version=v1_17 \ + -py3 \ + -m=/opt/gfa/pshell/apps/psss_panel \ + -z \ + -nbcf=true \ + -laf=dark \ + -d \ + -pini=true \ + -clog=WARNING \ + -sbar \ + $@ diff --git a/bin/psss_panel_editor b/bin/psss_panel_editor new file mode 100755 index 0000000..0d6ce67 --- /dev/null +++ b/bin/psss_panel_editor @@ -0,0 +1,13 @@ +#!/bin/bash + + +/opt/gfa/pshell/pshell \ + -py3 \ + -version=v1_17 \ + -z \ + -nbcf=true \ + -laf=dark \ + -pini=true \ + -setp=/opt/gfa/pshell/apps/psss_panel/config/setup.properties \ + -p=PSSS.java \ + $@ diff --git a/config/devices.properties b/config/devices.properties new file mode 100755 index 0000000..1a9eafc --- /dev/null +++ b/config/devices.properties @@ -0,0 +1,16 @@ +psss_fwhm_avg=ch.psi.pshell.epics.ChannelDouble|SARFE10-PSSS059:REL-E-SPREAD|||true +dispatcher=ch.psi.pshell.bs.Dispatcher|https://dispatcher-api.psi.ch/sf||| +cam_server=ch.psi.pshell.bs.PipelineServer|sf-daqsync-01:8889||| +energy_machine=ch.psi.pshell.epics.ChannelDouble|SARFE10-PBPG050:ENERGY|||true +psss_energy=ch.psi.pshell.epics.ChannelDouble|SARFE10-PSSS059:ENERGY|||true +psss_spectrum_x=ch.psi.pshell.epics.ChannelDoubleArray|SARFE10-PSSS059:SPECTRUM_X -1 -3|||true +psss_spectrum_y=ch.psi.pshell.epics.ChannelDoubleArray|SARFE10-PSSS059:SPECTRUM_Y -1 -3|||true +psss_center=ch.psi.pshell.epics.ChannelDouble|SARFE10-PSSS059:SPECTRUM_CENTER|||true +psss_fwhm=ch.psi.pshell.epics.ChannelDouble|SARFE10-PSSS059:SPECTRUM_FWHM|||true +psss_roi_min=ch.psi.pshell.epics.ChannelInteger|SARFE10-PSSS059:SPC_ROI_YMIN|||true +psss_roi_max=ch.psi.pshell.epics.ChannelInteger|SARFE10-PSSS059:SPC_ROI_YMAX|||true +histo_center=ch.psi.pshell.device.HistogramGenerator|psss_center|||true +histo_fwhm=ch.psi.pshell.device.HistogramGenerator|psss_fwhm|||true +psss_spectrum_y_average=ch.psi.pshell.device.ArrayAverager|psss_spectrum_y|||true +psss_center_average=ch.psi.pshell.device.Averager|psss_center|||true +psss_fwhm_average=ch.psi.pshell.device.Averager|psss_fwhm|||true diff --git a/config/jcae.properties b/config/jcae.properties new file mode 100755 index 0000000..0e02d88 --- /dev/null +++ b/config/jcae.properties @@ -0,0 +1,12 @@ +#Mon Feb 22 20:35:07 CET 2021 +ch.psi.jcae.ContextFactory.addressList= +ch.psi.jcae.ContextFactory.serverPort= +ch.psi.jcae.ContextFactory.maxArrayBytes=50000000 +ch.psi.jcae.ChannelFactory.retries=1 +ch.psi.jcae.ChannelFactory.timeout=1250 +ch.psi.jcae.impl.DefaultChannelService.retries=4 +ch.psi.jcae.impl.DefaultChannelService.timeout=1000 +ch.psi.jcae.ContextFactory.autoAddressList=true +ch.psi.jcae.ContextFactory.useShellVariables=true +ch.psi.jcae.ContextFactory.addLocalBroadcastInterfaces=false +ch.psi.jcae.ContextFactory.maxSendArrayBytes=1000000 diff --git a/config/setup.properties b/config/setup.properties new file mode 100644 index 0000000..7d4ce8d --- /dev/null +++ b/config/setup.properties @@ -0,0 +1,23 @@ +#Tue Jul 06 13:13:12 CEST 2021 +scriptPath=/opt/gfa/pshell/apps/psss_panel/script +sessionsPath={outp}/sessions +pluginsPath=/opt/gfa/pshell/apps/psss_panel/plugins +configFileDevices=/opt/gfa/pshell/apps/psss_panel/config/devices.properties +consoleSessionsPath={sessions}/console +libraryPath={script}; {script}/Lib +contextPath={outp}/context +configFilePlugins={config}/plugins.properties +extensionsPath={home}/extensions +configPath={home}/config +configFileSessions={config}/sessions.properties +userSessionsPath={sessions}/user +dataPath={outp}/data +devicesPath=/opt/gfa/pshell/apps/psss_panel/devices +configFileVariables={config}/variables.properties +configFileSettings={config}/settings.properties +wwwPath={home}/www +logPath={outp}/log +imagesPath={outp}/images +configFile={config}/config.properties +scriptType=py +configFileTasks={config}/tasks.properties diff --git a/devices/cam_server.properties b/devices/cam_server.properties new file mode 100755 index 0000000..7f7cedb --- /dev/null +++ b/devices/cam_server.properties @@ -0,0 +1,25 @@ +#Mon Jan 17 16:00:40 CET 2022 +spatialCalOffsetY=-50.02549719530852 +spatialCalOffsetX=-50.01953888237593 +colormapLogarithmic=false +scale=1.0 +grayscale=false +spatialCalScaleX=-1.0 +spatialCalScaleY=-1.0 +colormapMax=255.0 +rescaleOffset=0.0 +roiWidth=-1 +colormap=Temperature +invert=false +colormapMin=0.0 +rotationCrop=false +rotation=0.0 +rescaleFactor=1.0 +spatialCalUnits=mm +flipVertically=false +roiHeight=-1 +flipHorizontally=false +colormapAutomatic=true +roiY=0 +roiX=0 +transpose=false diff --git a/devices/dispatcher.properties b/devices/dispatcher.properties new file mode 100755 index 0000000..9891517 --- /dev/null +++ b/devices/dispatcher.properties @@ -0,0 +1,13 @@ +#Wed Jun 03 18:45:57 CEST 2020 +sendStrategy=complete_all +dropIncomplete=false +keepListeningOnStop=false +disableCompression=false +parallelHandlerProcessing=true +sendBuildChannelConfig=at_startup +sendAwaitFirstMessage=false +socketType=DEFAULT +validationInconsistency=keep_as_is +byteBufferAllocator=false +mappingIncomplete=fill_null +sendSyncTimeout=0 diff --git a/devices/histo_center.properties b/devices/histo_center.properties new file mode 100755 index 0000000..dfccc99 --- /dev/null +++ b/devices/histo_center.properties @@ -0,0 +1,6 @@ +#Tue Jun 15 11:03:15 CEST 2021 +bins=1000 +min=11400.0 +max=11200.0 +precision=-1 +numberOfSamples=10000 diff --git a/devices/histo_fwhm.properties b/devices/histo_fwhm.properties new file mode 100755 index 0000000..da35091 --- /dev/null +++ b/devices/histo_fwhm.properties @@ -0,0 +1,6 @@ +#Fri Apr 30 07:45:13 CEST 2021 +bins=1000 +min=0.0 +max=40.0 +precision=-1 +numberOfSamples=10000 diff --git a/devices/psss_center_average.properties b/devices/psss_center_average.properties new file mode 100755 index 0000000..6a20991 --- /dev/null +++ b/devices/psss_center_average.properties @@ -0,0 +1,4 @@ +#Mon May 03 09:21:35 CEST 2021 +measures=10 +precision=-1 +interval=-1 diff --git a/devices/psss_fwhm_average.properties b/devices/psss_fwhm_average.properties new file mode 100755 index 0000000..6aeec64 --- /dev/null +++ b/devices/psss_fwhm_average.properties @@ -0,0 +1,4 @@ +#Mon May 03 09:21:52 CEST 2021 +measures=10 +precision=-1 +interval=-1 diff --git a/devices/psss_spectrum_y_average.properties b/devices/psss_spectrum_y_average.properties new file mode 100755 index 0000000..2c2e72a --- /dev/null +++ b/devices/psss_spectrum_y_average.properties @@ -0,0 +1,5 @@ +#Mon May 03 09:15:04 CEST 2021 +measures=10 +precision=-1 +interval=-1 +integrate=false diff --git a/nb/PSSS/build.xml b/nb/PSSS/build.xml new file mode 100644 index 0000000..e754a54 --- /dev/null +++ b/nb/PSSS/build.xml @@ -0,0 +1,73 @@ + + + + + + + + + + + Builds, tests, and runs the project PSSS. + + + diff --git a/nb/PSSS/nbproject/build-impl.xml b/nb/PSSS/nbproject/build-impl.xml new file mode 100644 index 0000000..0773138 --- /dev/null +++ b/nb/PSSS/nbproject/build-impl.xml @@ -0,0 +1,1766 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Must set src.plugins.dir + Must set src.script.dir + Must set build.dir + Must set dist.dir + Must set build.classes.dir + Must set dist.javadoc.dir + Must set build.test.classes.dir + Must set build.test.results.dir + Must set build.classes.excludes + Must set dist.jar + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Must set javac.includes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + No tests executed. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Must set JVM to use for profiling in profiler.info.jvm + Must set profiler agent JVM arguments in profiler.info.jvmargs.agent + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Must select some files in the IDE or set javac.includes + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + To run this application from the command line without Ant, try: + + java -jar "${dist.jar.resolved}" + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Must select one file in the IDE or set run.class + + + + Must select one file in the IDE or set run.class + + + + + + + + + + + + + + + + + + + + + + + Must select one file in the IDE or set debug.class + + + + + Must select one file in the IDE or set debug.class + + + + + Must set fix.includes + + + + + + + + + + This target only works when run from inside the NetBeans IDE. + + + + + + + + + Must select one file in the IDE or set profile.class + This target only works when run from inside the NetBeans IDE. + + + + + + + + + This target only works when run from inside the NetBeans IDE. + + + + + + + + + + + + + This target only works when run from inside the NetBeans IDE. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Must select one file in the IDE or set run.class + + + + + + Must select some files in the IDE or set test.includes + + + + + Must select one file in the IDE or set run.class + + + + + Must select one file in the IDE or set applet.url + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Must select some files in the IDE or set javac.includes + + + + + + + + + + + + + + + + + + + + + + Some tests failed; see details above. + + + + + + + + + Must select some files in the IDE or set test.includes + + + + Some tests failed; see details above. + + + + Must select some files in the IDE or set test.class + Must select some method in the IDE or set test.method + + + + Some tests failed; see details above. + + + + + Must select one file in the IDE or set test.class + + + + Must select one file in the IDE or set test.class + Must select some method in the IDE or set test.method + + + + + + + + + + + + + + + Must select one file in the IDE or set applet.url + + + + + + + + + Must select one file in the IDE or set applet.url + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/nb/PSSS/nbproject/genfiles.properties b/nb/PSSS/nbproject/genfiles.properties new file mode 100644 index 0000000..5b41107 --- /dev/null +++ b/nb/PSSS/nbproject/genfiles.properties @@ -0,0 +1,8 @@ +build.xml.data.CRC32=cb48e78e +build.xml.script.CRC32=f58963e5 +build.xml.stylesheet.CRC32=f85dc8f2@1.97.0.48 +# This file is used by a NetBeans-based IDE to track changes in generated files such as build-impl.xml. +# Do not edit this file. You may delete it but then the IDE will never regenerate such files for you. +nbproject/build-impl.xml.data.CRC32=cb48e78e +nbproject/build-impl.xml.script.CRC32=1f638965 +nbproject/build-impl.xml.stylesheet.CRC32=d549e5cc@1.97.0.48 diff --git a/nb/PSSS/nbproject/private/config.properties b/nb/PSSS/nbproject/private/config.properties new file mode 100644 index 0000000..e69de29 diff --git a/nb/PSSS/nbproject/private/private.properties b/nb/PSSS/nbproject/private/private.properties new file mode 100644 index 0000000..ecf6f54 --- /dev/null +++ b/nb/PSSS/nbproject/private/private.properties @@ -0,0 +1,8 @@ +compile.on.save=true +do.depend=false +do.jar=true +do.jlink=false +javac.debug=true +javadoc.preview=true +jlink.strip=false +user.properties.file=/afs/psi.ch/user/g/gobbo_a/.netbeans/12.2/build.properties diff --git a/nb/PSSS/nbproject/private/private.xml b/nb/PSSS/nbproject/private/private.xml new file mode 100644 index 0000000..4750962 --- /dev/null +++ b/nb/PSSS/nbproject/private/private.xml @@ -0,0 +1,4 @@ + + + + diff --git a/nb/PSSS/nbproject/project.properties b/nb/PSSS/nbproject/project.properties new file mode 100644 index 0000000..06a5e91 --- /dev/null +++ b/nb/PSSS/nbproject/project.properties @@ -0,0 +1,99 @@ +annotation.processing.enabled=true +annotation.processing.enabled.in.editor=false +annotation.processing.processors.list= +annotation.processing.run.all.processors=true +annotation.processing.source.output=${build.generated.sources.dir}/ap-source-output +application.title=PSSS +application.vendor=gobbo_a +build.classes.dir=${build.dir}/classes +build.classes.excludes=**/*.java,**/*.form +# This directory is removed when the project is cleaned: +build.dir=build +build.generated.dir=${build.dir}/generated +build.generated.sources.dir=${build.dir}/generated-sources +# Only compile against the classpath explicitly listed here: +build.sysclasspath=ignore +build.test.classes.dir=${build.dir}/test/classes +build.test.results.dir=${build.dir}/test/results +# Uncomment to specify the preferred debugger connection transport: +#debug.transport=dt_socket +debug.classpath=\ + ${run.classpath} +debug.modulepath=\ + ${run.modulepath} +debug.test.classpath=\ + ${run.test.classpath} +debug.test.modulepath=\ + ${run.test.modulepath} +# Files in build.classes.dir which should be excluded from distribution jar +dist.archive.excludes= +# This directory is removed when the project is cleaned: +dist.dir=dist +dist.jar=${dist.dir}/PSSS.jar +dist.javadoc.dir=${dist.dir}/javadoc +dist.jlink.dir=${dist.dir}/jlink +dist.jlink.output=${dist.jlink.dir}/PSSS +endorsed.classpath= +excludes= +file.reference.pshell-latest.jar=../../../../head/pshell-latest.jar +file.reference.psss_panel-plugins=../../plugins +file.reference.psss_panel-script=../../script +includes=**/*.java +jar.compress=false +javac.classpath=\ + ${file.reference.pshell-latest.jar} +# Space-separated list of extra javac options +javac.compilerargs= +javac.deprecation=false +javac.external.vm=true +javac.modulepath= +javac.processormodulepath= +javac.processorpath=\ + ${javac.classpath} +javac.source=11 +javac.target=11 +javac.test.classpath=\ + ${javac.classpath}:\ + ${build.classes.dir} +javac.test.modulepath=\ + ${javac.modulepath} +javac.test.processorpath=\ + ${javac.test.classpath} +javadoc.additionalparam= +javadoc.author=false +javadoc.encoding=${source.encoding} +javadoc.html5=false +javadoc.noindex=false +javadoc.nonavbar=false +javadoc.notree=false +javadoc.private=false +javadoc.splitindex=true +javadoc.use=true +javadoc.version=false +javadoc.windowtitle= +# The jlink additional root modules to resolve +jlink.additionalmodules= +# The jlink additional command line parameters +jlink.additionalparam= +jlink.launcher=true +jlink.launcher.name=PSSS +meta.inf.dir=${src.dir}/META-INF +mkdist.disabled=true +platform.active=default_platform +run.classpath=\ + ${javac.classpath}:\ + ${build.classes.dir} +# Space-separated list of JVM arguments used when running the project. +# You may also define separate properties like run-sys-prop.name=value instead of -Dname=value. +# To set system properties for unit tests define test-sys-prop.name=value: +run.jvmargs= +run.modulepath=\ + ${javac.modulepath} +run.test.classpath=\ + ${javac.test.classpath}:\ + ${build.test.classes.dir} +run.test.modulepath=\ + ${javac.test.modulepath} +source.encoding=UTF-8 +src.plugins.dir=${file.reference.psss_panel-plugins} +src.script.dir=${file.reference.psss_panel-script} diff --git a/nb/PSSS/nbproject/project.xml b/nb/PSSS/nbproject/project.xml new file mode 100644 index 0000000..9b87f23 --- /dev/null +++ b/nb/PSSS/nbproject/project.xml @@ -0,0 +1,14 @@ + + + org.netbeans.modules.java.j2seproject + + + PSSS + + + + + + + + diff --git a/plugins/PSSS$1.class b/plugins/PSSS$1.class new file mode 100644 index 0000000000000000000000000000000000000000..71bd3a3e02298d909ffd0de5f5be1a27a0062270 GIT binary patch literal 518 zcmZ`$+e*Vg6r63VF)_Ba-tVO-)Cxx51jU!4DAWr?d{5h@O{p8{u9x2uynqjWK+#_k zXSJX&26E2X$(cE`C;iXYHvl^*y2xP4!w{xD6fomp*1^1k1%YAHj`ivmi`sb@hPy2G zVilWx0k`}EtzQacs!3A{ktFk;R!tyl zS5Qr^t`kMmR7;nw@tle3;h+1W$h{7MCLr>8>x}D8@pa)59c5MG*&<$L;|Zejg46>~ zbjwHpx>=LLTo%E{oC$+3%zs52gl|}O(BnQ<_v%81SY?c0_K?Q_zW|j EFC3R=p#T5? literal 0 HcmV?d00001 diff --git a/plugins/PSSS$2.class b/plugins/PSSS$2.class new file mode 100644 index 0000000000000000000000000000000000000000..8b40abc67a49f63aabdb6209ed8a5e95d3bdde3e GIT binary patch literal 529 zcmZ`$+e*Vg5ItL~F)`M9ziW{SwSp0!1VxadDAWr?d{5h@O{p8{uGQZXynqjWfbXJT zBc81VeKC-k*~y&CPTsy=UIA<)=OT?s5B-?(FotOdGY;k)%nJ;dMy&IjEXwC$81Asx zja6*+1l+<;Ywc1XT}o=X4Hx90(~2+ zdLTPenc(N^fvtBenHMHE?W#PCErDxgH!O(s2ddgiG?6FLG?JQw1s{u85*R#{+8ia? z_;4`j!^M)1Wn^gA;`3W{a;%hWms*k5QVV1)WzeFp;H=V=RU?r5&ssUwMiR_>S~h`< zT|p_ix=s{LldZ2{jnC=p*oy3TWIL+M@XyteihHObAV_`2}e9b&b@v+a1r z^+$-}Gg9|F(KsVfjPH!Y$n@q{d-HDb0b>0LeZS*gm*|5cpOhUTG=(finbZ8g@0YkOX8MzOx<31{f zKX9R()->f*<0CC*_2ne;`B|U^Ox<=*SCH7L@Ix5~;_i~Gp&S?}*(jsJkm>SRoy%C+ zFp#lfqGF?pEZNod^#ca`LI@tULpP3j%#fR!2widKTzMnzD~9sFzB)myI91Y+T8bg7 z=WomLy%gk_q>>Hodd*WUPe$rzn%{v@qqj|ijDY2G+8sT4g0=~Z*m*)*bk~g4+C3phL!f;T1VMwq zzEsj2Fu2w4)aDgKs;RqzVY)4qIJ+HnM0CkJL!05Dky>4d=vqf3(Pb!B+XH^b>-^r- z#hp;5emJJIEN)maFVMG9D2}8RaBbUdTR~zi6%KVQRqs@ozV13$^RbQsL*`7x=2*wZ zhl7j{7X=?hWXZ0rFYhw=CrXK^IplFHVusw<#E`3A{k$^}pG{Mo=XUX{8vl zcK)Uw-DpLQNh(>jt_uUr$L6mayjdgo<2d_g-6**LM6J} zj91%w2CKay@klpvPA63nEYUfI><_>4!*^>>U|TOp{?R=v#LCF93bmakfdq1xp_rl% Rp2gfalgBbe+df5+))#liYPbLZ literal 0 HcmV?d00001 diff --git a/plugins/PSSS$5.class b/plugins/PSSS$5.class new file mode 100644 index 0000000000000000000000000000000000000000..bf4a464eb15f5db70b885009bb84e574a635944b GIT binary patch literal 530 zcmZuu+e!m55IyO|?XGLB^?qq9QlZvMeGvpjkWx{oT3GQt-NrVhO=LIfcL`p=2S31% z5+_yk#XvGAhnX{Hl8?`~cL4iXb&MK(iGD!2y9o zsiZk#a4X-`#x+B#t~-KZvMH6gxEr=bbj8~P%ka=hty&_w(a}(J7)q69pWpKue=s$1 zFO;brk5U>IA6PLj(05TNPNWrZZQC7NL1GOR4s;4vV#GUA*ZELHJP5h^p6YjStkN5&L59F`IqQ<1K0(`sN9;JEHM(2I zt8PAnRbP>Kq#HS>ld1?->6}3JCx87X->p7@ZN4D+m+o01wuKDarnZwLkU$Po6jSuU R)0i1Cc`Q=2?NbzKeF0|AYOnwR literal 0 HcmV?d00001 diff --git a/plugins/PSSS$6.class b/plugins/PSSS$6.class new file mode 100644 index 0000000000000000000000000000000000000000..50706c2967cb3f58ce514a32f07c4259297844d4 GIT binary patch literal 531 zcmZuu+e!m55Iw2Y?XGKE?{_PrP%Bsw6+scCC<^rgE54`O*haRAY@*_430}YlKfsR? zCsp*tKr$zXnKNgSkI%Pv0Na@Nk;Q0$UW^48!nlVC4^tkd8TxEnn#DB&_46o-b_ncB zCG8%AU;d_+FB!5m-4qOkx>Vxyw$l*l1#cvd;jxukos0BJrybE`D3$Bi{Ek=oy{(En zp=@=(n{wdrjuQ(aeVfMOP&xtMwOw@;BzB
    sAEC&IRM)5A=NSl7@6r40$(y zO?PgzBF7}vQ+BRPDQ`+$OCOBoNfdMSJ=X8!n5ReX1vvsM7M)Fc`wVR#0kQpr7U=Gn zptAA|R(VC{k#1z2PO2ifPUisfKlw{P`F`aIY~=+#f9Zh}VjIY@O=?>pfeeZmq?n~2 R9>z$Q8Nwt**FH;;))zUTYhwTa literal 0 HcmV?d00001 diff --git a/plugins/PSSS$7.class b/plugins/PSSS$7.class new file mode 100644 index 0000000000000000000000000000000000000000..6735e7657fdf1cad65984c747edc255eb14973b2 GIT binary patch literal 524 zcmZuuYfAz_6g}gkb+=8;UiL603``;WR$v4nMuDld(f3s+UD27?9qo4oJ)jT$fPPeT zN2R_rFmulw?mhS1nXm7UPXH&#nTTM`LKN#3QrIw%Hn3%2n_)rqy&!)?px$yEr$XS= z6P`L_Fbgy4;GQ8;m0iw|sC$Ap#-ld(@7(rKGj2zDQna}LApH^VGGq(&fje=_?vpC> z2^Xq-K251u7p7`mcYZng)!qZl&(`l@?@;#5gPYAJ@e zp1&$bk5Z6hk_s2J>vUT=%Jo%E%;xzW89930D98#}GO7L1qlajlu!vnGv`2T%SjEyS zSn(a9XS$JRI;o0alg=f?=lT2de6#ohR(eDDFWu5Y>;^G*OKlS*5JD2m6eIMtD_ET} NDQr^I?IRRv{Q$ktXw?7! literal 0 HcmV?d00001 diff --git a/plugins/PSSS$8.class b/plugins/PSSS$8.class new file mode 100644 index 0000000000000000000000000000000000000000..682d69405fd4d67f11d445919b35888b16bfb6f8 GIT binary patch literal 525 zcmZuu+e!m55It$D+g;bX)@#*PkqWhf^|4S9q$mpY0xQ0!+o+K?k=;~3Nc3Is0zUWw zev~+AMPCdgb8?tDb0+!xdj9}W$BK&#W;~2w)NvS0aJuAc>kY$h5c7g;_2rx-8Lw`Gk P=>d~Rfue1np-Af+n^|fx literal 0 HcmV?d00001 diff --git a/plugins/PSSS$9.class b/plugins/PSSS$9.class new file mode 100644 index 0000000000000000000000000000000000000000..14e14ee8933e032f7cca652950bf55141a698c55 GIT binary patch literal 524 zcmZuuT}uK%6g}fd>u#Hxec6X8VPFc`V_^g#MuC~I(fg{CuINncj`q8PKA?wwKz}Ie zA-bbdFAdDxGlzT6J$L5)`}GaLA#x@nSg{brs)ZER45STg7}#W(Q#~)p9}uWF9mlB< zIQE36P8iI>l-j#th*V{VGbHMs;EmC+#rNUUDAM`?J~?X9 literal 0 HcmV?d00001 diff --git a/plugins/PSSS.class b/plugins/PSSS.class new file mode 100644 index 0000000000000000000000000000000000000000..54b4ad51b10d2df4b572a8c7957e3399e5d494e0 GIT binary patch literal 19372 zcmcIs34ByV(yyvFnKv_e6GAv6%Av>+jvyW=Q6NCVA(u%211b*5Kt^(zOt@4I4-U`O zb-fkERXp%QGaz`N>#@4(daSx$>w2)>y6t1Ev0ddD0|z|ZgZ`x5D{uCA`GuCA`G z?vC%-y*nQvqN9ybA@0uW0=z!R-FQO?$wr0F5Ap?kVTkhiq7YxqmuT@T0{z*lMY)d9XHz}G73I+L$g)D44_iBJ&%OmEOwD9K;zrk;Y_$_`rkA&Y1 z@Lv`EUN*naA83;g^T^L$qDxsh9DXLYV-)%EW+_XpZXA5Iv&RjWRwUlT4W$ zqQ|92K&F^7HAGKJ4^xg0(bK%nKklKvw$=K_y|Tdt`rA3_@TZwFUDYuoATt9p3)EZf z%7B~@kg|Z3n^F;?H_#P^Fk7Xcqq5F5(Qi9M1mTV_aaI2kai; zU;=WoDW`_-}HCDQB8;7LzrlzCPMCp*GUo9BpRGo=`l!q^x+s3}j4s(@u*ljTF{K z>K7N5$D3mHi;I}T6B_E9(ONJ%s68XQYC^O= zt_m~l`lc032e;we2<;SB#Ok7|rm3Y((J0uNr%j72jMhT>t{Ku#wb395xCA3oAc#He z$P{>Zk{OUL+Z67IB<{#0bW{==m4uE?LdPg%53Rj$o-$klyF(jicW6U)hc<)`J2k5I zFqhDLc~t~cm+N4fR>1md9h5^A_w-V`u3sJX)xGyr3RGNW!e;}jy0tB zd_bn2%d|decDW9DhFCT#U93P;w7xppq_f?(%^a(lt09-U6qvwZb7N^!L!E8N-81_x zZ;a`J)me6lz|sZPzC+E8ugtINKjehLUIgS@jw!1!f`P}F3nhxM14|X_to-w)aN!(e`9 zr?7DwX? z8k?J&7es8&1HU;4tgn%4q2pAn;y4sbu9NFcxxtc6a-$_T$!4rZ-N_fPSWp+Ks)6T0 zu#zcIv@urIY_Az?J`)>SFv*tOEWgcQ@8NHeTTR(w$!&7G_Hl>BC-6)Qu?!^bl9Hxm zc~!JgSDGbv%2reEvgB^L$CP_5xlguP+@BA&#WKH@h zU4k1dK8Sl`wX5=MWeiGA=FqGw8j(2%RSiwiLIj!d=<;}f zH@BB;Qjv%OJH%V^qP)b^Lv`C!&sEut!oOQ~z`dZ#j=wE=Q~87(+#)-|&#)l-JTNV? zqM-$D=oER&lDFj@Q~qMfyYg2{-jfe3`B465%14%bEPu1)6ZzDX&n@{|!%aYhYJG+O7xdM;&7-CZD%{}GtdaVyG(F` z*}jvbvBfp9|dh2*_L71iZa6P;w!WT^Wr4S$T2!$5{z7Q z;P`&kOXnox4m5!Cj2}B4B3neXPWSs!h@N?>9DTtSj6BQeqSO8_qbr72TpL?lUk5Y8 zR2tnZqq|&haV@;O(L?w3&bB4kVcz6Oa}7d0jJ~XR#-x%3rDZdxTSiYKA5-sODzGlH z$}37{m4oq&+0z%4Po7x;xV!F$g|39ojOjF6+{mZH*qqw5V^q*#H?y?Vt96uXH!=<| zdRfMS#z6t2H@vse$1?gF{Vd~PZcsmtRf!HW{ZC7*&~}p0zgv+Og5&U;ZW8T6Qqwr3 zokO#Of?3$y`WG)p@Qk?B?*VWx?aoha$#K>&5hiF8JCe(kwjK3g?~vf$R@emE7<_tk z(AMD_K)7#vcWg!J!J>OhqJclW8y%dpP-Vd~#TmX7Fkc<9_|ibKoK z_h26%Le094+;fWs29s(V7Dj64L~x7=LR}N1OJh|K=ID+_)25SZo3_xt6^T_jEnTha zwCOpbL#=v&9%yq_f6>J_^3I7h$Lxb&ugsCy^We3>B0vM%Szr+inmADv?29gq)OLJu zo7Z7r9*89`t#AmN;;ldXdTR>8jm7q544Ae=XD))B~cN;V@T{fp0 zqUlslYglZLuF!Tq$vUi0Xw;lrTsC9Mj7i>sz5^n(x1)$Q zRc_A)7&1+8O{sksMQLhvS42=O zVL}sX9mUNn>apn@*MU)cCSprh)lgs6(u6x2>}l9KYNI$?*G5ZQ;w??lqL9(wIJC{m zXn!d5CO>T;E(({f#sCZ?(%4vwfnd$5AN9_TcK?f7>g}_5;rLjo3u0^oQnR-|0pTBU z3sS9@7*o4V90e?YgjK;k@(zLyao__oqa_T zPASmdTaCsR$PmCu-!+UG9f;F@0)LUx3J?Wldb0HM3=Y-JH4V$sPBJ~yMM&appB?lp z1Jiy|)L&J^;%XqebyVP71!&L|{HPz*YCBfO zW}V5=4X5o&n^PF1j=!IQhytp2g?D1E`gjCq>Ly2aP&%O@Q?E2C2RA)Zk84ARvR`_D zkQQtAPXGS)P_T+K!ny zliyK?8QjomH>cC`=;AAW+Hi)=z606jWDJ$gsC819%nww<11>TIIFuI&8m%5MK$b2OxO|9EGwB- zTv393Y-q-G?w=V{p#z-N6kQaBudcQSI5%?+e}==pC$o(%(bxD6b){=2L*{g=Y0u=) zbX}#BW*86FAO>bvn+7i4vRuoVoH@Ak_BOnqnakjh(Tw}$qiM@x>i=Du_KNZAfYm~W zs~`F$D$RpuncUD6Td9Wbt+6&&-yLX59o_ii@{(y&W|X8Nf{erBFHon(uX7Ne#j27b z=!oz-!8F)ab^7+`Y$|PfgGfX7Kl-*)pu2!`yX`sn^E0L+D@$9Jwxefo_QDs@JnVdP*GMq1#^JlvZ_Oq z+~?v-c3QMwgK4X>Ky9pkNxA)&ls{<7{C!PCWu|fej34=BWA+RpbNU@gY$NevM@+!< zOgYjGIi&zM0Q2TCov7{bgR^HOo$ZqU4ex#G|Cf54Q~$5iyKi%9JGfk&c06mHg7?+s z8f80wBoIKAd%WuEorBVK?H-lvTRlW<9*ItoGxuJIf^()uY^i1K%%T_Tq|KrmyXrFP zhUL=soq`e2W`w`qthZ-@V-3XDo!k!9dPBN>hm>&cIQQ5WfVyToy0g$zxoHfujNu0E zRL{nhKdmdEhqj7yw$-8zQhoKQ+VeO&ebKWf(?U7we3{rek% zX&i;4MY}YFA0+V23A1iz$DgMi2BYnGkk&B@O3 zo}@JT-eu=_3$t^)li4{Q$m|@CId+b>LOaJRqn+dJ($4YhV&{0Fuyedm*g0M&>>O_s zc8*60JI9lRo#R2m&hZ>!=XeCMbG(DtIbK2R91kIOj<+E@$BT=d>>N)zc8(_-JI8B`o#Xw|&hg@E=Xe!E-W_v?$7rOdq7)?| zyvfj!1>1=W@}!lFf&}>r9-ypkv2?{u+V5$__M%f9n?n6W72-4nUB0Y$F)DwTS z*PC*v4;@H-set;?5p*!97IKx-C2ycCGC=)ZL1*LR1ceLoab%F)Ha6aYMhy3kY_K*wXG)9^c;7C;M4G?UJvS@xJN zBWtEf^*@2wFu9w_i}4=`P<+BK$2Yd;)2MuY5grQVREy%ob8JTn_k+32YfW;Ty3Itj}*Lj7qW46X`BR!zlJ3+mI*=@L4D>TER! z$#>8pj@9^hNm6Vbow|>9qiX^V?_;hKVi=t&VNgM@Ryw|**AAMtgQoAqRFVomxBU`P zrakC@q|679ADE?Z9m{}UiJIkD9xJd|&amY(N#@x$P5O5aZnp+;HS9Pm zNB^r)#LJpXS_AofpcptbZziZ`JxC`QG`C;~7H}E#U+(EPIHO=#f@V8PJqRAI!nIdN z!1E|f>!8qIq2^-vzDr=cmwE!8YEnfO$nobsPOsheU?xIXm(LgMqB)h@X>Kc>2(S|1 zyjGeI@FajIx6&y9^8qes#b9>PsQ@Fb)EVGHfK{zj4KNCDQ7fFf6z+M1d zW-)-LfnZ51g;Lns6j%qa9s~`oG!9@Rz|&jl*c3Q61&#se@@-0i%_%Sr(3PwZpet`n z3S62Z7?}c>r3jX%z!fQil>l8KM+0h&fi5BRbsEcI6;@7 zn3ead1YHWCI`6Us{Te{OU37V+&zD>6+fKhp&=n5qN^Nshg04ofN8U9Fx)wmMyz2n$ zqU$SBIAuHCFims#j7>w}p>IskO@KG2feE@96x9j(?KZk4LARpNFL$rx?oH5^1l@*W zXX(sOc3%Bcg6=>mOT>W_bf?oIjajaxzX{q3!d|(1J99jZPmd(x4zx84>_S`7LxS!~ z7Y*`&;oT4vJRsIR$UK3xU9Mk$(bVl0)jgy$J)ROUcdya0gdoLO(nujS31Vz2aa-I&J^dw(T;-=XUiWcubl2a+s zHOw#{roU7g+JAYa#`R0fD|J6yUS8=(wxT>i@1_wIIz%fSqGb-zNSi2axabWfci~}N zy6`X}+Y{)*a~6Aw1iDcYXl0)y7`&2r!|sR+#;+va$Xya=GX9E+(a?{hhXm;gX~w^J z-g^mpAHbQOJ)G&8+*yu+ebANmbfu3vC5>~HN4d%~#nlewCv7R$c$8~At6STVah~mw zo$Z<8IUSi|QQl{cDHiRb&(##aNSWdpFvZb2duQm$XH&i}-{+X47B^$i8D-F|O1G8O z>BeTaP=%yZoYfJsz7kK7*VMpdUD(_pzJV))K@6`uYrf^>ol(+Jv{=csh^ z%u1Q(d5+<&cMNZ%V|W+XhL=XQ&Y`-{p*r88+Tc)iqzBjZFhDgK46Uu{L02vf)TA^} z(7tJ)2Bq;7kY=7KKG1~Hcsgijo}-|tw&r79 zMHKmij=`I5zzAnlGYEV%L#U+LI7EK0CgVa+F?XeSV~^F|Hs^NLUnhn|TZF~`b;4rs zOBK)u3b!~lUgukqwBtPG|LAO6v+S7O9bT;tPcPtV^8-4(Kz_g-9*Vk7>M|7!%q;0F zi)`t)IFV(lk7d^r|=u!S1?dBKgF@A#{ z=MU+3{1rXH-{XH#&Z4Izm!6hh^m`dV&&rYXqKu)JWfJ{K=FzJXqqn4q-jOrtU0F|m zl}qS7xt!jYtLOvSL?6n{v`22GzsX(nk=#!o%cJy(JPzE8^r^f}pUJzR*@OSj_;dO~ z{!U-Yf9Na2q`w;>`qJo0UmLyX3!@+X!{|>R7z61&qk#Tp6w)`w;q-6gDEihoj=nP{ z(tnIu^u19I_$2zlsG|QGr_+zdD%xwT1-zbqGA_pdtalx=aXX8#74SW1zYV-vIm>vQ zP2&X)7_V{Ac#A{E9?mvCXUq7W!^V$n`G|u)gFE>Goa@Wu&c1xk^9|%Kz5?XKfFHr# zeaCT6Un%GNW^*6ksod9B!~J}x^TEC~Jjize5B6Qo1-_ekgzs)X%(tCK`kvyWeShN7 zzR&qs-#2`mZ!eF{GI?B9E|1R|#1pfQn zVb*tCmGuM4KXJ9+&r!d{i~K!#vA-9}eYnOyfMfole477gUg9r8c?{3@7jvzDB3Js$ zxz0a_=lUzT(Z7nD{5Nyle;c>>xAIc|J-p1H;N|{@d4>NOUg>`h@XLIL{|!FV|5rZC z|2ePne+Bp(UhV&{zSr?uvlE|f<^k@>=a_x?Typ@QXO6`8362Cjme-r5yuqBp8_j9F z)SSt2vy3k^7x2Yq4PRoO&X<}S`Pb%Ue7SioUt!+QSD6p-)#j6Yt@$QjZ+^fxn4j~F z0p`tt5Z@fg=i37V_>Mp!-x)ZHw+4#%uD}?4P@@CN$W!;b|%MxUSY6T!~-qQoeCOJWQ^6P&=$221%5!5REqa5g_5jPMJ=YQU%Q zAA?Q&VsHgMC2=0V9NYl-Vtyrf1->(JJ--^fonH%X1-y-45ANhY2eD#<&-0tX7XiP? zZw24uw}T(@JHd~@|5Nn!1-?D;J-;6^@Z|}M_k;%EqZ5Vvap-7#bfOI3nwZaDgsS?0+dT_ic#lcZDjRFr2*ZuT7MoPCny zW!FfT>?J7Ii;>+TU9*=9XP+zGve)xZ+2>2o?0e;a?APSL?04j#?DwU2_8#ez{e|?+ z{+IN#NDj6P>1ze$5UaEFw|dHv+H?$PlYihFYh} zFsoLETMd9$NTIbxMp);_Vb(h7ZEchTtqbJ{>uNd5x>ZJ5_sG#!LXNQ>k zj0 z_z_YZK3t9ukCSQPDKb4gOJ;;;%gpd8GAq1TP6#(kS$L(Chu2C)c!SIiUm-Db48MJmHvWnTCJnIC>wP6|IECx@SvQ^GIFg7E8dYWOb_34bUH!+)2m@V})x z{G&v3NEYQ7GC#*Jl{rC)<>bkdoIX;UGf3)k3Z*{h2x-U}CyhC!a(YgMH08`ic^>Lc zmUvFJwB$6((wtSYEN3ms=gP{QOXbX*Yvrt*n`BkaZL&J&9$AyK4REWRo%4{Klk=3E zoAV6d=VTqOeDQxix);qeaM7$6{oM-ejg;AUqxaCll6w9KDUocXu5!M8ecXd&7M?fs zc|z=Gjuty-@LtNxT;9i{kzTXs6eQT#Lc!?;xP@}>s(rZb_Z|Sc;~JghVth#AQZlh? z1>{#0mdm_*>Peu~d+Oy$#^-^pGM>GQeU;leYx>YGtkK}Q1e?xHIS?A9Y)D50fjB=M z5dz}EB%+`TXAezq+gC*S4dl53-$A*O3b2zF$<;JXuAvIK79y?qS&`04@^?jAosLkE z)}|v=q;rx8h!pNfq#Gd8CWv$+M7jwgZH7oUL!{qAq?JjLE_bzbYMPdEIH@01SYfg& zSGtku2$gPGIzpvenM6RkPMOl#k7Tk1(%nW~F!Vg?*2d5up*(qvx*`1PF1x9hJnk9# zY&G;;(LbAUS5S$&f_e^p3M(|hT@u{&8p z<`=nn1^ + +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    diff --git a/plugins/PSSS.java b/plugins/PSSS.java new file mode 100755 index 0000000..df4c4ba --- /dev/null +++ b/plugins/PSSS.java @@ -0,0 +1,760 @@ +import ch.psi.pshell.bs.PipelineServer; +import ch.psi.pshell.core.Context; +import ch.psi.pshell.epics.ChannelDouble; +import ch.psi.pshell.imaging.RendererMode; +import ch.psi.pshell.plot.Plot; +import ch.psi.pshell.plot.LinePlotJFree; +import ch.psi.pshell.ui.Panel; +import ch.psi.utils.Convert; +import ch.psi.utils.State; +import ch.psi.utils.Str; +import java.awt.CardLayout; +import java.util.HashMap; +import java.util.Map; +import java.util.logging.Level; +import java.util.logging.Logger; + + + +/** + * + */ +public class PSSS extends Panel { + + final String CAMERA_NAME = "SARFE10-PSSS059"; + PipelineServer pipelineServer; + volatile boolean updatingPlot; + volatile boolean updatingImage; + + public PSSS() { + initComponents(); + plot.getAxis(Plot.AxisId.X).setLabel(null); + plot.getAxis(Plot.AxisId.Y).setLabel(null); + renderer.setMode(RendererMode.Stretch); + } + + //Overridable callbacks + @Override + public void onInitialize(int runCount) { + startTimer(1000); + + try { + setGlobalVar("PSSS_PLOT", plot); + setGlobalVar("HISTORY_PLOT", history); + setGlobalVar("PSSS_RENDERER", renderer); + + pipelineServer = (PipelineServer) getDevice("cam_server"); + ((LinePlotJFree)histogramGeneratorPanelCenter.getPlot()).setLegendVisible(true); + ((LinePlotJFree)histogramGeneratorFwhm.getPlot()).setLegendVisible(true); + histogramGeneratorPanelCenter.getPlot().getAxis(Plot.AxisId.Y).setRange(0, 100); + histogramGeneratorFwhm.getPlot().getAxis(Plot.AxisId.Y).setRange(0, 100); + //setImageEnabled(true); + tabStateChanged(null); + + spinnerAverage.setValue(( (Number) eval("get_psss_averaging()", true)).intValue()); + + try{ + Double energy = (((ChannelDouble)getDevice("energy_machine")).take(-1)); + energy=Convert.roundDouble(energy, 0); + spFromEn.setValue(energy-150); + spToEn.setValue(energy+150); + } catch (Exception ex) { + getLogger().log(Level.WARNING, null, ex); + } + + + } catch (Exception ex) { + getLogger().log(Level.WARNING, null, ex); + } + } + + @Override + public void onStateChange(State state, State former) { + this.btStartCr.setEnabled(state == State.Ready); + this.btStartEn.setEnabled(state == State.Ready); + this.btStartCam.setEnabled(state == State.Ready); + radioEnergyScan.setEnabled(state == State.Ready); + radioCrystalScan.setEnabled(state == State.Ready); + radioCameraScan.setEnabled(state == State.Ready); + this.btAbort.setEnabled(state.isRunning()); + } + + @Override + public void onExecutedFile(String fileName, Object result) { + } + + @Override + public void onTimer() { + try { + if (!updatingPlot){ + updatingPlot = true; + //evalAsync("plot_psss(PSSS_PLOT, HISTORY_PLOT, " + spinnerAverage.getValue() + ")", true).handle((ret,ex)->{ + evalAsync("plot_psss(PSSS_PLOT, HISTORY_PLOT)", true).handle((ret,ex)->{ + updatingPlot = false; + return ret; + }); + } + if (isImageEnabled()){ + if (!updatingImage){ + updatingImage = true; + evalAsync("update_psss_image(PSSS_RENDERER)", true).handle((ret,ex)->{ + updatingImage = false; + return ret; + }); + } + } + } catch (Exception ex) { + getLogger().log(Level.WARNING, null, ex); + } + } + + //Callback to perform update - in event thread + @Override + protected void doUpdate() { + } + + void setImageEnabled(boolean enabled){ + try{ + imageEnabled = enabled; + evalAsync("enable_psss_image(" + Str.capitalizeFirst(String.valueOf(enabled)) + ", PSSS_RENDERER)", true); + } catch (Exception ex) { + getLogger().log(Level.WARNING, null, ex); + } + } + volatile boolean imageEnabled; + + boolean isImageEnabled(){ + return imageEnabled; + } + + + void runScan(String name, Map args){ + try { + args.put("PLOT", plotScan); + this.runAsync(name, args).handle((ret,ex)->{ + if (ex!=null){ + if (!getContext().isAborted()){ + showException((Exception)ex); + } + } + return ret; + }); + } catch (Context.ContextStateException ex) { + showException(ex); + } + } + + @SuppressWarnings("unchecked") + // //GEN-BEGIN:initComponents + private void initComponents() { + + buttonGroup1 = new javax.swing.ButtonGroup(); + tab = new javax.swing.JTabbedPane(); + jPanel1 = new javax.swing.JPanel(); + plot = new ch.psi.pshell.plot.LinePlotJFree(); + history = new ch.psi.pshell.plot.TimePlotJFree(); + jLabel1 = new javax.swing.JLabel(); + spinnerAverage = new javax.swing.JSpinner(); + histogramGeneratorPanelCenter = new ch.psi.pshell.swing.HistogramGeneratorPanel(); + histogramGeneratorFwhm = new ch.psi.pshell.swing.HistogramGeneratorPanel(); + jPanel4 = new javax.swing.JPanel(); + renderer = new ch.psi.pshell.imaging.Renderer(); + jPanel3 = new javax.swing.JPanel(); + jPanel2 = new javax.swing.JPanel(); + btAbort = new javax.swing.JButton(); + radioEnergyScan = new javax.swing.JRadioButton(); + radioCameraScan = new javax.swing.JRadioButton(); + radioCrystalScan = new javax.swing.JRadioButton(); + jPanel5 = new javax.swing.JPanel(); + panelScan = new javax.swing.JPanel(); + panelEnergyScan = new javax.swing.JPanel(); + spFromEn = new javax.swing.JSpinner(); + jLabel6 = new javax.swing.JLabel(); + jLabel7 = new javax.swing.JLabel(); + spToEn = new javax.swing.JSpinner(); + spStepsEn = new javax.swing.JSpinner(); + jLabel8 = new javax.swing.JLabel(); + jLabel9 = new javax.swing.JLabel(); + spShotsEn = new javax.swing.JSpinner(); + btStartEn = new javax.swing.JButton(); + panelCameraScan = new javax.swing.JPanel(); + jLabel10 = new javax.swing.JLabel(); + spFromCam = new javax.swing.JSpinner(); + jLabel11 = new javax.swing.JLabel(); + spToCam = new javax.swing.JSpinner(); + jLabel12 = new javax.swing.JLabel(); + spStepsCam = new javax.swing.JSpinner(); + jLabel13 = new javax.swing.JLabel(); + spShotsCam = new javax.swing.JSpinner(); + btStartCam = new javax.swing.JButton(); + panelCrystalScan = new javax.swing.JPanel(); + jLabel2 = new javax.swing.JLabel(); + spFromCr = new javax.swing.JSpinner(); + jLabel3 = new javax.swing.JLabel(); + spToCr = new javax.swing.JSpinner(); + jLabel4 = new javax.swing.JLabel(); + spStepsCr = new javax.swing.JSpinner(); + jLabel5 = new javax.swing.JLabel(); + spShotsCr = new javax.swing.JSpinner(); + btStartCr = new javax.swing.JButton(); + plotScan = new ch.psi.pshell.plot.LinePlotJFree(); + + tab.addChangeListener(new javax.swing.event.ChangeListener() { + public void stateChanged(javax.swing.event.ChangeEvent evt) { + tabStateChanged(evt); + } + }); + + plot.setTitle(""); + + jLabel1.setText("Average:"); + + spinnerAverage.setModel(new javax.swing.SpinnerNumberModel(1, 1, 100, 1)); + spinnerAverage.addChangeListener(new javax.swing.event.ChangeListener() { + public void stateChanged(javax.swing.event.ChangeEvent evt) { + spinnerAverageStateChanged(evt); + } + }); + + histogramGeneratorPanelCenter.setDeviceName("histo_center"); + + histogramGeneratorFwhm.setDeviceName("histo_fwhm"); + + javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1); + jPanel1.setLayout(jPanel1Layout); + jPanel1Layout.setHorizontalGroup( + jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(jPanel1Layout.createSequentialGroup() + .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(jPanel1Layout.createSequentialGroup() + .addContainerGap() + .addComponent(jLabel1) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(spinnerAverage, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addComponent(history, javax.swing.GroupLayout.DEFAULT_SIZE, 453, Short.MAX_VALUE) + .addComponent(plot, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(jPanel1Layout.createSequentialGroup() + .addGap(12, 12, 12) + .addComponent(histogramGeneratorFwhm, javax.swing.GroupLayout.PREFERRED_SIZE, 372, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addComponent(histogramGeneratorPanelCenter, javax.swing.GroupLayout.PREFERRED_SIZE, 372, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addGap(0, 0, 0)) + ); + jPanel1Layout.setVerticalGroup( + jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(jPanel1Layout.createSequentialGroup() + .addContainerGap() + .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(jLabel1) + .addComponent(spinnerAverage, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addGap(6, 6, 6) + .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(histogramGeneratorPanelCenter, javax.swing.GroupLayout.DEFAULT_SIZE, 194, Short.MAX_VALUE) + .addComponent(plot, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(history, javax.swing.GroupLayout.DEFAULT_SIZE, 194, Short.MAX_VALUE) + .addComponent(histogramGeneratorFwhm, javax.swing.GroupLayout.PREFERRED_SIZE, 0, Short.MAX_VALUE))) + ); + + tab.addTab("Spectrum", jPanel1); + + javax.swing.GroupLayout jPanel4Layout = new javax.swing.GroupLayout(jPanel4); + jPanel4.setLayout(jPanel4Layout); + jPanel4Layout.setHorizontalGroup( + jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGap(0, 843, Short.MAX_VALUE) + .addGroup(jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(renderer, javax.swing.GroupLayout.DEFAULT_SIZE, 843, Short.MAX_VALUE)) + ); + jPanel4Layout.setVerticalGroup( + jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGap(0, 432, Short.MAX_VALUE) + .addGroup(jPanel4Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(renderer, javax.swing.GroupLayout.DEFAULT_SIZE, 432, Short.MAX_VALUE)) + ); + + tab.addTab("Camera", jPanel4); + + btAbort.setText("Abort"); + btAbort.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + btAbortActionPerformed(evt); + } + }); + + buttonGroup1.add(radioEnergyScan); + radioEnergyScan.setSelected(true); + radioEnergyScan.setText("Energy Scan"); + radioEnergyScan.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + radioEnergyScanActionPerformed(evt); + } + }); + + buttonGroup1.add(radioCameraScan); + radioCameraScan.setText("Camera Scan"); + radioCameraScan.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + radioCameraScanActionPerformed(evt); + } + }); + + buttonGroup1.add(radioCrystalScan); + radioCrystalScan.setText("Crystal Height Scan"); + radioCrystalScan.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + radioCrystalScanActionPerformed(evt); + } + }); + + panelScan.setLayout(new java.awt.CardLayout()); + + spFromEn.setModel(new javax.swing.SpinnerNumberModel(7200.0d, 1.0d, 20000.0d, 10.0d)); + + jLabel6.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING); + jLabel6.setText("Range From:"); + + jLabel7.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING); + jLabel7.setText("Range To:"); + + spToEn.setModel(new javax.swing.SpinnerNumberModel(7340.0d, 1.0d, 20000.0d, 10.0d)); + + spStepsEn.setModel(new javax.swing.SpinnerNumberModel(20, 1, 1000, 1)); + + jLabel8.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING); + jLabel8.setText("Steps:"); + + jLabel9.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING); + jLabel9.setText("Num Shots:"); + + spShotsEn.setModel(new javax.swing.SpinnerNumberModel(100, 1, 1000, 1)); + + btStartEn.setText("Start Energy Scan"); + btStartEn.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + btStartEnActionPerformed(evt); + } + }); + + javax.swing.GroupLayout panelEnergyScanLayout = new javax.swing.GroupLayout(panelEnergyScan); + panelEnergyScan.setLayout(panelEnergyScanLayout); + panelEnergyScanLayout.setHorizontalGroup( + panelEnergyScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(panelEnergyScanLayout.createSequentialGroup() + .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addGroup(panelEnergyScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(panelEnergyScanLayout.createSequentialGroup() + .addComponent(jLabel7) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(spToEn, javax.swing.GroupLayout.PREFERRED_SIZE, 80, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addGroup(panelEnergyScanLayout.createSequentialGroup() + .addComponent(jLabel6) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(spFromEn, javax.swing.GroupLayout.PREFERRED_SIZE, 80, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addGroup(panelEnergyScanLayout.createSequentialGroup() + .addComponent(jLabel8) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(spStepsEn, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addGroup(panelEnergyScanLayout.createSequentialGroup() + .addComponent(jLabel9) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(spShotsEn, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))) + .addContainerGap()) + .addComponent(btStartEn, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + ); + + panelEnergyScanLayout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {jLabel6, jLabel7, jLabel8, jLabel9}); + + panelEnergyScanLayout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {spFromEn, spShotsEn, spStepsEn, spToEn}); + + panelEnergyScanLayout.setVerticalGroup( + panelEnergyScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(panelEnergyScanLayout.createSequentialGroup() + .addContainerGap() + .addGroup(panelEnergyScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(jLabel6) + .addComponent(spFromEn, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(panelEnergyScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(jLabel7) + .addComponent(spToEn, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(panelEnergyScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(jLabel8) + .addComponent(spStepsEn, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(panelEnergyScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(jLabel9) + .addComponent(spShotsEn, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(btStartEn) + .addContainerGap()) + ); + + panelScan.add(panelEnergyScan, "energy"); + + jLabel10.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING); + jLabel10.setText("Range From:"); + + spFromCam.setModel(new javax.swing.SpinnerNumberModel(-17.0d, -30.0d, 30.0d, 1.0d)); + + jLabel11.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING); + jLabel11.setText("Range To:"); + + spToCam.setModel(new javax.swing.SpinnerNumberModel(-11.0d, -30.0d, 30.0d, 1.0d)); + + jLabel12.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING); + jLabel12.setText("Steps:"); + + spStepsCam.setModel(new javax.swing.SpinnerNumberModel(20, 1, 1000, 1)); + + jLabel13.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING); + jLabel13.setText("Num Shots:"); + + spShotsCam.setModel(new javax.swing.SpinnerNumberModel(100, 1, 1000, 1)); + + btStartCam.setText("Start Camera Scan"); + btStartCam.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + btStartCamActionPerformed(evt); + } + }); + + javax.swing.GroupLayout panelCameraScanLayout = new javax.swing.GroupLayout(panelCameraScan); + panelCameraScan.setLayout(panelCameraScanLayout); + panelCameraScanLayout.setHorizontalGroup( + panelCameraScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(panelCameraScanLayout.createSequentialGroup() + .addContainerGap() + .addGroup(panelCameraScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(panelCameraScanLayout.createSequentialGroup() + .addComponent(jLabel11) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(spToCam, javax.swing.GroupLayout.PREFERRED_SIZE, 80, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addGroup(panelCameraScanLayout.createSequentialGroup() + .addComponent(jLabel10) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(spFromCam, javax.swing.GroupLayout.PREFERRED_SIZE, 80, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addGroup(panelCameraScanLayout.createSequentialGroup() + .addComponent(jLabel12) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(spStepsCam, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addGroup(panelCameraScanLayout.createSequentialGroup() + .addComponent(jLabel13) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(spShotsCam, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))) + .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) + .addComponent(btStartCam, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + ); + + panelCameraScanLayout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {jLabel10, jLabel11, jLabel12, jLabel13}); + + panelCameraScanLayout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {spFromCam, spShotsCam, spStepsCam, spToCam}); + + panelCameraScanLayout.setVerticalGroup( + panelCameraScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(panelCameraScanLayout.createSequentialGroup() + .addContainerGap() + .addGroup(panelCameraScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(jLabel10) + .addComponent(spFromCam, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(panelCameraScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(jLabel11) + .addComponent(spToCam, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(panelCameraScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(jLabel12) + .addComponent(spStepsCam, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(panelCameraScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(jLabel13) + .addComponent(spShotsCam, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(btStartCam) + .addContainerGap()) + ); + + panelScan.add(panelCameraScan, "camera"); + + jLabel2.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING); + jLabel2.setText("Range From:"); + + spFromCr.setModel(new javax.swing.SpinnerNumberModel(-0.8d, -10.0d, 10.0d, 0.1d)); + + jLabel3.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING); + jLabel3.setText("Range To:"); + + spToCr.setModel(new javax.swing.SpinnerNumberModel(-1.7d, -10.0d, 10.0d, 0.1d)); + + jLabel4.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING); + jLabel4.setText("Steps:"); + + spStepsCr.setModel(new javax.swing.SpinnerNumberModel(20, 1, 1000, 1)); + + jLabel5.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING); + jLabel5.setText("Num Shots:"); + + spShotsCr.setModel(new javax.swing.SpinnerNumberModel(100, 1, 1000, 1)); + + btStartCr.setText("Start Crystal Height Scan"); + btStartCr.addActionListener(new java.awt.event.ActionListener() { + public void actionPerformed(java.awt.event.ActionEvent evt) { + btStartCrActionPerformed(evt); + } + }); + + javax.swing.GroupLayout panelCrystalScanLayout = new javax.swing.GroupLayout(panelCrystalScan); + panelCrystalScan.setLayout(panelCrystalScanLayout); + panelCrystalScanLayout.setHorizontalGroup( + panelCrystalScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(panelCrystalScanLayout.createSequentialGroup() + .addContainerGap() + .addGroup(panelCrystalScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(panelCrystalScanLayout.createSequentialGroup() + .addComponent(jLabel3) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(spToCr, javax.swing.GroupLayout.PREFERRED_SIZE, 80, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addGroup(panelCrystalScanLayout.createSequentialGroup() + .addComponent(jLabel2) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(spFromCr, javax.swing.GroupLayout.PREFERRED_SIZE, 80, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addGroup(panelCrystalScanLayout.createSequentialGroup() + .addComponent(jLabel4) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(spStepsCr, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addGroup(panelCrystalScanLayout.createSequentialGroup() + .addComponent(jLabel5) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(spShotsCr, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))) + .addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)) + .addComponent(btStartCr, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + ); + + panelCrystalScanLayout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {jLabel2, jLabel3, jLabel4, jLabel5}); + + panelCrystalScanLayout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {spFromCr, spShotsCr, spStepsCr, spToCr}); + + panelCrystalScanLayout.setVerticalGroup( + panelCrystalScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(panelCrystalScanLayout.createSequentialGroup() + .addContainerGap() + .addGroup(panelCrystalScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(jLabel2) + .addComponent(spFromCr, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(panelCrystalScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(jLabel3) + .addComponent(spToCr, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(panelCrystalScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(jLabel4) + .addComponent(spStepsCr, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addGroup(panelCrystalScanLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) + .addComponent(jLabel5) + .addComponent(spShotsCr, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(btStartCr) + .addContainerGap()) + ); + + panelScan.add(panelCrystalScan, "crystal"); + + javax.swing.GroupLayout jPanel5Layout = new javax.swing.GroupLayout(jPanel5); + jPanel5.setLayout(jPanel5Layout); + jPanel5Layout.setHorizontalGroup( + jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGap(0, 0, Short.MAX_VALUE) + .addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(jPanel5Layout.createSequentialGroup() + .addGap(0, 0, Short.MAX_VALUE) + .addComponent(panelScan, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addGap(0, 0, 0))) + ); + jPanel5Layout.setVerticalGroup( + jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGap(0, 0, Short.MAX_VALUE) + .addGroup(jPanel5Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(jPanel5Layout.createSequentialGroup() + .addGap(0, 0, Short.MAX_VALUE) + .addComponent(panelScan, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addGap(0, 0, 0))) + ); + + javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2); + jPanel2.setLayout(jPanel2Layout); + jPanel2Layout.setHorizontalGroup( + jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(jPanel2Layout.createSequentialGroup() + .addGap(12, 12, 12) + .addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false) + .addComponent(jPanel5, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(radioCrystalScan) + .addComponent(radioCameraScan) + .addComponent(radioEnergyScan) + .addComponent(btAbort, javax.swing.GroupLayout.DEFAULT_SIZE, 213, Short.MAX_VALUE)) + .addContainerGap()) + ); + jPanel2Layout.setVerticalGroup( + jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup() + .addGap(20, 20, 20) + .addComponent(radioEnergyScan) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addComponent(radioCameraScan) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addComponent(radioCrystalScan) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) + .addComponent(jPanel5, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) + .addComponent(btAbort) + .addContainerGap()) + ); + + plotScan.setTitle(""); + + javax.swing.GroupLayout jPanel3Layout = new javax.swing.GroupLayout(jPanel3); + jPanel3.setLayout(jPanel3Layout); + jPanel3Layout.setHorizontalGroup( + jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addGroup(jPanel3Layout.createSequentialGroup() + .addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) + .addGap(0, 0, 0) + .addComponent(plotScan, javax.swing.GroupLayout.DEFAULT_SIZE, 614, Short.MAX_VALUE)) + ); + jPanel3Layout.setVerticalGroup( + jPanel3Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(jPanel2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE) + .addComponent(plotScan, javax.swing.GroupLayout.DEFAULT_SIZE, 432, Short.MAX_VALUE) + ); + + tab.addTab("Alignment", jPanel3); + + javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); + this.setLayout(layout); + layout.setHorizontalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(tab) + ); + layout.setVerticalGroup( + layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) + .addComponent(tab, javax.swing.GroupLayout.Alignment.TRAILING) + ); + }// //GEN-END:initComponents + + private void tabStateChanged(javax.swing.event.ChangeEvent evt) {//GEN-FIRST:event_tabStateChanged + setImageEnabled(tab.getSelectedIndex()==1); + }//GEN-LAST:event_tabStateChanged + + private void btAbortActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btAbortActionPerformed + try { + abort(); + } catch (Exception ex) { + showException(ex); + } + }//GEN-LAST:event_btAbortActionPerformed + + private void btStartCrActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btStartCrActionPerformed + Map args = new HashMap(); + args.put("RANGE_FROM", spFromCr.getValue()); + args.put("RANGE_TO", spToCr.getValue()); + args.put("STEPS", spStepsCr.getValue()); + args.put("NUM_SHOTS", spShotsCr.getValue()); + runScan("psss/CrystalHeightScan",args); + }//GEN-LAST:event_btStartCrActionPerformed + + private void btStartCamActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btStartCamActionPerformed + Map args = new HashMap(); + args.put("RANGE_FROM", spFromCam.getValue()); + args.put("RANGE_TO", spToCam.getValue()); + args.put("STEPS", spStepsCam.getValue()); + args.put("NUM_SHOTS", spShotsCam.getValue()); + runScan("psss/CameraScan",args); + }//GEN-LAST:event_btStartCamActionPerformed + + private void btStartEnActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btStartEnActionPerformed + Map args = new HashMap(); + args.put("RANGE_OFF", null); + args.put("RANGE_FROM", spFromEn.getValue()); + args.put("RANGE_TO", spToEn.getValue()); + args.put("STEPS", spStepsEn.getValue()); + args.put("NUM_SHOTS", spShotsEn.getValue()); + runScan("psss/EnergyScan",args); + }//GEN-LAST:event_btStartEnActionPerformed + + private void radioEnergyScanActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_radioEnergyScanActionPerformed + ((CardLayout)panelScan.getLayout()).show(panelScan, "energy"); + }//GEN-LAST:event_radioEnergyScanActionPerformed + + private void radioCameraScanActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_radioCameraScanActionPerformed + ((CardLayout)panelScan.getLayout()).show(panelScan, "camera"); + }//GEN-LAST:event_radioCameraScanActionPerformed + + private void radioCrystalScanActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_radioCrystalScanActionPerformed + ((CardLayout)panelScan.getLayout()).show(panelScan, "crystal"); + }//GEN-LAST:event_radioCrystalScanActionPerformed + + private void spinnerAverageStateChanged(javax.swing.event.ChangeEvent evt) {//GEN-FIRST:event_spinnerAverageStateChanged + try { + eval("set_psss_averaging(" + spinnerAverage.getValue() + ")", true); + } catch (Exception ex) { + showException(ex); + } + }//GEN-LAST:event_spinnerAverageStateChanged + + // Variables declaration - do not modify//GEN-BEGIN:variables + private javax.swing.JButton btAbort; + private javax.swing.JButton btStartCam; + private javax.swing.JButton btStartCr; + private javax.swing.JButton btStartEn; + private javax.swing.ButtonGroup buttonGroup1; + private ch.psi.pshell.swing.HistogramGeneratorPanel histogramGeneratorFwhm; + private ch.psi.pshell.swing.HistogramGeneratorPanel histogramGeneratorPanelCenter; + private ch.psi.pshell.plot.TimePlotJFree history; + private javax.swing.JLabel jLabel1; + private javax.swing.JLabel jLabel10; + private javax.swing.JLabel jLabel11; + private javax.swing.JLabel jLabel12; + private javax.swing.JLabel jLabel13; + private javax.swing.JLabel jLabel2; + private javax.swing.JLabel jLabel3; + private javax.swing.JLabel jLabel4; + private javax.swing.JLabel jLabel5; + private javax.swing.JLabel jLabel6; + private javax.swing.JLabel jLabel7; + private javax.swing.JLabel jLabel8; + private javax.swing.JLabel jLabel9; + private javax.swing.JPanel jPanel1; + private javax.swing.JPanel jPanel2; + private javax.swing.JPanel jPanel3; + private javax.swing.JPanel jPanel4; + private javax.swing.JPanel jPanel5; + private javax.swing.JPanel panelCameraScan; + private javax.swing.JPanel panelCrystalScan; + private javax.swing.JPanel panelEnergyScan; + private javax.swing.JPanel panelScan; + private ch.psi.pshell.plot.LinePlotJFree plot; + private ch.psi.pshell.plot.LinePlotJFree plotScan; + private javax.swing.JRadioButton radioCameraScan; + private javax.swing.JRadioButton radioCrystalScan; + private javax.swing.JRadioButton radioEnergyScan; + private ch.psi.pshell.imaging.Renderer renderer; + private javax.swing.JSpinner spFromCam; + private javax.swing.JSpinner spFromCr; + private javax.swing.JSpinner spFromEn; + private javax.swing.JSpinner spShotsCam; + private javax.swing.JSpinner spShotsCr; + private javax.swing.JSpinner spShotsEn; + private javax.swing.JSpinner spStepsCam; + private javax.swing.JSpinner spStepsCr; + private javax.swing.JSpinner spStepsEn; + private javax.swing.JSpinner spToCam; + private javax.swing.JSpinner spToCr; + private javax.swing.JSpinner spToEn; + private javax.swing.JSpinner spinnerAverage; + private javax.swing.JTabbedPane tab; + // End of variables declaration//GEN-END:variables +} diff --git a/script/Lib/builtin_classes.py b/script/Lib/builtin_classes.py new file mode 100644 index 0000000..9408d6f --- /dev/null +++ b/script/Lib/builtin_classes.py @@ -0,0 +1,587 @@ +from builtin_utils import * + +import ch.psi.utils.Threading as Threading +import ch.psi.utils.State as State +import ch.psi.utils.Convert as Convert +import ch.psi.utils.Str as Str +import ch.psi.utils.Sys as Sys +import ch.psi.utils.Arr as Arr +import ch.psi.utils.IO as IO +import ch.psi.utils.Chrono as Chrono +import ch.psi.utils.Folder as Folder +import ch.psi.utils.Histogram as Histogram +import ch.psi.utils.History as History +import ch.psi.utils.Condition as Condition +import ch.psi.utils.ArrayProperties as ArrayProperties +import ch.psi.utils.Audio as Audio +import ch.psi.utils.BitMask as BitMask +import ch.psi.utils.Config as Config +import ch.psi.utils.Inventory as Inventory +import ch.psi.utils.DataAPI as DataAPI +import ch.psi.utils.DispatcherAPI as DispatcherAPI +import ch.psi.utils.EpicsBootInfoAPI as EpicsBootInfoAPI +import ch.psi.utils.Mail as Mail +import ch.psi.utils.Posix as Posix +import ch.psi.utils.ProcessFactory as ProcessFactory +import ch.psi.utils.Range as Range +import ch.psi.utils.Reflection as Reflection +import ch.psi.utils.Serializer as Serializer +import ch.psi.utils.Windows as Windows +import ch.psi.utils.NumberComparator as NumberComparator + + +import ch.psi.pshell.core.CommandSource as CommandSource +import ch.psi.pshell.core.ContextAdapter as ContextListener +import ch.psi.pshell.core.Context +import ch.psi.pshell.core.InlineDevice as InlineDevice + +import ch.psi.pshell.data.DataSlice as DataSlice +import ch.psi.pshell.data.PlotDescriptor as PlotDescriptor +import ch.psi.pshell.data.Table as Table +import ch.psi.pshell.data.Provider as Provider +import ch.psi.pshell.data.ProviderHDF5 as ProviderHDF5 +import ch.psi.pshell.data.ProviderText as ProviderText +import ch.psi.pshell.data.ProviderCSV as ProviderCSV +import ch.psi.pshell.data.ProviderFDA as ProviderFDA +import ch.psi.pshell.data.Converter as DataConverter +import ch.psi.pshell.data.Layout as Layout +import ch.psi.pshell.data.LayoutBase as LayoutBase +import ch.psi.pshell.data.LayoutDefault as LayoutDefault +import ch.psi.pshell.data.LayoutTable as LayoutTable +import ch.psi.pshell.data.LayoutFDA as LayoutFDA +import ch.psi.pshell.data.LayoutSF as LayoutSF + +import ch.psi.pshell.device.Device as Device +import ch.psi.pshell.device.DeviceBase as DeviceBase +import ch.psi.pshell.device.GenericDevice as GenericDevice +import ch.psi.pshell.device.DeviceConfig as DeviceConfig +import ch.psi.pshell.device.PositionerConfig as PositionerConfig +import ch.psi.pshell.device.RegisterConfig as RegisterConfig +import ch.psi.pshell.device.ReadonlyProcessVariableConfig as ReadonlyProcessVariableConfig +import ch.psi.pshell.device.ProcessVariableConfig as ProcessVariableConfig +import ch.psi.pshell.device.MotorConfig as MotorConfig +import ch.psi.pshell.device.Register as Register +import ch.psi.pshell.device.RegisterBase as RegisterBase +import ch.psi.pshell.device.ProcessVariableBase as ProcessVariableBase +import ch.psi.pshell.device.ControlledVariableBase as ControlledVariableBase +import ch.psi.pshell.device.PositionerBase as PositionerBase +import ch.psi.pshell.device.MasterPositioner as MasterPositioner +import ch.psi.pshell.device.MotorBase as MotorBase +import ch.psi.pshell.device.DiscretePositionerBase as DiscretePositionerBase +import ch.psi.pshell.device.MotorGroupBase as MotorGroupBase +import ch.psi.pshell.device.MotorGroupDiscretePositioner as MotorGroupDiscretePositioner +import ch.psi.pshell.device.ReadonlyRegisterBase as ReadonlyRegisterBase +import ch.psi.pshell.device.ReadonlyAsyncRegisterBase as ReadonlyAsyncRegisterBase +import ch.psi.pshell.device.Register as Register +import ch.psi.pshell.device.Register.RegisterArray as RegisterArray +import ch.psi.pshell.device.Register.RegisterNumber as RegisterNumber +import ch.psi.pshell.device.Register.RegisterBoolean as RegisterBoolean +import ch.psi.pshell.device.RegisterCache as RegisterCache +import ch.psi.pshell.device.ReadonlyRegister.ReadonlyRegisterArray as ReadonlyRegisterArray +import ch.psi.pshell.device.ReadonlyRegister.ReadonlyRegisterMatrix as ReadonlyRegisterMatrix +import ch.psi.pshell.device.DummyPositioner as DummyPositioner +import ch.psi.pshell.device.DummyMotor as DummyMotor +import ch.psi.pshell.device.DummyRegister as DummyRegister +import ch.psi.pshell.device.Timestamp as Timestamp +import ch.psi.pshell.device.Interlock as Interlock +import ch.psi.pshell.device.Readable as Readable +import ch.psi.pshell.device.Readable.ReadableArray as ReadableArray +import ch.psi.pshell.device.Readable.ReadableMatrix as ReadableMatrix +import ch.psi.pshell.device.Readable.ReadableCalibratedArray as ReadableCalibratedArray +import ch.psi.pshell.device.Readable.ReadableCalibratedMatrix as ReadableCalibratedMatrix +import ch.psi.pshell.device.ArrayCalibration as ArrayCalibration +import ch.psi.pshell.device.MatrixCalibration as MatrixCalibration +import ch.psi.pshell.device.Writable as Writable +import ch.psi.pshell.device.Writable.WritableArray as WritableArray +import ch.psi.pshell.device.Stoppable as Stoppable +import ch.psi.pshell.device.Averager as Averager +import ch.psi.pshell.device.ArrayAverager as ArrayAverager +import ch.psi.pshell.device.Delta as Delta +import ch.psi.pshell.device.DeviceAdapter as DeviceListener +import ch.psi.pshell.device.ReadbackDeviceAdapter as ReadbackDeviceListener +import ch.psi.pshell.device.MotorAdapter as MotorListener +import ch.psi.pshell.device.MoveMode as MoveMode +import ch.psi.pshell.device.SettlingCondition as SettlingCondition +import ch.psi.pshell.device.HistogramGenerator as HistogramGenerator + +import ch.psi.pshell.epics.Epics as Epics +import ch.psi.pshell.epics.EpicsScan as EpicsScan +import ch.psi.pshell.epics.ChannelSettlingCondition as ChannelSettlingCondition +import ch.psi.pshell.epics.AreaDetector as AreaDetector +import ch.psi.pshell.epics.BinaryPositioner as BinaryPositioner +import ch.psi.pshell.epics.ChannelByte as ChannelByte +import ch.psi.pshell.epics.ChannelByteArray as ChannelByteArray +import ch.psi.pshell.epics.ChannelByteMatrix as ChannelByteMatrix +import ch.psi.pshell.epics.ChannelDouble as ChannelDouble +import ch.psi.pshell.epics.ChannelDoubleArray as ChannelDoubleArray +import ch.psi.pshell.epics.ChannelDoubleMatrix as ChannelDoubleMatrix +import ch.psi.pshell.epics.ChannelFloat as ChannelFloat +import ch.psi.pshell.epics.ChannelFloatArray as ChannelFloatArray +import ch.psi.pshell.epics.ChannelFloatMatrix as ChannelFloatMatrix +import ch.psi.pshell.epics.ChannelInteger as ChannelInteger +import ch.psi.pshell.epics.ChannelIntegerArray as ChannelIntegerArray +import ch.psi.pshell.epics.ChannelIntegerMatrix as ChannelIntegerMatrix +import ch.psi.pshell.epics.ChannelShort as ChannelShort +import ch.psi.pshell.epics.ChannelShortArray as ChannelShortArray +import ch.psi.pshell.epics.ChannelShortMatrix as ChannelShortMatrix +import ch.psi.pshell.epics.ChannelString as ChannelString +import ch.psi.pshell.epics.ControlledVariable as ControlledVariable +import ch.psi.pshell.epics.DiscretePositioner as DiscretePositioner +import ch.psi.pshell.epics.GenericChannel as GenericChannel +import ch.psi.pshell.epics.GenericArray as GenericArray +import ch.psi.pshell.epics.GenericMatrix as GenericMatrix +import ch.psi.pshell.epics.Manipulator as Manipulator +import ch.psi.pshell.epics.Motor as EpicsMotor +import ch.psi.pshell.epics.Positioner as Positioner +import ch.psi.pshell.epics.ProcessVariable as ProcessVariable +import ch.psi.pshell.epics.ReadonlyProcessVariable as ReadonlyProcessVariable +import ch.psi.pshell.epics.Scaler as Scaler +import ch.psi.pshell.epics.Scienta as Scienta +import ch.psi.pshell.epics.Slit as Slit +import ch.psi.pshell.epics.AreaDetectorSource as AreaDetectorSource +import ch.psi.pshell.epics.ArraySource as ArraySource +import ch.psi.pshell.epics.ByteArraySource as ByteArraySource +import ch.psi.pshell.epics.PsiCamera as PsiCamera +import ch.psi.pshell.epics.CAS as CAS + +import ch.psi.pshell.serial.SerialPortDevice as SerialPortDevice +import ch.psi.pshell.serial.TcpDevice as TcpDevice +import ch.psi.pshell.serial.UdpDevice as UdpDevice +import ch.psi.pshell.serial.SerialPortDeviceConfig as SerialPortDeviceConfig +import ch.psi.pshell.serial.SocketDeviceConfig as SocketDeviceConfig + +import ch.psi.pshell.modbus.ModbusTCP as ModbusTCP +import ch.psi.pshell.modbus.ModbusUDP as ModbusUDP +import ch.psi.pshell.modbus.ModbusSerial as ModbusSerial +import ch.psi.pshell.modbus.AnalogInput as ModbusAI +import ch.psi.pshell.modbus.AnalogInputArray as ModbusMAI +import ch.psi.pshell.modbus.AnalogOutput as ModbusAO +import ch.psi.pshell.modbus.AnalogOutputArray as ModbusMAO +import ch.psi.pshell.modbus.DigitalInput as ModbusDO +import ch.psi.pshell.modbus.DigitalInputArray as ModbusMDI +import ch.psi.pshell.modbus.DigitalOutput as ModbusDO +import ch.psi.pshell.modbus.DigitalOutputArray as ModbusMDO +import ch.psi.pshell.modbus.Register as ModbusReg +import ch.psi.pshell.modbus.ReadonlyProcessVariable as ModbusROPV +import ch.psi.pshell.modbus.ProcessVariable as ModbusPV +import ch.psi.pshell.modbus.ControlledVariable as ModbusCB +import ch.psi.pshell.modbus.ModbusDeviceConfig as ModbusDeviceConfig + +import ch.psi.pshell.imaging.Source as Source +import ch.psi.pshell.imaging.SourceBase as SourceBase +import ch.psi.pshell.imaging.DirectSource as DirectSource +import ch.psi.pshell.imaging.RegisterArraySource as RegisterArraySource +import ch.psi.pshell.imaging.RegisterMatrixSource as RegisterMatrixSource +import ch.psi.pshell.imaging.ImageListener as ImageListener +import ch.psi.pshell.imaging.ImageMeasurement as ImageMeasurement +import ch.psi.pshell.imaging.CameraSource as CameraSource +import ch.psi.pshell.imaging.ColormapAdapter as ColormapAdapter +import ch.psi.pshell.imaging.FileSource as FileSource +import ch.psi.pshell.imaging.MjpegSource as MjpegSource +import ch.psi.pshell.imaging.Webcam as Webcam +import ch.psi.pshell.imaging.Filter as Filter +import ch.psi.pshell.imaging.Utils as ImagingUtils +import ch.psi.pshell.imaging.Overlay as Overlay +import ch.psi.pshell.imaging.Overlays as Overlays +import ch.psi.pshell.imaging.Pen as Pen +import ch.psi.pshell.imaging.Data as Data +import ch.psi.pshell.imaging.Colormap as Colormap +import ch.psi.pshell.imaging.Renderer as Renderer + + +import ch.psi.pshell.plot.Plot as Plot +import ch.psi.pshell.plot.Plot.AxisId as AxisId +import ch.psi.pshell.plot.LinePlot.Style as LinePlotStyle +import ch.psi.pshell.plot.RangeSelectionPlot as RangeSelectionPlot +import ch.psi.pshell.plot.RangeSelectionPlot.RangeSelectionPlotListener as RangeSelectionPlotListener +import ch.psi.pshell.plot.LinePlot as LinePlot +import ch.psi.pshell.plot.MatrixPlot as MatrixPlot +import ch.psi.pshell.plot.TimePlot as TimePlot +import ch.psi.pshell.plot.SlicePlot as SlicePlot + +import ch.psi.pshell.plot.LinePlotJFree as LinePlotJFree +import ch.psi.pshell.plot.MatrixPlotJFree as MatrixPlotJFree +import ch.psi.pshell.plot.TimePlotJFree as TimePlotJFree +import ch.psi.pshell.plot.SlicePlotDefault as SlicePlotDefault + +import ch.psi.pshell.plot.LinePlotSeries as LinePlotSeries +import ch.psi.pshell.plot.LinePlotErrorSeries as LinePlotErrorSeries +import ch.psi.pshell.plot.MatrixPlotSeries as MatrixPlotSeries +import ch.psi.pshell.plot.TimePlotSeries as TimePlotSeries +import ch.psi.pshell.plot.SlicePlotSeries as SlicePlotSeries + +import ch.psi.pshell.scan.ScanBase as ScanBase +import ch.psi.pshell.scan.LineScan +import ch.psi.pshell.scan.ContinuousScan +import ch.psi.pshell.scan.AreaScan +import ch.psi.pshell.scan.VectorScan +import ch.psi.pshell.scan.ManualScan +import ch.psi.pshell.scan.HardwareScan +import ch.psi.pshell.scan.RegionScan +import ch.psi.pshell.scan.TimeScan +import ch.psi.pshell.scan.MonitorScan +import ch.psi.pshell.scan.BinarySearch +import ch.psi.pshell.scan.HillClimbingSearch +import ch.psi.pshell.scan.ScanResult +import ch.psi.pshell.scan.Otf as Otf +import ch.psi.pshell.scan.ScanCallbacks as ScanCallbacks + +import ch.psi.pshell.crlogic.CrlogicPositioner as CrlogicPositioner +import ch.psi.pshell.crlogic.CrlogicSensor as CrlogicSensor + +import ch.psi.pshell.scan.ScanAbortedException as ScanAbortedException + +import ch.psi.pshell.bs.BsScan +import ch.psi.pshell.bs.Stream as Stream +import ch.psi.pshell.bs.Provider as Provider +import ch.psi.pshell.bs.Dispatcher as Dispatcher +import ch.psi.pshell.bs.Scalar as Scalar +import ch.psi.pshell.bs.Waveform as Waveform +import ch.psi.pshell.bs.Matrix as Matrix +import ch.psi.pshell.bs.StreamCamera as StreamCamera +import ch.psi.pshell.bs.CameraServer as CameraServer +import ch.psi.pshell.bs.PipelineServer as PipelineServer +import ch.psi.pshell.bs.ProviderConfig as ProviderConfig +import ch.psi.pshell.bs.StreamConfig as StreamConfig +import ch.psi.pshell.bs.ScalarConfig as ScalarConfig +import ch.psi.pshell.bs.WaveformConfig as WaveformConfig +import ch.psi.pshell.bs.MatrixConfig as MatrixConfig + +import ch.psi.pshell.detector.DetectorConfig as DetectorConfig + +import ch.psi.pshell.ui.App as App + +import ch.psi.pshell.scripting.ViewPreference as Preference +import ch.psi.pshell.scripting.ScriptUtils as ScriptUtils +import ch.psi.pshell.scripting.ScriptType as ScriptType + +from ch.psi.pshell.device.Record import * +from javax.swing.SwingUtilities import invokeLater, invokeAndWait + +import org.jfree.ui.RectangleAnchor as RectangleAnchor +import org.jfree.ui.TextAnchor as TextAnchor + + +def string_to_obj(o): + if is_string(o): + o=str(o) + if "://" in o: + return InlineDevice(o) + ret = get_context().getInterpreterVariable(o) + if ret is None: + try: + return get_context().scriptManager.evalBackground(o).result + except: + return None + return ret + elif is_list(o): + ret = [] + for i in o: + ret.append(string_to_obj(i)) + return ret + return o + +def json_to_obj(o): + if is_string(o): + import json + return json.loads(o) + elif is_list(o): + ret = [] + for i in o: + ret.append(json_to_obj(i)) + return ret + return o + +################################################################################################### +#Scan classes +################################################################################################### + +def __no_args(f): + ret = f.func_code.co_argcount + return (ret-1) if type(f)==PyMethod else ret + +def __before_readout(scan, pos): + try: + if scan.before_read != None: + args = __no_args(scan.before_read) + if args==0: scan.before_read() + elif args==1: scan.before_read(pos.tolist()) + elif args==2: scan.before_read(pos.tolist(), scan) + except AttributeError: + pass + +def __after_readout(scan, record): + try: + if scan.after_read != None: + args = __no_args(scan.after_read) + if args==0: scan.after_read() + elif args==1: scan.after_read(record) + elif args==2: scan.after_read(record, scan) + except AttributeError: + pass + +def __before_pass(scan, num_pass): + try: + if scan.before_pass != None: + args = __no_args(scan.before_pass) + if args==0:scan.before_pass() + elif args==1:scan.before_pass(num_pass) + elif args==2:scan.before_pass(num_pass, scan) + except AttributeError: + pass + +def __after_pass(scan, num_pass): + try: + if scan.after_pass != None: + args = __no_args(scan.after_pass) + if args==0:scan.after_pass() + elif args==1:scan.after_pass(num_pass) + elif args==2:scan.after_pass(num_pass, scan) + except AttributeError: + pass + +def __before_region(scan, num_region): + try: + if scan.before_region != None: + args = __no_args(scan.before_region) + if args==0:scan.before_region() + elif args==1:scan.before_region(num_region) + elif args==2:scan.before_region(num_region, scan) + except AttributeError: + pass + +class LineScan(ch.psi.pshell.scan.LineScan): + def onBeforeReadout(self, pos): __before_readout(self, pos) + def onAfterReadout(self, rec): __after_readout(self, rec) + def onBeforePass(self, num): __before_pass(self, num) + def onAfterPass(self, num): __after_pass(self, num) + +class AreaScan(ch.psi.pshell.scan.AreaScan): + def onBeforeReadout(self, pos): __before_readout(self, pos) + def onAfterReadout(self, rec): __after_readout(self, rec) + def onBeforePass(self, num): __before_pass(self, num) + def onAfterPass(self, num): __after_pass(self, num) + +class RegionScan(ch.psi.pshell.scan.RegionScan): + def onBeforeReadout(self, pos): __before_readout(self, pos) + def onAfterReadout(self, rec): __after_readout(self, rec) + def onBeforePass(self, num): __before_pass(self, num) + def onAfterPass(self, num): __after_pass(self, num) + def onBeforeRegion(self, num): __before_region(self,num) + +class VectorScan(ch.psi.pshell.scan.VectorScan): + def onBeforeReadout(self, pos): __before_readout(self, pos) + def onAfterReadout(self, rec): __after_readout(self, rec) + def onBeforePass(self, num): __before_pass(self, num) + def onAfterPass(self, num): __after_pass(self, num) + +class ContinuousScan(ch.psi.pshell.scan.ContinuousScan): + def onBeforeReadout(self, pos): __before_readout(self, pos) + def onAfterReadout(self, rec): __after_readout(self, rec) + def onBeforePass(self, num): __before_pass(self, num) + def onAfterPass(self, num): __after_pass(self, num) + +class TimeScan(ch.psi.pshell.scan.TimeScan): + def onBeforeReadout(self, pos): __before_readout(self, pos) + def onAfterReadout(self, rec): __after_readout(self, rec) + def onBeforePass(self, num): __before_pass(self, num) + def onAfterPass(self, num): __after_pass(self, num) + +class MonitorScan(ch.psi.pshell.scan.MonitorScan): + def onBeforeReadout(self, pos): __before_readout(self, pos) + def onAfterReadout(self, rec): __after_readout(self, rec) + def onBeforePass(self, num): __before_pass(self, num) + def onAfterPass(self, num): __after_pass(self, num) + +class BsScan(ch.psi.pshell.bs.BsScan): + def onBeforeReadout(self, pos): __before_readout(self, pos) + def onAfterReadout(self, rec): __after_readout(self, rec) + def onBeforePass(self, num): __before_pass(self, num) + def onAfterPass(self, num): __after_pass(self, num) + +class ManualScan (ch.psi.pshell.scan.ManualScan): + def __init__(self, writables, readables, start = None, end = None, steps = None, relative = False, dimensions = None, **pars): + ch.psi.pshell.scan.ManualScan.__init__(self, writables, readables, start, end, steps, relative) + self._dimensions = dimensions + processScanPars(self, pars) + + def append(self,setpoints, positions, values, timestamps=None): + ch.psi.pshell.scan.ManualScan.append(self, to_array(setpoints), to_array(positions), to_array(values), None if (timestamps is None) else to_array(timestamps)) + + def getDimensions(self): + if self._dimensions == None: + return ch.psi.pshell.scan.ManualScan.getDimensions(self) + else: + return self._dimensions + +class BinarySearch(ch.psi.pshell.scan.BinarySearch): + def onBeforeReadout(self, pos): __before_readout(self, pos) + def onAfterReadout(self, rec): __after_readout(self, rec) + +class HillClimbingSearch(ch.psi.pshell.scan.HillClimbingSearch): + def onBeforeReadout(self, pos): __before_readout(self, pos) + def onAfterReadout(self, rec): __after_readout(self, rec) + +def processScanPars(scan, pars): + scan.before_read = pars.pop("before_read",None) + scan.after_read = pars.pop("after_read",None) + scan.before_pass = pars.pop("before_pass",None) + scan.after_pass = pars.pop("after_pass",None) + scan.before_region= pars.pop("before_region",None) + scan.setPlotTitle(pars.pop("title",None)) + scan.setHidden(pars.pop("hidden",False)) + scan.setSettleTimeout (pars.pop("settle_timeout",ScanBase.getScansSettleTimeout())) + scan.setUseWritableReadback (pars.pop("use_readback",ScanBase.getScansUseWritableReadback())) + scan.setInitialMove(pars.pop("initial_move",ScanBase.getScansTriggerInitialMove())) + scan.setParallelPositioning(pars.pop("parallel_positioning",ScanBase.getScansParallelPositioning())) + scan.setAbortOnReadableError(pars.pop("abort_on_error",ScanBase.getAbortScansOnReadableError())) + scan.setRestorePosition (pars.pop("restore_position",ScanBase.getRestorePositionOnRelativeScans())) + scan.setCheckPositions(pars.pop("check_positions",ScanBase.getScansCheckPositions())) + scan.setMonitors(to_list(string_to_obj(pars.pop("monitors",None)))) + scan.setSnaps(to_list(string_to_obj(pars.pop("snaps",None)))) + scan.setDiags(to_list(string_to_obj(pars.pop("diags",None)))) + scan.setMeta(pars.pop("meta",None)) + get_context().setCommandPars(scan, pars) + + + +################################################################################################### +#Simple EPICS Channel abstraction +################################################################################################### + +def create_channel(name, type=None, size=None): + return Epics.newChannel(name, Epics.getChannelType(type), size) + +#Not using finalizer: closing channels in garbage collection generate errors +class Channel(java.beans.PropertyChangeListener, Writable, Readable, DeviceBase): + def __init__(self, channel_name, type = None, size = None, callback=None, alias = None, monitored=None, name = None): + """ Create an object that encapsulates an Epics PV connection. + Args: + channel_name(str):name of the channel + type(str, optional): type of PV. By default gets the PV standard field type. + Scalar values: 'b', 'i', 'l', 'd', 's'. + Array values: '[b', '[i,', '[l', '[d', '[s'. + size(int, optional): the size of the channel + callback(function, optional): The monitor callback. + alias(str): name to be used on scans. + """ + super(DeviceBase, self).__init__(name if (name is not None) else channel_name.replace(":","_").replace(".","_")) + self.channel = create_channel(channel_name, type, size) + self.callback = callback + self._alias = alias + if monitored is not None:self.setMonitored(monitored) + self.initialize() + + def get_channel_name(self): + """Return the name of the channel. + """ + return self.channel.name + + def get_size(self): + """Return the size of the channel. + """ + return self.channel.size + + def set_size(self, size): + """Set the size of the channel. + """ + self.channel.size = size + + def is_connected(self): + """Return True if channel is connected. + """ + return self.channel.connected + + def doSetMonitored(self, value): + self.channel.monitored = value + if (value): + self.channel.addPropertyChangeListener(self) + else: + self.channel.removePropertyChangeListener(self) + + + def is_monitored(self): + """Return True if channel is monitored + """ + return self.channel.monitored + + def set_monitored(self, value): + """Set a channel monitor to trigger the callback function defined in the constructor. + """ + self.setMonitored(value) + + def propertyChange(self, pce): + if pce.getPropertyName() == "value": + value=pce.getNewValue() + self.setCache(value, None) + if self.callback is not None: + self.callback(value) + + def put(self, value, timeout=None): + """Write to channel and wait value change. In the case of a timeout throws a TimeoutException. + Args: + value(obj): value to be written + timeout(float, optional): timeout in seconds. If none waits forever. + """ + if (timeout==None): + self.channel.setValue(value) + else: + self.channel.setValueAsync(value).get(int(timeout*1000), java.util.concurrent.TimeUnit.MILLISECONDS) + self.setCache(value, None) + + def putq(self, value): + """Write to channel and don't wait. + """ + self.channel.setValueNoWait(value) + + def get(self, force = False): + """Get channel value. + """ + ret = self.channel.getValue(force) + self.setCache(ret, None) + return ret + + def wait_for_value(self, value, timeout=None, comparator=None): + """Wait channel to reach a value, using a given comparator. In the case of a timeout throws a TimeoutException. + Args: + value(obj): value to be verified. + timeout(float, optional): timeout in seconds. If None waits forever. + comparator (java.util.Comparator, optional). If None, uses Object.equals. + """ + if comparator is None: + if timeout is None: + self.channel.waitForValue(value) + else: + self.channel.waitForValue(value, int(timeout*1000)) + self.setCache(value, None) + else: + if timeout is None: + self.channel.waitForValue(value, comparator) + else: + self.channel.waitForValue(value, comparator, int(timeout*1000)) + + def doUpdate(self): + self.get() + + def close(self): + """Close the channel. + """ + Epics.closeChannel(self.channel) + + def setAlias(self, alias): + self._alias = alias + + def getAlias(self): + return self._alias if self._alias else self.getName() + + def write(self, value): + self.put(value) + + def read(self): + return self.get() + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() \ No newline at end of file diff --git a/script/Lib/builtin_functions.py b/script/Lib/builtin_functions.py new file mode 100644 index 0000000..204c57b --- /dev/null +++ b/script/Lib/builtin_functions.py @@ -0,0 +1,1895 @@ +from builtin_utils import * +from builtin_classes import * + +################################################################################################### +#Scan commands +################################################################################################### + +def lscan(writables, readables, start, end, steps, latency=0.0, relative=False, passes=1, zigzag=False, **pars): + """Line Scan: positioners change together, linearly from start to end positions. + + Args: + writables(list of Writable): Positioners set on each step. + readables(list of Readable): Sensors to be sampled on each step. + start(list of float): start positions of writables. + end(list of float): final positions of writables. + steps(int or float or list of float): number of scan steps (int) or step size (float). + relative (bool, optional): if true, start and end positions are relative to current. + latency(float, optional): settling time for each step before readout, defaults to 0.0. + passes(int, optional): number of passes + zigzag(bool, optional): if true writables invert direction on each pass. + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - hidden(bool, optional): if true generates no effects on user interface. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - before_pass (function(pass_num, scan), optional): called before each pass. + - after_pass (function(pass_num, scan), optional): callback after each pass. + - settle_timeout(int, optional): timeout for each positioner get to position. Default (-1) waits forever. + - initial_move (bool, optional): if true (default) perform move to initial position prior to scan start. + - parallel_positioning (bool, optional): if true (default) all positioners are set in parallel. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - restore_position (bool, optional): if true (default) then restore initial position after relative scans. + - check_positions (bool, optional): if true (default) verifies if in correct positions after move finishes. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - diags (list of Readable, optional): diagnostic device values are saved at each scan point. + - meta (dict, optional): scan metadata. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + latency_ms=int(latency*1000) + writables=to_list(string_to_obj(writables)) + readables=to_list(string_to_obj(readables)) + start=to_list(start) + end=to_list(end) + if type(steps) is float or is_list(steps): + steps = to_list(steps) + scan = LineScan(writables,readables, start, end , steps, relative, latency_ms, int(passes), zigzag) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def vscan(writables, readables, vector, line = False, latency=0.0, relative=False, passes=1, zigzag=False, **pars): + """Vector Scan: positioner values provided in a vector. + + Args: + writables(list of Writable): Positioners set on each step. + readables(list of Readable): Sensors to be sampled on each step. + vector (generator (floats or lists of float) or list of list of float): positioner values. + line (bool, optional): if true, processs as line scan (1d) + relative (bool, optional): if true, start and end positions are relative to current. + latency(float, optional): settling time for each step before readout, defaults to 0.0. + passes(int, optional): number of passes (disregarded if vector is a generator). + zigzag(bool, optional): if true writables invert direction on each pass (disregarded if vector is a generator). + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - before_pass (function(pass_num, scan), optional): called before each pass. + - after_pass (function(pass_num, scan), optional): callback after each pass. + - settle_timeout(int, optional): timeout for each positioner get to position. Default (-1) waits forever. + - initial_move (bool, optional): if true (default) perform move to initial position prior to scan start. + - parallel_positioning (bool, optional): if true (default) all positioners are set in parallel. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - restore_position (bool, optional): if true (default) then restore initial position after relative scans. + - check_positions (bool, optional): if true (default) verifies if in correct positions after move finishes. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - diags (list of Readable, optional): diagnostic device values are saved at each scan point. + - meta (dict, optional): scan metadata. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + latency_ms=int(latency*1000) + writables=to_list(string_to_obj(writables)) + readables=to_list(string_to_obj(readables)) + if type (vector) == PyGenerator: + scan = VectorScan(writables,readables, vector, line, relative, latency_ms) + else: + if len(vector) == 0: + vector.append([]) + elif (not is_list(vector[0])) and (not isinstance(vector[0],PyArray)): + vector = [[x,] for x in vector] + vector = to_array(vector, 'd') + scan = VectorScan(writables,readables, vector, line, relative, latency_ms, int(passes), zigzag) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def ascan(writables, readables, start, end, steps, latency=0.0, relative=False, passes=1, zigzag=False, **pars): + """Area Scan: multi-dimentional scan, each positioner is a dimention. + + Args: + writables(list of Writable): Positioners set on each step. + readables(list of Readable): Sensors to be sampled on each step. + start(list of float): start positions of writables. + end(list of float): final positions of writables. + steps(list of int or list of float): number of scan steps (int) or step size (float). + latency(float, optional): settling time for each step before readout, defaults to 0.0. + relative (bool, optional): if true, start and end positions are relative to current. + passes(int, optional): number of passes + zigzag (bool, optional): if true writables invert direction on each row. + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - before_pass (function(pass_num, scan), optional): called before each pass. + - after_pass (function(pass_num, scan), optional): callback after each pass. + - settle_timeout(int, optional): timeout for each positioner get to position. Default (-1) waits forever. + - initial_move (bool, optional): if true (default) perform move to initial position prior to scan start. + - parallel_positioning (bool, optional): if true (default) all positioners are set in parallel. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - restore_position (bool, optional): if true (default) then restore initial position after relative scans. + - check_positions (bool, optional): if true (default) verifies if in correct positions after move finishes. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - diags (list of Readable, optional): diagnostic device values are saved at each scan point. + - meta (dict, optional): scan metadata. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + latency_ms=int(latency*1000) + writables=to_list(string_to_obj(writables)) + readables=to_list(string_to_obj(readables)) + start=to_list(start) + end=to_list(end) + if is_list(steps): + steps = to_list(steps) + scan = AreaScan(writables,readables, start, end , steps, relative, latency_ms, int(passes), zigzag) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + + +def rscan(writable, readables, regions, latency=0.0, relative=False, passes=1, zigzag=False, **pars): + """Region Scan: positioner scanned linearly, from start to end positions, in multiple regions. + + Args: + writable(Writable): Positioner set on each step, for each region. + readables(list of Readable): Sensors to be sampled on each step. + regions (list of tuples (float,float, int) or (float,float, float)): each tuple define a scan region + (start, stop, steps) or (start, stop, step_size) + relative (bool, optional): if true, start and end positions are relative to current. + latency(float, optional): settling time for each step before readout, defaults to 0.0. + passes(int, optional): number of passes + zigzag(bool, optional): if true writable invert direction on each pass. + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - before_pass (function(pass_num, scan), optional): called before each pass. + - after_pass (function(pass_num, scan), optional): callback after each pass. + - before_region (function(region_num, scan), optional): callback before entering a region. + - settle_timeout(int, optional): timeout for each positioner get to position. Default (-1) waits forever. + - initial_move (bool, optional): if true (default) perform move to initial position prior to scan start. + - parallel_positioning (bool, optional): if true (default) all positioners are set in parallel. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - restore_position (bool, optional): if true (default) then restore initial position after relative scans. + - check_positions (bool, optional): if true (default) verifies if in correct positions after move finishes. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - diags (list of Readable, optional): diagnostic device values are saved at each scan point. + - meta (dict, optional): scan metadata. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + start=[] + end=[] + steps=[] + for region in regions: + start.append(region[0]) + end.append(region[1]) + steps.append(region[2]) + latency_ms=int(latency*1000) + writable=string_to_obj(writable) + readables=to_list(string_to_obj(readables)) + start=to_list(start) + end=to_list(end) + steps = to_list(steps) + scan = RegionScan(writable,readables, start, end , steps, relative, latency_ms, int(passes), zigzag) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def cscan(writables, readables, start, end, steps, latency=0.0, time=None, relative=False, passes=1, zigzag=False, **pars): + """Continuous Scan: positioner change continuously from start to end position and readables are sampled on the fly. + + Args: + writable(Speedable or list of Motor): A positioner with a getSpeed method or + a list of motors. + readables(list of Readable): Sensors to be sampled on each step. + start(float or list of float): start positions of writables. + end(float or list of float): final positions of writabless. + steps(int or float or list of float): number of scan steps (int) or step size (float). + latency(float, optional): sleep time in each step before readout, defaults to 0.0. + time (float, seconds): if not None then speeds are set according to time. + relative (bool, optional): if true, start and end positions are relative to current. + passes(int, optional): number of passes + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - before_pass (function(pass_num, scan), optional): called before each pass. + - after_pass (function(pass_num, scan), optional): callback after each pass. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - restore_position (bool, optional): if true (default) then restore initial position after relative scans. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - diags (list of Readable, optional): diagnostic device values are saved at each scan point. + - meta (dict, optional): scan metadata. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + latency_ms=int(latency*1000) + readables=to_list(string_to_obj(readables)) + writables=to_list(string_to_obj(writables)) + start=to_list(start) + end=to_list(end) + #A single Writable with fixed speed + if time is None: + if is_list(steps): steps=steps[0] + scan = ContinuousScan(writables[0],readables, start[0], end[0] , steps, relative, latency_ms, int(passes), zigzag) + #A set of Writables with speed configurable + else: + if type(steps) is float or is_list(steps): + steps = to_list(steps) + scan = ContinuousScan(writables,readables, start, end , steps, time, relative, latency_ms, int(passes), zigzag) + + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def hscan(config, writable, readables, start, end, steps, passes=1, zigzag=False, **pars): + """Hardware Scan: values sampled by external hardware and received asynchronously. + + Args: + config(dict): Configuration of the hardware scan. The "class" key provides the implementation class. + Other keys are implementation specific. + writable(Writable): A positioner appropriated to the hardware scan type. + readables(list of Readable): Sensors appropriated to the hardware scan type. + start(float): start positions of writable. + end(float): final positions of writables. + steps(int or float): number of scan steps (int) or step size (float). + passes(int, optional): number of passes + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - after_read (function(record, scan), optional): callback on each step, after sampling. + - before_pass (function(pass_num, scan), optional): callback before each scan pass execution. + - after_pass (function(pass_num, scan), optional): callback after each scan pass execution. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - meta (dict, optional): scan metadata. + + Returns: + ScanResult. + """ + cls = Class.forName(config["class"]) + class HardwareScan(cls): + def __init__(self, config, writable, readables, start, end, stepSize, passes, zigzag): + cls.__init__(self, config, writable, readables, start, end, stepSize, passes, zigzag) + def onAfterReadout(self, record): + __after_readout(self, record) + def onBeforePass(self, num_pass): + __before_pass(self, num_pass) + def onAfterPass(self, num_pass): + __after_pass(self, num_pass) + + readables=to_list(string_to_obj(readables)) + scan = HardwareScan(config, writable,readables, start, end , steps, int(passes), zigzag) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def bscan(stream, records, timeout = None, passes=1, **pars): + """BS Scan: records all values in a beam synchronous stream. + + Args: + stream(Stream): stream object or list of chanel names to build stream from + records(int): number of records to store + timeout(float, optional): maximum scan time in seconds. + passes(int, optional): number of passes + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - before_pass (function(pass_num, scan), optional): called before each pass. + - after_pass (function(pass_num, scan), optional): callback after each pass. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - diags (list of Readable, optional): diagnostic device values are saved at each scan point. + - meta (dict, optional): scan metadata. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + timeout_ms=int(timeout*1000) if ((timeout is not None) and (timeout>=0)) else -1 + if not is_list(stream): + stream=string_to_obj(stream) + scan = BsScan(stream,int(records), timeout_ms, int(passes)) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def tscan(readables, points, interval, passes=1, fixed_rate=True, **pars): + """Time Scan: sensors are sampled in fixed time intervals. + + Args: + readables(list of Readable): Sensors to be sampled on each step. + points(int): number of samples. + interval(float): time interval between readouts. Minimum temporization is 0.001s + passes(int, optional): number of passes + fixed_rate(bool, optional): in the case of delays in sampling: + If True tries to preserve to total scan time, accelerating following sampling. + If False preserves the interval between samples, increasing scan time. + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - before_pass (function(pass_num, scan), optional): called before each pass. + - after_pass (function(pass_num, scan), optional): callback after each pass. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - diags (list of Readable, optional): diagnostic device values are saved at each scan point. + - meta (dict, optional): scan metadata. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + interval= max(interval, 0.001) #Minimum temporization is 1ms + interval_ms=int(interval*1000) + readables=to_list(string_to_obj(readables)) + scan = TimeScan(readables, points, interval_ms, int(passes), bool(fixed_rate)) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def mscan(trigger, readables, points=-1, timeout=None, async=True, take_initial=False, passes=1, **pars): + """Monitor Scan: sensors are sampled when received change event of the trigger device. + + Args: + trigger(Device or list of Device): Source of the sampling triggering. + readables(list of Readable): Sensors to be sampled on each step. + If trigger has cache and is included in readables, it is not read + for each step, but the change event value is used. + points(int, optional): number of samples (-1 for undefined). + timeout(float, optional): maximum scan time in seconds (None for no timeout). + async(bool, optional): if True then records are sampled and stored on event change callback. Enforce + reading only cached values of sensors. + If False, the scan execution loop waits for trigger cache update. Do not make + cache only access, but may loose change events. + take_initial(bool, optional): if True include current values as first record (before first trigger). + passes(int, optional): number of passes + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - before_pass (function(pass_num, scan), optional): called before each pass. + - after_pass (function(pass_num, scan), optional): callback after each pass. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - diags (list of Readable, optional): diagnostic device values are saved at each scan point. + - meta (dict, optional): scan metadata. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + timeout_ms=int(timeout*1000) if ((timeout is not None) and (timeout>=0)) else -1 + trigger = string_to_obj(trigger) + readables=to_list(string_to_obj(readables)) + scan = MonitorScan(trigger, readables, points, timeout_ms, async, take_initial, int(passes)) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def escan(name, **pars): + """Epics Scan: execute an Epics Scan Record. + + Args: + name(str): Name of scan record. + title(str, optional): plotting window name. + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + scan = EpicsScan(name) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + + +def bsearch(writables, readable, start, end, steps, maximum = True, strategy = "Normal", latency=0.0, relative=False, **pars): + """Binary search: searches writables in a binary search fashion to find a local maximum for the readable. + + Args: + writables(list of Writable): Positioners set on each step. + readable(Readable): Sensor to be sampled. + start(list of float): start positions of writables. + end(list of float): final positions of writables. + steps(float or list of float): resolution of search for each writable. + maximum (bool , optional): if True (default) search maximum, otherwise minimum. + strategy (str , optional): "Normal": starts search midway to scan range and advance in the best direction. + Uses orthogonal neighborhood (4-neighborhood for 2d) + "Boundary": starts search on scan range. + "FullNeighborhood": Uses complete neighborhood (8-neighborhood for 2d) + + latency(float, optional): settling time for each step before readout, defaults to 0.0. + relative (bool, optional): if true, start and end positions are relative to current. + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - settle_timeout(int, optional): timeout for each positioner get to position. Default (-1) waits forever. + - parallel_positioning (bool, optional): if true (default) all positioners are set in parallel. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - restore_position (bool, optional): if true (default) then restore initial position after relative scans. + - check_positions (bool, optional): if true (default) verifies if in correct positions after move finishes. + - Aditional arguments defined by set_exec_pars. + + Returns: + SearchResult. + """ + latency_ms=int(latency*1000) + writables=to_list(string_to_obj(writables)) + readable=string_to_obj(readable) + start=to_list(start) + end=to_list(end) + steps = to_list(steps) + strategy = BinarySearch.Strategy.valueOf(strategy) + scan = BinarySearch(writables,readable, start, end , steps, maximum, strategy, relative, latency_ms) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def hsearch(writables, readable, range_min, range_max, initial_step, resolution, filter=1, maximum=True, latency=0.0, relative=False, **pars): + """Hill Climbing search: searches writables in decreasing steps to find a local maximum for the readable. + Args: + writables(list of Writable): Positioners set on each step. + readable(Readable): Sensor to be sampled. + range_min(list of float): minimum positions of writables. + range_max(list of float): maximum positions of writables. + initial_step(float or list of float):initial step size for for each writable. + resolution(float or list of float): resolution of search for each writable (minimum step size). + filter(int): number of aditional steps to filter noise + maximum (bool , optional): if True (default) search maximum, otherwise minimum. + latency(float, optional): settling time for each step before readout, defaults to 0.0. + relative (bool, optional): if true, start and end positions are relative to current. + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - settle_timeout(int, optional): timeout for each positioner get to position. Default (-1) waits forever. + - parallel_positioning (bool, optional): if true (default) all positioners are set in parallel. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - restore_position (bool, optional): if true (default) then restore initial position after relative scans. + - check_positions (bool, optional): if true (default) verifies if in correct positions after move finishes. + - Aditional arguments defined by set_exec_pars. + + Returns: + SearchResult. + """ + latency_ms=int(latency*1000) + writables=to_list(string_to_obj(writables)) + readable=string_to_obj(readable) + range_min=to_list(range_min) + range_max=to_list(range_max) + initial_step = to_list(initial_step) + resolution = to_list(resolution) + scan = HillClimbingSearch(writables,readable, range_min, range_max , initial_step, resolution, filter, maximum, relative, latency_ms) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + + +################################################################################################### +#Data plotting +################################################################################################### + +def plot(data, name = None, xdata = None, ydata=None, title=None): + """Request one or multiple plots of user data (1d, 2d or 3d). + + Args: + data: array or list of values. For multiple plots, list of arrays. + name(str or list of str, optional): plot name. For multiple plots, list of names. + xdata: array or list of values. For multiple plots, list of arrays. + ydata: array or list of values. For multiple plots, list of arrays. + title(str, optional): plotting window name. + + Returns: + List of Plot. + """ + data = json_to_obj(data) + xdata = json_to_obj(xdata) + ydata = json_to_obj(ydata) + if isinstance(data, ch.psi.pshell.data.Table): + if is_list(xdata): + xdata = to_array(xdata, 'd') + return get_context().plot(data,xdata,name,title) + + if isinstance(data, ch.psi.pshell.scan.ScanResult): + return get_context().plot(data,title) + + if (name is not None) and is_list(name): + if len(name)==0: + name=None; + else: + if (data==None): + data = [] + for n in name: + data.append([]) + plots = java.lang.reflect.Array.newInstance(Class.forName("ch.psi.pshell.data.PlotDescriptor"), len(data)) + for i in range (len(data)): + plotName = None if (name is None) else name[i] + x = xdata + if is_list(x) and len(x)>0 and (is_list(x[i]) or isinstance(x[i] , java.util.List) or isinstance(x[i],PyArray)): + x = x[i] + y = ydata + if is_list(y) and len(y)>0 and (is_list(y[i]) or isinstance(y[i] , java.util.List) or isinstance(y[i],PyArray)): + y = y[i] + plots[i] = PlotDescriptor(plotName , to_array(data[i], 'd'), to_array(x, 'd'), to_array(y, 'd')) + return get_context().plot(plots,title) + else: + plot = PlotDescriptor(name, to_array(data, 'd'), to_array(xdata, 'd'), to_array(ydata, 'd')) + return get_context().plot(plot,title) + +def get_plots(title=None): + """Return all current plots in the plotting window given by 'title'. + + Args: + title(str, optional): plotting window name. + + Returns: + List of Plot. + """ + return get_context().getPlots(title) + +def get_plot_snapshots(title = None, file_type = "png", size = None, temp_path = get_context().setup.getContextPath()): + """Returns list with file names of plots snapshots from a plotting context. + + Args: + title(str, optional): plotting window name. + file_type(str, optional): "png", "jpg", "bmp" or "gif" + size(array, optional): [width, height] + temp_path(str, optional): path where the files will be generated. + + Returns: + list of strings + """ + time.sleep(0.1) #Give some time to plot to be finished - it is not sync with acquisition + ret = [] + if size != None: + size = Dimension(size[0], size[1]) + plots = get_plots(title) + for i in range(len(plots)): + p = plots[i] + name = p.getTitle() + if name is None or name == "": + name = str(i) + file_name = os.path.abspath(temp_path + "/" + name + "." + file_type) + p.saveSnapshot(file_name , file_type, size) + ret.append(file_name) + return ret + + +################################################################################################### +#Data access +################################################################################################### + +def load_data(path, index=0, shape=None, root=None): + """Read data from the current persistence context or from data files. + + Args: + path(str): Path to group or dataset relative to the root. + If path is in the format 'root|path', or else if 'root' is defined, then + reads from data file given by root. Otherwise uses current data persistence file. + root(str, optional): data file. + index(int or list, optional): + if integer, data depth (used for 3D datasets returning a 2d matrix) + If a list, specifies the full coordinate for multidimensional datasets. + shape(list, optional): only valid if index is a list, provides the shape of the data array. + In this case return a flattened a one-dimensional array. + + Returns: + Data array + """ + dm=get_context().dataManager + if index is not None and is_list(index): + slice = dm.getData(path, index, shape) if (root==None) else dm.getData(root, path, index, shape) + else: + slice = dm.getData(path, index) if (root==None) else dm.getData(root, path, index) + return slice.sliceData + +def get_attributes(path, root=None): + """Get the attributes from group or dataset. + + Args: + path(str): Path to group or dataset relative to the root. + If path is in the format 'root|path', or else if 'root' is defined, then + reads from data file given by root. Otherwise uses current data persistence file. + root(str, optional): data file. + Returns: + Dictionary + """ + if (root is None): + return get_context().dataManager.getAttributes(path) + return get_context().dataManager.getAttributes(root, path) + +def get_data_info(path, root=None): + """Get information about the group or dataset. + + Args: + path(str): Path to group or dataset relative to the current persistence context root. + If path is in the format 'root|path', or else if 'root' is defined, then + reads from data file given by root. Otherwise uses current data persistence file. + root(str, optional): data file. + Returns: + Dictionary + """ + if (root is None): + return get_context().dataManager.getInfo(path) + return get_context().dataManager.getInfo(root, path) + +def save_dataset(path, data, type='d', unsigned=False, features=None): + """Save data into a dataset within the current persistence context. + + Args: + path(str): Path to dataset relative to the current persistence context root. + type(str, optional): array type - 'd'=double (default), 'b'=byte, 'h'=short, 'i'=int, + 'l'=long, 'f'=float, 'c'=char, 's'=String, 'z'=bool, 'o'=Object + data (array or list): data to be saved + unsigned(boolean, optional): create a dataset of unsigned type. + features(dictionary, optional): See create_dataset. + + Returns: + Dictionary + """ + data = to_array(data, type) + get_context().dataManager.setDataset(path, data, unsigned, features) + +def create_group(path): + """Create an empty dataset within the current persistence context. + + Args: + path(str): Path to group relative to the current persistence context root. + Returns: + None + """ + get_context().dataManager.createGroup(path) + +def create_dataset(path, type, unsigned=False, dimensions=None, features=None): + """Create an empty dataset within the current persistence context. + + Args: + path(str): Path to dataset relative to the current persistence context root. + type(str): array type 'b' = byte, 'h' = short, 'i' = int, 'l' = long, 'f' = float, + 'd' = double, 'c' = char, 's' = String, 'z'=bool, 'o' = Object + unsigned(boolean, optional) + dimensions(tuple of int, optional): a 0 value means variable length in that dimension. + features(dictionary, optional): storage features for the dataset, format specific. + Keys for HDF5: "layout": "compact", "contiguous" or "chunked" + "compression": True, "max" or deflation level from 1 to 9 + "shuffle": Byte shuffle before compressing. + "chunk": tuple, setting the chunk size + Default: No compression, contiguous for fixed size arrays, chunked for variable size, compact for scalars. + Returns: + None + """ + get_context().dataManager.createDataset(path, ScriptUtils.getType(type), unsigned, dimensions, features) + +def create_table(path, names, types=None, lengths=None, features=None): + """Create an empty table (dataset of compound type) within the current persistence context. + + Args: + path(str): Path to dataset relative to the current persistence context root. + names(list of strings): name of each column + types(array of str): 'b' = byte, 'h' = short, 'i' = int, 'l' = long, 'f' = float, + 'd' = double, 'c' = char, 's' = String, 'o' = Object + Note:A '[' prefix on type name indicates an array type. + lengths(list of int): the array length for each columns(0 for scalar types). + features(dictionary, optional): See create_dataset. + Returns: + None + """ + type_classes = [] + if (types is not None): + for i in range (len(types)): + type_classes.append(ScriptUtils.getType(types[i])) + get_context().dataManager.createDataset(path, names, type_classes, lengths, features) + +def append_dataset(path, data, index=None, type='d', shape=None): + """Append data to dataset. + + Args: + path(str): Path to dataset relative to the current persistence context root. + data(number or array or list): name of each column. + index(int or list, optional): if set then add the data in a specific position in the dataset. + If integer is the index in an array (data must be 1 order lower than dataset) + If a list, specifies the full coordinate for multidimensional datasets. + type(str, optional): array type 'b' = byte, 'h' = short, 'i' = int, 'l' = long, 'f' = float, + 'd' = double, 'c' = char, 's' = String, 'o' = Object + default: 'd' (convert data to array of doubles) + shape(list, optional): only valid if index is a list, provides the shape of the data array. + In this case data must be a flattened one-dimensional array. + Returns: + None + """ + data = to_array(data, type) + if index is None: + get_context().dataManager.appendItem(path, data) + else: + if is_list(index): + if shape is None: + shape = [len(index)] + get_context().dataManager.setItem(path, data, index, shape) + else: + get_context().dataManager.setItem(path, data, index) + +def append_table(path, data): + """Append data to a table (dataset of compound type) + + Args: + path(str): Path to dataset relative to the current persistence context root. + data(list): List of valus for each column of the table. + Returns: + None + """ + if is_list(data): + arr = java.lang.reflect.Array.newInstance(Class.forName("java.lang.Object"),len(data)) + for i in range (len(data)): + if is_list(data[i]): + arr[i] = to_array(data[i], 'd') + else: + arr[i] = data[i] + data=arr + get_context().dataManager.appendItem(path, data) + +def flush_data(): + """Flush all data files immediately. + + Args: + None + Returns: + None + """ + get_context().dataManager.flush() + +def set_attribute(path, name, value, unsigned = False): + """Set an attribute to a group or dataset. + + Args: + path(str): Path to dataset relative to the current persistence context root. + name(str): name of the atttribute + value(Object): the attribute value + unsigned(bool, optional): if applies, indicate if value is unsigned. + Returns: + None + """ + if is_list(value): + value = Convert.toStringArray(to_array(value)) + get_context().dataManager.setAttribute(path, name, value, unsigned) + +def log(log, data_file=None): + """Writes a log to the system log and data context - if there is an ongoing scan or script execution. + + Args: + log(str): Log string. + data_file(bool, optional): if true logs to the data file, in addiction to the system logger. + If None(default) appends to data file only if it exists. + + Returns: + None + """ + get_context().scriptingLog(str(log)) + if data_file is None: + data_file = get_exec_pars().open + if data_file: + try: + get_context().dataManager.appendLog(str(log)) + except: + #Do not generate exception if cannot write to data file + pass + +def set_exec_pars(**args): + """ Configures the script execution parameters, overriding the system configuration. + + Args: + args(optional arguments): + name(str): value of the {name} tag. Default is the running script name. + type(str): value of the {type} tag. Default is empty. + This field can be used to store data in sub-folders of standard location. + path(str): If defined provides the full path name for data output root (overriding config)) + The tag {data} can be used to enter a path relative to the standard data folder. + layout(str): Change data layout. + format(str): Change data format. + split(scan or True): Split scan data to another table. If set to True in scan command then split every pass. + depth_dim(int): dimension of 2d-matrixes in 3d datasets. + save(bool): Change option to auto save scan data. + flush(bool): Change option to flush file on each record. + keep(bool): Change option keep scan records in memory. If false do not add records to scan result. + preserve(bool): Change option to preserve device types. If false all values are converted to double. + setpoints(bool): Save the positioner setpoints too. + verbose(bool): Enable options to save additional information (output, script). + compression(obj): True for enabling default compression, int for specifying deflation level. + Device or list of devices for specifying devices to be compressed. + shuffle(obj): True for enabling shuffling before compression. + Device or list of devices for specifying devices to be shuffled. + contiguous(obj): True for setting contiguous datasets for all devices. + Device or list of devices for specifying device datasets to be contiguous. + seq(int): Set next data file sequence number. + open(bool): If true create data output path immediately. If false closes output root, if open. + reset(bool): If true reset the scan counter - the {count} tag and set the timestamp to now. + group(str): Change layout group name for scans + tag(str): Change tag for scan names (affecting group or dataset name, according to layout) + then, then_success, then_exception(str): Sets statement to be executed on the completion of current. + defaults(bool): If true restore the original execution parameters. + + Graphical preferences: + line_plots(list): list of devices with enforced line plots. + range(str or list): "none", "auto", [min_x, max_x] or [min_x, max_x, min_y, max_y] + display(bool): if false disables scan data plotting and printing. + print_scan(bool): Enable/disables scan data printing to console. + plot_disabled(bool): Enable/disable scan plot + plot_layout (str):"Horizontal", "Vertical" or "Grid" + table_disabled(bool): Enable/disable scan table + enabled_plots (list of str or Readable): list of devices (Readables) to be plotted + plot_types(dict): Dictionary - Plot name(Readable or String) : Plot type(String or int) + auto_range(bool): If true automatic range scan plots x-axis. + manual_range(tuple): : Set range (min_x, max_x) or (min_x, max_x, min_y, max_y). None sets fixed range. + manual_range_y(tuple): Set y range (min_y, max_y). None sets fixed range. + domain_axis(str): Set the domain axis source: "Time", "Index", or a readable name. Default: first positioner. + status(str): set application status + """ + get_context().setExecutionPars(args) + +def get_exec_pars(): + """ Returns script execution parameters. + + Returns: + ExecutionParameters object. Fields: + name (str): execution name - {name} tag. + type (str): execution type - {type} tag. + path (str): output data root. + seq(int): data file sequence number. + open (bool): true if the output data root has been opened. + layout (str): data output layout. If None then using the configuration. + save (bool): auto save scan data option. + flush (bool): flush file on each record. + index (int): current scan index. + group (str): data group currently used for scan data storage. + if no ongoing scan return "/" if within a script, or else None if a console command. + scanPath (str): dataset or group corresponding to current scan. + scan (Scan): reference to current scan, if any + source (CommandSource): return the source of the script or command. + background (bool): return False if executing in main interpreter thread . + debug (bool): True if executing from statements in editor. + simulation (bool): global simulation flag. + aborted (bool): True if execution has been aborted + """ + return get_context().getExecutionPars() + + +################################################################################################### +#EPICS +################################################################################################### + +def _adjust_channel_value(value, var_type=None): + if (value is None): + return value + if (var_type is not None): + if is_list(value): + var_type = var_type.replace(',','').replace('[','') + ret = [] + for item in value: + ret.append(_adjust_channel_value(item), var_type) + value = ret + else: + var_type = var_type.lower() + if var_type=='b': + value = byte(value) + elif var_type=='i': + value = short(value) + elif var_type=='l': + value = int(value) + elif var_type=='f': + value = float(value) + elif var_type=='d': + value = float(value) + elif var_type=='s': + value = str(value) + + if isinstance(value,tuple): + value = list(value) + if isinstance(value,list): + list_type = type(value[0]) + array_types = { + int: "i", + long: "l", + float:"d", + str:Class.forName("java.lang.String"), + } + array_type = array_types.get(type(value[0]),'d') + array = PyArray(array_type) + array.fromlist(value) + value=array + return value + +def caget(name, type=None, size=None, meta = False ): + """Reads an Epics PV. + + Args: + name(str): PV name + type(str, optional): type of PV. By default gets the PV standard field type. + Scalar values: 'b', 'i', 'l', 'd', 's'. + Array values: '[b', '[i,', '[l', '[d', '[s'. + size (int, optional): for arrays, number of elements to be read. Default read all. + meta (bool, optional): if true gets channel value and metadata (timestamp, severity). + + Returns: + PV value if meta is false, otherwise a dictionary containing PV value and metadata + """ + if meta: + return Epics.getMeta(name, Epics.getChannelType(type), size) + return Epics.get(name, Epics.getChannelType(type), size) + +def cawait(name, value, timeout=None, comparator=None, type=None, size=None): + """Wait for a PV to have a given value. + + Args: + name(str): PV name + value (obj): value to compare to + timeout(float, optional): time in seconds to wait. If None, waits forever. + comparator(java.util.Comparator or float, optional): if None waits for equality. + If a numeric value is provided, waits for channel to be in range. + type(str, optional): type of PV. By default gets the PV standard field type. + Scalar values: 'b', 'i', 'l', 'd', 's'. + Array values: '[b', '[i,', '[l', '[d', '[s'. + size (int, optional): for arrays, number of elements to be read. Default read all. + + Returns: + None + """ + if (timeout is not None): + timeout = int(timeout*1000) + value = _adjust_channel_value(value) + Epics.waitValue(name, value, comparator, timeout, Epics.getChannelType(type), size) + +def caput(name, value, timeout = None): + """Writes to an Epics PV. + + Args: + name(str): PV name + value(scalar, string or array): new PV value. + timeout(int, optional): timeout in seconds to the write. If None waits forever to completion. + + Returns: + None + """ + value=_adjust_channel_value(value) + if (timeout is not None): + timeout = int(timeout*1000) + return Epics.put(name, value, timeout) + +def caputq(name, value): + """Writes to an Epics PV and does not wait. + + Args: + name(str): PV name + value(scalar, string or array): new PV value. + + Returns: + None + """ + value=_adjust_channel_value(value) + return Epics.putq(name, value) + +def camon(name, type=None, size=None, wait = sys.maxint): + """Install a monitor to an Epics PV and print value changes. + + Args: + name(str): PV name + type(str, optional): type of PV. By default gets the PV standard field type. + Scalar values: 'b', 'i', 'l', 'd', 's'. + Array values: '[b', '[i,', '[l', '[d', '[s'. + size (int, optional): for arrays, number of elements to be read. Default read all. + wait (int, optional): blocking time for this function. By default blocks forever. + Returns: + None + """ + val = lambda x: x.tolist() if isinstance(x,PyArray) else x + + class MonitorListener(java.beans.PropertyChangeListener): + def propertyChange(self, pce): + print val(pce.getNewValue()) + + channel = create_channel(name, type, size) + print val(channel.getValue()) + channel.setMonitored(True) + channel.addPropertyChangeListener(MonitorListener()) + + try: + time.sleep(wait) + finally: + Epics.closeChannel(channel) + +def create_channel_device(channel_name, type=None, size=None, device_name=None, monitored=False): + """Create a device from an EPICS PV. + + Args: + channel_name(str): PV name + type(str, optional): type of PV. By default gets the PV standard field type. + Scalar values: 'b', 'i', 'l', 'd', 's'. + Array values: '[b', '[i,', '[l', '[d', '[s'. + size (int, optional): for arrays, number of elements to be read. Default read all. + device_name (str, optional): device name (if different from hannel_name. + Returns: + None + """ + dev = Epics.newChannelDevice(channel_name if (device_name is None) else device_name , channel_name, Epics.getChannelType(type)) + if get_context().isSimulation(): + dev.setSimulated() + dev.initialize() + if (size is not None): + dev.setSize(size) + if (monitored): + dev.setMonitored(True) + return dev + + +################################################################################################### +#Concurrent execution +################################################################################################### + +class Callable(java.util.concurrent.Callable): + def __init__(self, method, *args): + self.method = method + self.args = args + self.thread = java.lang.Thread.currentThread() + def call(self): + try: + get_context().startedChildThread(self.thread) + return self.method(*self.args) + finally: + get_context().finishedChildThread(self.thread) + +def fork(*functions): + """Start execution of functions in parallel. + + Args: + *functions(function references) + + Returns: + List of callable + """ + callables = [] + for m in functions: + if is_list(m): + callables.append(Callable(m[0],*m[1])) + else: + callables.append(Callable(m)) + return Threading.fork(callables) + +def join(futures): + """Wait parallel execution of functions. + + Args: + futures(Future or list of Future) : as returned from fork + + Returns: + None +""" + try: + futures=to_list(futures) + return Threading.join(futures) + except java.util.concurrent.ExecutionException, ex: + raise ex.getCause() + +def parallelize(*functions): + """Equivalent to fork + join + + Args: + *functions(function references) + + Returns: + None + """ + futures = fork(*functions) + return join(futures) + +def invoke(f, wait = False): + """ Execute in event thread. + + Args: + f(function reference) + wait (boolean, optional) + """ + if is_list(f): [m, a] = f; f = lambda: m(*a) + invokeAndWait(f) if wait else invokeLater(f) + + +################################################################################################### +#Background task control. +################################################################################################### + +def start_task(script, delay = 0.0, interval = -1): + """Start a background task + + Args: + script(str): Name of the script implementing the task + delay(float, optional): time in seconds for the first execution. + Default starts immediately. + interval(float, optional): time in seconds for between execution. + If negative (default), single-execution. + + Returns: + Task object. + """ + delay_ms=int(delay*1000) + interval_ms=int(interval*1000) if (interval>=0) else int(interval) + return get_context().startTask(script, delay_ms, interval_ms) + +def stop_task(script, force = False): + """Stop a background task + + Args: + script(str): Name of the script implementing the task + force(boolean, optional): interrupt current execution, if running + + Returns: + None + """ + get_context().stopTask(script, force) + + +################################################################################################### +#Versioning +################################################################################################### + +def commit(message, force = False): + """Commit the changes to the repository. + + Args: + message(str): commit message + force(bool, optional): if False, raises exception if no change detected in repo + + Returns: + None + """ + get_context().commit(message, force) + +def diff(): + """Return list of changes in the repository + + Args: + None + + Returns: + None + """ + return get_context().diff() + +def checkout_tag(tag): + """Checkout a tag name. + + Args: + tag(str): tag name. + + Returns: + None + """ + get_context().checkoutTag(tag) + +def checkout_branch(tag): + """Checkout a local branch name. + + Args: + tag(str): branch name. + + Returns: + None + """ + get_context().checkoutLocalBranch(tag) + +def pull_repository(): + """Pull from remote repository. + + """ + get_context().pullFromUpstream() + +def push_repository(all_branches=True, force=False, push_tags=False): + """Push to remote repository. + + Args: + all_branches(boolean, optional): all branches or just current. + force(boolean, optional): force flag. + push_tags(boolean, optional): push tags. + + Returns: + None + """ + get_context().pushToUpstream(all_branches, force, push_tags) + +def cleanup_repository(): + """Performs a repository cleanup. + + Args: + None + + Returns: + None + """ + get_context().cleanupRepository() + + +################################################################################################### +#Device Pool +################################################################################################### + +def get_device(device_name): + """Returns a configured device (or imaging source) by its name. + + Args: + device_name(str): name of the device. + + Returns: + device + """ + return get_context().devicePool.getByName(device_name) + +def add_device(device, force = False): + """Add a device (or imaging source) to the device pool. + + Args: + device(Device or Source) + force(boolean, optional): if true then dispose existing device with same name. + Otherwise will fail in case of name clash. + + Returns: + True if device was added, false if was already in the pool, or exception in case of name clash. + """ + return get_context().devicePool.addDevice(device, force, True) + +def remove_device(device): + """Remove a device (or imaging source) from the device pool. + + Args: + device(Device or Source) + + Returns: + bool: true if device was removed. + """ + device=string_to_obj(device) + return get_context().devicePool.removeDevice(device) + +def set_device_alias(device, alias): + """Deprecated, use "dev.set_alias" instead. Set a device alias to be used in scans (datasets and plots). + + Args: + device(Device) + alias(str): replace device name in scans. + + Returns: + None + """ + device=string_to_obj(device) + device.setAlias(alias) + +def stop(): + """Stop all devices implementing the Stoppable interface. + + Args: + None + + Returns: + None + """ + get_context().stopAll() + +def update(): + """Update all devices. + + Args: + None + + Returns: + None + """ + get_context().updateAll() + +def reinit(dev = None): + """Re-initialize devices. + + Args: + dev(Device, optional): Device to be re-initialized (if None, all devices not yet initialized) + + Returns: + List with devices not initialized. + """ + if dev is not None: + dev=string_to_obj(dev) + return get_context().reinit(dev) + return to_list(get_context().reinit()) + +def create_device(url, parent=None): + """Create a device form a definition string(see InlineDevice) + + Args: + url(str or list of string): the device definition string (or list of strings) + parent(bool, optional): parent device + + Returns: + The created device (or list of devices) + """ + if parent is not None: + parent=string_to_obj(parent) + return InlineDevice.create(url, parent) + + +def create_averager(dev, count, interval=0.0, name = None, monitored = False): + """Creates and initializes and averager for dev. + + Args: + dev(Device): the source device + count(int): number of samples + interval(float, optional): sampling interval(s). If negative sampling is made on data change event. + name(str, optional): sets the name of the device (default is: averager) + monitored (bool, optional): if true then averager processes asynchronously. + + Returns: + Averager device + """ + dev = string_to_obj(dev) + if isinstance(dev, ReadableArray): + av = ArrayAverager(dev, count, int(interval*1000)) if (name is None) else ArrayAverager(name, dev, count, int(interval*1000)) + else: + av = Averager(dev, count, int(interval*1000)) if (name is None) else Averager(name, dev, count, int(interval*1000)) + av.initialize() + if (monitored): + av.monitored = True + return av + +def tweak(dev, step, is2d=False): + """Move one or more positioners in steps using the arrow keys. + Steps are increased/decreased using the shift and control keys. + + Args: + dev(Positioner or List): the device or list of devices to move. + step(float or List): step size or list of step sizes + is2d(bool, optional): if true moves second motor with up/down arrows. + """ + if (get_exec_pars().isBackground()): return + dev,step = to_list(string_to_obj(dev)),to_list(step) + while (True): + key=get_context().waitKey(0) + for i in range(len(dev)): + if not is2d or i==0: + if key == 0x25: dev[i].moveRel(-step[i]) #Left + elif key == 0x27: dev[i].moveRel(step[i]) #Right + if key in (0x10, 0x11): + step[i] = step[i]*2 if key == 0x10 else step[i]/2 + print "Tweak step for " + dev[i].name + " set to: "+str(step[i]) + if is2d and len(dev)>1: + if key == 0x26: dev[1].moveRel(step[1]) #Top + elif key == 0x28: dev[1].moveRel(-step[1]) #Bottom + + +################################################################################################### +#Maths +################################################################################################### + +def arrmul(a, b): + """Multiply 2 series of the same size. + + Args: + + a(subscriptable) + b(subscriptable) + + Returns: + List + """ + return map(mul, a, b) + +def arrdiv(a, b): + """Divide 2 series of the same size. + + Args: + + a(subscriptable) + b(subscriptable) + + Returns: + List + """ + return map(truediv, a, b) + +def arradd(a, b): + """Add 2 series of the same size. + + Args: + + a(subscriptable) + b(subscriptable) + + Returns: + List + """ + return map(add, a, b) + +def arrsub(a, b): + """Subtract 2 series of the same size. + + Args: + + a(subscriptable) + b(subscriptable) + + Returns: + List + """ + return map(sub, a, b) + +def arrabs(a): + """Returns the absolute of all elements in series. + + Args: + + a(subscriptable) + + Returns: + List + """ + return map(abs, a) + +def arroff(a, value = "mean"): + """Subtract offset to all elemets in series. + + Args: + + a(subscriptable) + type(int or str, optional): value to subtract from the array, or "mean" or "min". + + Returns: + List + """ + if value=="mean": + value = mean(a) + elif value=="min": + value = min(a) + return [x-value for x in a] + +def mean(data): + """Calculate the mean of a sequence. + + Args: + data(subscriptable) + + Returns: + Mean of the elements in the object. + """ + return reduce(lambda x, y: x + y, data) / len(data) + +def variance(data): + """Calculate the variance of a sequence. + + Args: + data(subscriptable) + + Returns: + Variance of the elements in the object. + """ + c = mean(data) + ss = sum((x-c)**2 for x in data) + return ss/len(data) + +def stdev(data): + """Calculate the standard deviation of a sequence. + + Args: + data(subscriptable) + + Returns: + Standard deviation of the elements in the object. + """ + return variance(data)**0.5 + + +def center_of_mass(data, x = None): + """Calculate the center of mass of a series, and its rms. + + Args: + + data(subscriptable) + x(list, tuple, array ..., optional): x coordinates + + Returns: + Tuple (com, rms) + """ + if x is None: + x = Arr.indexesDouble(len(data)) + data_sum = sum(data) + if (data_sum==0): + return float('nan') + xmd = arrmul( x, data) + com = sum(xmd) / data_sum + xmd2 = arrmul( x, xmd) + com2 = sum(xmd2) / data_sum + rms = math.sqrt(abs(com2 - com * com)) + return (com, rms) + +def poly(val, coefs): + """Evaluates a polinomial: (coefs[0] + coefs[1]*val + coefs[2]*val^2... + + Args: + val(float): value + coefs (list of loats): polinomial coefficients + Returns: + Evaluated function for val + """ + r = 0 + p = 0 + for c in coefs: + r = r + c * math.pow(val, p) + p = p + 1 + return r + +def histogram(data, range_min = None, range_max = None, bin = 1.0): + """Creates histogram on data. + + Args: + data (tuple, array, List or Array): input data can be multi-dimensional or nested. + range_min (int, optional): minimum histogram value. Default is floor(min(data)) + range_max (int, optional): maximul histogram value. Default is ceil(max(data)) + bin(int or float, optional): if int means number of bins. If float means bin size. Default = 1.0. + Returns: + tuple: (ydata, xdata) + """ + if range_min is None: range_min = math.floor(min(flatten(data))) + if range_max is None: range_max = math.ceil(max(flatten(data))) + if type(bin) is float: + bin_size = bin + n_bin = int(math.ceil(float(range_max - range_min)/bin_size)) + else: + n_bin = bin + bin_size = float(range_max - range_min)/bin + + result = [0] * n_bin + for d in flatten(data): + b = int( float(d - range_min) / bin_size) + if (b >=0) and (b < n_bin): + result[b] = result[b] + 1 + return (result, frange(range_min, range_max, bin_size)) + +def _turn(p, q, r): + return cmp((q[0] - p[0])*(r[1] - p[1]) - (r[0] - p[0])*(q[1] - p[1]), 0) + +def _keep(hull, r): + while len(hull) > 1 and _turn(hull[-2], hull[-1], r) != 1: + hull.pop() + return (not len(hull) or hull[-1] != r) and hull.append(r) or hull + +def convex_hull(point_list=None, x=None, y=None): + """Returns the convex hull from a list of points. Either point_list or x,y is provided. + (Alhorithm taken from http://tomswitzer.net/2010/03/graham-scan/) + Args: + point_list (array of tuples, optional): arrays of the points + x (array of float, optional): array with x coords of points + y (array of float, optional): array with y coords of points + Returns: + Array of points or (x,y) + """ + is_point_list = point_list is not None + if not point_list: + point_list=[] + for i in range(len(x)): + if((x[i] is not None) and (y[i] is not None)): point_list.append((x[i], y[i])) + point_list.sort() + lh,uh = reduce(_keep, point_list, []), reduce(_keep, reversed(point_list), []) + ret = lh.extend(uh[i] for i in xrange(1, len(uh) - 1)) or lh + if not is_point_list: + x, y = [], [] + for i in range(len(ret)): + x.append(ret[i][0]) + y.append(ret[i][1]) + return (x,y) + return ret + +################################################################################################### +#Utilities +################################################################################################### + +def get_setting(name=None): + """Get a persisted script setting value. + + Args: + name (str): name of the setting. + Returns: + String with setting value or None if setting is undefined. + If name is None then returns map with all settings. + """ + return get_context().getSettings() if (name is None) else get_context().getSetting(name) + +def set_setting(name, value): + """Set a persisted script setting value. + + Args: + name (str): name of the setting. + value (obj): value for the setting, converted to string (if None then remove the setting). + Returns: + None. + """ + get_context().setSetting(name, value) + +def exec_cmd(cmd, stderr_raise_ex = True): + """Executes a shell command. If errors happens raises an exception. + + Args: + cmd (str): command process input. If stderr_raise_ex is set then raise exception if stderr is not null. + Returns: + Output of command process. + """ + import subprocess + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE if stderr_raise_ex else subprocess.STDOUT, shell=True) + (ret, err) = proc.communicate() + if stderr_raise_ex and (err is not None) and err!="": + raise Exception(err) + return ret + +def exec_cpython(script_name, args = [], method_name = None, python_name = "python"): + """Executes an external cpython process. + + Args: + script_name (str): name of the script (can be absolute or relative to script folder). + args(list, optional): arguments to python process (or parameters to method, if not None) + method_name (str, optional): if defined indicates a method to be called. + python_name (str, optional): name of executable + Returns: + Return of python process. + """ + if method_name is None: + script = get_context().scriptManager.library.resolveFile(script_name) + if script is None : + script= os.path.abspath(script_name) + c = python_name + " " + script + " " + if args is not None and (len(args)>0): + for arg in args: + c = c + str(arg) + " " + return exec_cmd(c) + else: + #Calling a method + import json + import tempfile + script = os.path.abspath(get_context().scriptManager.library.resolveFile(script_name)) + with open(get_context().setup.getContextPath()+ "/Temp" + str(java.lang.Thread.currentThread().getId())+".py", "wb") as f: + f.write(("script = '" +script +"'\n").replace('\\', '\\\\')) + f.write("function = '" +method_name +"'\n") + f.write("jsonargs = '" + json.dumps(args) +"'\n") + f.write("""import sys +import json +import os +args =json.loads(jsonargs) +i = script.rfind(os.sep) +module = script[i+1:-3] +sys.path.insert(1,script[:i+1]) +exec ('from ' + module + ' import ' + function + ' as function') +print (json.dumps(function(*args))) +""") + f.close() + ret = exec_cpython(os.path.abspath(f.name), python_name = python_name) + os.remove(f.name) + ret = '\n'+ret[0:-len(os.linesep)] + jsonret = ret[ret.rfind('\n')+1:].strip() + return json.loads(jsonret) + +def bsget(channel, modulo=1, offset=0, timeout = 5.0): + """Reads an values a bsread stream, using the default provider. + + Args: + channel(str or list of str): channel name(s) + module(int, optional): stream modulo + offset(int, optional): stream offset + timeout(float, optional): stream timeout in secs + Returns: + BS value or list of values + """ + channels = to_list(channel) + ret = Stream.readChannels(channels, modulo, offset, int(timeout * 1000)) + if is_string(channel): + return ret[0] + return ret + +def flatten(data): + """Flattens multi-dimentional or nested data. + + Args: + data (tuple, array, List or Array): input data + Returns: + Iterator on the flattened data. + """ + if isinstance(data,PyArray): + if not data.typecode.startswith('['): + return data + + import itertools + return itertools.chain(*data) + +def frange_gen(start, finish, step): + while ((step >= 0.0) and (start <= finish)) or ((step < 0.0) and (start >= finish)): + yield start + start += step + +def frange(start, finish, step, enforce_finish = False, inclusive_finish = False): + """Create a list with a range of float values (a float equivalent to "range"). + + Args: + start(float): start of range. + finish(float): end of range. + step(float): step size. + enforce_finish(boolean, optional): adds the final element even if range was not exact. + inclusive_finish(boolean, optional): if false finish is exclusive (like in "range"). + + Returns: + list + """ + step = float(step) + ret = list(frange_gen(start, finish, step)) + if len(ret) > 0: + if inclusive_finish == False: + if ret[-1]==finish: + del ret[-1] + if enforce_finish and ret[-1]!=finish: + ret.append(finish) + return ret + +def notify(subject, text, attachments = None, to=None): + """Send email message. + + Args: + subject(str): Message subject. + text(str): Message body. + attachments(list of str, optional): list of files to be attached (expansion tokens are allowed). + to (list ofd str, optional): recipients. If None uses the recipients defined in mail.properties. + Returns: + None + """ + get_context().notify(subject, text, to_list(attachments), to_list(to)) + +def expand_path(path, timestamp=-1): + """Expand path containing tokens. + + Args: + path(str): path name. + timestamp(int): If not defined(-1), uses now. + Returns: + Expanded path name. + """ + + return get_context().setup.expandPath(path, timestamp) + +################################################################################################### +#UI +################################################################################################### + +def set_status(status): + """Set the application status. + + Args: + status(str): new status. + + Returns: + None + """ + set_preference(Preference.STATUS, status) + +def setup_plotting( enable_plots=None, enable_table=None,plot_list=None, line_plots=None, range=None, domain=None, defaults=None): + if defaults == True: set_preference(Preference.DEFAULTS, True) + if enable_plots is not None: set_preference(Preference.PLOT_DISABLED, not enable_plots) + if enable_table is not None: set_preference(Preference.TABLE_DISABLED, not enable_table) + if plot_list is not None: set_preference(Preference.ENABLED_PLOTS, None if plot_list == "all" else plot_list) + if line_plots is not None: + plots = None + if line_plots != "none": + plots = {} + for p in line_plots: plots[p]=1 + set_preference(Preference.PLOT_TYPES, plots) + if range is not None: + if range == "none": set_preference(Preference.AUTO_RANGE, None) + elif range == "auto": set_preference(Preference.AUTO_RANGE, True) + else: set_preference(Preference.MANUAL_RANGE, range) + if domain is not None: set_preference(Preference.DOMAIN_AXIS, domain) + +def set_preference(preference, value): + """Hints to graphical layer: + + Args: + preference(Preference): Enum of preference types: + PLOT_DISABLED: enable/disable scan plot (True/False) + PLOT_LAYOUT: "Horizontal", "Vertical" or "Grid" + TABLE_DISABLED: enable/disable scan table (True/False) + ENABLED_PLOTS: select Readables to be plotted (list of Readable or String (names)) + PLOT_TYPES: Dictionary - Plot name(Readable or String) : Plot type(String or int) + PRINT_SCAN: Print scan records to console + AUTO_RANGE: Automatic range scan plots x-axis + MANUAL_RANGE: Manually set scan plots x-axis + MANUAL_RANGE_Y: Manually set scan plots y-axis + DOMAIN_AXIS: Set the domain axis source: "Time", "Index", or a readable name. + Default(None): first positioner + STATUS: set application status + value(object): preference value + + Returns: + None + """ + value = to_array(value, 'o') #If list then convert to Object array + get_context().setPreference(preference, value) + +def get_string(msg, default = None, alternatives = None, password = False): + """ + Reads a string from UI + Args: + msg(str): display message. + default(str, optional): value displayed when window is shown. + alternatives(list of str, optional): if provided presents a combo box instead of an editing field. + password(boolean, optional): if True hides entered characters. + + Returns: + String entered of null if canceled + """ + if password : + return get_context().getPassword(msg, None) + return get_context().getString(msg, str(default) if (default is not None) else None, alternatives) + +def get_option(msg, type = "YesNoCancel"): + """ + Gets an option from UI + Args: + msg(str): display message. + type(str, optional): 'YesNo','YesNoCancel' or 'OkCancel' + + Returns: + 'Yes', 'No', 'Cancel' + """ + return get_context().getOption(msg, type) + +def show_message(msg, title=None, blocking = True): + """ + Pops a blocking message to UI + + Args: + msg(str): display message. + title(str, optional): dialog title + """ + get_context().showMessage(msg, title, blocking) + +def show_panel(device, title=None): + """ + Show, if exists, the panel relative to this device. + + Args: + device(Device or str or BufferedImage): device + title only apply to BufferedImage objects. For devices the title is the device name. + """ + if type(device) is BufferedImage: + device = DirectSource(title, device) + device.initialize() + if is_string(device): + device = get_device(device) + return get_context().showPanel(device) diff --git a/script/Lib/builtin_utils.py b/script/Lib/builtin_utils.py new file mode 100644 index 0000000..899497b --- /dev/null +++ b/script/Lib/builtin_utils.py @@ -0,0 +1,174 @@ +import sys +import time +import math +import os.path +from operator import add, mul, sub, truediv +from time import sleep +from array import array +import jarray + +import java.lang.Class as Class +import java.lang.Object as Object +import java.lang.System as System +import java.beans.PropertyChangeListener +import java.util.concurrent.Callable +import java.util.List +import java.util.ArrayList +import java.lang.reflect.Array +import java.lang.Thread +import java.awt.image.BufferedImage as BufferedImage +import java.awt.Color as Color +import java.awt.Point as Point +import java.awt.Dimension as Dimension +import java.awt.Rectangle as Rectangle +import java.awt.Font as Font +import org.python.core.PyArray as PyArray +import org.python.core.PyFunction as PyFunction +import org.python.core.PyMethod as PyMethod +import org.python.core.PyGenerator as PyGenerator + +import java.lang.Boolean +import java.lang.Integer +import java.lang.Float +import java.lang.Double +import java.lang.Short +import java.lang.Byte +import java.lang.Long +import java.lang.String + +import ch.psi.pshell.core.Context +import ch.psi.pshell.scripting.ScriptUtils as ScriptUtils +import ch.psi.utils.Convert as Convert +import ch.psi.utils.Arr as Arr + + +################################################################################################### +#Type conversion and checking +################################################################################################### + +def to_array(obj, type = None, primitive = True): + """Convert Python list to Java array. + + Args: + obj(list): Original data. + type(str): array type 'b' = byte, 'h' = short, 'i' = int, 'l' = long, 'f' = float, 'd' = double, + 'c' = char, 'z' = boolean, 's' = String, 'o' = Object + Returns: + Java array. + """ + if obj is None: + return None + if type is None: + type = 'o' + enforceArrayType=False + else: + enforceArrayType=True + if type[0] == '[': + type = type[1:] + element_type = ScriptUtils.getPrimitiveType(type) if primitive else ScriptUtils.getType(type) + + def convert_1d_array(obj): + try: + if primitive: + #If primitive, first try converting with jarray.array + return jarray.array(obj,type) + except: + pass + + if type == 'c': + ret = java.lang.reflect.Array.newInstance(element_type,len(obj)) + for i in range(len(obj)): ret[i] = chr(obj[i]) + return ret + if type == 'z': + ret = java.lang.reflect.Array.newInstance(element_type,len(obj)) + for i in range(len(obj)): + ret[i]= True if obj[i] else False + return ret + if type == 'o': + ret = java.lang.reflect.Array.newInstance(element_type,len(obj)) + for i in range(len(obj)): + ret[i]= obj[i] + return ret + if type == "s": + return Convert.toStringArray(obj) + if primitive: + ret = Convert.toPrimitiveArray(obj, element_type) + else: + ret = java.lang.reflect.Array.newInstance(element_type,len(obj)) + for i in range(len(obj)): ret[i] = Convert.toType(obj[i],element_type) + return ret + + if isinstance(obj,PyArray): + if enforceArrayType: + if Arr.getComponentType(obj) != element_type: + rank = Arr.getRank(obj) + if (rank== 1): + obj=convert_1d_array(obj) + elif (rank>1): + pars, aux = [element_type], obj + for i in range(rank): + pars.append(len(aux)) + aux = aux[0] + ret = java.lang.reflect.Array.newInstance(*pars) + for i in range(len(obj)): + ret[i]=to_array(obj[i], type) + obj = ret + elif is_list(obj): + if type=='o': + ret = java.lang.reflect.Array.newInstance(element_type, len(obj)) + for i in range (len(obj)): + if is_list(obj[i]) or isinstance(obj[i],PyArray): + ret[i] = to_array(obj[i],type) + else: + ret[i] = obj[i] + obj=ret + elif len(obj)>0 and (is_list(obj[0]) or isinstance(obj[0],PyArray)): + pars, aux = [element_type], obj + while len(aux)>0 and (is_list(aux[0]) or isinstance(aux[0],PyArray)): + pars.append(len(aux)) + aux = aux[0] + pars.append(0) + ret = java.lang.reflect.Array.newInstance(*pars) + for i in range(len(obj)): + ret[i]=to_array(obj[i], type) + obj=ret + else: + obj= convert_1d_array(obj) + return obj + +def to_list(obj): + """Convert an object into a Python List. + + Args: + obj(tuple or array or List): Original data. + + Returns: + List. + """ + if obj is None: + return None + if isinstance(obj,tuple) or isinstance(obj,java.util.List) : + return list(obj) + #if isinstance(obj,PyArray): + # return obj.tolist() + if not isinstance(obj,list): + return [obj,] + return obj + +def is_list(obj): + return isinstance(obj,tuple) or isinstance(obj,list) or isinstance (obj, java.util.List) + +def is_string(obj): + return (type(obj) is str) or (type(obj) is unicode) + +def is_interpreter_thread(): + return java.lang.Thread.currentThread().name == "MainThread" + +################################################################################################### +#Access to context singleton +################################################################################################### +def get_context(): + return ch.psi.pshell.core.Context.getInstance() + + + diff --git a/script/Lib/diffutils.py b/script/Lib/diffutils.py new file mode 100644 index 0000000..a5df179 --- /dev/null +++ b/script/Lib/diffutils.py @@ -0,0 +1,1032 @@ +###################################################################################################\ +# Diffcalc utilities +################################################################################################### + +###################################################################################################\ +# Installing +################################################################################################### + +#1- Download from: https://github.com/DiamondLightSource/diffcalc/archive/v2.1.zip +#2- Extract the contents to {script}/Lib/diffcalc +#3- Download http://central.maven.org/maven2/gov/nist/math/jama/1.0.3/jama-1.0.3.jar +# to the extensions folder. +#4- On {script}/Lib/diffcalc/diffcalc/gdasupport/you.py, the line " wl.asynchronousMoveTo(1)" +# must be commented for the energy not to move when the library is loaded. + +###################################################################################################\ +# Library loading and Hardware setup +################################################################################################### + +#1- Create a MotorGroup with the diffractometer motors +# e.g. 'sixc', containing mu, delta, gam, eta, chi, phi motors (gam = nu) +# or 'fivec', containing delta, gam, eta, chi, phi motors +# or 'fourc', containing delta, eta, chi, phi motors +#2- Create positioner to read/set the energy in kEv, e.g. named 'en' +#3- Execute: run("diffutils") +#4- Execute: setup_diff(sixc, en) + + +###################################################################################################\ +# API +################################################################################################### + +# Orientation commands defined in https://github.com/DiamondLightSource/diffcalc#id19 are +# defined heren with identical signatures, and so the constraint commands. +# Motion command names were changed because thge original can collide with other globals: +# hklci, hklca, hklwh, hklget, hklmv and hklsim(hkl). + + +from __future__ import absolute_import +import traceback +import os + + +import Jama.Matrix +diffcalc_path = os.path.abspath(expand_path("{script}/Lib/diffcalc")) +if not diffcalc_path in sys.path: + sys.path.append(diffcalc_path) + +import diffcalc +import math +from diffcalc import settings +from diffcalc.hkl.you.geometry import YouGeometry,SixCircle, FiveCircle, FourCircle, YouPosition +from diffcalc.hardware import HardwareAdapter +from diffcalc.ub.persistence import UBCalculationJSONPersister, UbCalculationNonPersister +from diffcalc.gdasupport.minigda.scannable import ScannableBase, ScannableGroup +#from diffcalc.gdasupport.minigda import command +import diffcalc.hkl.you.calc as you_calc + + +import ch.psi.pshell.device.PositionerConfig as PositionerConfig +import ch.psi.pshell.device.RegisterConfig as RegisterConfig +import ch.psi.pshell.device.Register as Register + +_difcalc_names = {} + +# +# Disable error handling designed for interactive use +#diffcalc.util.DEBUG = True +# Disable console bold charcters +diffcalc.util.COLOURISE_TERMINAL_OUTPUT = False + +################################################################################################### +# Device mapping to difcalc +################################################################################################### +class PositionerScannable(ScannableBase): + def __init__(self, positioner, name = None): + self.positioner = positioner + self.name = positioner.name if name is None else name + self.inputNames = [self.name] + self.outputFormat = ['% 6.4f'] + self.level = 3 + + def isBusy(self): + return self.positioner.state == State.Busy + + def waitWhileBusy(self): + self.positioner.waitReady(-1) + + def asynchronousMoveTo(self, new_position): + #print "Moving " , self.name, " to: ", new_position + self.positioner.moveAsync(float(new_position), -1) + + def getPosition(self): + return self.positioner.getPosition() + +def _get_diffcalc_axis_names(): + nu_name=diffcalc.hkl.you.constraints.NUNAME + return ("mu", "delta", nu_name, "eta", "chi", "phi") + +class PositionerScannableGroup(ScannableGroup): + def __init__(self, name, motors, diffcalc_axis_names=None): + self.name = name + global _difcalc_names + _difcalc_names = {} + positioners = [] + if diffcalc_axis_names is None: + if len(motors) == 6: diffcalc_axis_names = _get_diffcalc_axis_names() + elif len(motors) == 5: diffcalc_axis_names = ("delta", "gam", "eta", "chi", " phi") + elif len(motors) == 4: diffcalc_axis_names = ("delta", "eta", "chi", " phi") + self.diffcalc_axis_names = diffcalc_axis_names + for i in range(len(motors)): + _difcalc_names[motors[i]] = diffcalc_axis_names[i] + exec('self.' + diffcalc_axis_names[i] + ' = PositionerScannable(' + motors[i].name + ', "' +diffcalc_axis_names[i] + '")') + exec('positioners.append(self.' + diffcalc_axis_names[i] + ')' ) + #for m in motors: + # exec('self.' + m.name + ' = PositionerScannable(' + m.name + ', "' + m.name + '")') + # exec('positioners.append(self.' + m.name + ')' ) + ScannableGroup.__init__(self, self.name, positioners) + +class MotorGroupScannable(PositionerScannableGroup): + def __init__(self, motor_group, diffcalc_axis_names=None, simultaneous_move=False): + self.simultaneous_move = simultaneous_move + self.motor_group = motor_group + PositionerScannableGroup.__init__(self, motor_group.name, motor_group.motors, diffcalc_axis_names) + self.motor_group.restoreSpeedAfterMove = self.simultaneous_move + + #Make sync moves (default implementation trigger each motor individually) + def asynchronousMoveTo(self, position): + if self.simultaneous_move: + position = [(float('nan') if v is None else v) for v in position] + self.motor_group.write(position) + else: + PositionerScannableGroup.asynchronousMoveTo(self, position) + + +class ScannableAdapter(HardwareAdapter): + def __init__(self, diffractometer, energy, energy_multiplier_to_kev=1): + self.diffractometer = diffractometer + self.energy = energy + self.energy_multiplier_to_kev = energy_multiplier_to_kev + input_names = diffractometer.getInputNames() + HardwareAdapter.__init__(self, input_names) + + #Returns the current physical POSITIONS + def get_position(self): + """ + pos = getDiffractometerPosition() -- returns the current physical + diffractometer position as a list in degrees + """ + return self.diffractometer.getPosition() + + #returns energy in kEv + def get_energy(self): + """energy = get_energy() -- returns energy in kEv (NOT eV!) """ + multiplier = self.energy_multiplier_to_kev + energy = self.energy.getPosition() * multiplier + if energy is None: + raise DiffcalcException("Energy has not been set") + return energy + + def get_motor(self,name): + global _motor_group + global _difcalc_names + for m in _difcalc_names.keys(): + if _difcalc_names[m] == name: + return m + for m in _motor_group.motors: + if m.name == name: + return m + raise Exception("Invalid axis name: " + str(name)) + + + def get_lower_limit(self, name): + '''returns lower limits by axis name. Limit may be None if not set + ''' + m = self.get_motor(name) + ret = m.getMinValue() + if ret == float("NaN"): ret = None + return ret + + def get_upper_limit(self, name): + '''returns upper limit by axis name. Limit may be None if not set + ''' + m = self.get_motor(name) + ret = m.getMaxValue() + if ret == float("NaN"): ret = None + return ret + + def set_lower_limit(self, name, value): + """value may be None to remove limit""" + if value is None: value = float("NaN") + m = self.get_motor(name) + m.config.minValue =value + + def set_upper_limit(self, name, value): + """value may be None to remove limit""" + if value is None: value = float("NaN") + m = self.get_motor(name) + m.config.maxValue =value + + def is_axis_value_within_limits(self, axis_name, value): + m = self.get_motor(axis_name) + upper = self.get_upper_limit(axis_name) + lower = self.get_lower_limit(axis_name) + if (upper is None) or (math.isnan(upper)): upper = sys.float_info.max + if (lower is None) or (math.isnan(lower)): lower = -sys.float_info.max + return lower <= value <= upper + + @property + def name(self): + return self.diffractometer.getName() + +class MotorGroupAdapter(ScannableAdapter): + def __init__(self, diffractometer, energy, energy_multiplier_to_kev=1, diffcalc_axis_names=None, simultaneous_move=False): + self.diffractometer = MotorGroupScannable(diffractometer, diffcalc_axis_names, simultaneous_move) + self.energy = PositionerScannable(energy) + self.energy.level = 3 + ScannableAdapter.__init__(self, self.diffractometer, self.energy, energy_multiplier_to_kev) + +class Wavelength(RegisterBase): + def doRead(self): + try: + return get_wavelength().getPosition() + except: + return None + + def doWrite(self, val): + get_wavelength().asynchronousMoveTo(val) + + +################################################################################################### +# HKL Pseudo-devices +################################################################################################### +class HklPositoner (PositionerBase): + def __init__(self, name, index, hkl_group): + PositionerBase.__init__(self, name, PositionerConfig()) + self.setParent(hkl_group) + self.index = index + + def isReady(self): + return PositionerBase.isReady(self) and self.getParent().isReady() + + def doRead(self): + return self.getParent()._setpoint[self.index] + + def doWrite(self, value): + #print "Setting " , self.getName(), "to: ", value + pos = [None, None, None] + pos[self.index] = value + self.getParent().write(pos) + + def doReadReadback(self): + if java.lang.Thread.currentThread() != self.getParent()._updating_thread: + self.getParent().update() + return self.getParent()._readback[self.index] + +class HklGroup(RegisterBase, Register.RegisterArray): + def __init__(self, name): + RegisterBase.__init__(self, name, RegisterConfig()) + self.hkl=get_hkl() + self.h, self.k, self.l = HklPositoner("h", 0, self), HklPositoner("k", 1, self), HklPositoner("l", 2, self) + add_device(self.h, True) + add_device(self.k, True) + add_device(self.l, True) + self._setpoint = self.doRead() + self._updating = False + + def getSize(self): + return 3 + + def doRead(self): + try: + self._readback = self.hkl.getPosition() + self._updating_thread = java.lang.Thread.currentThread() + self.h.update() + self.k.update() + self.l.update() + except: + #traceback.print_exc() + self._readback = (None, None, None) + finally: + self._updating_thread = None + return self._readback + + def doWrite(self, pos): + self._setpoint = None if (pos is None) else [(None if v is None else float(v)) for v in pos] + #print "Moving to: " + str(pos) + self.hkl.asynchronousMoveTo(pos) + + def sim(self, pos): + return self.hkl.simulateMoveTo(pos) + +################################################################################################### +# System setup +################################################################################################### +you = None +dc, ub, hardware, hkl = None, None, None, None +_motor_group = None +def setup_diff(diffractometer= None, energy= None, diffcalc_axis_names = None, geometry=None, persist_ub=True, simultaneous_move=False): + """ + configure diffractometer. Display configuration if no parameter is given + diffractometer: Diffraction motor group + energy: Positioner having energy in kev + geometry: YouGeometry extension. If none, uses default + diffcalc_axis_names: if None use defaults: + - mu, delta, gam, eta, chi, phi (six circle) + - delta, gam, eta, chi, phi (ficve circle) + - delta, eta, chi, phi (four circle) + """ + global you, dc, ub, hardware, hkl, _motor_group + if diffractometer is not None: + _motor_group = diffractometer + you = None + if geometry is not None: + settings.geometry = geometry + elif diffcalc_axis_names is not None: + class CustomGeometry(YouGeometry): + def __init__(self): + self.all_axis_names = _get_diffcalc_axis_names() + self.my_axis_names = diffcalc_axis_names + fixed_constraints = {} + for axis in self.all_axis_names: + if not axis in self.my_axis_names: + fixed_constraints[axis] = 0 + YouGeometry.__init__(self, diffractometer.name, fixed_constraints) + def physical_angles_to_internal_position(self, physical_angle_tuple): + pos=[] + index = 0 + for axis in self.all_axis_names: + pos.append(physical_angle_tuple[index] if (axis in self.my_axis_names) else 0) + index = index+1 + pos.append("DEG")#units + return YouPosition(*pos) + def internal_position_to_physical_angles(self, internal_position): + pos = internal_position.clone() + pos.changeToDegrees() + pos = pos.totuple() + ret = [] + for i in range (len(self.all_axis_names)): + if self.all_axis_names[i] in self.my_axis_names: + ret.append(pos[i]) + return tuple(ret) + settings.geometry = CustomGeometry() + elif len(diffractometer.motors) == 6: + settings.geometry = SixCircle() + elif len(diffractometer.motors) == 5: + settings.geometry = FiveCircle() + elif len(diffractometer.motors) == 4: + settings.geometry = FourCircle() + else: + raise Exception("Invalid motor group") + settings.hardware = MotorGroupAdapter(diffractometer, energy, 1, diffcalc_axis_names, simultaneous_move) + + if persist_ub: + settings.persistence_path = os.path.abspath(expand_path("{config}/diffcalc")) + if not os.path.exists(settings.persistence_path): + os.makedirs(settings.persistence_path) + print "UB calculations persistence path: " + settings.persistence_path + settings.ubcalc_persister = UBCalculationJSONPersister(settings.persistence_path) + else: + print "UB calculations are not persisteds" + settings.ubcalc_persister = UbCalculationNonPersister() + settings.axes_scannable_group = settings.hardware.diffractometer + settings.energy_scannable = settings.hardware.energy + settings.ubcalc_strategy = you_calc.YouUbCalcStrategy() + settings.angles_to_hkl_function = you_calc.youAnglesToHkl + from diffcalc.gdasupport import you + reload(you) + + # These must be imported AFTER the settings have been configured + from diffcalc.dc import dcyou as dc + from diffcalc.ub import ub + from diffcalc import hardware + from diffcalc.hkl.you import hkl + + add_device(HklGroup("hkl_group"), True) + add_device(Wavelength("wavelength", 6), True) + hkl_group.polling = 250 + wavelength.polling = 250 + + if settings.hardware is not None: + print "Diffractometer defined with:" + print " \t" + "Motor group: " + str(settings.hardware.diffractometer.name) + print " \t" + "Energy: " + str(settings.hardware.energy.name) + print "\nDiffcalc axis names:" + for m in _difcalc_names.keys(): + print " \t Motor " + m.name + " = Axis " + _difcalc_names[m] + else: + print "Diffractometer is not defined\n" + print + +def setup_axis(motor = None, min=None, max=None, cut=None): + """ + configure axis range and cut. + displays ranges if motor is None + """ + if motor is not None: + name = get_axis_name(motor) + if min is not None: hardware.setmin(name, min) + if max is not None: hardware.setmax(name, max) + if cut is not None: hardware.setcut(name, cut) + else: + print "Axis range configuration:" + hardware.hardware() + print + +################################################################################################### +# Acceess functions +################################################################################################### +def get_diff(): + return settings.hardware.diffractometer + +def get_energy(): + return settings.hardware.energy + +def get_adapter(): + return settings.hardware + +def get_motor_group(): + return _motor_group + +def get_wavelength(): + return you.wl + +def get_hkl(): + return you.hkl + +def get_axis_name(motor): + if is_string(motor): + motor = get_adapter().get_motor(motor) + return _difcalc_names[motor] + +################################################################################################### +# Orientation Commands +################################################################################################### + + +# State + +def newub(name): + """ + start a new ub calculation name + """ + try: + rmub(name) + except: + pass + try: + return ub.newub(name) + finally: + save_exp_context() +def loadub(name_or_num): + """ + load an existing ub calculation + """ + try: + return ub.loadub(name_or_num) + finally: + save_exp_context() + +def lastub(): + """ + load the last used ub calculation + """ + try: + return ub.lastub() + finally: + save_exp_context() + +def listub(): + """ + list the ub calculations available to load + """ + return ub.listub() + +def rmub(name_or_num): + """ + remove existing ub calculation + """ + return ub.rmub(name_or_num) + +def saveubas(name): + """ + save the ub calculation with a new name + """ + try: + return ub.saveubas(name) + finally: + save_exp_context() + +# Lattice + +def setlat(name=None, *args): + """ + set lattice parameters (Angstroms and Deg) + setlat -- interactively enter lattice parameters (Angstroms and Deg) + setlat name a -- assumes cubic + setlat name a b -- assumes tetragonal + setlat name a b c -- assumes ortho + setlat name a b c gamma -- assumes mon/hex with gam not equal to 90 + setlat name a b c alpha beta gamma -- arbitrary + """ + return ub.setlat(name, *args) + +def c2th(hkl, en=None): + """ + calculate two-theta angle for reflection + """ + return ub.c2th(hkl, en) + +def hklangle(hkl1, hkl2): + """ + calculate angle between [h1 k1 l1] and [h2 k2 l2] crystal planes + """ + return ub.hklangle(hkl1, hkl2) + + +# Reference (surface) + +def setnphi(xyz = None): + """ + sets or displays (xyz=None) n_phi reference + """ + return ub.setnphi(xyz) + + +def setnhkl(hkl = None): + """ + sets or displays (hkl=None) n_hkl reference + """ + return ub.setnhkl(hkl) + +# Reflections + +def showref(): + """ + shows full reflection list + """ + return ub.showref() + +def addref(*args): + """ + Add reflection + addref -- add reflection interactively + addref [h k l] {'tag'} -- add reflection with current position and energy + addref [h k l] (p1, .., pN) energy {'tag'} -- add arbitrary reflection + """ + return ub.addref(*args) + +def editref(idx): + """ + interactively edit a reflection (idx is tag or index numbered from 1) + """ + return ub.editref(idx) + +def delref(idx): + """ + deletes a reflection (idx is tag or index numbered from 1) + """ + return ub.delref(idx) + + +def clearref(): + """ + deletes all the reflections + """ + return ub.clearref() + +def swapref(idx1=None, idx2=None): + """ + swaps two reflections + swapref -- swaps first two reflections used for calculating U matrix + swapref {num1 | 'tag1'} {num2 | 'tag2'} -- swaps two reflections + """ + return ub.swapref(idx1, idx2) + + +# Crystal Orientations + +def showorient(): + """ + shows full list of crystal orientations + """ + #TODO: Workaround of bug on Diffcalc (str_lines needs parameter) + if ub.ubcalc._state.orientlist: + print '\n'.join(ub.ubcalc._state.orientlist.str_lines(None)) + return + return ub.showorient() + +def addorient(*args): + """ + addorient -- add crystal orientation interactively + addorient [h k l] [x y z] {'tag'} -- add crystal orientation in laboratory frame + """ + return ub.addorient(*args) + +def editorient(idx): + """ + interactively edit a crystal orientation (idx is tag or index numbered from 1) + """ + return ub.editorient(tag_or_num) + +def delorient(idx): + """ + deletes a crystal orientation (idx is tag or index numbered from 1) + """ + return ub.delorient(tag_or_num) + +def clearorient(): + """ + deletes all the crystal orientations + """ + return ub.clearorient() + +def swaporient(idx1=None, idx2=None): + """ + swaps two swaporient + swaporient -- swaps first two crystal orientations used for calculating U matrix + swaporient {num1 | 'tag1'} {num2 | 'tag2'} -- swaps two crystal orientations + """ + return ub.swaporient(idx1, idx2) + + +# UB Matrix +def showub(): + """ + show the complete state of the ub calculation + NOT A DIFFCALC COMMAND + """ + return ub.ub() + +def checkub(): + """ + show calculated and entered hkl values for reflections + """ + return ub.checkub() + +def setu(U=None): + """ + manually set U matrix + setu -- set U matrix interactively + setu [[..][..][..]] -- manually set U matrix + """ + return ub.setu(U) + +def setub(UB=None): + """ + manually set UB matrix + setub -- set UB matrix interactively + setub [[..][..][..]] -- manually set UB matrix + """ + return ub.setub(UB) + +def getub(): + """ + returns current UB matrix + NOT A DIFFCALC COMMAND + """ + return None if ub.ubcalc._UB is None else ub.ubcalc._UB.tolist() + +def calcub(idx1=None, idx2=None): + """ + (re)calculate u matrix + calcub -- (re)calculate U matrix from the first two reflections and/or orientations. + calcub idx1 idx2 -- (re)calculate U matrix from reflections and/or orientations referred by indices and/or tags idx1 and idx2. + """ + return ub.calcub(idx1, idx2) + +def trialub(idx=1): + """ + (re)calculate u matrix using one reflection only + Use indice or tags idx1. Default: use first reflection. + """ + return ub.trialub(idx) + +def refineub(*args): + """ + refine unit cell dimensions and U matrix to match diffractometer angles for a given hkl value + refineub -- interactively + refineub [h k l] {pos} + """ + return ub.refineub(*args) + +def fitub(*args): + """ + fitub ref1, ref2, ref3... -- fit UB matrix to match list of provided reference reflections. + """ + return ub.fitub(*args) + +def addmiscut(angle, xyz=None): + """ + apply miscut to U matrix using a specified miscut angle in degrees and a rotation axis (default: [0 1 0]) + """ + return ub.addmiscut(angle, xyz) + +def setmiscut(angle, xyz=None): + """ + manually set U matrix using a specified miscut angle in degrees and a rotation axis (default: [0 1 0]) + """ + return ub.setmiscut(angle, xyz) + + + +################################################################################################### +# Motion Commands +################################################################################################### + +#Constraints + +def con(*args): + """ + list or set available constraints and values + con -- list available constraints and values + con {val} -- constrains and optionally sets one constraint + con {val} {val} {val} -- clears and then fully constrains + """ + try: + ret = hkl.con(*args) + finally: + save_exp_context() + return ret + +def uncon(name): + """ + remove constraint + """ + try: + ret = hkl.uncon(name) + finally: + save_exp_context() + return ret + + +# HKL +def allhkl(_hkl, wavelength=None): + """ + print all hkl solutions ignoring limits + """ + return hkl.allhkl(_hkl, wavelength) + + +#Hardware + +def setmin(axis, val=None): + """ + set lower limits used by auto sector code (nan to clear) + """ + name = get_axis_name(axis) + try: + hardware.setmin(name, val) + finally: + save_exp_context() + +def setmax(axis, val=None): + """ + set upper limits used by auto sector code (nan to clear) + """ + name = get_axis_name(axis) + try: + return hardware.setmax(name, val) + finally: + save_exp_context() + +def setcut(axis, val): + """ + sets cut angle + """ + name = get_axis_name(axis) + try: + return hardware.setcut(name, val) + finally: + save_exp_context() + +################################################################################################### +# Motion commands: not standard Diffcalc names +################################################################################################### + + +def hklci(positions, energy=None): + """ + converts positions of motors to reciprocal space coordinates (H K L) + """ + return dc.angles_to_hkl(positions, energy) + +def hklca(hkl, energy=None): + """ + converts reciprocal space coordinates (H K L) to positions of motors. + """ + return dc.hkl_to_angles(hkl[0], hkl[1], hkl[2], energy) + +def hklwh(): + """ + prints the current reciprocal space coordinates (H K L) and positions of motors. + """ + hkl = hklget() + print "HKL: " + str(hkl) + for m in _difcalc_names.keys(): + print _difcalc_names[m] + " [" + m.name + "] :" + str(m.take()) + +def hklget(): + """ + get current hkl position + """ + return hkl_group.read() + +def hklmv(hkl): + """ + move to hkl position + """ + hkl_group.write(hkl) + +def hklsim(hkl): + """ + simulates moving diffractometer + """ + return hkl_group.sim(hkl) + + +################################################################################################### +# HKL Combined Scan +################################################################################################### +def hklscan(vector, readables,latency = 0.0, passes = 1, **pars): + """ + HKL Scan: + + Args: + vector(list of lists): HKL values to be scanned + readables(list of Readable): Sensors to be sampled on each step. + latency(float, optional): settling time for each step before readout, defaults to 0.0. + passes(int, optional): number of passes + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - hidden(bool, optional): if true generates no effects on user interface. + - before_read (function, optional): callback on each step, before sampling. Arguments: positions, scan + - after_read (function, optional): callback on each step, after sampling. Arguments: record, scan. + - before_pass (function, optional): callback before each scan pass execution. Arguments: pass_num, scan. + - after_pass (function, optional): callback after each scan pass execution. Arguments: pass_num, scan. + - Aditional arguments defined by set_exec_pars. + Returns: + ScanResult object. + + """ + readables=to_list(string_to_obj(readables)) + pars["initial_move"] = False + scan = ManualScan([h,k,l], readables ,vector[0], vector[-1], [len(vector)-1] * 3, dimensions = 1) + if not "domain_axis" in pars.keys(): + pars["domain_axis"] = "Index" + processScanPars(scan, pars) + scan.start() + try: + for pos in vector: + #print "Writing ", pos + hkl_group.write(pos) + time.sleep(0.1) #Make sure is busy + get_motor_group().update() + get_motor_group().waitReady(-1) + time.sleep(latency) + hkl_group.update() + if scan.before_read: scan.before_read(pos,scan) + scan.append ([h.take(), k.take(), l.take()], [h.getPosition(), k.getPosition(), l.getPosition()], [readable.read() for readable in readables ]) + if scan.after_read: scan.after_read(scan.currentRecord,scan) + finally: + scan.end() + return scan.result + +def get_constraints(): + constraints={} + from diffcalc.hkl.you.constraints import valueless_constraints + all_constraints=hkl.hklcalc.constraints.all + for name in all_constraints: + if not hkl.hklcalc.constraints.is_constraint_fixed(name): + value = hkl.hklcalc.constraints.get_constraint(name) + if name in valueless_constraints: + constraints[name] = None + elif value is not None: + constraints[name] = value + return constraints + + +def set_constraints(constraints): + for name in constraints.keys(): + try: + value = constraints[name] + if value is None: + con(name) + else: + con(name, value) + except: + print sys.exc_info()[1] + +def get_limits(): + limits={} + for name in settings.hardware.get_axes_names(): + axis = {} + axis["lower_limit"] = settings.hardware.get_lower_limit(name) + axis["upper_limit"] = settings.hardware.get_upper_limit(name) + axis["cut"] = settings.hardware.get_cuts()[name] + limits[name]=axis + return limits + +def set_limits(limits): + for name in limits.keys(): + try: + axis = limits[name] + if axis.get("lower_limit") is not None: setmin (name, axis["lower_limit"]) + if axis.get("upper_limit") is not None: setmax (name, axis["upper_limit"]) + if axis.get("cut") is not None: setcut (name, axis["cut"]) + except: + print sys.exc_info()[1] + +def get_exp_context(): + context = {} + try: + context["limits"] = get_limits() + except: + context["limits"] = None + try: + context["constraints"] = get_constraints() + except: + context["constraints"] = None + try: + context["ub"] = ub.ubcalc._state.name + except: + context["ub"] = None + return context + +def set_exp_context(context): + try: + if context.get("limits") is not None: + set_limits(context["limits"]) + except: + print sys.exc_info()[1] + try: + if context.get("constraints") is not None: + set_constraints(context["constraints"]) + except: + print sys.exc_info()[1] + try: + if context.get("ub") is not None: + loadub(str(context["ub"])) + except: + print sys.exc_info()[1] + + +EXPERIMENT_CONTEXT_FILE = expand_path("{context}/diff_exp_context.json") +def save_exp_context(): + """ + Saves experiment context (constraints, ub and hw limits) + """ + try: + c = get_exp_context() + with open(EXPERIMENT_CONTEXT_FILE, 'w') as json_file: + json.dump(c, json_file) + except: + print "Cannot save experiment context: ", sys.exc_info()[1] + +def load_exp_context(): + """ + Loads experiment context (constraints, ub and hw limits) + """ + try: + with open(EXPERIMENT_CONTEXT_FILE) as json_file: + c = json.load(json_file) + set_exp_context(c) + except: + print "Cannot load experiment context: ", sys.exc_info()[1] + + + +################################################################################################### +# Experiment context +################################################################################################### + + +def test_diffcalc(): + print "Start test" + energy.move(20.0) + delta.config.maxSpeed = 50.0 + delta.speed = 50.0 + delta.move(1.0) + + #Setup + setup_diff(sixc, energy) + setup_axis('gam', 0, 179) + setup_axis('delta', 0, 179) + setup_axis('delta', min=0) + setup_axis('phi', cut=-180.0) + setup_axis() + + #Orientation + listub() + # Create a new ub calculation and set lattice parameters + newub('test') + setlat('cubic', 1, 1, 1, 90, 90, 90) + # Add 1st reflection (demonstrating the hardware adapter) + settings.hardware.wavelength = 1 + c2th([1, 0, 0]) # energy from hardware + settings.hardware.position = 0, 60, 0, 30, 0, 0 + addref([1, 0, 0])# energy and position from hardware + # Add 2nd reflection (this time without the harware adapter) + c2th([0, 1, 0], 12.39842) + addref([0, 1, 0], [0, 60, 0, 30, 0, 90], 12.39842) + # check the state + showub() + checkub() + + #Constraints + con('qaz', 90) + con('a_eq_b') + con('mu', 0) + con() + + #Motion + print hklci((0., 60., 0., 30., 0., 0.)) + print hklca((1, 0, 0)) + sixc.write([0, 60, 0, 30, 90, 0]) + print "sixc=" , sixc.position + wavelength.write(1.0) + print "wavelength = ", wavelength.read() + lastub() + setu ([[1, 0, 0], [0, 1, 0], [0, 0, 1]]) + showref() + swapref(1,2) + hklwh() + hklsim([0.0,1.0,1.0]) + hklmv([0.0,1.0,1.0]) + + #Scans + lscan(l, [sin], 1.0, 1.5, 0.1) + ascan([k,l], [sin], [1.0, 1.0], [1.2, 1.3], [0.1, 0.1], zigzag=True, parallel_positioning = False) + vector = [[1.0,1.0,1.0], [1.0,1.0,1.1], [1.0,1.0,1.2], [1.0,1.0,1.4]] + hklscan(vector, [sin, arr], 0.9) \ No newline at end of file diff --git a/script/Lib/ijutils.py b/script/Lib/ijutils.py new file mode 100644 index 0000000..4852bd1 --- /dev/null +++ b/script/Lib/ijutils.py @@ -0,0 +1,782 @@ +#################################################################################################### +# Facade to ImageJ functionality +#################################################################################################### + +#More information on: +# Image: https://imagej.nih.gov/ij/docs/guide/146-28.html#toc-Section-28 +# Process: https://imagej.nih.gov/ij/docs/guide/146-29.html#toc-Section-29 +# Analyze: https://imagej.nih.gov/ij/docs/guide/146-30.html#toc-Section-30 + +import ch.psi.utils.Convert as Convert +import ch.psi.pshell.imaging.Utils as ImagingUtils +from startup import get_context, expand_path, is_string +import java.awt.image.BufferedImage as BufferedImage +import jarray +import os + +import ij.IJ as IJ +import ij.ImageJ as ImageJ +import ij.WindowManager as WindowManager +import ij.ImagePlus as ImagePlus +import ij.ImageStack as ImageStack +import ij.Prefs as Prefs +import ij.io.FileSaver as FileSaver +import ij.io.Opener as Opener +from ij.gui import Roi + +import ij.process.ImageProcessor as ImageProcessor +import ij.process.ByteProcessor as ByteProcessor +import ij.process.ShortProcessor as ShortProcessor +import ij.process.ColorProcessor as ColorProcessor +import ij.process.FloatProcessor as FloatProcessor +import ij.process.ImageConverter as ImageConverter +import ij.process.AutoThresholder as AutoThresholder +import ij.process.LUT as LUT +import ij.measure.Measurements as Measurements +import ij.measure.ResultsTable as ResultsTable +import ij.plugin.filter.Analyzer as Analyzer +import ij.plugin.filter.GaussianBlur as GaussianBlur +import ij.plugin.filter.Filters as Filters +import ij.plugin.filter.FFTFilter as FFTFilter +import ij.plugin.filter.BackgroundSubtracter as BackgroundSubtracter +import ij.plugin.filter.EDM as EDM +import ij.plugin.filter.Shadows as Shadows +import ij.plugin.filter.UnsharpMask as UnsharpMask +import ij.plugin.filter.MaximumFinder as MaximumFinder +import ij.plugin.filter.EDM as EDM +import ij.plugin.filter.Shadows as Shadows +import ij.plugin.filter.UnsharpMask as UnsharpMask +import ij.plugin.filter.RankFilters as RankFilters +import ij.plugin.filter.Convolver as Convolver +import ij.plugin.filter.ParticleAnalyzer as ParticleAnalyzer + +import ij.plugin.ContrastEnhancer as ContrastEnhancer +import ij.plugin.Thresholder as Thresholder +import ij.plugin.ImageCalculator as ImageCalculator +import ij.plugin.FFT as FFT +import ij.plugin.Concatenator as Concatenator + +#ImageJ customizations +import ch.psi.pshell.imaging.ij.FFTMath as FFTMath +import ch.psi.pshell.imaging.ij.FFTFilter as FFTFilter +import ch.psi.pshell.imaging.ij.Binary as Binary +import ch.psi.pshell.imaging.ij.Slicer as Slicer + + +#This eliminates the error messages due to the bug on ij.gui.ImageWindow row 555 (ij is null) +if not "_image_j" in globals().keys(): + _image_j = ImageJ(None, ImageJ.NO_SHOW) + +################################################################################################### +#Image creation, copying & saving +################################################################################################### +def load_image(image, title = None): + """ + image: file name or BufferedImage + """ + if is_string(image): + try: + file = expand_path(image) + except: + pass + try: + image = ImagingUtils.newImage(file) + except: + #try loading from assembly + image = get_context().setup.getAssemblyImage(image) + if title is None: + title = os.path.basename(file) + return ImagePlus("img" if title is None else title, image) + +def load_array(array, width=None, height=None, title = "img"): + """ + array: 1d array if width and height defined , or else 2d array to be flattened. + """ + #2D + if (width==None) and (height==None): + if array.typecode == '[B': proc = ByteProcessor(len(array[0]), len(array), Convert.flatten(array)) + elif array.typecode == '[S': proc = ShortProcessor(len(array[0]), len(array), Convert.flatten(array), None) + elif array.typecode in ['[I','[F', '[D']: proc = FloatProcessor(len(array[0]), len(array), Convert.flatten(array)) + else: raise Exception("Invalid array type") + #1D + else: + if (len(array) > width*height): + array = array[:(width*height)] + if array.typecode == 'b': proc = ByteProcessor(width, height, array) + elif array.typecode == 'h': proc = ShortProcessor(width, height, array, None) + elif array.typecode in ['i','f','d']: proc = FloatProcessor(width, height, array) + else: raise Exception("Invalid array type") + return ImagePlus(title, proc) + +def save_image(ip, path=None, format = None, metadata={}): + """ + Saves image or stack + If parameters omitted, saves image again in same location, with same format. + """ + fs = FileSaver(ip) + + info = "" + for key,val in metadata.items(): + info = info + ("\n" if len(info)>0 else "") + str(key) + ": " + str(val) + ip.setProperty("Info", info) + + if path == None: fs.save() + else: + try: + path = expandPath(path) + except: + pass + if format == "bmp": fs.saveAsBmp(path) + elif format == "fits": fs.saveAsFits(path) + elif format == "gif": fs.saveAsGif(path) + elif format == "jpeg": fs.saveAsJpeg(path) + elif format == "lut": fs.saveAsLut(path) + elif format == "pgm": fs.saveAsPgm(path) + elif format == "png": fs.saveAsPng(path) + elif format == "raw" and ip.getImageStackSize()>1: fs.saveAsRawStack(path) + elif format == "raw": fs.saveAsRaw(path) + elif format == "txt": fs.saveAsText(path) + elif format == "tiff" and ip.getImageStackSize()>1: fs.saveAsTiffStack(path) + elif format == "tiff": fs.saveAsTiff(path) + elif format == "zip": fs.saveAsZip(path) + + +def open_image(path, index=1): + """ + Open file using ij.io,Opener + """ + try: + path = expand_path(path) + except: + pass + opener = Opener() + return opener.openImage(path, index) + +def new_image(width, height, image_type="byte", title = "img", fill_color = None): + """ + type = "byte", "short", "color" or "float" + """ + if image_type == "byte": p=ByteProcessor(width, height) + elif image_type == "short": p=ShortProcessor(width, height) + elif image_type == "color": p=ColorProcessor(width, height) + elif image_type == "float": p=FloatProcessor(width, height) + else: raise Exception("Invalid image type " + str(image_type)) + ret = ImagePlus(title, p) + if fill_color is not None: + p.setColor(fill_color) + p.resetRoi() + p.fill() + return ret + +def get_ip_array(ip): + """ + Returns data array of ImagePlus + """ + if type(ip.getProcessor()) == FloatProcessor: + return ip.getProcessor().getFloatArray() + else: + return ip.getProcessor().getIntArray() + + +def sub_image(ip, x, y, width, height): + """ + Returns new ImagePlus + """ + ip.setRoi(x, y, width, height) + p=ip.getProcessor().crop() + return ImagePlus(ip.getTitle() + " subimage", p) + +def copy_image(ip): + return ip.duplicate() + +def copy_image_to(ip_source, ip_dest, x, y): + ip_source.deleteRoi() + ip_source.copy() + ip_dest.setRoi(x, y, ip_source.getWidth(), ip_source.getHeight()) + ip_dest.paste() + ip_dest.changes = False + ip_dest.deleteRoi() + +def pad_image(ip, left=0, right=0, top=0, bottom=0, fill_color = None): + p=ip.getProcessor() + width = p.getWidth() + left + right + height = p.getHeight() + top + bottom + image_type = get_image_type(ip) + ret = new_image(width, height, image_type, ip.getTitle() + " padded", fill_color) + ip.deleteRoi() + ip.copy() + ret.setRoi(left, top, p.getWidth(), p.getHeight()) + ret.paste() + ret.changes = False + ret.deleteRoi() + return ret + +def get_image_type(ip): + """ + Returns: "byte", "short", "color" or "float" + """ + p=ip.getProcessor() + if type(p) == ShortProcessor: return "short" + elif type(p) == ColorProcessor: return "color" + elif type(p) == FloatProcessor: return "float" + return "byte" + +################################################################################################### +#Image measurements +################################################################################################### + +def get_measurement(ip, measurement): + """ + Return image measurement: + "Area", "Mean", "StdDev", "Mode", "Min", "Max", "X", "Y", "XM", "YM", "Perim.", "BX", "BY", + "Width", "Height", "Major", "Minor", "Angle", "Circ.", "Feret", "IntDen", "Median", "Skew", + "Kurt", "%Area", "RawIntDen", "Ch", "Slice", "Frame", "FeretX", "FeretY", "FeretAngle", + "MinFeret", "AR", "Round", "Solidity", "MinThr" or "MaxThr" + """ + return IJ.getValue(ip,measurement) + +################################################################################################### +#Image type conversion +################################################################################################### +def grayscale(ip, do_scaling=None, in_place=True): + ip = ip if in_place else ip.duplicate() + ic = ImageConverter(ip) + if do_scaling is not None: + ic.setDoScaling(do_scaling) + ic.convertToGray8() + return ip + +def get_channel(ip, channel): + """ + Return a channel from a color image as a new ImagePlus. + channel: "red", "green","blue", "alpha", "brightness", + """ + proc = ip.getProcessor() + if channel == "red": ret = proc.getChannel(1, None) + elif channel == "green": ret = proc.getChannel(2, None) + elif channel == "blue": ret = proc.getChannel(3, None) + elif channel == "alpha": ret = proc.getChannel(4, None) + elif channel == "brightness": ret = proc.getBrightness() + else: raise Exception("Invalid channel " + str(channel)) + return ImagePlus(ip.getTitle() + " channel: " + channel, ret) + +################################################################################################### +#Thresholder +################################################################################################### +def threshold(ip, min_threshold, max_threshold, in_place=True): + ip = ip if in_place else ip.duplicate() + ip.getProcessor().setThreshold(min_threshold, max_threshold, ImageProcessor.NO_LUT_UPDATE) + WindowManager.setTempCurrentImage(ip) + Thresholder().run("mask") + return ip + +def auto_threshold(ip, dark_background = False, method = AutoThresholder.getMethods()[0], in_place=True): + ip = ip if in_place else ip.duplicate() + ip.getProcessor().setAutoThreshold(method, dark_background , ImageProcessor.NO_LUT_UPDATE) + WindowManager.setTempCurrentImage(ip) + thresholder=Thresholder().run("mask") + return ip + +################################################################################################### +#Binary functions +################################################################################################### +def binary_op(ip, op, dark_background=False, iterations=1, count=1, in_place=True): + """ + op = "erode","dilate", "open","close", "outline", "fill holes", "skeletonize" + """ + ip = ip if in_place else ip.duplicate() + binary = Binary(count, iterations, dark_background ) + binary.setup(op, ip) + binary.run(ip.getProcessor()) + return ip + +def binary_erode(ip, dark_background=False, iterations=1, count=1, in_place=True): + return binary_op(ip, "erode", dark_background, iterations, count, in_place) + +def binary_dilate(ip, dark_background=False, iterations=1, count=1, in_place=True): + return binary_op(ip, "dilate", dark_background, iterations, count, in_place) + +def binary_open(ip, dark_background=False, iterations=1, count=1, in_place=True): + return binary_op(ip, "open", dark_background, iterations, count, in_place) + +def binary_close(ip, dark_background=False, iterations=1, count=1, in_place=True): + return binary_op(ip, "close", dark_background, iterations, count) + +def binary_outline(ip, dark_background=False, in_place=True): + return binary_op(ip, "outline", dark_background, in_place=in_place) + +def binary_fill_holes(ip, dark_background=False, in_place=True): + return binary_op(ip, "fill holes", dark_background, in_place=in_place) + +def binary_skeletonize(ip, dark_background=False, in_place=True): + return binary_op(ip, "skeletonize", dark_background, in_place=in_place) + +def analyse_particles(ip, min_size, max_size, fill_holes = True, exclude_edges = True, extra_measurements = 0, \ + print_table = False, output_image = "outlines", minCirc = 0.0, maxCirc = 1.0): + """ + Returns: tuple (ResultsTable results_table, ImagePlus output_image) + output_image = "outlines", "overlay_outlines", "masks", "overlay_masks", "roi_masks" or None + extra_measurements = mask with Measurements.CENTROID, PERIMETER, RECT, MIN_MAX, ELLIPSE, CIRCULARITY, AREA_FRACTION, INTEGRATED_DENSITY, INVERT_Y, FERET, KURTOSIS, MEDIAN, MODE, SKEWNESS, STD_DEV + Measurements is a mask of flags: https://imagej.nih.gov/ij/developer/api/ij/measure/Measurements.html. + Returned ResultsTable hold public fields: https://imagej.nih.gov/ij/developer/api/ij/measure/ResultsTable.html + + """ + rt = ResultsTable() + show_summary = False + options = ParticleAnalyzer.SHOW_RESULTS | ParticleAnalyzer.CLEAR_WORKSHEET + """ + ParticleAnalyzer.SHOW_ROI_MASKS | \ + #ParticleAnalyzer.RECORD_STARTS | \ + #ParticleAnalyzer.ADD_TO_MANAGER | \ + #ParticleAnalyzer.FOUR_CONNECTED | \ + #ParticleAnalyzer.IN_SITU_SHOW | \ + #ParticleAnalyzer.SHOW_NONE | \ + """ + if show_summary: options = options | ParticleAnalyzer.DISPLAY_SUMMARY + if output_image == "outlines": options = options | ParticleAnalyzer.SHOW_OUTLINES + elif output_image == "overlay_outlines": options = options | ParticleAnalyzer.SHOW_OVERLAY_OUTLINES + elif output_image == "masks": options = options | ParticleAnalyzer.SHOW_MASKS + elif output_image == "overlay_masks": options = options | ParticleAnalyzer.SHOW_OVERLAY_MASKS + elif output_image == "roi_masks": options = options | ParticleAnalyzer.SHOW_ROI_MASKS + #ParticleAnalyzer.SHOW_ROI_MASKS + if exclude_edges: options = options | ParticleAnalyzer.EXCLUDE_EDGE_PARTICLES + if fill_holes: options = options | ParticleAnalyzer.INCLUDE_HOLES + measurements = Measurements.AREA | Measurements.MEAN | Measurements.CENTER_OF_MASS | Measurements.RECT + pa = ParticleAnalyzer(options, measurements, rt, min_size, max_size, minCirc, maxCirc) + pa.setHideOutputImage(True) + pa.setResultsTable(rt) + if pa.analyze(ip): + if print_table: + print rt.getColumnHeadings() + for row in range (rt.counter): + print rt.getRowAsString(row) + return (rt, pa.getOutputImage()) + +################################################################################################### +#Image operators +################################################################################################### +def op_image(ip1, ip2, op, float_result=False, in_place=True): + """ + op = "add","subtract", "multiply","divide", "and", "or", "xor", "min", "max", "average", "difference" or "copy" + """ + ip1 = ip1 if in_place else ip1.duplicate() + ic = ImageCalculator() + pars = op + if float_result: + op = op + " float" + ic.run(pars, ip1, ip2) + return ip1 + +def op_const(ip, op, val, in_place=True): + """ + op = "add","subtract", "multiply","divide", "and", "or", "xor", "min", "max", "gamma", "set" or "log", "exp", "sqr", "sqrt","abs" + """ + ip = ip if in_place else ip.duplicate() + pr = ip.getProcessor() + if op == 'add': pr.add(val) + elif op == 'sub': pr.subtract(val) + elif op == 'multiply': pr.multiply(val) + elif op == 'divide' and val!=0: pr.multiply(1.0/val) + elif op == 'and': pr.and(val) + elif op == 'or': pr.or(val) + elif op == 'xor': pr.xor(val) + elif op == 'min': pr.min(val);pr.resetMinAndMax() + elif op == 'max': pr.max(val);pr.resetMinAndMax() + elif op == 'gamma' and 0.05 < val < 5.0: pr.gamma(val) + elif op == 'set': pr.set(val) + elif op == 'log': pr.log() + elif op == 'exp': pr.exp() + elif op == 'sqr': pr.sqr() + elif op == 'sqrt': pr.sqrt() + elif op == 'abs': pr.abs();pr.resetMinAndMax() + else: raise Exception("Invalid operation " + str(op)) + return ip + +def op_fft(ip1, ip2, op, do_inverse = True) : + """ + Images must have same sizes, and multiple of 2 height and width. + op = "correlate" (complex conjugate multiply), "convolve" (Fourier domain multiply), "deconvolve" (Fourier domain divide) + """ + if op == "correlate": op_index = 0 + elif op == "convolve": op_index = 1 + elif op == "deconvolve": op_index = 2 + else: raise Exception("Invalid operation " + str(op)) + return FFTMath().doMath(ip1, ip2, op_index, do_inverse) + +def op_rank(ip, op, kernel_radius =1 , dark_outliers = False ,threshold = 50, in_place=True): + """ + op = "mean", "min", "max", "variance", "median", "close_maxima", "open_maxima", "remove_outliers", "remove_nan", "despeckle" + """ + if op == "mean": filter_type = RankFilters.MEAN + elif op == "min": filter_type = RankFilters.MIN + elif op == "max": filter_type = RankFilters.MAX + elif op == "variance": filter_type = RankFilters.VARIANCE + elif op == "median": filter_type = RankFilters.MEDIAN + elif op == "close_maxima": filter_type = RankFilters.CLOSE + elif op == "open_maxima": filter_type = RankFilters.OPEN + elif op == "remove_outliers": filter_type = RankFilters.OUTLIERS + elif op == "remove_nan": filter_type = RankFilters.REMOVE_NAN + elif op == "despeckle": filter_type, kernel_radius = RankFilters.MEDIAN, 1 + else: raise Exception("Invalid operation " + str(op)) + ip = ip if in_place else ip.duplicate() + RankFilters().rank(ip.getProcessor(), kernel_radius, filter_type, RankFilters.DARK_OUTLIERS if dark_outliers else RankFilters.BRIGHT_OUTLIERS ,threshold) + return ip + +def op_edm(ip, op="edm", dark_background=False, in_place=True): + """ + Euclidian distance map & derived operations + op ="edm", "watershed","points", "voronoi" + """ + ip = ip if in_place else ip.duplicate() + pr = ip.getProcessor() + edm=EDM() + Prefs.blackBackground=dark_background + if op=="edm": + #pr.setPixels(0, edm.makeFloatEDM(pr, 0, False)); + #pr.resetMinAndMax(); + if dark_background: + pr.invert() + edm.toEDM(pr) + else: + edm.setup(op, ip) + edm.run(pr) + return ip + +def watershed(ip, dark_background=False, in_place=True): + return op_edm(ip, "watershed", dark_background, in_place) + +def ultimate_points(ip, dark_background=False, in_place=True): + return op_edm(ip, "points", dark_background, in_place) + +def veronoi(ip, dark_background=False, in_place=True): + return op_edm(ip, "voronoi", dark_background, in_place) + +def edm(ip, dark_background=False, in_place=True): + return op_edm(ip, "edm", dark_background, in_place) + +def op_filter(ip, op, in_place=True): + """ + This is redundant as just calls processor methods. + op ="invert", "smooth", "sharpen", "edge", "add" + """ + ip = ip if in_place else ip.duplicate() + f = Filters() + f.setup(op, ip ) + f.run(ip.getProcessor()) + return ip + +################################################################################################### +#Other operations +################################################################################################### +def gaussian_blur(ip, sigma_x=3.0, sigma_y=3.0, accuracy = 0.01, in_place=True): + ip = ip if in_place else ip.duplicate() + GaussianBlur().blurGaussian(ip.getProcessor(), sigma_x, sigma_y, accuracy) + return ip + +def find_maxima(ip, tolerance=25, threshold = ImageProcessor.NO_THRESHOLD, output_type=MaximumFinder.IN_TOLERANCE, exclude_on_edges = False, is_edm = False): + """ + Returns new ImagePlus + tolerance: maxima are accepted only if protruding more than this value from the ridge to a higher maximum + threshhold: minimum height of a maximum (uncalibrated); + output_type = SINGLE_POINTS, IN_TOLERANCE or SEGMENTED. No output image is created for output types POINT_SELECTION, LIST and COUNT. + """ + byte_processor = MaximumFinder().findMaxima(ip.getProcessor(), tolerance, threshold, output_type, exclude_on_edges, is_edm) + return ImagePlus(ip.getTitle() + " maxima", byte_processor) + + +def get_maxima_points(ip, tolerance=25, exclude_on_edges = False): + polygon = MaximumFinder().getMaxima(ip.getProcessor(), tolerance, exclude_on_edges) + return (polygon.xpoints, polygon.ypoints) + +def enhance_contrast(ip, equalize_histo = True, saturated_pixels = 0.5, normalize = False, stack_histo = False, in_place=True): + ip = ip if in_place else ip.duplicate() + ce = ContrastEnhancer() + if equalize_histo: + ce.equalize(ip.getProcessor()); + else: + ce.stretchHistogram(ip.getProcessor(), saturated_pixels) + if normalize: + ip.getProcessor().setMinAndMax(0,1.0 if (ip.getProcessor().getBitDepth()==32) else ip.getProcessor().maxValue()) + return ip + +def shadows(ip, op, in_place=True): + """ + op ="north","northeast", "east", "southeast","south", "southwest", "west","northwest" + """ + ip = ip if in_place else ip.duplicate() + shadows= Shadows() + shadows.setup(op, ip) + shadows.run(ip.getProcessor()) + return ip + +def unsharp_mask(ip, sigma, weight, in_place=True): + """ + Float processor + """ + ip = ip if in_place else ip.duplicate() + ip.getProcessor().snapshot() + unsharp=UnsharpMask() + USmask.setup(" ", ip) + USmask.sharpenFloat( ip.getProcessor(),sigma, weight) + return ip + +def subtract_background(ip, radius = 50, create_background=False, dark_background=False, use_paraboloid =True, do_presmooth = True, correctCorners = True, rgb_brightness=False, in_place=True): + ip = ip if in_place else ip.duplicate() + if rgb_brightness: + BackgroundSubtracter().rollingBallBrightnessBackground(ip.getProcessor(), radius, create_background,not dark_background, use_paraboloid, do_presmooth, correctCorners) + else: + BackgroundSubtracter().rollingBallBackground(ip.getProcessor(), radius, create_background, not dark_background, use_paraboloid, do_presmooth, correctCorners) + return ip + +################################################################################################### +#FFT +################################################################################################### +def image_fft(ip, show = True): + WindowManager.setTempCurrentImage(ip) + fft = FFT() + fft.run("fft") + #TODO: how to avoid it to be created? + #ret = ImagePlus("FHT of " + ip.getTitle(), WindowManager.getCurrentImage().getProcessor()) + ret = WindowManager.getCurrentImage() + if not show: + WindowManager.getCurrentImage().hide() + return ret + + +def image_ffti(ip, show = True): + WindowManager.setTempCurrentImage(ip) + fft = FFT() + fft.run("inverse") + #WindowManager.getCurrentImage().hide() + #TODO: how to avoid it to be created? + #ret = WindowManager.getCurrentImage() + #WindowManager.getCurrentImage().hide() + #ret = ImagePlus(ip.getTitle() + " ffti", WindowManager.getCurrentImage().getProcessor()) + ret = WindowManager.getCurrentImage() + if not show: + WindowManager.getCurrentImage().hide() + + return ret + +def bandpass_filter(ip, small_dia_px, large_dia_px, suppress_stripes = 0, stripes_tolerance_direction = 5.0, autoscale_after_filtering = False, saturate_if_autoscale = False, display_filter = False, in_place=True): + """ + suppress_stripes = 0 for none, 1 for horizontal, 2 for vertical + """ + ip = ip if in_place else ip.duplicate() + filter= FFTFilter(); + FFTFilter.filterLargeDia = large_dia_px + FFTFilter.filterSmallDia = small_dia_px + FFTFilter.choiceIndex = suppress_stripes + FFTFilter.toleranceDia = stripes_tolerance_direction + FFTFilter.doScalingDia = autoscale_after_filtering + FFTFilter.saturateDia = saturate_if_autoscale + FFTFilter.displayFilter =display_filter + filter.setup(None, ip); + filter.run(ip.getProcessor()) + return ip + +################################################################################################### +#Convolution +################################################################################################### + +KERNEL_BLUR = [[0.1111, 0.1111, 0.1111], [0.1111, 0.1111, 0.1111], [0.1111, 0.1111, 0.1111]] +KERNEL_SHARPEN = [[0.0, -0.75, 0.0], [-0.75, 4.0, -0.75], [0.0, -0.75, 0.0]] +KERNEL_SHARPEN_2 = [[-1.0, -1.0, -1.0], [-1.0, 9.0, -1.0], [-1.0, -1.0, -1.0]] +KERNEL_LIGHT = [[0.1, 0.1, 0.1], [0.1, 1.0, 0.1],[0.1, 0.1, 0.1]] +KERNEL_DARK = [[0.01, 0.01, 0.01],[0.01, 0.5, 0.01],[0.01, 0.01, 0.01]] +KERNEL_EDGE_DETECT = [[0.0, -0.75, 0.0], [-0.75, 3.0, -0.75], [0.0, -0.75, 0.0]] +KERNEL_EDGE_DETECT_2 = [[-0.5, -0.5, -0.5], [-0.5, 4.0, -0.5], [-0.5, -0.5, -0.5]] +KERNEL_DIFFERENTIAL_EDGE_DETECT = [[-1.0, 0.0, 1.0], [0.0, 0.0, 0.0], [1.0, 0.0, -1.0]] +KERNEL_PREWITT = [[-2.0, -1.0, 0.0], [-1.0, 0.0, 1.0 ], [0.0, 1.0, 2.0]] +KERNEL_SOBEL = [[2.0, 2.0, 0.0], [2.0, 0.0, -2.0 ], [0.0, -2.0, -2.0]] + + +def convolve(ip, kernel, in_place=True): + """ + kernel: list of lists + """ + ip = ip if in_place else ip.duplicate() + kernel_width = len(kernel) + kernel_height= len(kernel[0]) + kernel = [item for row in kernel for item in row] + #Convolver().convolve(ip.getProcessor(), kernel, kernel_width, kernel_height) + ip.getProcessor().convolve(kernel, kernel_width, kernel_height) + return ip + + +################################################################################################### +#Shortcut to ImageProcessor methods +################################################################################################### +def invert(ip, in_place=True): + ip = ip if in_place else ip.duplicate() + ip.getProcessor().invert() + return ip + +def smooth(ip, in_place=True): + ip = ip if in_place else ip.duplicate() + ip.getProcessor().smooth() + return ip + +def sharpen(ip, in_place=True): + ip = ip if in_place else ip.duplicate() + ip.getProcessor().sharpen() + return ip + +def edges(ip, in_place=True): #Sobel + ip = ip if in_place else ip.duplicate() + ip.getProcessor().findEdges() + return ip + +def noise(ip, sigma = 25.0, in_place=True): + ip = ip if in_place else ip.duplicate() + ip.getProcessor().noise(sigma) + return ip + +def remap(ip, min=None, max=None, in_place=True): + ip = ip if in_place else ip.duplicate() + if min is None or max is None: + stats = get_statistics(ip, Measurements.MIN_MAX) + if min is None: min = stats.min + if max is None: max = stats.max + ip.getProcessor().setMinAndMax(min, max) + return ip + +def set_lut(ip, r, g, b): + """ + r,g and b are lists of 256 integers + """ + r = [x if x<128 else x-256 for x in r] + g = [x if x<128 else x-256 for x in g] + b = [x if x<128 else x-256 for x in b] + ip.setLut(LUT(jarray.array(r,'b'),jarray.array(g,'b'),jarray.array(b,'b'))) + +def resize(ip, width, height): + """ + Returns new ImagePlus + """ + p = ip.getProcessor().resize(width, height) + return ImagePlus(ip.getTitle() + " resized", p) + +def binning(ip, factor): + p=ip.getProcessor().bin(factor) + return ImagePlus(ip.getTitle() + " resized", p) + +def get_histogram(ip, hist_min = 0, hist_max = 0, hist_bins = 256, roi=None): + """ + hist_min, hist_max, hist_bins used only for float images (otherwise fixed to 0,255,256) + roi is list [x,y,w,h] + """ + if roi == None: ip.deleteRoi() + else: ip.setRoi(roi[0],roi[1],roi[2],roi[3]) + image_statistics = ip.getStatistics(0, hist_bins, hist_min, hist_max) + return image_statistics.getHistogram() + + +def get_array(ip): + return ip.getProcessor().getIntArray() + +def get_line(ip, x1, y1, x2, y2): + return ip.getProcessor().getLine(x1, y1, x2, y2) + +def get_pixel_range(ip): + return (ip.getProcessor().getMin(), ip.getProcessor().getMax()) + +def get_num_channels(ip): + return ip.getProcessor().getNChannels() + +def is_binary(ip): + return ip.getProcessor().isBinary() + +def get_pixel(ip, x, y): + return ip.getProcessor().getPixel(x,y) + +def get_pixel_array(ip, x, y): + a = [0]*get_num_channels(ip) + return ip.getProcessor().getPixel(x,y,a) + +def get_pixels(ip): + return ip.getProcessor().getPixels() + +def get_width(ip): + return ip.getProcessor().getWidth() + +def get_height(ip): + return ip.getProcessor().getHeight() + +def get_row(ip, y): + a = [0]*get_width(ip) + array = jarray.array(a,'i') + ip.getProcessor().getRow(0, y, array, get_width(ip)) + return array + +def get_col(ip, x): + a = [0]*get_height(ip) + array = jarray.array(a,'i') + ip.getProcessor().getColumn(x, 0, array, get_height(ip)) + return array + +def get_statistics(ip, measurements = None): + """ + Measurements is a mask of flags: https://imagej.nih.gov/ij/developer/api/ij/measure/Measurements.html. + Statistics object hold public fields: https://imagej.nih.gov/ij/developer/api/ij/process/ImageStatistics.html + """ + if measurements is None: + return ip.getStatistics() + else: + return ip.getStatistics(measurements) + +################################################################################################### +#Image stack functions +################################################################################################### +def create_stack(ip_list, duplicate=True, title = None): + stack = Concatenator().concatenate(ip_list, duplicate) + if title is not None: + stack.setTitle(title) + return stack + +def open_stack(path_list, title=None): + """ + Open list of files as a stack using ij.io,Opener + """ + ip_list = [] + for path in path_list: + ip_list.append(open_image(path)) + return create_stack(ip_list, duplicate=False, title = "stack" if title is None else title) + +def reslice(stack, start_at = "Top", vertically = True, flip = True, output_pixel_spacing=1.0, avoid_interpolation = True, title = None): + ss = Slicer() + ss.rotate = vertically + ss.startAt = start_at + ss.flip = flip + ss.nointerpolate = avoid_interpolation + ss.outputZSpacing = output_pixel_spacing + stack = ss.reslice(stack) + if title is not None: + stack.setTitle(title) + return stack + + + +############################################################################### +# ImagePlus list operations +############################################################################### + +def integrate_ips(ips, as_float=True): + """ + Integrate list if ImagePlus with the same size. + """ + aux = None + for i in range(len(ips)): + if i==0: + img_type = "float" if as_float else "short" + aux = new_image(ips[i].width, ips[i].height, image_type=img_type, title = "sum", fill_color = None) + op_image(aux, ips[i], "add", float_result=as_float, in_place=True) + return aux + +def average_ips (ips, roi=None, as_float=True): + """ + Average list if ImagePlus with the same size. + """ + aux = integrate_ips(ips, as_float) + op_const(aux, "divide", float(len(ips)), in_place=True) + return aux \ No newline at end of file diff --git a/script/Lib/jeputils.py b/script/Lib/jeputils.py new file mode 100644 index 0000000..9cce6d9 --- /dev/null +++ b/script/Lib/jeputils.py @@ -0,0 +1,239 @@ +################################################################################################### +# Facade to JEP: Embedded Python +################################################################################################### + +#Matplotlib won't work out of the box because it's default backend (Qt) uses signals, which only works in +#the main thread. Ideally should find a fix, in order to mark the running thread as the main. +#As a workaround, one can use the Tk backend: +# +#import matplotlib +#matplotlib.use('TkAgg') + + +#In principle just add JEP jar and library to the extensions folder. +# +#Alternatively on Linux: +# Python 2: +# - Add /lib/python3.X/site-packages/jep to LD_LIBRARY_PATH +# - Add /lib/python3.X/site-packages/jep/jep-X.X.X.jar to the class path +# +#Python3: +# - Add JEP library folder to LD_LIBRARY_PATH +# - If using OpenJDK, add also python /lib folder to LD_LIBRARY_PATH +# - Set LD_PRELOAD=/lib/libpython3.5m.so + + +import sys +import os +import jep.Jep +import jep.SharedInterpreter +import jep.NDArray +import java.lang.Thread +import org.python.core.PyArray as PyArray +import java.lang.String as String +import java.util.List +import java.util.Map +import java.util.HashMap +import ch.psi.pshell.scripting.ScriptUtils as ScriptUtils + + +from startup import to_array, get_context, _get_caller, Convert, Arr + +__jep = {} + +def __get_jep(): + t = java.lang.Thread.currentThread() + if not t in __jep: + init_jep() + return __jep[t] + +def __close_jep(): + t = java.lang.Thread.currentThread() + if t in __jep: + __jep[t].close() + +def init_jep(): + #TODO: Should do it but generates errors + #__close_jep() + j = jep.SharedInterpreter() + #Faster, but statements must be complete + j.setInteractive(False) + __jep[java.lang.Thread.currentThread()] = j + j.eval("import sys") + #sys.argv is not present in JEP and may be needed for certain modules (as Tkinter) + j.eval("sys.argv = ['PShell']"); + #Add standard script path to python path + j.eval("sys.path.append('" + get_context().setup.getScriptPath() + "')") + + #Redirect stdout + j.eval("class JepStdout:\n" + + " def write(self, str):\n" + + " self.str += str\n" + + " def clear(self):\n" + + " self.str = ''\n" + + " def flush(self):\n" + + " pass\n") + j.eval("sys.stdout=JepStdout()"); + j.eval("sys.stderr=JepStdout()"); + j.eval("sys.stdout.clear()") + j.eval("sys.stderr.clear()") + + #Import reload on Python 3 + j.eval("try:\n" + + " reload # Python 2.7\n" + + "except NameError:\n" + + " try:\n" + + " from importlib import reload # Python 3.4+\n" + + " except ImportError:\n" + + " from imp import reload # Python 3.0 - 3.3\n") + +def __print_stdout(): + j=__get_jep() + output = None + err = None + try: + output = j.getValue("sys.stdout.str") + err = j.getValue("sys.stderr.str") + j.eval("sys.stdout.clear()") + j.eval("sys.stderr.clear()") + except: + pass + if (output is not None) and len(output)>0: + print output + if (err is not None) and len(err)>0: + print >> sys.stderr, err + +def run_jep(script_name, vars = {}): + global __jep + script = get_context().scriptManager.library.resolveFile(script_name) + if script is None : + script= os.path.abspath(script_name) + j=__get_jep() + + for v in vars: + j.set(v, vars[v]) + try: + j.runScript(script) + finally: + __print_stdout() + +def eval_jep(line): + j=__get_jep() + try: + j.eval(line) + finally: + __print_stdout() + +def set_jep(var, value): + j=__get_jep() + j.set(var, value) + +def get_jep(var): + j=__get_jep() + return j.getValue(var) + +def call_jep(module, function, args = [], kwargs = {}, reload=False): + j=__get_jep() + if "/" in module: + script = get_context().scriptManager.library.resolveFile(module) + if "\\" in script: + #Windows paths + module_path = script[0:script.rfind("\\")] + module = script[script.rfind("\\")+1:] + else: + #Linux paths + module_path = script[0:script.rfind("/")] + module = script[script.rfind("/")+1:] + eval_jep("import sys") + eval_jep("sys.path.append('" + module_path + "')") + if module.endswith(".py"): + module = module[0:-3] + + f = module+"_" + function+"_"+str(j.hashCode()) + try: + if reload: + eval_jep("import " + module) + eval_jep("_=reload(" + module+")") + eval_jep("from " + module + " import " + function + " as " + f) + if (kwargs is not None) and (len(kwargs)>0): + #invoke with kwargs only available in JEP>3.8 + hm=java.util.HashMap() + hm.update(kwargs) + #The only way to get the overloaded method... + m = j.getClass().getMethod("invoke", [String, ScriptUtils.getType("[o"), java.util.Map]) + ret = m.invoke(j, [f, to_array(args,'o'), hm]) + else: + ret = j.invoke(f, args) + finally: + __print_stdout() + return ret + +#Converts pythonlist or Java array to numpy array +def to_npa(data, dimensions = None, type = None): + if (not isinstance(data, PyArray)) or (type is not None): + data = to_array(data,'d' if type is None else type) + return jep.NDArray(data, dimensions) + +#recursivelly converts all NumPy arrays to Java arrys +def rec_from_npa(obj): + if isinstance(obj, jep.NDArray): + ret = obj.data + if len(obj.dimensions)>1: + ret=Convert.reshape(ret, obj.dimensions) + return ret + if isinstance(obj, java.util.List) or isinstance(obj,tuple) or isinstance(obj,list): + ret=[] + for i in range(len(obj)): + ret.append(rec_from_npa(obj[i])) + if isinstance(obj,tuple): + return type(ret) + return ret + if isinstance(obj, java.util.Map) or isinstance(obj,dict): + ret = {} if isinstance(obj,dict) else java.util.HashMap() + for k in obj.keys(): + ret[k] = rec_from_npa(obj[k]) + return ret + return obj + +#recursivelly converts all Java arrays to NumPy arrys +def rec_to_npa(obj): + if isinstance(obj, PyArray): + dimensions = Arr.getShape(obj) + if len(dimensions)>1: + obj = Convert.flatten(obj) + return to_npa(obj, dimensions = dimensions) + if isinstance(obj, java.util.List) or isinstance(obj,tuple) or isinstance(obj,list): + ret=[] + for i in range(len(obj)): + ret.append(rec_to_npa(obj[i])) + if isinstance(obj,tuple): + return tuple(ret) + return ret + if isinstance(obj, java.util.Map) or isinstance(obj,dict): + ret = {} if isinstance(obj,dict) else java.util.HashMap() + for k in obj.keys(): + ret[k] = rec_to_npa(obj[k]) + return ret + return obj + +def call_py(module, function, reload_function, *args, **kwargs): + """ + Calls a CPython function recursively crecursively converting Java arrays in arguments to NumPy, + and NumPy arrays in return values to Java arrays. + """ + ret = call_jep(module, function, rec_to_npa(args), rec_to_npa(kwargs), reload=reload_function) + return rec_from_npa(ret) + +def import_py(module, function): + """ + Adds a CPython function to globals, creating a wrapper call to JEP, with + recurvive convertion of Java arrays in arguments to NumPy arrays, + and NumPy arrays in return values to Java arrays. + """ + def jep_wrapper(*args, **kwargs): + reload_function = jep_wrapper.reload + jep_wrapper.reload = False + return call_py(module, function, reload_function, *args, **kwargs) + jep_wrapper.reload=True + _get_caller().f_globals[function] = jep_wrapper + \ No newline at end of file diff --git a/script/Lib/mathutils.py b/script/Lib/mathutils.py new file mode 100644 index 0000000..32d9e72 --- /dev/null +++ b/script/Lib/mathutils.py @@ -0,0 +1,681 @@ +################################################################################################### +# Facade to Apache Commons Math +################################################################################################### + +import sys +import math +import operator + +import java.util.List +import java.lang.reflect.Array +import java.lang.Class as Class +import jarray +import org.python.core.PyArray as PyArray +import ch.psi.utils.Convert as Convert + +import org.apache.commons.math3.util.FastMath as FastMath +import org.apache.commons.math3.util.Pair as Pair +import org.apache.commons.math3.complex.Complex as Complex + +import org.apache.commons.math3.analysis.DifferentiableUnivariateFunction as DifferentiableUnivariateFunction +import org.apache.commons.math3.analysis.function.Gaussian as Gaussian +import org.apache.commons.math3.analysis.function.HarmonicOscillator as HarmonicOscillator +import org.apache.commons.math3.analysis.differentiation.DerivativeStructure as DerivativeStructure +import org.apache.commons.math3.analysis.differentiation.FiniteDifferencesDifferentiator as FiniteDifferencesDifferentiator +import org.apache.commons.math3.analysis.integration.SimpsonIntegrator as SimpsonIntegrator +import org.apache.commons.math3.analysis.integration.TrapezoidIntegrator as TrapezoidIntegrator +import org.apache.commons.math3.analysis.integration.RombergIntegrator as RombergIntegrator +import org.apache.commons.math3.analysis.integration.MidPointIntegrator as MidPointIntegrator +import org.apache.commons.math3.analysis.polynomials.PolynomialFunction as PolynomialFunction +import org.apache.commons.math3.analysis.polynomials.PolynomialFunctionLagrangeForm as PolynomialFunctionLagrangeForm +import org.apache.commons.math3.analysis.solvers.LaguerreSolver as LaguerreSolver +import org.apache.commons.math3.analysis.UnivariateFunction as UnivariateFunction +import org.apache.commons.math3.analysis.interpolation.SplineInterpolator as SplineInterpolator +import org.apache.commons.math3.analysis.interpolation.LinearInterpolator as LinearInterpolator +import org.apache.commons.math3.analysis.interpolation.NevilleInterpolator as NevilleInterpolator +import org.apache.commons.math3.analysis.interpolation.LoessInterpolator as LoessInterpolator +import org.apache.commons.math3.analysis.interpolation.DividedDifferenceInterpolator as DividedDifferenceInterpolator +import org.apache.commons.math3.analysis.interpolation.AkimaSplineInterpolator as AkimaSplineInterpolator + +import org.apache.commons.math3.fitting.GaussianCurveFitter as GaussianCurveFitter +import org.apache.commons.math3.fitting.PolynomialCurveFitter as PolynomialCurveFitter +import org.apache.commons.math3.fitting.HarmonicCurveFitter as HarmonicCurveFitter +import org.apache.commons.math3.fitting.WeightedObservedPoint as WeightedObservedPoint +import org.apache.commons.math3.fitting.leastsquares.MultivariateJacobianFunction as MultivariateJacobianFunction +import org.apache.commons.math3.fitting.leastsquares.LeastSquaresBuilder as LeastSquaresBuilder +import org.apache.commons.math3.fitting.leastsquares.LevenbergMarquardtOptimizer as LevenbergMarquardtOptimizer +import org.apache.commons.math3.fitting.leastsquares.GaussNewtonOptimizer as GaussNewtonOptimizer + +import org.apache.commons.math3.stat.regression.SimpleRegression as SimpleRegression + +import org.apache.commons.math3.transform.FastFourierTransformer as FastFourierTransformer +import org.apache.commons.math3.transform.DftNormalization as DftNormalization +import org.apache.commons.math3.transform.TransformType as TransformType + +import org.apache.commons.math3.linear.ArrayRealVector as ArrayRealVector +import org.apache.commons.math3.linear.Array2DRowRealMatrix as Array2DRowRealMatrix +import org.apache.commons.math3.linear.MatrixUtils as MatrixUtils + + + +################################################################################################### +#Derivative and interpolation +################################################################################################### + +def get_values(f, xdata): + """Return list of values of a function + + Args: + f(UnivariateFunction): function + xdata(float array or list): Domain values + Returns: + List of doubles + + """ + v = [] + for x in xdata: + v.append(f.value(x)) + return v + +def interpolate(data, xdata = None, interpolation_type = "linear"): + """Interpolate data array or list to a UnivariateFunction + + Args: + data(float array or list): The values to interpolate + xdata(float array or list, optional): Domain values + interpolation_type(str , optional): "linear", "cubic", "akima", "neville", "loess", "newton" + Returns: + UnivariateDifferentiableFunction object + + """ + if xdata is None: + from startup import frange + xdata = frange(0, len(data), 1.0) + else: + #X must be ordered + xy = sorted(zip(xdata,data), key=operator.itemgetter(0)) + xdata, data = zip(*xy) + if len(data) != len(xdata) or len(data)<2: + raise Exception("Dimension mismatch") + + if interpolation_type == "cubic": + i = SplineInterpolator() + elif interpolation_type == "linear": + i = LinearInterpolator() + elif interpolation_type == "akima": + i = AkimaSplineInterpolator() + elif interpolation_type == "neville": + i = NevilleInterpolator() + elif interpolation_type == "loess": + i = LoessInterpolator() + elif interpolation_type == "newton": + i = DividedDifferenceInterpolator() + else: + raise Exception("Invalid interpolation type") + from startup import to_array + return i.interpolate(to_array(xdata,'d'), to_array(data,'d')) + +def deriv(f, xdata = None, interpolation_type = "linear"): + """Calculate derivative of UnivariateFunction, array or list. + + Args: + f(UnivariateFunction or array): The function object. If array it is interpolated. + xdata(float array or list, optional): Domain values to process. + interpolation_type(str , optional): "linear", "cubic", "akima", "neville", "loess", "newton" + Returns: + List with the derivative values for xdata + + """ + if not isinstance(f,UnivariateFunction): + if xdata is None: + from startup import frange + xdata = frange(0, len(f), 1.0) + f = interpolate(f, xdata, interpolation_type) + if xdata is None: + if isinstance(f,DifferentiableUnivariateFunction): + return f.derivative() + raise Exception("Domain range not defined") + d = [] + for x in xdata: + xds = DerivativeStructure(1, 2, 0, x) + yds = f.value(xds) + d.append( yds.getPartialDerivative(1)) + return d + +def integrate(f, range = None, xdata = None, interpolation_type = "linear", integrator_type = "simpson"): + """Integrate UnivariateFunction, array or list in an interval. + + Args: + f(UnivariateFunction or array): The function object. If array it is interpolated. + range(list, optional): integration range ([min, max]). + xdata(float array or list, optional): disregarded if f is UnivariateFunction. + interpolation_type(str , optional): "linear", "cubic", "akima", "neville", "loess", "newton" + integrator_type(str , optional): "simpson", "trapezoid", "romberg" or "midpoint" + Returns: + Integrated value (Float) + + """ + if not isinstance(f, UnivariateFunction): + from startup import frange + if xdata is None: + xdata = frange(0, len(f), 1.0) + if range is None: + range = xdata + f = interpolate(f, xdata, interpolation_type) + if range is None: + raise Exception("Domain range not defined") + d = [] + if integrator_type == "simpson": + integrator = SimpsonIntegrator() + elif integrator_type == "trapezoid": + integrator = TrapezoidIntegrator() + elif integrator_type == "romberg": + integrator = RombergIntegrator() + elif integrator_type == "midpoint": + integrator = MidPointIntegrator() + raise Exception("Invalid integrator type") + lower = min(range) + upper = max(range) + return integrator.integrate(MAX_EVALUATIONS, f, lower, upper) + +def trapz(y, xdata=None): + """Integrate an array or list using the composite trapezoidal rule. + + Args: + y(array or list) + xdata(float array or list, optional) + """ + return integrate(y, range = None, xdata = xdata, interpolation_type = "linear", integrator_type = "trapezoid") + +################################################################################################### +#Fitting and peak search +################################################################################################### + +try: + MAX_FLOAT = sys.float_info.max +except: # Python 2.5 + MAX_FLOAT = 1.7976931348623157e+308 + +MAX_ITERATIONS = 1000 +MAX_EVALUATIONS = 1000000 + +def calculate_peaks(function, start_value, end_value = MAX_FLOAT, positive=True): + """Calculate peaks of a DifferentiableUnivariateFunction in a given range by finding the roots of the derivative + + Args: + function(DifferentiableUnivariateFunction): The function object. + start_value(float): start of range + end_value(float, optional): end of range + positive (boolean, optional): True for searching positive peaks, False for negative. + Returns: + List of peaks in the interval + + """ + derivative = function.derivative() + derivative2 = derivative.derivative() + ret = [] + solver = LaguerreSolver() + for complex in solver.solveAllComplex(derivative.coefficients, start_value): + r = complex.real + if start_value < r < end_value: + if (positive and (derivative2.value(r) < 0)) or ( (not positive) and (derivative2.value(r) > 0)): + ret.append(r) + return ret + + +def estimate_peak_indexes(data, xdata = None, threshold = None, min_peak_distance = None, positive = True): + """Estimation of peaks in an array by ordering local maxima according to given criteria. + + Args: + data(float array or list) + xdata(float array or list, optional): if not None must have the same length as data. + threshold(float, optional): if specified filter peaks below this value + min_peak_distance(float, optional): if specified defines minimum distance between two peaks. + if xdata == None, it represents index counts, otherwise in xdata units. + positive (boolean, optional): True for searching positive peaks, False for negative. + Returns: + List of peaks indexes. + """ + peaks = [] + indexes = sorted(range(len(data)),key=lambda x:data[x]) + if positive: + indexes = reversed(indexes) + for index in indexes: + first = (index == 0) + last = (index == (len(data)-1)) + val=data[index] + prev = float('NaN') if first else data[index-1] + next = float('NaN') if last else data[index+1] + + if threshold is not None: + if (positive and (valthreshold)): + break + if ( positive and (first or val>prev ) and (last or val>=next ) ) or ( + (not positive) and (first or vallen(y))): + raise Exception("Invalid data for fit") + +def fit_gaussians(y, x, peak_indexes): + """Fits data on multiple gaussians on the given peak indexes. + + Args: + x(float array or list) + y(float array or list) + peak_indexes(list of int) + Returns: + List of tuples of gaussian parameters: (normalization, mean, sigma) + """ + _assert_valid_for_fit(y,x) + ret = [] + + minimum = min(y) + for peak in peak_indexes: + #Copy data + data = y[:] + #Remover data from other peaks + for p in peak_indexes: + limit = int(round((p+peak)/2)) + if (p > peak): + data[limit : len(y)] =[minimum] * (len(y)-limit) + elif (p < peak): + data[0:limit] = [minimum] *limit + #Build fit point list + values = create_fit_point_list(data, x) + maximum = max(data) + gaussian_fitter = GaussianCurveFitter.create().withStartPoint([(maximum-minimum)/2,x[peak],1.0]).withMaxIterations(MAX_ITERATIONS) + #Fit return parameters: (normalization, mean, sigma) + try: + ret.append(gaussian_fitter.fit(values).tolist()) + except: + ret.append(None) #Fitting error + return ret + + +def create_fit_point_list(y, x, weights = None): + values = [] + for i in sorted(range(len(x)),key=lambda v:x[v]): #Creating list ordered by x, needed for gauss fit + if weights is None: + values.append(WeightedObservedPoint(1.0, x[i], y[i])) + else: + values.append(WeightedObservedPoint(weights[i], x[i], y[i])) + return values + +def fit_polynomial(y, x, order, start_point = None, weights = None): + """Fits data into a polynomial. + + Args: + x(float array or list): observed points x + y(float array or list): observed points y + order(int): if start_point is provided order parameter is disregarded - set to len(start_point)-1. + start_point(optional tuple of float): initial parameters (a0, a1, a2, ...) + weights(optional float array or list): weight for each observed point + Returns: + Tuples of polynomial parameters: (a0, a1, a2, ...) + """ + _assert_valid_for_fit(y,x) + fit_point_list = create_fit_point_list(y, x, weights) + if start_point is None: + polynomial_fitter = PolynomialCurveFitter.create(order).withMaxIterations(MAX_ITERATIONS) + else: + polynomial_fitter = PolynomialCurveFitter.create(0).withStartPoint(start_point).withMaxIterations(MAX_ITERATIONS) + try: + return polynomial_fitter.fit(fit_point_list).tolist() + except: + raise Exception("Fitting failure") + +def fit_gaussian(y, x, start_point = None, weights = None): + """Fits data into a gaussian. + + Args: + x(float array or list): observed points x + y(float array or list): observed points y + start_point(optional tuple of float): initial parameters (normalization, mean, sigma) + If None, use a custom initial estimation. + Set to "default" to force Commons.Math the default (GaussianCurveFitter.ParameterGuesser). + weights(optional float array or list): weight for each observed point + Returns: + Tuples of gaussian parameters: (normalization, mean, sigma) + """ + _assert_valid_for_fit(y,x) + fit_point_list = create_fit_point_list(y, x, weights) + + #If start point not provided, start on peak + if start_point is None: + maximum, minimum = max(y), min(y) + norm = maximum - minimum + mean = x[y.index(maximum)] + sigma = trapz([v-minimum for v in y], x) / (norm*math.sqrt(2*math.pi)) + start_point = (norm, mean, sigma) + elif start_point == "simple": + start_point = [(max(y)-min(y))/2, x[y.index(max(y))], 1.0] + elif start_point == "default": + start_point = GaussianCurveFitter.ParameterGuesser(fit_point_list).guess().tolist() + gaussian_fitter = GaussianCurveFitter.create().withStartPoint(start_point).withMaxIterations(MAX_ITERATIONS) + try: + return gaussian_fitter.fit(fit_point_list).tolist() # (normalization, mean, sigma) + except: + raise Exception("Fitting failure") + +def fit_harmonic(y, x, start_point = None, weights = None): + """Fits data into an harmonic. + + Args: + x(float array or list): observed points x + y(float array or list): observed points y + start_point(optional tuple of float): initial parameters (amplitude, angular_frequency, phase) + weights(optional float array or list): weight for each observed point + Returns: + Tuples of harmonic parameters: (amplitude, angular_frequency, phase) + """ + _assert_valid_for_fit(y,x) + fit_point_list = create_fit_point_list(y, x, weights) + if start_point is None: + harmonic_fitter = HarmonicCurveFitter.create().withMaxIterations(MAX_ITERATIONS) + else: + harmonic_fitter = HarmonicCurveFitter.create().withStartPoint(start_point).withMaxIterations(MAX_ITERATIONS) + try: + return harmonic_fitter.fit(fit_point_list).tolist() # (amplitude, angular_frequency, phase) + except: + raise Exception("Fitting failure") + + +def fit_gaussian_offset(y, x, start_point = None, weights = None): + """Fits data into a gaussian with offset (constant background). + f(x) = a + b * exp(-(pow((x - c), 2) / (2 * pow(d, 2)))) + + Args: + x(float array or list): observed points x + y(float array or list): observed points y + start_point(optional tuple of float): initial parameters (normalization, mean, sigma) + weights(optional float array or list): weight for each observed point + Returns: + Tuples of gaussian parameters: (offset, normalization, mean, sigma) + """ + + # For normalised gauss curve sigma=1/(amp*sqrt(2*pi)) + if start_point is None: + off = min(y) # good enough starting point for offset + com = x[y.index(max(y))] + amp = max(y) - off + sigma = trapz([v-off for v in y], x) / (amp*math.sqrt(2*math.pi)) + start_point = [off, amp, com , sigma] + + class Model(MultivariateJacobianFunction): + def value(self, variables): + value = ArrayRealVector(len(x)) + jacobian = Array2DRowRealMatrix(len(x), 4) + for i in range(len(x)): + (a,b,c,d) = (variables.getEntry(0), variables.getEntry(1), variables.getEntry(2), variables.getEntry(3)) + v = math.exp(-(math.pow((x[i] - c), 2) / (2 * math.pow(d, 2)))) + model = a + b * v + value.setEntry(i, model) + jacobian.setEntry(i, 0, 1) # derivative with respect to p0 = a + jacobian.setEntry(i, 1, v) # derivative with respect to p1 = b + v2 = b*v*((x[i] - c)/math.pow(d, 2)) + jacobian.setEntry(i, 2, v2) # derivative with respect to p2 = c + jacobian.setEntry(i, 3, v2*(x[i] - c)/d ) # derivative with respect to p3 = d + return Pair(value, jacobian) + + model = Model() + target = [v for v in y] #the target is to have all points at the positios + (parameters, residuals, rms, evals, iters) = optimize_least_squares(model, target, start_point, weights) + return parameters + + +def fit_gaussian_linear(y, x, start_point = None, weights = None): + """Fits data into a gaussian with linear background. + f(x) = a * x + b + c * exp(-(pow((x - d), 2) / (2 * pow(e, 2)))) + + Args: + x(float array or list): observed points x + y(float array or list): observed points y + start_point(optional tuple of float): initial parameters (normalization, mean, sigma) + weights(optional float array or list): weight for each observed point + Returns: + Tuples of gaussian parameters: (a, b, normalization, mean, sigma) + """ + + # For normalised gauss curve sigma=1/(amp*sqrt(2*pi)) + if start_point is None: + off = min(y) # good enough starting point for offset + com = x[y.index(max(y))] + amp = max(y) - off + sigma = trapz([v-off for v in y], x) / (amp*math.sqrt(2*math.pi)) + start_point = [0, off, amp, com, sigma] + + class Model(MultivariateJacobianFunction): + def value(self, variables): + value = ArrayRealVector(len(x)) + jacobian = Array2DRowRealMatrix(len(x), 5) + for i in range(len(x)): + (a,b,c,d,e) = (variables.getEntry(0), variables.getEntry(1), variables.getEntry(2), variables.getEntry(3), variables.getEntry(4)) + v = math.exp(-(math.pow((x[i] - d), 2) / (2 * math.pow(e, 2)))) + model = a*x[i] + b + c * v + value.setEntry(i, model) + jacobian.setEntry(i, 0, x[i]) # derivative with respect to p0 = a + jacobian.setEntry(i, 1, 1) # derivative with respect to p1 = b + jacobian.setEntry(i, 2, v) # derivative with respect to p2 = c + v2 = c*v*((x[i] - d)/math.pow(e, 2)) + jacobian.setEntry(i, 3, v2) # derivative with respect to p3 = d + jacobian.setEntry(i, 4, v2*(x[i] - d)/e ) # derivative with respect to p4 = e + return Pair(value, jacobian) + + model = Model() + target = [v for v in y] #the target is to have all points at the positios + (parameters, residuals, rms, evals, iters) = optimize_least_squares(model, target, start_point, weights) + return parameters + +def fit_gaussian_exp_bkg(y, x, start_point = None, weights = None): + """Fits data into a gaussian with exponential background. + f(x) = a * math.exp(-(x/b)) + c * exp(-(pow((x - d), 2) / (2 * pow(e, 2)))) + + Args: + x(float array or list): observed points x + y(float array or list): observed points y + start_point(optional tuple of float): initial parameters (normalization, mean, sigma) + weights(optional float array or list): weight for each observed point + Returns: + Tuples of gaussian parameters: (a,b , normalization, mean, sigma) + """ + + # For normalised gauss curve sigma=1/(amp*sqrt(2*pi)) + if start_point is None: + off = min(y) # good enough starting point for offset + com = x[len(x)/2] + #com = 11.9 + amp = max(y) - off + sigma = trapz([v-off for v in y], x) / (amp*math.sqrt(2*math.pi)) + start_point = [1, 1, amp, com, sigma] + + class Model(MultivariateJacobianFunction): + def value(self, variables): + value = ArrayRealVector(len(x)) + jacobian = Array2DRowRealMatrix(len(x), 5) + for i in range(len(x)): + (a,b,c,d,e) = (variables.getEntry(0), variables.getEntry(1), variables.getEntry(2), variables.getEntry(3), variables.getEntry(4)) + v = math.exp(-(math.pow((x[i] - d), 2) / (2 * math.pow(e, 2)))) + bkg=math.exp(-(x[i]/b)) + model = a*bkg + c * v + value.setEntry(i, model) + jacobian.setEntry(i, 0, bkg) # derivative with respect to p0 = a + jacobian.setEntry(i, 1, a*x[i]*bkg/math.pow(b, 2)) # derivative with respect to p1 = b + jacobian.setEntry(i, 2, v) # derivative with respect to p2 = c + v2 = c*v*((x[i] - d)/math.pow(e, 2)) + jacobian.setEntry(i, 3, v2) # derivative with respect to p3 = d + jacobian.setEntry(i, 4, v2*(x[i] - d)/e ) # derivative with respect to p4 = e + return Pair(value, jacobian) + + model = Model() + target = [v for v in y] #the target is to have all points at the positios + (parameters, residuals, rms, evals, iters) = optimize_least_squares(model, target, start_point, weights) + return parameters + +################################################################################################### +#Functions +################################################################################################### + +class GaussianOffset(UnivariateFunction): + def __init__(self, offset, normalization, mean_value, sigma): + self.gaussian = Gaussian(normalization, mean_value, sigma) + self.offset = offset + def value(self,x): + return self.gaussian.value(x) + self.offset + +class GaussianLinear(UnivariateFunction): + def __init__(self, a,b, normalization, mean_value, sigma): + self.gaussian = Gaussian(normalization, mean_value, sigma) + self.a = a + self.b = b + def value(self,x): + return self.gaussian.value(x) + self.a * x + self.b + +class GaussianExpBkg(UnivariateFunction): + def __init__(self, a, b, normalization, mean_value, sigma): + self.gaussian = Gaussian(normalization, mean_value, sigma) + self.a = a + self.b = b + def value(self,x): + return self.gaussian.value(x) + self.a * math.exp(-(x/self.b)) + +################################################################################################### +#Least squares +################################################################################################### + +def optimize_least_squares(model, target, initial, weights): + """Fits a parametric model to a set of observed values by minimizing a cost function. + + Args: + model(MultivariateJacobianFunction): observed points x + target(float array or list): observed data + initial(optional tuple of float): initial guess + weights(optional float array or list): weight for each observed point + Returns: + Tuples of harmonic parameters: (amplitude, angular_frequency, phase) + """ + if isinstance(weights,tuple) or isinstance(weights,list): + weights = MatrixUtils.createRealDiagonalMatrix(weights) + problem = LeastSquaresBuilder().start(initial).model(model).target(target).lazyEvaluation(False).maxEvaluations(MAX_EVALUATIONS).maxIterations(MAX_ITERATIONS).weight(weights).build() + optimizer = LevenbergMarquardtOptimizer() + optimum = optimizer.optimize(problem) + + parameters=optimum.getPoint().toArray().tolist() + residuals = optimum.getResiduals().toArray().tolist() + rms = optimum.getRMS() + evals = optimum.getEvaluations() + iters = optimum.getIterations() + return (parameters, residuals, rms, evals, iters) + + +################################################################################################### +#FFT +################################################################################################### + +def is_power(num, base): + if base<=1: return num == 1 + power = int (math.log (num, base) + 0.5) + return base ** power == num + +def pad_to_power_of_two(data): + if is_power(len(data),2): + return data + pad =(1 << len(data).bit_length()) - len(data) + elem = complex(0,0) if type(data[0]) is complex else [0.0,] + return data + elem * pad + +def get_real(values): + """Returns real part of a complex numbers vector. + Args: + values: List of complex. + Returns: + List of float + """ + ret = [] + for c in values: + ret.append(c.real) + return ret + +def get_imag(values): + """Returns imaginary part of a complex numbers vector. + Args: + values: List of complex. + Returns: + List of float + """ + ret = [] + for c in values: + ret.append(c.imag) + return ret + +def get_modulus(values): + """Returns the modulus of a complex numbers vector. + Args: + values: List of complex. + Returns: + List of float + """ + ret = [] + for c in values: + ret.append(math.hypot(c.imag,c.real)) + return ret + +def get_phase(values): + """Returns the phase of a complex numbers vector. + Args: + values: List of complex. + Returns: + List of float + """ + ret = [] + for c in values: + ret.append(math.atan(c.imag/c.real)) + return ret + +def fft(f): + """Calculates the Fast Fourrier Transform of a vector, padding to the next power of 2 elements. + Args: + values(): List of float or complex + Returns: + List of complex + """ + f = pad_to_power_of_two(f) + if type(f[0]) is complex: + aux = [] + for c in f: + aux.append(Complex(c.real, c.imag)) + f = aux + fftt = FastFourierTransformer(DftNormalization.STANDARD) + ret = [] + for c in fftt.transform(f,TransformType.FORWARD ): + ret.append(complex(c.getReal(),c.getImaginary())) + return ret + +def ffti(f): + """Calculates the Inverse Fast Fourrier Transform of a vector, padding to the next power of 2 elements. + Args: + values(): List of float or complex + Returns: + List of complex + """ + f = pad_to_power_of_two(f) + if type(f[0]) is complex: + aux = [] + for c in f: + aux.append(Complex(c.real, c.imag)) + f = aux + fftt = FastFourierTransformer(DftNormalization.STANDARD) + ret = [] + for c in fftt.transform(f,TransformType.INVERSE ): + ret.append(complex(c.getReal(),c.getImaginary())) + return ret \ No newline at end of file diff --git a/script/Lib/plotutils.py b/script/Lib/plotutils.py new file mode 100644 index 0000000..c67acb6 --- /dev/null +++ b/script/Lib/plotutils.py @@ -0,0 +1,119 @@ +################################################################################################### +# Plot utilities +################################################################################################### + +import ch.psi.pshell.plot.LinePlotSeries as LinePlotSeries +import ch.psi.pshell.plot.LinePlotErrorSeries as LinePlotErrorSeries +import math +from startup import frange, to_array + +def plot_function(plot, function, name, range, show_points = True, show_lines = True, color = None): + """Plots a function to a plot. + + Args: + plot(LinePlot) + function(UnivariateFunction): Gaussian, PolynomialFunction, HarmonicOscillator... + name(str): name of the series + range(list or array of floats): x values to plot + Returns: + Tuples of harmonic parameters: (amplitude, angular_frequency, phase) + """ + if plot.style.isError(): + s = LinePlotErrorSeries(name, color) + else: + s = LinePlotSeries(name, color) + plot.addSeries(s) + s.setPointsVisible(show_points) + s.setLinesVisible(show_lines) + for x in range: + s.appendData(x, function.value(x)) + return s + +def plot_data(plot, data, name, xdata = None, error = None, show_points = True, show_lines = True, color = None): + """Plots a subscriptable object to a plot. + + Args: + plot(LinePlot) + data(subscriptable): Y data + name(str): name of the series + xdata(subscriptable): X data + error(subscriptable): Error data (only for error plots) + Returns: + Tuples of harmonic parameters: (amplitude, angular_frequency, phase) + """ + if plot.style.isError(): + s = LinePlotErrorSeries(name, color) + else: + s = LinePlotSeries(name, color) + plot.addSeries(s) + s.setPointsVisible(show_points) + s.setLinesVisible(show_lines) + if xdata is None: + xdata = range(len(data)) + xdata = to_array(xdata, 'd') + data = to_array(data, 'd') + if plot.style.isError(): + error = to_array(error, 'd') + s.setData(xdata, data, error) + else: + s.setData(xdata, data) + return s + +def plot_point(plot, x, y, size = 3, color = None, name = "Point"): + s = LinePlotSeries(name, color) + plot.addSeries(s) + s.setPointSize(size) + s.appendData(x, y) + return s + +def plot_line(plot, x1, y1, x2, y2, width = 1, color = None, name = "Line"): + s = LinePlotSeries(name, color) + plot.addSeries(s) + s.setLineWidth(width) + s.setPointsVisible(False) + s.appendData(x1, y1) + s.appendData(x2, y2) + return s + +def plot_cross(plot, x, y, size = 1.0, width = 1, color = None, name = "Cross"): + size = float(size) + s = LinePlotSeries(name, color) + plot.addSeries(s) + s.setLineWidth(width) + s.setPointsVisible(False) + s.appendData(float('nan'), float('nan')) + s.appendData(x-size/2, y) + s.appendData(x+size/2, y) + s.appendData(float('nan'), float('nan')) + s.appendData(x, y-size/2) + s.appendData(x, y+size/2) + return s + +def plot_rectangle(plot, x1, y1, x2, y2, width = 1, color = None, name = "Rectangle"): + s = LinePlotSeries(name, color) + plot.addSeries(s) + s.setLineWidth(width) + s.setPointsVisible(False) + s.appendData(x1, y1) + s.appendData(x1, y2) + s.appendData(x2, y2) + s.appendData(x2, y1) + s.appendData(x1, y1) + return s + +def plot_circle(plot, cx, cy, radius, width = 1, color = None, name = "Circle"): + s = LinePlotSeries(name, color) + plot.addSeries(s) + s.setLineWidth(width) + s.setPointsVisible(False) + res=float(radius) / 100.0 + epson = 1e-12 + for xp in frange (cx+radius-epson , cx-radius+epson , -res): + yp = math.sqrt(math.pow(radius, 2) - math.pow(xp - cx, 2)) + cy + s.appendData(xp, yp) + for xp in frange (cx-radius+epson , cx+radius-epson, res): + yp = -math.sqrt(math.pow(radius, 2) - math.pow(xp - cx, 2)) + cy + s.appendData(xp, yp) + if s.getCount()>0: + s.appendData(s.getX()[0], s.getY()[0]) + return s \ No newline at end of file diff --git a/script/Lib/rsync.py b/script/Lib/rsync.py new file mode 100644 index 0000000..56eac78 --- /dev/null +++ b/script/Lib/rsync.py @@ -0,0 +1,149 @@ +#################################################################################################### +# Utilities for synchronizing folders with rsync +# On RH7 (not SL6) +# Change permission of the account, otherwise SSH keys are not accepted: +# ~/.ssh from drwxr-S--- to drwx---- +# ~ : from drwxrws--- to drwxr-s--- +#################################################################################################### + +import sys +import os +import os.path +import shutil +import ch.psi.utils.Sys + +from startup import exec_cmd, log + +RSYNC_GENERATE_USER_KEY = True +XTERM = "/opt/X11/bin/xterm" if ch.psi.utils.Sys.getOSFamily().name()=="Mac" else "xterm" + +def rsync(src, dest, key): + #cmd = 'rsync -e "ssh -i ' + key + ' -o LogLevel=quiet" --chmod=ug=rwx --verbose --modify-window=1 --times --recursive ' + src + ' ' + dest + #ret = exec_cmd(cmd) + cmd = 'rsync -e "ssh -i ' + key + '" --chmod=ug=rwx --verbose --modify-window=1 --times --recursive ' + src + ' ' + dest + ret = exec_cmd(cmd, False) + lines = ret.split("\n") + lines = filter(lambda x: x != "", lines) + if len(lines)<3: + print "Invalid return from rsync:\n", ret + raise Exception ("Invalid format") + #files = lines[1:-2] + files = [] + head,tail=os.path.split(src) + for l in lines: + f = os.path.join(head,l) + if os.path.exists(f): + files.append(f) + try: + stats = lines[-2].replace(",", "").replace(".", "") + stats = [int(s) for s in stats.split() if s.isdigit()] + bytes_sent, bytes_received = stats[0], stats[1] + except: + print "Invalid statistics from rsync:\n", ret + bytes_sent, bytes_received = None, None + + return files, bytes_sent, bytes_received + +def sync_user_data(user, src, dest, host= "localhost", remove_local_folder=False, remove_local_files=False, do_log=True, do_print=True): + try: + if do_log: + log("Start synchronizing %s to %s:%s" % (src, user, dest), False ) + key = os.path.expanduser("~/.ssh/" + ("ke" if RSYNC_GENERATE_USER_KEY else "id_rsa")) + if not os.path.isfile(key): + raise Exception ("Invalid key file") + dest = "'" + dest.replace(" ", "\ ") + "'" + dest = user + "@" + host + ":" + dest + files, bytes_sent, bytes_received = rsync(src,dest,key) + msg = "Transferred " + str(bytes_sent) + " bytes to " + user + ": " + for f in files: + msg = msg + "\n" + f + if do_log: + log(msg, False) + if do_print: + print msg + if remove_local_folder: + if do_log: + log("Removing folder: " + src) + shutil.rmtree(src) + elif remove_local_files: + for f in files: + if not os.path.samefile(f, src): + if os.path.isfile(f): + if do_log: + log("Removing file: " + f) + os.remove(f) + elif os.path.isdir(f): + if do_log: + log("Removing folder: " + f) + shutil.rmtree(f) + + except: + msg = "Error transferring user data to " + user + ": " + str(sys.exc_info()[1]) + if do_log: + log(msg, False) + if do_print: + print >> sys.stderr, msg + return msg + +def remove_user_key(do_print=True): + cmd = "rm ~/.ssh/ke;" + cmd = cmd + "rm ~/.ssh/ke.pub" + ret = exec_cmd(cmd, False) + if do_print: + if not ret.strip(): + ret = "Success removing ssh keys" + print ret + +def reset_user_key(do_print=True): + remove_user_key(do_print) + cmd = "ssh-keygen -N '' -f ~/.ssh/ke -t rsa;" + ret = exec_cmd(cmd) + if do_print: + print ret + +def authorize_user(user, aux_file = os.path.expanduser("~/.rsync.tmp"), fix_permissions=True, do_print=True): + if (os.path.isfile(aux_file)): + os.remove(aux_file) + with open(aux_file, "w") as fh: + fh.write("Cannot access file: " + aux_file) + os.chmod(aux_file, 0o777) + + success_msg = 'Success transfering authorization key for: ' + user + cmd = 'echo Authorizing: ' + user + ";" + cmd = cmd + 'echo Invalid user or password > ' + aux_file + ";" + cmd = cmd + "export PK_SUCCESS=FAILURE;" + if RSYNC_GENERATE_USER_KEY: + reset_user_key(do_print) + cmd = cmd + "export PK=`cat ~/.ssh/ke.pub`;" + else: + cmd = cmd + "export PK=`cat ~/.ssh/id_rsa.pub`;" + cmd = cmd + 'su - ' + user + ' bash -c "' + cmd = cmd + 'echo $PK >> .ssh/authorized_keys;' + #cmd = cmd + 'sort .ssh/authorized_keys | uniq > .ssh/authorized_keys.uniq;' + #cmd = cmd + 'mv .ssh/authorized_keys.uniq .ssh/authorized_keys;' + if fix_permissions: + cmd = cmd + 'chmod g-w ~' + ";" + cmd = cmd + 'echo ' + success_msg + ";" + cmd = cmd + 'echo ' + success_msg + " > " + aux_file + ";" + cmd = cmd + '"' + #xterm_options = '-hold -T "Authentication" -into 44040199' #Get Winfow ID with 'wmctrl -lp' + xterm_options = '-T "Authentication" -fa monaco -fs 14 -bg black -fg green -geometry 80x15+400+100' + try: + ret = exec_cmd(XTERM + " " + xterm_options + " -e '" + cmd + "'") + with open (aux_file, "r") as myfile: + ret=myfile.read() + #;if [ "$depth" -eq "1" ]; then echo ' + success_msg + '; fi') + if not success_msg in ret: + raise Exception (ret) + except: + if RSYNC_GENERATE_USER_KEY: + remove_user_key(do_print) + raise Exception ("Error authenticating user: " + str(sys.exc_info()[1])) + finally: + if (os.path.isfile(aux_file)): + os.remove(aux_file) + return ret + +def is_authorized(): + key = os.path.expanduser("~/.ssh/" + ("ke" if RSYNC_GENERATE_USER_KEY else "id_rsa")) + return os.path.isfile(key) diff --git a/script/Lib/sessions.py b/script/Lib/sessions.py new file mode 100644 index 0000000..503cf60 --- /dev/null +++ b/script/Lib/sessions.py @@ -0,0 +1,348 @@ +from startup import get_context, set_exec_pars +import ch.psi.utils.SciCat as SciCat +import java.lang.Boolean + +def _sm(): + return get_context().sessionManager + + +def session_start(name, metadata=None): + """ Starts new session. If a session os open, completes it first. + + Args: + name(str): Session name. + metadata(dict): Map of initial metadata parameters + If None(Default) use the default metadata definition. + + Returns: + session id (int) + """ + set_exec_pars(open=False) + return _sm().start(name, metadata) + +def session_complete(): + """ Completes current session, if started. + """ + set_exec_pars(open=False) + return _sm().stop() + +def session_pause(): + """ Pauses current session, if started. + """ + return _sm().pause() + +def session_resume(): + """ Resumes current session, if paused. + """ + return _sm().resume() + +def session_cancel(): + """ Cancels current session, if started and empty (no generated data). + """ + return _sm().cancel() + +def session_restart(id): + """ Reopens a completed if not yet archived and if belongs to the same user. + + Args: + id(int): Session id. + """ + return _sm().restart(id) + +def session_move(origin, files, dest): + """ Moves a list of run files (relative to root) to another session. + Sessions must not be archived and belong to the same user. + + Args: + origin(int): Origin session id. + files(list): file names + dest(int): Destination session id. + """ + return _sm().move(origin, files, dest) + +def session_detach(name, id, files): + """ Detaches a list of run files (relative to root) to a new session. + Session must not be archived and belong to the same user. + + Args: + name(str): Name of new session. + id(int): Session id. + files(list): file names + + Returns: + New session id (int) + """ + return _sm().detach(name, id, files) + + +def session_create(name, files, metadata=None, root=None): + """ Create a session from existing data files. + + Args: + name(str): Name of new session. + files(list): file names relative to root + metadata(dict): Map of initial metadata parameters + If None(Default) use the default metadata definition. + root(str): data root path. If None(Default) uses default data path. + + Returns: + New session id (int) + """ + return _sm().create(name, files, metadata, root) + +def session_id(): + """ Returns current session id (0 if no session is started). + + Returns: + session id (int) + """ + return _sm().getCurrentSession() + + +def session_name(): + """ Returns current session name ("unknown" if no session is started) + + Returns: + session name(str) + """ + return _sm().getCurrentName() + +def session_started(): + """ Returns true if a session is started. + + Returns: + bool + """ + return _sm().isStarted() + +def session_paused(): + """ Returns true if current session is paused. + + Returns: + bool + """ + return _sm().isPaused() + + +def session_add_file(path): + """ Adds additional file to session, if started. + + Args: + path(str): Relative to data path or absolute. + """ + return _sm().addAdditionalFile(path) + + +def session_ids(): + """ Returns list of completed sessions. + + Returns: + list of int + """ + return _sm().getIDs() + +def session_get_name(id=None): + """ Return the name of a session. + + Args: + id(int): Session id. Default (None) is the current session. + + Returns: + session name (str) + """ + return _sm().getName() if id is None else _sm().getName(id) + + +def session_get_state(id=None): + """ Returns the session state + + Args: + id(int): Session id. Default (None) is the current session. + + Returns: + session state (str) + """ + return _sm().getState() if id is None else _sm().getState(id) + +def session_get_start(id=None): + """ Returns the start timestamp of a session. + + Args: + id(int): Session id. Default (None) is the current session. + + Returns: + long + """ + return _sm().getStart() if id is None else _sm().getStart(id) + +def session_get_stop(id): + """ Returns the stop timestamp of a completed session. + + Args: + id(int): Session id. + Returns: + Timestamp (long) + """ + return _sm().getStop(id) + +def session_get_root(id=None): + """ Returns the root data path of a session. + + Args: + id(int): Session id. Default (None) is the current session. + + Returns: + str + """ + return _sm().getRoot() if id is None else _sm().getRoot(id) + + +def session_get_info(id=None): + """ Returns the session information. + + Args: + id(int): Session id. Default (None) is the current session. + + Returns: + session info (dict) + """ + return _sm().getInfo() if id is None else _sm().getInfo(id) + + +def session_get_metadata(id=None): + """ Returns a session info metadata. + + Args: + id(int): Session id. Default (None) is the current session. + + Returns: + session metadata (dict) + """ + return _sm().getMetadata() if id is None else _sm().getMetadata(id) + + +def session_set_metadata(key, value,id=None): + """ Set session metadata entry. + + Args: + key(str): Metadata key + value(obj): Metadata value + id(int): Session id. Default (None) is the current session. + """ + return _sm().setMetadata(key, value) if id is None else _sm().setMetadata(id,key, value) + + +def session_get_metadata_keys(): + """ Return the default metadata definition for samples. + + Returns: + list of map entries + """ + return [str(e.key) for e in _sm().getMetadataDefinition()] + + +def session_get_metadata_type(key): + """ Return the metadata type for a given key: + String, Integer, Double, Boolean, List or Map. + Args: + key(str): Metadata key. + + Returns: + str + """ + return str(_sm().getMetadataType(key)) + +def session_get_metadata_default(key): + """ Return the metadata default value for a given key. + + Args: + key(str): Metadata key. + + Returns: + Object + """ + return _sm().getMetadataDefault(key) + +def session_get_runs(id=None, relative=True): + """ Return the runs of a session. + + Args: + id(int): Session id. Default (None) is the current session. + relative(bool): if True use relative file names (for files under the data root path) + + Returns: + List of dicts + """ + return _sm().getRuns(java.lang.Boolean(relative)) if id is None else _sm().getRuns(id, relative) + + +def session_set_run_enabled(enabled, id=None, index=-1): + """ Enable or disable a run. + + Args: + enabled(bool): true for enabling, false for disabling + id(int): Session id. Default (None) is the current session. + index: Index of the run. Default (-1) for the last run. + + Returns: + Object + """ + return _sm().setRunEnabled(index, enabled) if id is None else _sm().setRunEnabled(id, index, enabled) + +def session_get_additional_files(id=None, relative=True): + """ Return additional files of a session. + + Args: + id(int): Session id. Default (None) is the current session. + relative(bool): if True use relative file names (for files under the data root path) + + Returns: + List of str + """ + return _sm().getAdditionalFiles(java.lang.Boolean(relative)) if id is None else _sm().getAdditionalFiles(id, relative) + +def session_get_file_list(id=None, relative=True): + """ Return complete list of data files of a session. + + Args: + id(int): Session id. Default (None) is the current session. + relative(bool): if True use relative file names (for files under the data root path) + + Returns: + List of str + """ + return _sm().getFileList(java.lang.Boolean(relative)) if id is None else _sm().getFileList(id, relative) + +def session_create_zip(file_name, id=None, preserve_folder_structure=True): + """ Create ZIP file with session contents + + Args: + file_name(str): name of the zip file + id(int): Session id. Default (None) is the current session. + preserve_folder_structure: if False all data files are added to the root of the file. + if True the folder structure under data root is preserved. + """ + return _sm().createZipFile(file_name, preserve_folder_structure) if id is None else _sm().createZipFile(id, file_name, preserve_folder_structure) + + +def session_ingest_scicat(id, matadata={}): + """ Ingest a completed session to SciCat + + Args: + id(int): Session id. + matadata(dict): session metadata + + Returns: + Tuple (Dataset Name, Dataset ID) in case of success. Otherwise throws an exception. + """ + sciCat= SciCat() + result = sciCat.ingest(id, matadata) + print result.output + if not result.success: + raise Exception ("Error ingesting session " + str(id)) + return result.datasetName, result.datasetId + + + + + diff --git a/script/Lib/startup.py b/script/Lib/startup.py new file mode 100644 index 0000000..5817c57 --- /dev/null +++ b/script/Lib/startup.py @@ -0,0 +1,207 @@ +################################################################################################### +# Global definitions and built-in functions +################################################################################################### + +from builtin_utils import * +from builtin_classes import * +from builtin_functions import * + + +################################################################################################### +#Default empty callbacks +################################################################################################### +def on_command_started(info): pass +def on_command_finished(info): pass +def on_session_started(id): pass +def on_session_finished(id): pass +def on_change_data_path(path): pass + + +################################################################################################### +#Help and access to function documentation +################################################################################################### +def _getBuiltinFunctions(filter = None): + ret = [] + for name in globals().keys(): + val = globals()[name] + if type(val) is PyFunction: + if filter is None or filter in name: + #Only "public" documented functions + if not name.startswith('_') and (val.__doc__ is not None): + ret.append(val) + return to_array(ret) + + +def _getBuiltinFunctionNames(filter = None): + ret = [] + for function in _getBuiltinFunctions(filter): + ret.append(function.func_name) + return to_array(ret) + +def _getFunctionDoc(function): + if is_string(function): + if function not in globals(): + return + function = globals()[function] + if type(function) is PyFunction and '__doc__' in dir(function): + ac = function.func_code.co_argcount + var = function.func_code.co_varnames + args = list(var)[:ac] + defs = function.func_defaults + if defs is not None: + for i in range (len(defs)): + index = len(args) - len(defs) + i + args[index] = args[index] + " = " + str(defs[i]) + flags = function.func_code.co_flags + if flags & 4 > 0: + args.append('*' + var[ac]) + ac=ac+1 + if flags & 8 > 0: + args.append('**' + var[ac]) + d = function.func_doc + return function.func_name+ "(" + ", ".join(args) + ")" + "\n\n" + (d if (d is not None) else "") + +def help(object = None): + """ + Print help message for function or object (if available). + + Args: + object (any, optional): function or object to get help. + If null prints a list of the builtin functions. + + Returns: + None + """ + if object is None: + print "Built-in functions:" + for f in _getBuiltinFunctionNames(): + print "\t" + f + else: + if type(object) is PyFunction: + print _getFunctionDoc(object) + elif '__doc__' in dir(object): + #The default doc is now shown + import org.python.core.BuiltinDocs.object_doc + if object.__doc__ != org.python.core.BuiltinDocs.object_doc: + print object.__doc__ + +################################################################################################### +#Variable injection +################################################################################################### + +def _get_caller(): + #Not doing inspect.currentframe().f_back because inspect is slow to load + return sys._getframe(1).f_back if hasattr(sys, "_getframe") else None + +def inject(): + """Restore initial globals: re-inject devices and startup variables to the interpreter. + + Args: + None + + Returns: + None + """ + if __name__ == "__main__": + get_context().injectVars() + else: + _get_caller().f_globals.update(get_context().scriptManager.injections) + + +################################################################################################### +#Script evaluation and return values +################################################################################################### + +def run(script_name, args = None, locals = None): + """Run script: can be absolute path, relative, or short name to be search in the path. + Args: + args(Dict ot List): Sets sys.argv (if list) or gobal variables(if dict) to the script. + locals(Dict): If not none sets the locals()for the runing script. + If locals is used then script definitions will not go to global namespace. + + Returns: + The script return value (if set with set_return) + """ + script = get_context().scriptManager.library.resolveFile(script_name) + if script is not None and os.path.isfile(script): + info = get_context().startScriptExecution(script_name, args) + try: + set_return(None) + if args is not None: + if isinstance(args,list) or isinstance(args,tuple): + sys.argv = list(args) + globals()["args"] = sys.argv + else: + for arg in args.keys(): + globals()[arg] = args[arg] + if (locals is None): + execfile(script, globals()) + else: + execfile(script, globals(), locals) + ret = get_return() + get_context().finishScriptExecution(info, ret) + return ret + except Exception, ex: + get_context().finishScriptExecution(info, ex) + raise ex + raise IOError("Invalid script: " + str(script_name)) + +def abort(): + """Abort the execution of ongoing task. It can be called from the script to quit. + + Args: + None + + Returns: + None + """ + fork(get_context().abort) #Cannot be on script execution thread + while True: sleep(10.0) + +def set_return(value): + """Sets the script return value. This value is returned by the "run" function. + + Args: + value(Object): script return value. + + Returns: + None + """ + #In Jython, the output of last statement is not returned when running a file + if __name__ == "__main__": + global __THREAD_EXEC_RESULT__ + if is_interpreter_thread(): + global _ + _=value + __THREAD_EXEC_RESULT__[java.lang.Thread.currentThread()]=value #Used when running file + else: + #if startup is imported, cannot set global + caller = _get_caller() + if is_interpreter_thread(): + caller.f_globals["_"]=value + if not "__THREAD_EXEC_RESULT__" in caller.f_globals.keys(): + caller.f_globals["__THREAD_EXEC_RESULT__"] = {} + caller.f_globals["__THREAD_EXEC_RESULT__"][java.lang.Thread.currentThread()]=value + return value #Used when parsing file + +def get_return(): + if __name__ == "__main__": + global __THREAD_EXEC_RESULT__ + return __THREAD_EXEC_RESULT__[java.lang.Thread.currentThread()] + else: + return _get_caller().f_globals["__THREAD_EXEC_RESULT__"][java.lang.Thread.currentThread()] + + +################################################################################################### +#Executed on startup +################################################################################################### + +if __name__ == "__main__": + ca_channel_path=os.path.join(get_context().setup.getStandardLibraryPath(), "epics") + sys.path.append(ca_channel_path) + #This is to destroy previous context of _ca (it is not shared with PShell) + if run_count > 0: + if sys.modules.has_key("_ca"): + print + import _ca + _ca.initialize() diff --git a/script/Lib/startup_c.py b/script/Lib/startup_c.py new file mode 100644 index 0000000..128882d --- /dev/null +++ b/script/Lib/startup_c.py @@ -0,0 +1,2836 @@ +################################################################################################### +# Global definitions and built-in functions +################################################################################################### + +import sys +sys.argv=[''] +import time +import math +import inspect +import os.path +from operator import add, mul, sub, truediv +from time import sleep +from array import array +import types +import threading +import functools +import socket +import numpy +import traceback +from jep import jproxy + +#TODO +#from jep import PyJArray +def is_array(obj): + try: + return str(type(obj)) == "" + except: + return False + + +from java.lang import Class +from java.lang import Object +from java.lang import System +from java.lang import AutoCloseable +from java.beans import PropertyChangeListener +from java.util import concurrent +from java.util import List +from java.util import ArrayList +from java.lang import reflect +from java.lang import Thread + +from java.awt.image import BufferedImage +from java.awt import Color +from java.awt import Point +from java.awt import Dimension +from java.awt import Rectangle +from java.awt import Font + + +from java.lang import Boolean, Integer, Float, Double, Short, Byte, Long, String + +from ch.psi.pshell import core as _core +from ch.psi.pshell.scripting import ScriptUtils +from ch.psi.pshell.scripting import ScriptType +from ch.psi.pshell.scripting import JepUtils +from ch.psi.utils import Convert +from ch.psi.utils import Arr + +__THREAD_EXEC_RESULT__=None + +################################################################################################### +#Type conversion and checking +################################################################################################### + +def to_array(obj, type = None, primitive = True): + """Convert Python list to Java array. + + Args: + obj(list): Original data. + type(str): array type 'b' = byte, 'h' = short, 'i' = int, 'l' = long, 'f' = float, 'd' = double, + 'c' = char, 'z' = boolean, 's' = String, 'o' = Object + Returns: + Java array. + """ + if obj is None: + return None + if type is None: + type = 'o' + enforceArrayType=False + else: + enforceArrayType=True + if type[0] == '[': + type = type[1:] + element_type = ScriptUtils.getPrimitiveType(type) if primitive else ScriptUtils.getType(type) + + def convert_1d_array(obj): + if type == 'c': + ret = reflect.Array.newInstance(element_type,len(obj)) + for i in range(len(obj)): ret[i] = chr(obj[i]) + return ret + if type == 'z': + ret = reflect.Array.newInstance(element_type,len(obj)) + for i in range(len(obj)): + ret[i]= True if obj[i] else False + return ret + if type == 'o': + ret = reflect.Array.newInstance(element_type,len(obj)) + for i in range(len(obj)): + ret[i]= obj[i] + return ret + if type == "s": + return Convert.toStringArray(obj) + if primitive: + ret = Convert.toPrimitiveArray(obj, element_type) + else: + ret = reflect.Array.newInstance(element_type,len(obj)) + for i in range(len(obj)): ret[i] = Convert.toType(obj[i],element_type) + return ret + + if is_array(obj): + if enforceArrayType: + if Arr.getComponentType(obj) != element_type: + rank = Arr.getRank(obj) + if (rank== 1): + obj=convert_1d_array(obj) + elif (rank>1): + pars, aux = [element_type], obj + for i in range(rank): + pars.append(len(aux)) + aux = aux[0] + #TODO: OVERLOADING BUG + #ret = reflect.Array.newInstance(*pars) + ret = Arr.newInstance(*pars) + for i in range(len(obj)): + ret[i]=to_array(obj[i], type) + obj = ret + elif is_list(obj): + if type=='o': + ret = reflect.Array.newInstance(element_type, len(obj)) + for i in range (len(obj)): + if is_list(obj[i]) or is_array(obj[i]): + ret[i] = to_array(obj[i],type) + else: + ret[i] = obj[i] + obj=ret + elif len(obj)>0 and (is_list(obj[0]) or is_array(obj[0])): + pars, aux = [element_type], obj + while len(aux)>0 and (is_list(aux[0]) or is_array(aux[0])): + pars.append(len(aux)) + aux = aux[0] + pars.append(0) + #ret = reflect.Array.newInstance(*pars) + ret = Arr.newInstance(*pars) + for i in range(len(obj)): + ret[i]=to_array(obj[i], type) + obj=ret + else: + obj= convert_1d_array(obj) + return obj + +def np_to_java(obj, dtype=None): + """Convert a numpy array into a java array, preserving dimensionality. + + Args: + obj(numpy array): Original data. + + Returns: + Java array + """ + if type(obj) == numpy.ndarray: + if dtype is not None: + obj=obj.astype(dtype, copy=False) + return JepUtils.toJavaArray(obj) + return obj + +def to_list(obj): + """Convert an object into a Python List. + + Args: + obj(tuple or array or List): Original data. + + Returns: + List. + """ + if obj is None: + return None + if isinstance(obj,tuple) or is_java_instance(obj,List) : + return list(obj) + #if is_array(obj): + # return obj.tolist() + if not isinstance(obj,list): + return [obj,] + return obj + +def is_list(obj): + try: + if obj.__class__.__name__=="PyJList": + return True + except: + pass + return isinstance(obj,tuple) or isinstance(obj,list) or is_java_instance (obj, List) + +def is_string(obj): + return (type(obj) is str) + + +def is_main_thread(): + return threading.current_thread() == threading.main_thread() + + +def is_java_instance(obj, cls): + try: + return obj.getClass() == Class.forName(cls.java_name) + except: + return False +################################################################################################### +#Access to context singleton +################################################################################################### +def get_context(): + if not is_main_thread(): + raise Exception("Application context can only be accessed by the scripting main thread") + return _core.Context.getInstance() + +################################################################################################### +#Builtin classes +################################################################################################### + +from ch.psi.utils import Threading as Threading +from ch.psi.utils import State as State +from ch.psi.utils import Convert as Convert +from ch.psi.utils import Str as Str +from ch.psi.utils import Sys as Sys +from ch.psi.utils import Arr as Arr +from ch.psi.utils import IO as IO +from ch.psi.utils import Chrono as Chrono +from ch.psi.utils import Folder as Folder +from ch.psi.utils import Histogram as Histogram +from ch.psi.utils import History as History +from ch.psi.utils import Condition as Condition +from ch.psi.utils import ArrayProperties as ArrayProperties +from ch.psi.utils import Audio as Audio +from ch.psi.utils import BitMask as BitMask +from ch.psi.utils import Config as Config +from ch.psi.utils import Inventory as Inventory +from ch.psi.utils import DataAPI as DataAPI +from ch.psi.utils import DispatcherAPI as DispatcherAPI +from ch.psi.utils import EpicsBootInfoAPI as EpicsBootInfoAPI +from ch.psi.utils import Mail as Mail +from ch.psi.utils import Posix as Posix +from ch.psi.utils import ProcessFactory as ProcessFactory +from ch.psi.utils import Range as Range +from ch.psi.utils import Reflection as Reflection +from ch.psi.utils import Serializer as Serializer +from ch.psi.utils import Windows as Windows +from ch.psi.utils import NumberComparator as NumberComparator +from java.util import Iterator as Iterator +from java.util import NoSuchElementException as NoSuchElementException + + +from ch.psi.pshell.core import CommandSource as CommandSource +from ch.psi.pshell.core import ContextAdapter as ContextListener +from ch.psi.pshell.core import Context +from ch.psi.pshell.core import InlineDevice as InlineDevice + +from ch.psi.pshell.data import DataSlice as DataSlice +from ch.psi.pshell.data import PlotDescriptor as PlotDescriptor +from ch.psi.pshell.data import Table as Table +from ch.psi.pshell.data import Provider as Provider +from ch.psi.pshell.data import ProviderHDF5 as ProviderHDF5 +from ch.psi.pshell.data import ProviderText as ProviderText +from ch.psi.pshell.data import ProviderCSV as ProviderCSV +from ch.psi.pshell.data import ProviderFDA as ProviderFDA +from ch.psi.pshell.data import Converter as DataConverter +from ch.psi.pshell.data import Layout as Layout +from ch.psi.pshell.data import LayoutBase as LayoutBase +from ch.psi.pshell.data import LayoutDefault as LayoutDefault +from ch.psi.pshell.data import LayoutTable as LayoutTable +from ch.psi.pshell.data import LayoutFDA as LayoutFDA +from ch.psi.pshell.data import LayoutSF as LayoutSF + +from ch.psi.pshell.device import Device as Device +from ch.psi.pshell.device import DeviceBase as DeviceBase +from ch.psi.pshell.device import DeviceConfig as DeviceConfig +from ch.psi.pshell.device import GenericDevice as GenericDevice +from ch.psi.pshell.device import PositionerConfig as PositionerConfig +from ch.psi.pshell.device import RegisterConfig as RegisterConfig +from ch.psi.pshell.device import ReadonlyProcessVariableConfig as ReadonlyProcessVariableConfig +from ch.psi.pshell.device import ProcessVariableConfig as ProcessVariableConfig +from ch.psi.pshell.device import MotorConfig as MotorConfig +from ch.psi.pshell.device import Register as Register +from ch.psi.pshell.device import RegisterBase as RegisterBase +from ch.psi.pshell.device import ProcessVariableBase as ProcessVariableBase +from ch.psi.pshell.device import ControlledVariableBase as ControlledVariableBase +from ch.psi.pshell.device import PositionerBase as PositionerBase +from ch.psi.pshell.device import MasterPositioner as MasterPositioner +from ch.psi.pshell.device import MotorBase as MotorBase +from ch.psi.pshell.device import DiscretePositionerBase as DiscretePositionerBase +from ch.psi.pshell.device import MotorGroupBase as MotorGroupBase +from ch.psi.pshell.device import MotorGroupDiscretePositioner as MotorGroupDiscretePositioner +from ch.psi.pshell.device import ReadonlyRegisterBase as ReadonlyRegisterBase +from ch.psi.pshell.device import ReadonlyAsyncRegisterBase as ReadonlyAsyncRegisterBase +from ch.psi.pshell.device import Register as Register +from ch.psi.pshell.device import Record as Record + +RegisterArray = Register.RegisterArray +RegisterNumber = Register.RegisterNumber +RegisterBoolean = Register.RegisterBoolean +from ch.psi.pshell.device import RegisterCache as RegisterCache +from ch.psi.pshell.device import ReadonlyRegister +ReadonlyRegisterArray = ReadonlyRegister.ReadonlyRegisterArray +ReadonlyRegisterMatrix = ReadonlyRegister.ReadonlyRegisterMatrix +from ch.psi.pshell.device import DummyPositioner as DummyPositioner +from ch.psi.pshell.device import DummyMotor as DummyMotor +from ch.psi.pshell.device import DummyRegister as DummyRegister +from ch.psi.pshell.device import Timestamp as Timestamp +from ch.psi.pshell.device import Interlock as Interlock +from ch.psi.pshell.device import Readable as Readable +ReadableArray = Readable.ReadableArray +ReadableMatrix = Readable.ReadableMatrix +ReadableCalibratedArray = Readable.ReadableCalibratedArray +ReadableCalibratedMatrix = Readable.ReadableCalibratedMatrix +from ch.psi.pshell.device import ArrayCalibration as ArrayCalibration +from ch.psi.pshell.device import MatrixCalibration as MatrixCalibration +from ch.psi.pshell.device import Writable as Writable +WritableArray = Writable.WritableArray +from ch.psi.pshell.device import Stoppable as Stoppable +from ch.psi.pshell.device import Averager as Averager +from ch.psi.pshell.device import ArrayAverager as ArrayAverager +from ch.psi.pshell.device import Delta as Delta +from ch.psi.pshell.device import DeviceAdapter as DeviceListener +from ch.psi.pshell.device import ReadbackDeviceAdapter as ReadbackDeviceListener +from ch.psi.pshell.device import MotorAdapter as MotorListener +from ch.psi.pshell.device import MoveMode as MoveMode +from ch.psi.pshell.device import SettlingCondition as SettlingCondition +from ch.psi.pshell.device import HistogramGenerator as HistogramGenerator + +from ch.psi.pshell.epics import Epics as Epics +from ch.psi.pshell.epics import EpicsScan as EpicsScan +from ch.psi.pshell.epics import ChannelSettlingCondition as ChannelSettlingCondition +from ch.psi.pshell.epics import AreaDetector as AreaDetector +from ch.psi.pshell.epics import BinaryPositioner as BinaryPositioner +from ch.psi.pshell.epics import ChannelByte as ChannelByte +from ch.psi.pshell.epics import ChannelByteArray as ChannelByteArray +from ch.psi.pshell.epics import ChannelByteMatrix as ChannelByteMatrix +from ch.psi.pshell.epics import ChannelDouble as ChannelDouble +from ch.psi.pshell.epics import ChannelDoubleArray as ChannelDoubleArray +from ch.psi.pshell.epics import ChannelDoubleMatrix as ChannelDoubleMatrix +from ch.psi.pshell.epics import ChannelFloat as ChannelFloat +from ch.psi.pshell.epics import ChannelFloatArray as ChannelFloatArray +from ch.psi.pshell.epics import ChannelFloatMatrix as ChannelFloatMatrix +from ch.psi.pshell.epics import ChannelInteger as ChannelInteger +from ch.psi.pshell.epics import ChannelIntegerArray as ChannelIntegerArray +from ch.psi.pshell.epics import ChannelIntegerMatrix as ChannelIntegerMatrix +from ch.psi.pshell.epics import ChannelShort as ChannelShort +from ch.psi.pshell.epics import ChannelShortArray as ChannelShortArray +from ch.psi.pshell.epics import ChannelShortMatrix as ChannelShortMatrix +from ch.psi.pshell.epics import ChannelString as ChannelString +from ch.psi.pshell.epics import ControlledVariable as ControlledVariable +from ch.psi.pshell.epics import DiscretePositioner as DiscretePositioner +from ch.psi.pshell.epics import GenericChannel as GenericChannel +from ch.psi.pshell.epics import GenericArray as GenericArray +from ch.psi.pshell.epics import GenericMatrix as GenericMatrix +from ch.psi.pshell.epics import Manipulator as Manipulator +from ch.psi.pshell.epics import Motor as EpicsMotor +from ch.psi.pshell.epics import Positioner as Positioner +from ch.psi.pshell.epics import ProcessVariable as ProcessVariable +from ch.psi.pshell.epics import ReadonlyProcessVariable as ReadonlyProcessVariable +from ch.psi.pshell.epics import Scaler as Scaler +from ch.psi.pshell.epics import Scienta as Scienta +from ch.psi.pshell.epics import Slit as Slit +from ch.psi.pshell.epics import AreaDetectorSource as AreaDetectorSource +from ch.psi.pshell.epics import ArraySource as ArraySource +from ch.psi.pshell.epics import ByteArraySource as ByteArraySource +from ch.psi.pshell.epics import PsiCamera as PsiCamera +from ch.psi.pshell.epics import CAS as CAS + +from ch.psi.pshell.serial import SerialPortDevice as SerialPortDevice +from ch.psi.pshell.serial import TcpDevice as TcpDevice +from ch.psi.pshell.serial import UdpDevice as UdpDevice +from ch.psi.pshell.serial import SerialPortDeviceConfig as SerialPortDeviceConfig +from ch.psi.pshell.serial import SocketDeviceConfig as SocketDeviceConfig + +from ch.psi.pshell.modbus import ModbusTCP as ModbusTCP +from ch.psi.pshell.modbus import ModbusUDP as ModbusUDP +from ch.psi.pshell.modbus import ModbusSerial as ModbusSerial +from ch.psi.pshell.modbus import AnalogInput as ModbusAI +from ch.psi.pshell.modbus import AnalogInputArray as ModbusMAI +from ch.psi.pshell.modbus import AnalogOutput as ModbusAO +from ch.psi.pshell.modbus import AnalogOutputArray as ModbusMAO +from ch.psi.pshell.modbus import DigitalInput as ModbusDO +from ch.psi.pshell.modbus import DigitalInputArray as ModbusMDI +from ch.psi.pshell.modbus import DigitalOutput as ModbusDO +from ch.psi.pshell.modbus import DigitalOutputArray as ModbusMDO +from ch.psi.pshell.modbus import Register as ModbusReg +from ch.psi.pshell.modbus import ReadonlyProcessVariable as ModbusROPV +from ch.psi.pshell.modbus import ProcessVariable as ModbusPV +from ch.psi.pshell.modbus import ControlledVariable as ModbusCB +from ch.psi.pshell.modbus import ModbusDeviceConfig as ModbusDeviceConfig + +from ch.psi.pshell.imaging import Source as Source +from ch.psi.pshell.imaging import SourceBase as SourceBase +from ch.psi.pshell.imaging import DirectSource as DirectSource +from ch.psi.pshell.imaging import RegisterArraySource as RegisterArraySource +from ch.psi.pshell.imaging import RegisterMatrixSource as RegisterMatrixSource +ReadableMatrixSource=RegisterMatrixSource.ReadableMatrixSource +from ch.psi.pshell.imaging import ImageListener as ImageListener +from ch.psi.pshell.imaging import ImageMeasurement as ImageMeasurement +from ch.psi.pshell.imaging import CameraSource as CameraSource +from ch.psi.pshell.imaging import ColormapAdapter as ColormapAdapter +from ch.psi.pshell.imaging import FileSource as FileSource +from ch.psi.pshell.imaging import MjpegSource as MjpegSource +from ch.psi.pshell.imaging import Webcam as Webcam +from ch.psi.pshell.imaging import Filter as Filter +from ch.psi.pshell.imaging import Utils as ImagingUtils +from ch.psi.pshell.imaging import Overlay as Overlay +from ch.psi.pshell.imaging import Overlays as Overlays +from ch.psi.pshell.imaging import Pen as Pen +from ch.psi.pshell.imaging import Data as Data +from ch.psi.pshell.imaging import Colormap as Colormap +from ch.psi.pshell.imaging import Renderer as Renderer + + +from ch.psi.pshell.plot import RangeSelectionPlot as RangeSelectionPlot +RangeSelectionPlotListener= RangeSelectionPlot.RangeSelectionPlotListener +from ch.psi.pshell.plot import LinePlot as LinePlot +from ch.psi.pshell.plot import MatrixPlot as MatrixPlot +from ch.psi.pshell.plot import TimePlot as TimePlot +from ch.psi.pshell.plot import SlicePlot as SlicePlot +from ch.psi.pshell.plot import LinePlotJFree as LinePlotJFree +from ch.psi.pshell.plot import MatrixPlotJFree as MatrixPlotJFree +from ch.psi.pshell.plot import TimePlotJFree as TimePlotJFree +from ch.psi.pshell.plot import SlicePlotDefault as SlicePlotDefault +from ch.psi.pshell.plot import LinePlotSeries as LinePlotSeries +from ch.psi.pshell.plot import LinePlotErrorSeries as LinePlotErrorSeries +from ch.psi.pshell.plot import MatrixPlotSeries as MatrixPlotSeries +from ch.psi.pshell.plot import TimePlotSeries as TimePlotSeries +from ch.psi.pshell.plot import SlicePlotSeries as SlicePlotSeries +AxisId=Class.forName("ch.psi.pshell.plot.Plot$AxisId") +LinePlotStyle=Class.forName("ch.psi.pshell.plot.LinePlot$Style") + +from ch.psi.pshell import scan as scans +from ch.psi.pshell.scan import ScanBase as ScanBase +from ch.psi.pshell.scan import ScanResult +from ch.psi.pshell.scan import Otf as Otf +from ch.psi.pshell.scan import ScanAbortedException as ScanAbortedException +from ch.psi.pshell.scan import ScanCallbacks + +from ch.psi.pshell.crlogic import CrlogicPositioner as CrlogicPositioner +from ch.psi.pshell.crlogic import CrlogicSensor as CrlogicSensor + +from ch.psi.pshell.bs import BsScan +from ch.psi.pshell.bs import Stream as Stream +from ch.psi.pshell.bs import Provider as Provider +from ch.psi.pshell.bs import Dispatcher as Dispatcher +from ch.psi.pshell.bs import Scalar as Scalar +from ch.psi.pshell.bs import Waveform as Waveform +from ch.psi.pshell.bs import Matrix as Matrix +from ch.psi.pshell.bs import StreamCamera as StreamCamera +from ch.psi.pshell.bs import CameraServer as CameraServer +from ch.psi.pshell.bs import PipelineServer as PipelineServer +from ch.psi.pshell.bs import ProviderConfig as ProviderConfig +from ch.psi.pshell.bs import StreamConfig as StreamConfig +from ch.psi.pshell.bs import ScalarConfig as ScalarConfig +from ch.psi.pshell.bs import WaveformConfig as WaveformConfig +from ch.psi.pshell.bs import MatrixConfig as MatrixConfig + +from ch.psi.pshell.detector import DetectorConfig as DetectorConfig + +from ch.psi.pshell.ui import App as App + +from ch.psi.pshell.scripting import ViewPreference as Preference +from ch.psi.pshell.scripting import ScriptUtils as ScriptUtils +from ch.psi.pshell.device import Record +from javax.swing import SwingUtilities + + +from org.jfree.ui import RectangleAnchor as RectangleAnchor +from org.jfree.ui import TextAnchor as TextAnchor + + +def string_to_obj(o): + if is_string(o): + o=str(o) + if "://" in o: + return InlineDevice(o) + ret = get_context().getInterpreterVariable(o) + if ret is None: + try: + ret = get_context().getScriptManager().evalBackground(o).result + except: + return None + o=ret + elif is_list(o): + ret = [] + for i in o: + ret.append(string_to_obj(i)) + o=ret + proxy_method = getattr(o, "get_proxy", None) + if callable(proxy_method): + return o.get_proxy() + return o + +def json_to_obj(o): + if is_string(o): + import json + return json.loads(o) + elif is_list(o): + ret = [] + for i in o: + ret.append(json_to_obj(i)) + return ret + return o + +################################################################################################### +#Scan device interfaces +################################################################################################### + +class Nameable(): + def __init__(self, name=None, interfaces=[]): + self.name = name + self.proxy=jproxy(self, interfaces) + + def getName(self): + if self.name: + return self.name + return self.__class__.__name__ + + def get_proxy(self): + return self.proxy + +class Writable(Nameable): + __interfaces__=['ch.psi.pshell.device.Writable'] + def __init__(self, name=None): + Nameable.__init__(self, name, Writable.__interfaces__) + def write(self, value): + raise Exception ("Not implemented") + +class Readable(Nameable): + __interfaces__=['ch.psi.pshell.device.Readable'] + def __init__(self, name=None): + Nameable.__init__(self, name, Readable.__interfaces__) + + def read(self): + raise Exception ("Not implemented") + +class ReadableArray(Readable): + __interfaces__=['ch.psi.pshell.device.Readable$ReadableArray'] + def __init__(self, name=None): + Nameable.__init__(self, name, Readable.__interfaces__ + ReadableArray.__interfaces__) + + def read(self): + raise Exception ("Not implemented") + + def getSize(self): + raise Exception ("Not implemented") + +class ReadableCalibratedArray(ReadableArray): + __interfaces__=['ch.psi.pshell.device.Readable$ReadableCalibratedArray'] + def __init__(self, name=None): + Nameable.__init__(self, name, Readable.__interfaces__ + ReadableArray.__interfaces__ + ReadableCalibratedArray.__interfaces__) + + def read(self): + raise Exception ("Not implemented") + + def getSize(self): + raise Exception ("Not implemented") + + def getCalibration(self): + raise Exception ("Not implemented") + +class ReadableMatrix(Readable): + __interfaces__=['ch.psi.pshell.device.Readable$ReadableMatrix'] + def __init__(self, name=None): + Nameable.__init__(self, name, Readable.__interfaces__ + ReadableMatrix.__interfaces__) + + def read(self): + raise Exception ("Not implemented") + + def getWidth(self): + raise Exception ("Not implemented") + + def getHeight(self): + raise Exception ("Not implemented") + + +class ReadableCalibratedMatrix(ReadableMatrix): + __interfaces__=['ch.psi.pshell.device.Readable$ReadableCalibratedMatrix'] + def __init__(self, name=None): + Nameable.__init__(self, name, Readable.__interfaces__ + ReadableMatrix.__interfaces__ + ReadableCalibratedMatrix.__interfaces__) + + def read(self): + raise Exception ("Not implemented") + + def getWidth(self): + raise Exception ("Not implemented") + + def getHeight(self): + raise Exception ("Not implemented") + + def getCalibration(self): + raise Exception ("Not implemented") + + +################################################################################################### +#Other Java interfaces +################################################################################################### + + +class GenIterator(Nameable): + __interfaces__ = ['java.util.Iterator'] + def __init__(self, gen): + Nameable.__init__(self, None, GenIterator.__interfaces__) + self.gen = gen + self.cache=self + + def remove(): + pass + + def forEachRemaining(action): + pass + + def hasNext(self): + if self.cache != self: + return True + try: + self.cache=self.gen.__next__() + return True + except: + self.cache = self + return False + + def next(self): + try: + if self.hasNext(): + return self.cache + else: + raise NoSuchElementException() + finally: + self.cache = self + +################################################################################################### +#Scan classes +################################################################################################### + +def __no_args(f): + ret = f.func_code.co_argcount + return (ret-1) if type(f)==PyMethod else ret + +def __before_readout(scan, pos): + try: + if scan.before_read != None: + args = __no_args(scan.before_read) + if args==0: scan.before_read() + elif args==1: scan.before_read(pos.tolist()) + elif args==2: scan.before_read(pos.tolist(), scan) + except AttributeError: + pass + +def __after_readout(scan, record): + try: + if scan.after_read != None: + args = __no_args(scan.after_read) + if args==0: scan.after_read() + elif args==1: scan.after_read(record) + elif args==2: scan.after_read(record, scan) + except AttributeError: + pass + +def __before_pass(scan, num_pass): + try: + if scan.before_pass != None: + args = __no_args(scan.before_pass) + if args==0:scan.before_pass() + elif args==1:scan.before_pass(num_pass) + elif args==2:scan.before_pass(num_pass, scan) + except AttributeError: + pass + +def __after_pass(scan, num_pass): + try: + if scan.after_pass != None: + args = __no_args(scan.after_pass) + if args==0:scan.after_pass() + elif args==1:scan.after_pass(num_pass) + elif args==2:scan.after_pass(num_pass, scan) + except AttributeError: + pass + +def __before_region(scan, num_region): + try: + if scan.before_region != None: + args = __no_args(scan.before_region) + if args==0:scan.before_region() + elif args==1:scan.before_region(num_region) + elif args==2:scan.before_region(num_region, scan) + except AttributeError: + pass + +#TODO implement scan callbacks with different method, as cannot extend Java classes +LineScan=scans.LineScan +AreaScan=scans.AreaScan +RegionScan=scans.RegionScan +VectorScan=scans.VectorScan +ContinuousScan=scans.ContinuousScan +TimeScan=scans.TimeScan +MonitorScan=scans.MonitorScan +BsScan=BsScan +#ManualScan=scans.ManualScan +BinarySearch=scans.BinarySearch +HillClimbingSearcharySearch=scans.HillClimbingSearch + +class ManualScan(): + def __init__(self, writables, readables, start = None, end = None, steps = None, relative = False, dimensions = None, **pars): + start=to_list(start) + end=to_list(end) + steps=to_list(steps) + self.scan=scans.ManualScan.ManualScanStr(writables, readables, start, end, steps, relative) + self.dimensions = dimensions + processScanPars(self.scan, pars) + + def start(self): + self.scan.start() + + def end(self): + self.scan.end() + + def append(self,setpoints, positions, values, timestamps=None): + self.scan.append(np_to_java(to_array(setpoints)), np_to_java(to_array(positions)), np_to_java(to_array(values)), \ + None if (timestamps is None) else np_to_java(to_array(timestamps))) + + def getDimensions(self): + if self._dimensions == None: + return self._scan.getDimensions() + else: + return self.dimensions + +def _no_args(f): + ret = f.__code__.co_argcount + return (ret-1) if isinstance(f, types.MethodType) else ret + +class Callbacks(): + def __init__(self,pars): + self.pars=pars + self.before_read = pars.pop("before_read",None) + self.after_read = pars.pop("after_read",None) + self.before_pass = pars.pop("before_pass",None) + self.after_pass = pars.pop("after_pass",None) + self.before_region= pars.pop("before_region",None) + self.proxy=jproxy(self, ["ch.psi.pshell.scan.ScanCallbacks"]) + def onBeforeScan(self, scan): + pass + def onAfterScan(self, scan): + pass + def onBeforeReadout(self, scan, pos): + try: + if self.before_read is not None: + args = _no_args(self.before_read) + if args==0: self.before_read() + elif args==1: self.before_read(list(pos)) + elif args==2: self.before_read(list(pos), scan) + except: + traceback.print_exc() + def onAfterReadout(self, scan, record): + try: + if self.after_read is not None: + args = _no_args(self.after_read) + if args==0: self.after_read() + elif args==1: self.after_read(record) + elif args==2: self.after_read(record, scan) + except: + traceback.print_exc() + def onBeforePass(self, scan, num_pass): + try: + if self.before_pass is not None: + args = _no_args(self.before_pass) + if args==0:self.before_pass() + elif args==1:self.before_pass(num_pass) + elif args==2:self.before_pass(num_pass, scan) + except: + traceback.print_exc() + + def onAfterPass(self, scan, num_pass): + try: + if self.after_pass is not None: + args = _no_args(self.after_pass) + if args==0:self.after_pass() + elif args==1:self.after_pass(num_pass) + elif args==2:self.after_pass(num_pass, scan) + except: + traceback.print_exc() + def onBeforeRegion(self, scan, num_region): + try: + if self.before_region != None: + args = _no_args(self.before_region) + if args==0:self.before_region() + elif args==1:self.before_region(num_region) + elif args==2:self.before_region(num_region, scan) + except: + traceback.print_exc() + +def processScanPars(scan, pars): + scan.setCallbacks(Callbacks(pars).proxy) + scan.setPlotTitle(pars.pop("title",None)) + scan.setHidden(pars.pop("hidden",False)) + scan.setSettleTimeout (pars.pop("settle_timeout",ScanBase.getScansSettleTimeout())) + scan.setUseWritableReadback (pars.pop("use_readback",ScanBase.getScansUseWritableReadback())) + scan.setInitialMove(pars.pop("initial_move",ScanBase.getScansTriggerInitialMove())) + scan.setParallelPositioning(pars.pop("parallel_positioning",ScanBase.getScansParallelPositioning())) + scan.setAbortOnReadableError(pars.pop("abort_on_error",ScanBase.getAbortScansOnReadableError())) + scan.setRestorePosition (pars.pop("restore_position",ScanBase.getRestorePositionOnRelativeScans())) + scan.setCheckPositions(pars.pop("check_positions",ScanBase.getScansCheckPositions())) + scan.setMonitors(to_list(string_to_obj(pars.pop("monitors",None)))) + scan.setSnaps(to_list(string_to_obj(pars.pop("snaps",None)))) + scan.setDiags(to_list(string_to_obj(pars.pop("diags",None)))) + scan.setMeta(pars.pop("meta",None)) + get_context().setCommandPars(scan, pars) + + + +################################################################################################### +#EPICS Channela abstraction +################################################################################################### + +def create_channel(name, type=None, size=None): + return Epics.newChannel(name, Epics.getChannelType(type), size) + + +################################################################################################### +#Help and access to function documentation +################################################################################################### +def _getBuiltinFunctions(filter = None): + ret = [] + for name in globals().keys(): + val = globals()[name] + if isinstance(val, types.FunctionType): + if filter is None or filter in name: + #Only "public" documented functions + if not name.startswith('_') and (val.__doc__ is not None): + ret.append(val) + return to_array(ret) + + +def _getBuiltinFunctionNames(filter = None): + ret = [] + for function in _getBuiltinFunctions(filter): + ret.append(function.__name__) + ret.sort() + return to_array(ret) + +def _getFunctionDoc(function): + if is_string(function): + if function not in globals(): + return + function = globals()[function] + if isinstance(function, types.FunctionType): + if '__doc__' in dir(function): + return function.__name__ + str(inspect.signature(function)) + "\n\n" + function.__doc__ + + +def help(object = None): + """ + Print help message for function or object (if available). + + Args: + object (any, optional): function or object to get help. + If null prints a list of the builtin functions. + + Returns: + None + """ + if object is None: + print ("Built-in functions:") + for f in _getBuiltinFunctionNames(): + print ("\t" + f) + else: + if isinstance(object, types.FunctionType): + print (_getFunctionDoc(object)) + elif '__doc__' in dir(object): + print (object.__doc__) + +################################################################################################### +#Variable injection +################################################################################################### + +def _get_caller(): + #Not doing inspect.currentframe().f_back because inspect is slow to load + return sys._getframe(1).f_back if hasattr(sys, "_getframe") else None + +def inject(): + """Restore initial globals: re-inject devices and startup variables to the interpreter. + + Args: + None + + Returns: + None + """ + if __name__ == "__main__": + g=globals() + else: + g=_get_caller().f_globals + + i = get_context().getScriptManager().getInjections() + for k in i.keySet(): + g[k]=i[k] + + +################################################################################################### +#Script evaluation and return values +################################################################################################### + +def run(script_name, args = None, locals = None): + """Run script: can be absolute path, relative, or short name to be search in the path. + Args: + args(Dict ot List): Sets sys.argv (if list) or gobal variables(if dict) to the script. + locals(Dict): If not none sets the locals()for the runing script. + If locals is used then script definitions will not go to global namespace. + + Returns: + The script return value (if set with set_return) + """ + script = get_context().getScriptManager().getLibrary().resolveFile(script_name) + if script is not None and os.path.isfile(script): + info = get_context().startScriptExecution(script_name, args) + try: + set_return(None) + if args is not None: + if isinstance(args,list) or isinstance(args,tuple): + sys.argv = list(args) + globals()["args"] = sys.argv + else: + for arg in args.keys(): + globals()[arg] = args[arg] + if (locals is None): + exec(open(script).read(), globals()) + else: + exec(open(script).read(), globals(), locals) + ret = get_return() + get_context().finishScriptExecution(info, ret) + return ret + except Exception as ex: + get_context().finishScriptExecution(info, ex) + raise ex + raise IOError("Invalid script: " + str(script_name)) + +def abort(): + """Abort the execution of ongoing task. It can be called from the script to quit. + + Args: + None + + Returns: + None + """ + get_context().abort() + raise KeyboardInterrupt() + +def set_return(value): + """Sets the script return value. This value is returned by the "run" function. + + Args: + value(Object): script return value. + + Returns: + None + """ + global __THREAD_EXEC_RESULT__ + __THREAD_EXEC_RESULT__=value + return value #Used when parsing file + +def get_return(): + if __name__ == "__main__": + global __THREAD_EXEC_RESULT__ + return __THREAD_EXEC_RESULT__ + else: + return _get_caller().f_globals["__THREAD_EXEC_RESULT__"] + + + +################################################################################################### +#Builtin functions +################################################################################################### + + +################################################################################################### +#Scan commands +################################################################################################### + +def lscan(writables, readables, start, end, steps, latency=0.0, relative=False, passes=1, zigzag=False, **pars): + """Line Scan: positioners change together, linearly from start to end positions. + + Args: + writables(list of Writable): Positioners set on each step. + readables(list of Readable): Sensors to be sampled on each step. + start(list of float): start positions of writables. + end(list of float): final positions of writables. + steps(int or float or list of float): number of scan steps (int) or step size (float). + relative (bool, optional): if true, start and end positions are relative to current. + latency(float, optional): settling time for each step before readout, defaults to 0.0. + passes(int, optional): number of passes + zigzag(bool, optional): if true writables invert direction on each pass. + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - hidden(bool, optional): if true generates no effects on user interface. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - before_pass (function(pass_num, scan), optional): called before each pass. + - after_pass (function(pass_num, scan), optional): callback after each pass. + - settle_timeout(int, optional): timeout for each positioner get to position. Default (-1) waits forever. + - initial_move (bool, optional): if true (default) perform move to initial position prior to scan start. + - parallel_positioning (bool, optional): if true (default) all positioners are set in parallel. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - restore_position (bool, optional): if true (default) then restore initial position after relative scans. + - check_positions (bool, optional): if true (default) verifies if in correct positions after move finishes. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - diags (list of Readable, optional): diagnostic device values are saved at each scan point. + - meta (dict, optional): scan metadata. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + latency_ms=int(latency*1000) + writables=to_list(string_to_obj(writables)) + readables=to_list(string_to_obj(readables)) + start=to_list(start) + end=to_list(end) + if type(steps) is float or is_list(steps): + steps = to_list(steps) + scan = LineScan.LineScanStepSize(writables,readables, start, end , steps, relative, latency_ms, int(passes), zigzag) + else: + scan = LineScan.LineScanNumSteps(writables,readables, start, end , steps, relative, latency_ms, int(passes), zigzag) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def vscan(writables, readables, vector, line = False, latency=0.0, relative=False, passes=1, zigzag=False, **pars): + """Vector Scan: positioner values provided in a vector. + + Args: + writables(list of Writable): Positioners set on each step. + readables(list of Readable): Sensors to be sampled on each step. + vector (generator (floats or lists of float) or list of list of float): positioner values. + line (bool, optional): if true, processs as line scan (1d) + relative (bool, optional): if true, start and end positions are relative to current. + latency(float, optional): settling time for each step before readout, defaults to 0.0. + passes(int, optional): number of passes (disregarded if vector is a generator). + zigzag(bool, optional): if true writables invert direction on each pass (disregarded if vector is a generator). + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - before_pass (function(pass_num, scan), optional): called before each pass. + - after_pass (function(pass_num, scan), optional): callback after each pass. + - settle_timeout(int, optional): timeout for each positioner get to position. Default (-1) waits forever. + - initial_move (bool, optional): if true (default) perform move to initial position prior to scan start. + - parallel_positioning (bool, optional): if true (default) all positioners are set in parallel. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - restore_position (bool, optional): if true (default) then restore initial position after relative scans. + - check_positions (bool, optional): if true (default) verifies if in correct positions after move finishes. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - diags (list of Readable, optional): diagnostic device values are saved at each scan point. + - meta (dict, optional): scan metadata. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + latency_ms=int(latency*1000) + writables=to_list(string_to_obj(writables)) + readables=to_list(string_to_obj(readables)) + if inspect.isgenerator(vector): + vector = GenIterator(vector).proxy + scan = VectorScan(writables,readables, vector, line, relative, latency_ms) + else: + if len(vector) == 0: + vector.append([]) + elif (not is_list(vector[0])) and (not is_array(vector[0])): + vector = [[x,] for x in vector] + vector = np_to_java(to_array(vector, 'd'),'d') + scan = VectorScan(writables,readables, vector, line, relative, latency_ms, int(passes), zigzag) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def ascan(writables, readables, start, end, steps, latency=0.0, relative=False, passes=1, zigzag=False, **pars): + """Area Scan: multi-dimentional scan, each positioner is a dimention. + + Args: + writables(list of Writable): Positioners set on each step. + readables(list of Readable): Sensors to be sampled on each step. + start(list of float): start positions of writables. + end(list of float): final positions of writables. + steps(list of int or list of float): number of scan steps (int) or step size (float). + latency(float, optional): settling time for each step before readout, defaults to 0.0. + relative (bool, optional): if true, start and end positions are relative to current. + passes(int, optional): number of passes + zigzag (bool, optional): if true writables invert direction on each row. + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - before_pass (function(pass_num, scan), optional): called before each pass. + - after_pass (function(pass_num, scan), optional): callback after each pass. + - settle_timeout(int, optional): timeout for each positioner get to position. Default (-1) waits forever. + - initial_move (bool, optional): if true (default) perform move to initial position prior to scan start. + - parallel_positioning (bool, optional): if true (default) all positioners are set in parallel. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - restore_position (bool, optional): if true (default) then restore initial position after relative scans. + - check_positions (bool, optional): if true (default) verifies if in correct positions after move finishes. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - diags (list of Readable, optional): diagnostic device values are saved at each scan point. + - meta (dict, optional): scan metadata. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + latency_ms=int(latency*1000) + writables=to_list(string_to_obj(writables)) + readables=to_list(string_to_obj(readables)) + start=to_list(start) + end=to_list(end) + steps = to_list(steps) + if type(steps[0]) is int: + scan = AreaScan.AreaScanNumSteps(writables,readables, start, end , steps, relative, latency_ms, int(passes), zigzag) + else: + scan = AreaScan.AreaScanStepSize(writables,readables, start, end , steps, relative, latency_ms, int(passes), zigzag) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + + +def rscan(writable, readables, regions, latency=0.0, relative=False, passes=1, zigzag=False, **pars): + """Region Scan: positioner scanned linearly, from start to end positions, in multiple regions. + + Args: + writable(Writable): Positioner set on each step, for each region. + readables(list of Readable): Sensors to be sampled on each step. + regions (list of tuples (float,float, int) or (float,float, float)): each tuple define a scan region + (start, stop, steps) or (start, stop, step_size) + relative (bool, optional): if true, start and end positions are relative to current. + latency(float, optional): settling time for each step before readout, defaults to 0.0. + passes(int, optional): number of passes + zigzag(bool, optional): if true writable invert direction on each pass. + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - before_pass (function(pass_num, scan), optional): called before each pass. + - after_pass (function(pass_num, scan), optional): callback after each pass. + - before_region (function(region_num, scan), optional): callback before entering a region. + - settle_timeout(int, optional): timeout for each positioner get to position. Default (-1) waits forever. + - initial_move (bool, optional): if true (default) perform move to initial position prior to scan start. + - parallel_positioning (bool, optional): if true (default) all positioners are set in parallel. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - restore_position (bool, optional): if true (default) then restore initial position after relative scans. + - check_positions (bool, optional): if true (default) verifies if in correct positions after move finishes. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - diags (list of Readable, optional): diagnostic device values are saved at each scan point. + - meta (dict, optional): scan metadata. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + start=[] + end=[] + steps=[] + for region in regions: + start.append(region[0]) + end.append(region[1]) + steps.append(region[2]) + latency_ms=int(latency*1000) + writable=string_to_obj(writable) + readables=to_list(string_to_obj(readables)) + start=to_list(start) + end=to_list(end) + if type(steps[0]) is float: + scan = RegionScan.RegionScanStepSize(writable,readables, start, end , steps, relative, latency_ms, int(passes), zigzag) + else: + scan = RegionScan.RegionScanNumSteps(writable,readables, start, end , steps, relative, latency_ms, int(passes), zigzag) + + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def cscan(writables, readables, start, end, steps, latency=0.0, time=None, relative=False, passes=1, zigzag=False, **pars): + """Continuous Scan: positioner change continuously from start to end position and readables are sampled on the fly. + + Args: + writable(Speedable or list of Motor): A positioner with a getSpeed method or + a list of motors. + readables(list of Readable): Sensors to be sampled on each step. + start(float or list of float): start positions of writables. + end(float or list of float): final positions of writabless. + steps(int or float or list of float): number of scan steps (int) or step size (float). + latency(float, optional): sleep time in each step before readout, defaults to 0.0. + time (float, seconds): if not None then speeds are set according to time. + relative (bool, optional): if true, start and end positions are relative to current. + passes(int, optional): number of passes + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - before_pass (function(pass_num, scan), optional): called before each pass. + - after_pass (function(pass_num, scan), optional): callback after each pass. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - restore_position (bool, optional): if true (default) then restore initial position after relative scans. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - diags (list of Readable, optional): diagnostic device values are saved at each scan point. + - meta (dict, optional): scan metadata. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + latency_ms=int(latency*1000) + readables=to_list(string_to_obj(readables)) + writables=to_list(string_to_obj(writables)) + start=to_list(start) + end=to_list(end) + #A single Writable with fixed speed + if time is None: + if is_list(steps): steps=steps[0] + scan = ContinuousScan.ContinuousScanSingle(writables[0],readables, start[0], end[0] , int(steps), relative, latency_ms, int(passes), zigzag) + #A set of Writables with speed configurable + else: + if type(steps) is float or is_list(steps): + steps = to_list(steps) + if type(steps) is int: + scan = ContinuousScan.ContinuousScanNumSteps(writables,readables, start, end , steps, time, relative, latency_ms, int(passes), zigzag) + else: + steps = to_list(steps) + scan = ContinuousScan.ContinuousScanStepSize(writables,readables, start, end , steps, time, relative, latency_ms, int(passes), zigzag) + + processScanPars(scan, pars) + scan.start() + return scan.getResult() + + +def hscan(config, writable, readables, start, end, steps, passes=1, zigzag=False, **pars): + """Hardware Scan: values sampled by external hardware and received asynchronously. + + Args: + config(dict): Configuration of the hardware scan. The "class" key provides the implementation class. + Other keys are implementation specific. + writable(Writable): A positioner appropriated to the hardware scan type. + readables(list of Readable): Sensors appropriated to the hardware scan type. + start(float): start positions of writable. + end(float): final positions of writables. + steps(int or float): number of scan steps (int) or step size (float). + passes(int, optional): number of passes + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - after_read (function(record, scan), optional): callback on each step, after sampling. + - before_pass (function(pass_num, scan), optional): callback before each scan pass execution. + - after_pass (function(pass_num, scan), optional): callback after each scan pass execution. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - meta (dict, optional): scan metadata. + + Returns: + ScanResult. + """ + cls = Class.forName(config["class"]) + readables=to_list(string_to_obj(readables)) + scan = cls(config, writable,readables, start, end , steps, int(passes), zigzag) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def bscan(stream, records, timeout = None, passes=1, **pars): + """BS Scan: records all values in a beam synchronous stream. + + Args: + stream(Stream): stream object or list of chanel names to build stream from + records(int): number of records to store + timeout(float, optional): maximum scan time in seconds. + passes(int, optional): number of passes + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - before_pass (function(pass_num, scan), optional): called before each pass. + - after_pass (function(pass_num, scan), optional): callback after each pass. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - diags (list of Readable, optional): diagnostic device values are saved at each scan point. + - meta (dict, optional): scan metadata. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + timeout_ms=int(timeout*1000) if ((timeout is not None) and (timeout>=0)) else -1 + if not is_list(stream): + stream=string_to_obj(stream) + scan = BsScan(stream,int(records), timeout_ms, int(passes)) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def tscan(readables, points, interval, passes=1, fixed_rate=True, **pars): + """Time Scan: sensors are sampled in fixed time intervals. + + Args: + readables(list of Readable): Sensors to be sampled on each step. + points(int): number of samples. + interval(float): time interval between readouts. Minimum temporization is 0.001s + passes(int, optional): number of passes + fixed_rate(bool, optional): in the case of delays in sampling: + If True tries to preserve to total scan time, accelerating following sampling. + If False preserves the interval between samples, increasing scan time. + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - before_pass (function(pass_num, scan), optional): called before each pass. + - after_pass (function(pass_num, scan), optional): callback after each pass. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - diags (list of Readable, optional): diagnostic device values are saved at each scan point. + - meta (dict, optional): scan metadata. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + interval= max(interval, 0.001) #Minimum temporization is 1ms + interval_ms=int(interval*1000) + readables=to_list(string_to_obj(readables)) + scan = TimeScan(readables, points, interval_ms, int(passes), bool(fixed_rate)) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def mscan(trigger, readables, points=-1, timeout=None, asynchronous=True, take_initial=False, passes=1, **pars): + """Monitor Scan: sensors are sampled when received change event of the trigger device. + + Args: + trigger(Device or list of Device): Source of the sampling triggering. + readables(list of Readable): Sensors to be sampled on each step. + If trigger has cache and is included in readables, it is not read + for each step, but the change event value is used. + points(int, optional): number of samples (-1 for undefined). + timeout(float, optional): maximum scan time in seconds (None for no timeout). + asynchronous(bool, optional): if True then records are sampled and stored on event change callback. Enforce + reading only cached values of sensors. + If False, the scan execution loop waits for trigger cache update. Do not make + cache only access, but may loose change events. + take_initial(bool, optional): if True include current values as first record (before first trigger). + passes(int, optional): number of passes + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - before_pass (function(pass_num, scan), optional): called before each pass. + - after_pass (function(pass_num, scan), optional): callback after each pass. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - monitors (list of Device, optional): device values are saved on every change event during the scan. + - snaps (list of Readable, optional): snapshot device values are saved before the scan. + - diags (list of Readable, optional): diagnostic device values are saved at each scan point. + - meta (dict, optional): scan metadata. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + raise Exception("Not implemented") + timeout_ms=int(timeout*1000) if ((timeout is not None) and (timeout>=0)) else -1 + trigger = string_to_obj(trigger) + readables=to_list(string_to_obj(readables)) + scan = MonitorScan(trigger, readables, points, timeout_ms, asynchronous, take_initial, int(passes)) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def escan(name, **pars): + """Epics Scan: execute an Epics Scan Record. + + Args: + name(str): Name of scan record. + title(str, optional): plotting window name. + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - Aditional arguments defined by set_exec_pars. + + Returns: + ScanResult. + """ + scan = EpicsScan(name) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + + +def bsearch(writables, readable, start, end, steps, maximum = True, strategy = "Normal", latency=0.0, relative=False, **pars): + """Binary search: searches writables in a binary search fashion to find a local maximum for the readable. + + Args: + writables(list of Writable): Positioners set on each step. + readable(Readable): Sensor to be sampled. + start(list of float): start positions of writables. + end(list of float): final positions of writables. + steps(float or list of float): resolution of search for each writable. + maximum (bool , optional): if True (default) search maximum, otherwise minimum. + strategy (str , optional): "Normal": starts search midway to scan range and advance in the best direction. + Uses orthogonal neighborhood (4-neighborhood for 2d) + "Boundary": starts search on scan range. + "FullNeighborhood": Uses complete neighborhood (8-neighborhood for 2d) + + latency(float, optional): settling time for each step before readout, defaults to 0.0. + relative (bool, optional): if true, start and end positions are relative to current. + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - settle_timeout(int, optional): timeout for each positioner get to position. Default (-1) waits forever. + - parallel_positioning (bool, optional): if true (default) all positioners are set in parallel. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - restore_position (bool, optional): if true (default) then restore initial position after relative scans. + - check_positions (bool, optional): if true (default) verifies if in correct positions after move finishes. + - Aditional arguments defined by set_exec_pars. + + Returns: + SearchResult. + """ + latency_ms=int(latency*1000) + writables=to_list(string_to_obj(writables)) + readable=string_to_obj(readable) + start=to_list(start) + end=to_list(end) + steps = to_list(steps) + strategy = BinarySearch.Strategy.valueOf(strategy) + scan = BinarySearch(writables,readable, start, end , steps, maximum, strategy, relative, latency_ms) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + +def hsearch(writables, readable, range_min, range_max, initial_step, resolution, filter=1, maximum=True, latency=0.0, relative=False, **pars): + """Hill Climbing search: searches writables in decreasing steps to find a local maximum for the readable. + Args: + writables(list of Writable): Positioners set on each step. + readable(Readable): Sensor to be sampled. + range_min(list of float): minimum positions of writables. + range_max(list of float): maximum positions of writables. + initial_step(float or list of float):initial step size for for each writable. + resolution(float or list of float): resolution of search for each writable (minimum step size). + filter(int): number of aditional steps to filter noise + maximum (bool , optional): if True (default) search maximum, otherwise minimum. + latency(float, optional): settling time for each step before readout, defaults to 0.0. + relative (bool, optional): if true, start and end positions are relative to current. + pars(keyworded variable length arguments, optional): scan optional named arguments: + - title(str, optional): plotting window name. + - before_read (function(positions, scan), optional): called on each step, before sampling. + - after_read (function(record, scan), optional): called on each step, after sampling. + - settle_timeout(int, optional): timeout for each positioner get to position. Default (-1) waits forever. + - parallel_positioning (bool, optional): if true (default) all positioners are set in parallel. + - abort_on_error (bool, optional): if true then aborts scan in sensor failures. Default is false. + - restore_position (bool, optional): if true (default) then restore initial position after relative scans. + - check_positions (bool, optional): if true (default) verifies if in correct positions after move finishes. + - Aditional arguments defined by set_exec_pars. + + Returns: + SearchResult. + """ + latency_ms=int(latency*1000) + writables=to_list(string_to_obj(writables)) + readable=string_to_obj(readable) + range_min=to_list(range_min) + range_max=to_list(range_max) + initial_step = to_list(initial_step) + resolution = to_list(resolution) + scan = HillClimbingSearch(writables,readable, range_min, range_max , initial_step, resolution, filter, maximum, relative, latency_ms) + processScanPars(scan, pars) + scan.start() + return scan.getResult() + + +################################################################################################### +#Data plotting +################################################################################################### + +def plot(data, name = None, xdata = None, ydata=None, title=None): + """Request one or multiple plots of user data (1d, 2d or 3d). + + Args: + data: array or list of values. For multiple plots, list of arrays. + name(str or list of str, optional): plot name. For multiple plots, list of names. + xdata: array or list of values. For multiple plots, list of arrays. + ydata: array or list of values. For multiple plots, list of arrays. + title(str, optional): plotting window name. + + Returns: + List of Plot. + """ + data = json_to_obj(data) + xdata = json_to_obj(xdata) + ydata = json_to_obj(ydata) + if is_java_instance(data, Table): + if is_list(xdata): + xdata = np_to_java(to_array(xdata, 'd'), 'd') + return get_context().plot(data,xdata,name,title) + + if is_java_instance(data, ScanResult): + return get_context().plot(data,title) + + if (name is not None) and is_list(name): + if len(name)==0: + name=None; + else: + if (data==None): + data = [] + for n in name: + data.append([]) + plots = reflect.Array.newInstance(Class.forName("ch.psi.pshell.data.PlotDescriptor"), len(data)) + for i in range (len(data)): + plotName = None if (name is None) else name[i] + x = xdata + if is_list(x) and len(x)>0 and (is_list(x[i]) or is_java_instance(x[i] , List) or is_array(x[i])): + x = x[i] + y = ydata + if is_list(y) and len(y)>0 and (is_list(y[i]) or is_java_instance(y[i] , List) or is_array(y[i])): + y = y[i] + plots[i] = PlotDescriptor(plotName , np_to_java(to_array(data[i], 'd'), 'd'), np_to_java(to_array(x, 'd'), 'd'), np_to_java(to_array(y, 'd'), 'd')) + return get_context().plot(plots,title) + else: + plot = PlotDescriptor(name, np_to_java(to_array(data, 'd'), 'd'), np_to_java(to_array(xdata, 'd'), 'd'), np_to_java(to_array(ydata, 'd'), 'd')) + return get_context().plot(plot,title) + +def get_plots(title=None): + """Return all current plots in the plotting window given by 'title'. + + Args: + title(str, optional): plotting window name. + + Returns: + List of Plot. + """ + return get_context().getPlots(title) + +def get_plot_snapshots(title = None, file_type = "png", size = None, temp_path = get_context().getSetup().getContextPath()): + """Returns list with file names of plots snapshots from a plotting context. + + Args: + title(str, optional): plotting window name. + file_type(str, optional): "png", "jpg", "bmp" or "gif" + size(array, optional): [width, height] + temp_path(str, optional): path where the files will be generated. + + Returns: + list of strings + """ + time.sleep(0.1) #Give some time to plot to be finished - it is not sync with acquisition + ret = [] + if size != None: + size = Dimension(size[0], size[1]) + plots = get_plots(title) + for i in range(len(plots)): + p = plots[i] + name = p.getTitle() + if name is None or name == "": + name = str(i) + file_name = os.path.abspath(temp_path + "/" + name + "." + file_type) + p.saveSnapshot(file_name , file_type, size) + ret.append(file_name) + return ret + + +################################################################################################### +#Data access +################################################################################################### + +def load_data(path, index=0, shape=None, root=None): + """Read data from the current persistence context or from data files. + + Args: + path(str): Path to group or dataset relative to the root. + If path is in the format 'root|path', or else if 'root' is defined, then + reads from data file given by root. Otherwise uses current data persistence file. + root(str, optional): data file. + index(int or list, optional): + if integer, data depth (used for 3D datasets returning a 2d matrix) + If a list, specifies the full coordinate for multidimensional datasets. + shape(list, optional): only valid if index is a list, provides the shape of the data array. + In this case return a flattened a one-dimensional array. + + Returns: + Data array + """ + dm=get_context().getDataManager() + if index is not None and is_list(index): + slice = dm.getData(path, index, shape) if (root==None) else dm.getData(root, path, index, shape) + else: + slice = dm.getData(path, index) if (root==None) else dm.getData(root, path, index) + return slice.sliceData + +def get_attributes(path, root=None): + """Get the attributes from group or dataset. + + Args: + path(str): Path to group or dataset relative to the root. + If path is in the format 'root|path', or else if 'root' is defined, then + reads from data file given by root. Otherwise uses current data persistence file. + root(str, optional): data file. + Returns: + Dictionary + """ + if (root is None): + return get_context().getDataManager().getAttributes(path) + return get_context().getDataManager().getAttributes(root, path) + +def get_data_info(path, root=None): + """Get information about the group or dataset. + + Args: + path(str): Path to group or dataset relative to the current persistence context root. + If path is in the format 'root|path', or else if 'root' is defined, then + reads from data file given by root. Otherwise uses current data persistence file. + root(str, optional): data file. + Returns: + Dictionary + """ + if (root is None): + return get_context().getDataManager().getInfo(path) + return get_context().getDataManager().getInfo(root, path) + +def save_dataset(path, data, type='d', unsigned=False, features=None): + """Save data into a dataset within the current persistence context. + + Args: + path(str): Path to dataset relative to the current persistence context root. + type(str, optional): array type - 'd'=double (default), 'b'=byte, 'h'=short, 'i'=int, + 'l'=long, 'f'=float, 'c'=char, 's'=String, 'z'=bool, 'o'=Object + data (array or list): data to be saved + unsigned(boolean, optional): create a dataset of unsigned type. + features(dictionary, optional): See create_dataset. + + Returns: + Dictionary + """ + data = np_to_java(to_array(data, type), type) + get_context().getDataManager().setDataset(path, data, unsigned, features) + +def create_group(path): + """Create an empty dataset within the current persistence context. + + Args: + path(str): Path to group relative to the current persistence context root. + Returns: + None + """ + get_context().getDataManager().createGroup(path) + +def create_dataset(path, type, unsigned=False, dimensions=None, features=None): + """Create an empty dataset within the current persistence context. + + Args: + path(str): Path to dataset relative to the current persistence context root. + type(str): array type 'b' = byte, 'h' = short, 'i' = int, 'l' = long, 'f' = float, + 'd' = double, 'c' = char, 's' = String, 'z'=bool, 'o' = Object + unsigned(boolean, optional) + dimensions(tuple of int, optional): a 0 value means variable length in that dimension. + features(dictionary, optional): storage features for the dataset, format specific. + Keys for HDF5: "layout": "compact", "contiguous" or "chunked" + "compression": True, "max" or deflation level from 1 to 9 + "shuffle": Byte shuffle before compressing. + "chunk": tuple, setting the chunk size + Default: No compression, contiguous for fixed size arrays, chunked for variable size, compact for scalars. + Returns: + None + """ + get_context().getDataManager().createDataset(path, ScriptUtils.getType(type), unsigned, dimensions, features) + +def create_table(path, names, types=None, lengths=None, features=None): + """Create an empty table (dataset of compound type) within the current persistence context. + + Args: + path(str): Path to dataset relative to the current persistence context root. + names(list of strings): name of each column + types(array of str): 'b' = byte, 'h' = short, 'i' = int, 'l' = long, 'f' = float, + 'd' = double, 'c' = char, 's' = String, 'o' = Object + Note:A '[' prefix on type name indicates an array type. + lengths(list of int): the array length for each columns(0 for scalar types). + features(dictionary, optional): See create_dataset. + Returns: + None + """ + type_classes = [] + if (types is not None): + for i in range (len(types)): + type_classes.append(ScriptUtils.getType(types[i])) + get_context().getDataManager().createTable(path, names, type_classes, lengths, features) + +def append_dataset(path, data, index=None, type='d', shape=None): + """Append data to dataset. + + Args: + path(str): Path to dataset relative to the current persistence context root. + data(number or array or list): name of each column. + index(int or list, optional): if set then add the data in a specific position in the dataset. + If integer is the index in an array (data must be 1 order lower than dataset) + If a list, specifies the full coordinate for multidimensional datasets. + type(str, optional): array type 'b' = byte, 'h' = short, 'i' = int, 'l' = long, 'f' = float, + 'd' = double, 'c' = char, 's' = String, 'o' = Object + default: 'd' (convert data to array of doubles) + shape(list, optional): only valid if index is a list, provides the shape of the data array. + In this case data must be a flattened one-dimensional array. + Returns: + None + """ + data = np_to_java(to_array(data, type)) + if index is None: + get_context().getDataManager().appendItem(path, data) + else: + if is_list(index): + if shape is None: + shape = [len(index)] + get_context().getDataManager().setItem(path, data, index, shape) + else: + get_context().getDataManager().setItem(path, data, index) + +def append_table(path, data): + """Append data to a table (dataset of compound type) + + Args: + path(str): Path to dataset relative to the current persistence context root. + data(list): List of valus for each column of the table. + Returns: + None + """ + if is_list(data): + arr = reflect.Array.newInstance(Class.forName("java.lang.Object"),len(data)) + for i in range (len(data)): + if is_list(data[i]): + arr[i] = to_array(data[i], 'd') + else: + arr[i] = np_to_java(data[i]) + data=arr + get_context().getDataManager().appendItem(path, data) + +def flush_data(): + """Flush all data files immediately. + + Args: + None + Returns: + None + """ + get_context().getDataManager().flush() + +def set_attribute(path, name, value, unsigned = False): + """Set an attribute to a group or dataset. + + Args: + path(str): Path to dataset relative to the current persistence context root. + name(str): name of the attribute + value(Object): the attribute value + unsigned(bool, optional): if applies, indicate if value is unsigned. + Returns: + None + """ + if is_list(value): + value = Convert.toStringArray(to_array(value)) + elif type(value) == numpy.ndarray: + value = np_to_java(value) + get_context().getDataManager().setAttribute(path, name, value, unsigned) + +def log(log, data_file=None): + """Writes a log to the system log and data context - if there is an ongoing scan or script execution. + + Args: + log(str): Log string. + data_file(bool, optional): if true logs to the data file, in addiction to the system logger. + If None(default) appends to data file only if it exists. + + Returns: + None + """ + get_context().scriptingLog(str(log)) + if data_file is None: + data_file = get_exec_pars().isOpen() + if data_file: + try: + get_context().getDataManager().appendLog(str(log)) + except: + #Do not generate exception if cannot write to data file + pass + +def set_exec_pars(**args): + """ Configures the script execution parameters, overriding the system configuration. + + Args: + args(optional arguments): + name(str): value of the {name} tag. Default is the running script name. + type(str): value of the {type} tag. Default is empty. + This field can be used to store data in sub-folders of standard location. + path(str): If defined provides the full path name for data output root (overriding config)) + The tag {data} can be used to enter a path relative to the standard data folder. + layout(str): Change data layout. + format(str): Change data format. + split(scan or True): Split scan data to another table. If set to True in scan command then split every pass. + depth_dim(int): dimension of 2d-matrixes in 3d datasets. + save(bool): Change option to auto save scan data. + flush(bool): Change option to flush file on each record. + keep(bool): Change option keep scan records in memory. If false do not add records to scan result. + preserve(bool): Change option to preserve device types. If false all values are converted to double. + setpoints(bool): Save the positioner setpoints too. + verbose(bool): Enable options to save additional information (output, script). + compression(obj): True for enabling default compression, int for specifying deflation level. + Device or list of devices for specifying devices to be compressed. + shuffle(obj): True for enabling shuffling before compression. + Device or list of devices for specifying devices to be shuffled. + contiguous(obj): True for setting contiguous datasets for all devices. + Device or list of devices for specifying device datasets to be contiguous. + seq(int): Set next data file sequence number. + open(bool): If true create data output path immediately. If false closes output root, if open. + reset(bool): If true reset the scan counter - the {count} tag and set the timestamp to now. + group(str): Change layout group name for scans + tag(str): Change tag for scan names (affecting group or dataset name, according to layout) + then, then_success, then_exception(str): Sets statement to be executed on the completion of current. + defaults(bool): If true restore the original execution parameters. + + Graphical preferences: + line_plots(list): list of devices with enforced line plots. + range(str or list): "none", "auto", [min_x, max_x] or [min_x, max_x, min_y, max_y] + display(bool): if false disables scan data plotting and printing. + print_scan(bool): Enable/disables scan data printing to console. + plot_disabled(bool): Enable/disable scan plot + plot_layout (str):"Horizontal", "Vertical" or "Grid" + table_disabled(bool): Enable/disable scan table + enabled_plots (list of str or Readable): list of devices (Readables) to be plotted + plot_types(dict): Dictionary - Plot name(Readable or String) : Plot type(String or int) + auto_range(bool): If true automatic range scan plots x-axis. + manual_range(tuple): : Set range (min_x, max_x) or (min_x, max_x, min_y, max_y). None sets fixed range. + manual_range_y(tuple): Set y range (min_y, max_y). None sets fixed range. + domain_axis(str): Set the domain axis source: "Time", "Index", or a readable name. Default: first positioner. + status(str): set application status + """ + get_context().setExecutionPars(args) + +def get_exec_pars(): + """ Returns script execution parameters. + + Returns: + ExecutionParameters object. Fields: + name (str): execution name - {name} tag. + type (str): execution type - {type} tag. + path (str): output data root. + seq(int): data file sequence number. + open (bool): true if the output data root has been opened. + layout (str): data output layout. If None then using the configuration. + save (bool): auto save scan data option. + flush (bool): flush file on each record. + index (int): current scan index. + group (str): data group currently used for scan data storage. + if no ongoing scan return "/" if within a script, or else None if a console command. + scanPath (str): dataset or group corresponding to current scan. + scan (Scan): reference to current scan, if any + source (CommandSource): return the source of the script or command. + background (bool): return False if executing in main interpreter thread . + debug (bool): True if executing from statements in editor. + simulation (bool): global simulation flag. + aborted (bool): True if execution has been aborted + """ + return get_context().getExecutionPars() + + +################################################################################################### +#EPICS +################################################################################################### + +def _adjust_channel_value(value, var_type=None): + if (value is None): + return value + if (var_type is not None): + if is_list(value): + var_type = var_type.replace(',','').replace('[','') + ret = [] + for item in value: + ret.append(_adjust_channel_value(item), var_type) + value = ret + else: + var_type = var_type.lower() + if var_type=='b': + value = byte(value) + elif var_type=='i': + value = short(value) + elif var_type=='l': + value = int(value) + elif var_type=='f': + value = float(value) + elif var_type=='d': + value = float(value) + elif var_type=='s': + value = str(value) + + if isinstance(value,tuple): + value = list(value) + if isinstance(value,list): + list_type = type(value[0]) + array_types = { + int: "i", + long: "l", + float:"d", + str:Class.forName("java.lang.String"), + } + array_type = array_types.get(type(value[0]),'d') + array = PyArray(array_type) + array.fromlist(value) + value=array + return value + +def caget(name, type=None, size=None, meta = False ): + """Reads an Epics PV. + + Args: + name(str): PV name + type(str, optional): type of PV. By default gets the PV standard field type. + Scalar values: 'b', 'i', 'l', 'd', 's'. + Array values: '[b', '[i,', '[l', '[d', '[s'. + size (int, optional): for arrays, number of elements to be read. Default read all. + meta (bool, optional): if true gets channel value and metadata (timestamp, severity). + + Returns: + PV value if meta is false, otherwise a dictionary containing PV value and metadata + """ + if meta: + return Epics.getMeta(name, Epics.getChannelType(type), size) + return Epics.get(name, Epics.getChannelType(type), size) + +def cawait(name, value, timeout=None, comparator=None, type=None, size=None): + """Wait for a PV to have a given value. + + Args: + name(str): PV name + value (obj): value to compare to + timeout(float, optional): time in seconds to wait. If None, waits forever. + comparator(java.util.Comparator or float, optional): if None waits for equality. + If a numeric value is provided, waits for channel to be in range. + type(str, optional): type of PV. By default gets the PV standard field type. + Scalar values: 'b', 'i', 'l', 'd', 's'. + Array values: '[b', '[i,', '[l', '[d', '[s'. + size (int, optional): for arrays, number of elements to be read. Default read all. + + Returns: + None + """ + if (timeout is not None): + timeout = int(timeout*1000) + value = _adjust_channel_value(value) + Epics.waitValue(name, value, comparator, timeout, Epics.getChannelType(type), size) + +def caput(name, value, timeout = None): + """Writes to an Epics PV. + + Args: + name(str): PV name + value(scalar, string or array): new PV value. + timeout(int, optional): timeout in seconds to the write. If None waits forever to completion. + + Returns: + None + """ + value=_adjust_channel_value(value) + if (timeout is not None): + timeout = int(timeout*1000) + return Epics.put(name, value, timeout) + +def caputq(name, value): + """Writes to an Epics PV and does not wait. + + Args: + name(str): PV name + value(scalar, string or array): new PV value. + + Returns: + None + """ + value=_adjust_channel_value(value) + return Epics.putq(name, value) + +def camon(name, type=None, size=None, wait = sys.maxsize): + """Install a monitor to an Epics PV and print value changes. + + Args: + name(str): PV name + type(str, optional): type of PV. By default gets the PV standard field type. + Scalar values: 'b', 'i', 'l', 'd', 's'. + Array values: '[b', '[i,', '[l', '[d', '[s'. + size (int, optional): for arrays, number of elements to be read. Default read all. + wait (int, optional): blocking time for this function. By default blocks forever. + Returns: + None + """ + start = time.time() + with create_channel_device(name, type=type, size=size, device_name=name, monitored=True) as dev: + print (dev.read()) + while(True): + if (wait != sys.maxsize) and ((time.time()-start)>wait): + break + if dev.waitCacheChange(100): + print (dev.take()) + +def create_channel_device(channel_name, type=None, size=None, device_name=None, monitored=False): + """Create a device from an EPICS PV. + + Args: + channel_name(str): PV name + type(str, optional): type of PV. By default gets the PV standard field type. + Scalar values: 'b', 'i', 'l', 'd', 's'. + Array values: '[b', '[i,', '[l', '[d', '[s'. + size (int, optional): for arrays, number of elements to be read. Default read all. + device_name (str, optional): device name (if different from hannel_name. + Returns: + None + """ + dev = Epics.newChannelDevice(channel_name if (device_name is None) else device_name , channel_name, Epics.getChannelType(type)) + if get_context().isSimulation(): + dev.setSimulated() + dev.initialize() + if (size is not None): + dev.setSize(size) + if (monitored): + dev.setMonitored(True) + return dev + + +################################################################################################### +#Java threading is not possible with as cannot extend Runnable and Callable +################################################################################################### + +def fork(*functions): + """Start execution of functions in parallel. + + Args: + *functions(function references) + + Returns: + List of callable + """ + raise Exception("Not implemented") + +def join(futures): + """Wait parallel execution of functions. + + Args: + futures(Future or list of Future) : as returned from fork + + Returns: + None + """ + raise Exception("Not implemented") + +def parallelize(*functions): + """Equivalent to fork + join + + Args: + *functions(function references) + + Returns: + None + """ + raise Exception("Not implemented") + + + +################################################################################################### +#Background task control. +################################################################################################### + +def start_task(script, delay = 0.0, interval = -1): + """Start a background task + + Args: + script(str): Name of the script implementing the task + delay(float, optional): time in seconds for the first execution. + Default starts immediately. + interval(float, optional): time in seconds for between execution. + If negative (default), single-execution. + + Returns: + Task object. + """ + raise Exception("Not implemented") + +def stop_task(script, force = False): + """Stop a background task + + Args: + script(str): Name of the script implementing the task + force(boolean, optional): interrupt current execution, if running + + Returns: + None + """ + raise Exception("Not implemented") + + +################################################################################################### +#Versioning +################################################################################################### + +def commit(message, force = False): + """Commit the changes to the repository. + + Args: + message(str): commit message + force(bool, optional): if False, raises exception if no change detected in repo + + Returns: + None + """ + get_context().commit(message, force) + +def diff(): + """Return list of changes in the repository + + Args: + None + + Returns: + None + """ + return get_context().diff() + +def checkout_tag(tag): + """Checkout a tag name. + + Args: + tag(str): tag name. + + Returns: + None + """ + get_context().checkoutTag(tag) + +def checkout_branch(tag): + """Checkout a local branch name. + + Args: + tag(str): branch name. + + Returns: + None + """ + get_context().checkoutLocalBranch(tag) + +def pull_repository(): + """Pull from remote repository. + + """ + get_context().pullFromUpstream() + +def push_repository(all_branches=True, force=False, push_tags=False): + """Push to remote repository. + + Args: + all_branches(boolean, optional): all branches or just current. + force(boolean, optional): force flag. + push_tags(boolean, optional): push tags. + + Returns: + None + """ + get_context().pushToUpstream(all_branches, force, push_tags) + +def cleanup_repository(): + """Performs a repository cleanup. + + Args: + None + + Returns: + None + """ + get_context().cleanupRepository() + +################################################################################################### +#Device Pool +################################################################################################### + +def get_device(device_name): + """Returns a configured device (or imaging source) by its name. + + Args: + device_name(str): name of the device. + + Returns: + device + """ + return get_context().getDevicePool().getByName(device_name) + +def add_device(device, force = False): + """Add a device (or imaging source) to the device pool. + + Args: + device(Device or Source) + force(boolean, optional): if true then dispose existing device with same name. + Otherwise will fail in case of name clash. + + Returns: + True if device was added, false if was already in the pool, or exception in case of name clash. + """ + proxy_method = getattr(device, "get_proxy", None) + if callable(proxy_method): + device=device.get_proxy() + return get_context().getDevicePool().addDevice(device, force, True) + +def remove_device(device): + """Remove a device (or imaging source) from the device pool. + + Args: + device(Device or Source) + + Returns: + bool: true if device was removed. + """ + device=string_to_obj(device) + return get_context().getDevicePool().removeDevice(device) + +def set_device_alias(device, alias): + """Deprecated, use "dev.set_alias" instead. Set a device alias to be used in scans (datasets and plots). + + Args: + device(Device) + alias(str): replace device name in scans. + + Returns: + None + """ + device=string_to_obj(device) + device.setAlias(alias) + +def stop(): + """Stop all devices implementing the Stoppable interface. + + Args: + None + + Returns: + None + """ + get_context().stopAll() + +def update(): + """Update all devices. + + Args: + None + + Returns: + None + """ + get_context().updateAll() + +def reinit(dev = None): + """Re-initialize devices. + + Args: + dev(Device, optional): Device to be re-initialized (if None, all devices not yet initialized) + + Returns: + List with devices not initialized. + """ + if dev is not None: + dev=string_to_obj(dev) + return get_context().reinit(dev) + return to_list(get_context().reinit()) + +def create_device(url, parent=None): + """Create a device form a definition string(see InlineDevice) + + Args: + url(str or list of string): the device definition string (or list of strings) + parent(bool, optional): parent device + + Returns: + The created device (or list of devices) + """ + if parent is not None: + parent=string_to_obj(parent) + return InlineDevice.create(url, parent) + + +def create_averager(dev, count, interval=0.0, name = None, monitored = False): + """Creates and initializes and averager for dev. + + Args: + dev(Device): the source device + count(int): number of samples + interval(float, optional): sampling interval(s). If negative sampling is made on data change event. + name(str, optional): sets the name of the device (default is: averager) + monitored (bool, optional): if true then averager processes asynchronously. + + Returns: + Averager device + """ + dev = string_to_obj(dev) + if is_java_instance(dev, ReadableArray): + av = ArrayAverager(dev, count, int(interval*1000)) if (name is None) else ArrayAverager(name, dev, count, int(interval*1000)) + else: + av = Averager(dev, count, int(interval*1000)) if (name is None) else Averager(name, dev, count, int(interval*1000)) + av.initialize() + if (monitored): + av.monitored = True + return av + +def tweak(dev, step, is2d=False): + """Move one or more positioners in steps using the arrow keys. + Steps are increased/decreased using the shift and control keys. + + Args: + dev(Positioner or List): the device or list of devices to move. + step(float or List): step size or list of step sizes + is2d(bool, optional): if true moves second motor with up/down arrows. + """ + if (get_exec_pars().isBackground()): return + dev,step = to_list(string_to_obj(dev)),to_list(step) + while (not (get_exec_pars().getAborted())): + key=get_context().waitKey(0) + for i in range(len(dev)): + if not is2d or i==0: + if key == 0x25: dev[i].moveRel(-step[i]) #Left + elif key == 0x27: dev[i].moveRel(step[i]) #Right + if key in (0x10, 0x11): + step[i] = step[i]*2 if key == 0x10 else step[i]/2 + print ("Tweak step for " + dev[i].getName() + " set to: "+str(step[i])) + if is2d and len(dev)>1: + if key == 0x26: dev[1].moveRel(step[1]) #Top + elif key == 0x28: dev[1].moveRel(-step[1]) #Bottom + + +################################################################################################### +#Maths +################################################################################################### + +def arrmul(a, b): + """Multiply 2 series of the same size. + + Args: + + a(subscriptable) + b(subscriptable) + + Returns: + List + """ + return map(mul, a, b) + +def arrdiv(a, b): + """Divide 2 series of the same size. + + Args: + + a(subscriptable) + b(subscriptable) + + Returns: + List + """ + return map(truediv, a, b) + +def arradd(a, b): + """Add 2 series of the same size. + + Args: + + a(subscriptable) + b(subscriptable) + + Returns: + List + """ + return map(add, a, b) + +def arrsub(a, b): + """Subtract 2 series of the same size. + + Args: + + a(subscriptable) + b(subscriptable) + + Returns: + List + """ + return map(sub, a, b) + +def arrabs(a): + """Returns the absolute of all elements in series. + + Args: + + a(subscriptable) + + Returns: + List + """ + return map(abs, a) + +def arroff(a, value = "mean"): + """Subtract offset to all elemets in series. + + Args: + + a(subscriptable) + type(int or str, optional): value to subtract from the array, or "mean" or "min". + + Returns: + List + """ + if value=="mean": + value = mean(a) + elif value=="min": + value = min(a) + return [x-value for x in a] + +def mean(data): + """Calculate the mean of a sequence. + + Args: + data(subscriptable) + + Returns: + Mean of the elements in the object. + """ + return functools.reduce(lambda x, y: x + y, data) / len(data) + +def variance(data): + """Calculate the variance of a sequence. + + Args: + data(subscriptable) + + Returns: + Variance of the elements in the object. + """ + c = mean(data) + ss = sum((x-c)**2 for x in data) + return ss/len(data) + +def stdev(data): + """Calculate the standard deviation of a sequence. + + Args: + data(subscriptable) + + Returns: + Standard deviation of the elements in the object. + """ + return variance(data)**0.5 + + +def center_of_mass(data, x = None): + """Calculate the center of mass of a series, and its rms. + + Args: + + data(subscriptable) + x(list, tuple, array ..., optional): x coordinates + + Returns: + Tuple (com, rms) + """ + if x is None: + x = Arr.indexesDouble(len(data)) + data_sum = sum(data) + if (data_sum==0): + return float('nan') + xmd = arrmul( x, data) + com = sum(xmd) / data_sum + xmd2 = arrmul( x, xmd) + com2 = sum(xmd2) / data_sum + rms = math.sqrt(abs(com2 - com * com)) + return (com, rms) + +def poly(val, coefs): + """Evaluates a polinomial: (coefs[0] + coefs[1]*val + coefs[2]*val^2... + + Args: + val(float): value + coefs (list of loats): polinomial coefficients + Returns: + Evaluated function for val + """ + r = 0 + p = 0 + for c in coefs: + r = r + c * math.pow(val, p) + p = p + 1 + return r + +def histogram(data, range_min = None, range_max = None, bin = 1.0): + """Creates histogram on data. + + Args: + data (tuple, array, List or Array): input data can be multi-dimensional or nested. + range_min (int, optional): minimum histogram value. Default is floor(min(data)) + range_max (int, optional): maximul histogram value. Default is ceil(max(data)) + bin(int or float, optional): if int means number of bins. If float means bin size. Default = 1.0. + Returns: + tuple: (ydata, xdata) + """ + if range_min is None: range_min = math.floor(min(flatten(data))) + if range_max is None: range_max = math.ceil(max(flatten(data))) + if type(bin) is float: + bin_size = bin + n_bin = int(math.ceil(float(range_max - range_min)/bin_size)) + else: + n_bin = bin + bin_size = float(range_max - range_min)/bin + + result = [0] * n_bin + for d in flatten(data): + b = int( float(d - range_min) / bin_size) + if (b >=0) and (b < n_bin): + result[b] = result[b] + 1 + return (result, frange(range_min, range_max, bin_size)) + +def cmp(a, b): + return (a > b) - (a < b) + +def _turn(p, q, r): + return cmp((q[0] - p[0])*(r[1] - p[1]) - (r[0] - p[0])*(q[1] - p[1]), 0) + +def _keep(hull, r): + while len(hull) > 1 and _turn(hull[-2], hull[-1], r) != 1: + hull.pop() + return (not len(hull) or hull[-1] != r) and hull.append(r) or hull + +def convex_hull(point_list=None, x=None, y=None): + """Returns the convex hull from a list of points. Either point_list or x,y is provided. + (Alhorithm taken from http://tomswitzer.net/2010/03/graham-scan/) + Args: + point_list (array of tuples, optional): arrays of the points + x (array of float, optional): array with x coords of points + y (array of float, optional): array with y coords of points + Returns: + Array of points or (x,y) + """ + is_point_list = point_list is not None + if not point_list: + point_list=[] + for i in range(len(x)): + if((x[i] is not None) and (y[i] is not None)): point_list.append((x[i], y[i])) + point_list.sort() + lh,uh = functools.reduce(_keep, point_list, []), functools.reduce(_keep, reversed(point_list), []) + ret = lh.extend(uh[i] for i in range(1, len(uh) - 1)) or lh + if not is_point_list: + x, y = [], [] + for i in range(len(ret)): + x.append(ret[i][0]) + y.append(ret[i][1]) + return (x,y) + return ret + +################################################################################################### +#Utilities +################################################################################################### + +def get_setting(name=None): + """Get a persisted script setting value. + + Args: + name (str): name of the setting. + Returns: + String with setting value or None if setting is undefined. + If name is None then returns map with all settings. + """ + return get_context().getSettings() if (name is None) else get_context().getSetting(name) + +def set_setting(name, value): + """Set a persisted script setting value. + + Args: + name (str): name of the setting. + value (obj): value for the setting, converted to string (if None then remove the setting). + Returns: + None. + """ + get_context().setSetting(name, value) + +def exec_cmd(cmd, stderr_raise_ex = True): + """Executes a shell command. If errors happens raises an exception. + + Args: + cmd (str or list of str): command process input and parameters. If stderr_raise_ex is set then raise exception if stderr is not null. + Returns: + Output of command process. + """ + import subprocess + result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE if stderr_raise_ex else subprocess.STDOUT) + ret=result.stdout.decode('utf-8') + err=result.stderr.decode('utf-8') + if stderr_raise_ex and (err is not None) and err!="": + raise Exception(err) + return ret + +def bsget(channel, modulo=1, offset=0, timeout = 5.0): + """Reads an values a bsread stream, using the default provider. + + Args: + channel(str or list of str): channel name(s) + module(int, optional): stream modulo + offset(int, optional): stream offset + timeout(float, optional): stream timeout in secs + Returns: + BS value or list of values + """ + channels = to_list(channel) + ret = Stream.readChannels(channels, modulo, offset, int(timeout * 1000)) + if is_string(channel): + return ret[0] + return ret + +def flatten(data): + """Flattens multi-dimentional or nested data. + + Args: + data (tuple, array, List or Array): input data + Returns: + Iterator on the flattened data. + """ + if is_array(data): + if not data.typecode.startswith('['): + return data + + import itertools + return itertools.chain(*data) + +def frange_gen(start, finish, step): + while ((step >= 0.0) and (start <= finish)) or ((step < 0.0) and (start >= finish)): + yield start + start += step + +def frange(start, finish, step, enforce_finish = False, inclusive_finish = False): + """Create a list with a range of float values (a float equivalent to "range"). + + Args: + start(float): start of range. + finish(float): end of range. + step(float): step size. + enforce_finish(boolean, optional): adds the final element even if range was not exact. + inclusive_finish(boolean, optional): if false finish is exclusive (like in "range"). + + Returns: + list + """ + step = float(step) + ret = list(frange_gen(start, finish, step)) + if len(ret) > 0: + if inclusive_finish == False: + if ret[-1]==finish: + del ret[-1] + if enforce_finish and ret[-1]!=finish: + ret.append(finish) + return ret + +def notify(subject, text, attachments = None, to=None): + """Send email message. + + Args: + subject(str): Message subject. + text(str): Message body. + attachments(list of str, optional): list of files to be attached (expansion tokens are allowed). + to (list ofd str, optional): recipients. If None uses the recipients defined in mail.properties. + Returns: + None + """ + get_context().notify(subject, text, to_list(attachments), to_list(to)) + +def expand_path(path, timestamp=-1): + """Expand path containing tokens. + + Args: + path(str): path name. + timestamp(int): If not defined(-1), uses now. + Returns: + Expanded path name. + """ + + return get_context().getSetup().expandPath(path, timestamp) + +################################################################################################### +#UI +################################################################################################### + +def set_status(status): + """Set the application status. + + Args: + status(str): new status. + + Returns: + None + """ + set_preference(Preference.STATUS, status) + +def setup_plotting( enable_plots=None, enable_table=None,plot_list=None, line_plots=None, range=None, domain=None, defaults=None): + if defaults == True: set_preference(Preference.DEFAULTS, True) + if enable_plots is not None: set_preference(Preference.PLOT_DISABLED, not enable_plots) + if enable_table is not None: set_preference(Preference.TABLE_DISABLED, not enable_table) + if plot_list is not None: set_preference(Preference.ENABLED_PLOTS, None if plot_list == "all" else plot_list) + if line_plots is not None: + plots = None + if line_plots != "none": + plots = {} + for p in line_plots: plots[p]=1 + set_preference(Preference.PLOT_TYPES, plots) + if range is not None: + if range == "none": set_preference(Preference.AUTO_RANGE, None) + elif range == "auto": set_preference(Preference.AUTO_RANGE, True) + else: set_preference(Preference.MANUAL_RANGE, range) + if domain is not None: set_preference(Preference.DOMAIN_AXIS, domain) + +def set_preference(preference, value): + """Hints to graphical layer: + + Args: + preference(Preference): Enum of preference types: + PLOT_DISABLED: enable/disable scan plot (True/False) + PLOT_LAYOUT: "Horizontal", "Vertical" or "Grid" + TABLE_DISABLED: enable/disable scan table (True/False) + ENABLED_PLOTS: select Readables to be plotted (list of Readable or String (names)) + PLOT_TYPES: Dictionary - Plot name(Readable or String) : Plot type(String or int) + PRINT_SCAN: Print scan records to console + AUTO_RANGE: Automatic range scan plots x-axis + MANUAL_RANGE: Manually set scan plots x-axis + MANUAL_RANGE_Y: Manually set scan plots y-axis + DOMAIN_AXIS: Set the domain axis source: "Time", "Index", or a readable name. + Default(None): first positioner + STATUS: set application status + value(object): preference value + + Returns: + None + """ + value = to_array(value, 'o') #If list then convert to Object array + get_context().setPreference(preference, value) + +def get_string(msg, default = None, alternatives = None, password = False): + """ + Reads a string from UI + Args: + msg(str): display message. + default(str, optional): value displayed when window is shown. + alternatives(list of str, optional): if provided presents a combo box instead of an editing field. + password(boolean, optional): if True hides entered characters. + + Returns: + String entered of null if canceled + """ + if password : + return get_context().getPassword(msg, None) + return get_context().getString(msg, str(default) if (default is not None) else None, alternatives) + +def get_option(msg, type = "YesNoCancel"): + """ + Gets an option from UI + Args: + msg(str): display message. + type(str, optional): 'YesNo','YesNoCancel' or 'OkCancel' + + Returns: + 'Yes', 'No', 'Cancel' + """ + return get_context().getOption(msg, type) + +def show_message(msg, title=None, blocking = True): + """ + Pops a blocking message to UI + + Args: + msg(str): display message. + title(str, optional): dialog title + """ + get_context().showMessage(msg, title, blocking) + +def show_panel(device, title=None): + """ + Show, if exists, the panel relative to this device. + + Args: + device(Device or str or BufferedImage): device + title only apply to BufferedImage objects. For devices the title is the device name. + """ + if type(device) is BufferedImage: + device = DirectSource(title, device) + device.initialize() + if is_string(device): + device = get_device(device) + return get_context().showPanel(device) + + + +################################################################################################### +#Executed on startup +################################################################################################### + +def on_ctrl_cmd(cmd): + #print ("Control command: ", cmd) + pass + +if __name__ == "__main__": + #Handle control command server + if ("ctrl_cmd_socket" in globals()) and (ctrl_cmd_socket is not None): + if ("ctrl_cmd_task_thread" in globals()) and (ctrl_cmd_task_thread.is_alive()): + ctrl_cmd_socket.close() + ctrl_cmd_task_thread.join(5.0) + if ctrl_cmd_task_thread.is_alive(): + raise Exception("Cannot stop control command task thread") + + def ctlm_cmd_task(port,parent_thread, rc): + try: + global ctrl_cmd_socket + print ("Starting control command task") + quit=False + with socket.socket(family=socket.AF_INET, type=socket.SOCK_DGRAM) as ctrl_cmd_socket: + ctrl_cmd_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + ctrl_cmd_socket.bind(("127.0.0.1", port)) + ctrl_cmd_socket.settimeout(2.0) + while(quit==False) and (run_count==rc) and parent_thread.is_alive() and not ctrl_cmd_socket._closed: + try: + msg,add = ctrl_cmd_socket.recvfrom(100) + except socket.timeout: + continue + cmd =msg.decode('UTF-8') + on_ctrl_cmd(cmd) + if cmd=="exit": + quit=True + ctrl_cmd_socket.sendto("ack".encode('UTF-8'), add) + finally: + print("Quitting control command task") + + ctrl_cmd_task_thread = threading.Thread(target=functools.partial(ctlm_cmd_task, CTRL_CMD_PORT, threading.currentThread(), run_count)) + ctrl_cmd_task_thread.setDaemon(True) + ctrl_cmd_task_thread.start() diff --git a/script/Lib/statsutils.py b/script/Lib/statsutils.py new file mode 100644 index 0000000..1d84c4d --- /dev/null +++ b/script/Lib/statsutils.py @@ -0,0 +1,191 @@ +################################################################################################### +# Utilities for generating reports from command statistics files +################################################################################################### + +#CsvJdbc JAR file must be downloaded to extensions folder: +#http://central.maven.org/maven2/net/sourceforge/csvjdbc/csvjdbc/1.0.34/csvjdbc-1.0.34.jar + + +import java.sql.DriverManager as DriverManager +import java.sql.ResultSet as ResultSet +import java.util.Properties as Properties +import java.lang.Class as Class +import os +from startup import get_context, expand_path +import ch.psi.pshell.core.CommandManager.CommandStatisticsFileRange as CommandStatisticsFileRange + +stmt = None +STAT_COLUMN_NAMES = ["Command","Args","Source","Start","End","Background","Result","Return"] +def get_stats_connection(): + global stmt + Class.forName("org.relique.jdbc.csv.CsvDriver"); + db = os.path.abspath(expand_path("{home}/statistics")) + props = Properties() + props.put("fileExtension", ".csv") + props.put("separator", ";") + props.put("timestampFormat", "dd/MM/yy HH:mm:ss.SSS") + props.put("indexedFiles", "true"); + props.put("columnTypes", "String,String,String,Timestamp,Timestamp,Boolean,String,String"); + + fileRange = get_context().commandManager.commandStatisticsConfig.fileRange + if fileRange==CommandStatisticsFileRange.Daily: + props.put("fileTailPattern", "(\\d+)_(\\d+)_(\\d+)"); + props.put("fileTailParts", "Year,Month,Day"); + elif fileRange==CommandStatisticsFileRange.Monthly: + props.put("fileTailPattern", "(\\d+)_(\\d+)"); #props.put("fileTailPattern", "-(\\d+)_(\\d+)"); + props.put("fileTailParts", "Year,Month"); + elif fileRange==CommandStatisticsFileRange.Yearly: + props.put("fileTailPattern", "(\\d+)"); + props.put("fileTailParts", "Year"); + + conn = DriverManager.getConnection("jdbc:relique:csv:" + db, props); + stmt = conn.createStatement(ResultSet.TYPE_SCROLL_SENSITIVE,ResultSet.CONCUR_READ_ONLY); + return conn + +def _get_count(sql): + ret = 0 + results = stmt.executeQuery("SELECT COUNT(*) AS count FROM . WHERE " + sql) + if results.first(): + ret = results.getInt("count") + return ret + +def _add_sql_time(sql, start, end): + if start: + if len(start)==8: + start = start + " 00:00:00.000" + sql = sql + " AND Start>='" + start + "'" + if end: + if len(end)==8: + end = end + " 00:00:00.000" + sql = sql + " AND (\"End\"<'" + end + "')" + return sql + +def get_count(command= "%%", start = None, end = None, result= "%%"): + sql = "Command LIKE '"+ command +"' AND Result LIKE '"+ result +"'" + sql = _add_sql_time(sql, start, end) + return _get_count(sql) + +def get_return_count(command= "%%", start = None, end = None, ret= "%%"): + sql = "Command LIKE '"+ command +"' AND Return = '"+ ret +"'" + sql = _add_sql_time(sql, start, end) + return _get_count(sql) + +def get_cmd_stats(command = "%", start = None, end = None): + s = get_count(command, start, end, "success") + a = get_count(command, start, end, "abort") + e = get_count(command, start, end, "error") + return (s,a,e) + +def get_errors(command = "%", start = None, end = None): + sql = "SELECT Return, Count(Return) as count FROM . WHERE Command LIKE '"+ command +"' AND Result='error'" + sql = _add_sql_time(sql, start, end) + sql = sql + " GROUP BY Return ORDER BY count DESC" + results = stmt.executeQuery(sql) + ret = [] + while results.next(): + ret.append((results.getInt("count"), results.getString("Return"))) + return ret + + +def get_cmd_records(command = "%", start = None, end = None, result= "%%"): + sql = "SELECT * FROM . WHERE Command LIKE '"+ command +"' AND Result LIKE '"+ result +"'" + sql = _add_sql_time(sql, start, end) + results = stmt.executeQuery(sql) + ret = [] + while results.next(): + rec={} + for col in STAT_COLUMN_NAMES: + rec[col]= results.getString(col) + ret.append(rec) + return ret + +def get_commands(commands =None, start = None, end = None): + ret = [] + if (commands is None) or (len(commands)==0): + sql = "SELECT * FROM . WHERE Command != ''" + sql = _add_sql_time(sql, start, end) + sql = sql + " GROUP BY Command" + results = stmt.executeQuery(sql) + while results.next(): + cmd = results.getString("Command") + if cmd and not " " in cmd: + ret.append(cmd) + else: + for cmd in commands: + if get_count(cmd, start, end) >0 : + ret.append(cmd) + return ret + +def print_cmd_stats(command = "%", start = None, end = None): + print "-----------------------------------------------------------" + print "Statistics from ", start , " to ", end + (s,a,e) = get_cmd_stats(command, start, end) + t=s+a+e #get_count(command, start, end, "%") + print "Command: " , command , " Records: ", t + if t>0: + print "%-10s %7.2f%% - %d" % ("Success", (float(s)/t) * 100, s) + print "%-10s %7.2f%% - %d" % ("Abort", (float(a)/t) * 100, a) + print "%-10s %7.2f%% - %d" % ("Error", (float(e)/t) * 100, e) + + print "\nErrors:" + print "%5s %s" % ("Count", "Error") + errors = get_errors(command, start, end) + for error in errors: + print "%5d %s" % (error[0], error[1]) + print "-----------------------------------------------------------" + +def print_cmd_records(command = "%", start = None, end = None, result= "%%"): + print "-----------------------------------------------------------" + print "Records from ", start , " to ", end + info = get_cmd_records(command, start, end, result) + print "Command: " , command , " Result: ", result, " Records: ", len(info) + + for col in STAT_COLUMN_NAMES: + print col+ "; " , + print + + for cmd in info: + s = "" + for col in STAT_COLUMN_NAMES: + s = s + cmd[col]+ "; " + print s + print "-----------------------------------------------------------" + +def print_stats(commands = None, start = None, end = None): + print "-----------------------------------------------------------" + print "Statistics from ", start , " to ", end + print "%-20s %-5s %8s %8s %8s" % ("Command", "Total", "Success", "Abort", "Error") + cmds = get_commands(commands) + for cmd in cmds: + (s,a,e) = get_cmd_stats(cmd, start, end) + t=s+a+e + if t>0: + print "%-20s %-5d %7.2f%% %7.2f%% %7.2f%%" % (cmd, t, (float(s)/t) * 100, (float(a)/t) * 100, (float(e)/t) * 100) + else: + print "%-20s %-5d" % (cmd, t) + print "-----------------------------------------------------------" + + + + +if __name__=='__main__': + conn = get_stats_connection() + + #Print stats of all commands, with no time range + print_stats() + + cmds = ["%scan1%", "%scan2%"] + start= "01/03/19" + end= "01/04/19" + + #Print stats all commands containing 'scan1' and 'scan2' in the month 03.2019 + print_stats(cmds, start, end) + + #Print individual statistics, including error count, for commands containing 'scan1' and 'scan2' + for cmd in cmds: + print_cmd_stats (cmd, start, end) + + #Print all records for commands containing 'scan1' + print_cmd_records("%scan1%%", start, end, "error") + conn.close() + diff --git a/script/local.py b/script/local.py new file mode 100755 index 0000000..39306ed --- /dev/null +++ b/script/local.py @@ -0,0 +1,7 @@ +################################################################################################### +# Deployment specific global definitions - executed after startup.py +################################################################################################### + +run("psss/psss") + + diff --git a/script/psss/CameraScan.py b/script/psss/CameraScan.py new file mode 100755 index 0000000..3884f99 --- /dev/null +++ b/script/psss/CameraScan.py @@ -0,0 +1,73 @@ +#Scan the PSSS camera position +#Purpose: +#To set or confirm the camera is positioned with the measured spectrum in the centre of the spectral integration window + +#If running from editor +if get_exec_pars().source == CommandSource.ui: +#User inputs - define travel range of camera + RANGE_FROM = -17 + RANGE_TO = -11 + STEPS = 20 + NUM_SHOTS= 10 #100 + PLOT=None +p = plot(None, title="Data")[0] if (PLOT is None) else PLOT +p.clear() +p.removeMarker(None) +p.setLegendVisible(True) +p.addSeries(LinePlotSeries("PSSS Spectrum Average")) + + +run("cpython/wrapper") + +if not is_dry_run(): + cam_x=Channel("SARFE10-PSSS059:MOTOR_X5.VAL", name="cam_x") +else: + cam_x=DummyRegister("cam_x") + +av = create_averager(psss_spectrum_y, NUM_SHOTS, interval=-1, name="spectrum_average") +av_samples = av.samples +av_samples.alias = "spectrum_samples" + + +#Scan and take data +def after_read(record, scan): + p.getSeries(0).setData(psss_spectrum_x.take(), record[av]) + p.setTitle("Cam X = %1.3f" %(record[cam_x])) + +r = lscan(cam_x, (av, av_samples), RANGE_FROM, RANGE_TO, STEPS, latency=0.0, after_read = after_read, save=False) +average, samples, cam_range = r.getReadable(0), r.getReadable(1), r.getPositions(0) + +signal_centre, projection = get_signal_centre(samples, cam_range) + +#Set max position +cam_x.write(signal_centre) +cam_x.close() + + + +""" +plt.figure(figsize=[10,5]) +plt.subplot(121) +plt.title('PSSS scan of camera position') +plt.pcolormesh(np.arange(0,Scan_spec.shape[2]), Cam_range, Scan_spec.mean(axis=1),cmap='CMRmap') +plt.xlim([0,Scan_spec.shape[2]]) +plt.xlabel('Camera pixel dispersive direction') +plt.ylabel('Set PSSS cam_x _pos [mm] \n'+PSSS_cam_x_PV_name[0:-4]) +plt.subplot(122) +plt.plot(projection,Cam_range,linewidth = 2, color = 'orange',label ='projected signal') +plt.title('Spectrum centred at %.1f [mm] (from signal max) \n trace should have hard edges'%signal_centre) +plt.xticks([]) +plt.legend() +plt.grid(True) +""" +#PLOT.clear() +#plot_data(PLOT, projection, "Data", xdata=cam_range, show_points = True, color=Color.BLUE) +#p,pars = plot_gauss_fit(cam_range, projection, gauss_pars=None, p=PLOT, title = "Data") + +p.clear() +p.setTitle("") +plot_data(p, projection, "Projection", xdata=cam_range, show_points = True, color=Color.BLUE) +p.addMarker(signal_centre, None, "Signal Centre=" + str(round(signal_centre,2)), Color.LIGHT_GRAY) + + +set_return(signal_centre) \ No newline at end of file diff --git a/script/psss/CrystalHeightScan.py b/script/psss/CrystalHeightScan.py new file mode 100755 index 0000000..49adc69 --- /dev/null +++ b/script/psss/CrystalHeightScan.py @@ -0,0 +1,81 @@ +############################################################################### +#Scan the PSSS crystal height +#Purpose: +#The PSSS signal level is very sensitive to the crystal height. This script will scan the height and set the position to the maximum signal + +if get_exec_pars().source == CommandSource.ui: +#User inputs - define travel range of camera + RANGE_FROM = -0.8 + RANGE_TO = -1.7 + STEPS = 10 #20 + NUM_SHOTS= 10 # 100 + PLOT=None + +p = plot(None, title="Data")[0] if (PLOT is None) else PLOT +p.clear() +p.removeMarker(None) +p.setLegendVisible(True) +p.addSeries(LinePlotSeries("PSSS Spectrum Average")) + +run("cpython/wrapper") + + +#Setup and functions setup¶ +if not is_dry_run(): + xstal_height=Channel("SARFE10-PSSS059:MOTOR_Y3.VAL", name="xstal_height") +else: + xstal_height=DummyRegister("xstal_height") + +av = create_averager(psss_spectrum_y, NUM_SHOTS, interval=-1, name="spectrum_average") +av_samples = av.samples +av_samples.alias = "spectrum_samples" + +#Scan and take data +def after_read(record, scan): + p.getSeries(0).setData(psss_spectrum_x.take(), record[av]) + p.setTitle("Xtal Height = %1.3f" %(record[xstal_height])) + +r = lscan(xstal_height, (av, av_samples), RANGE_FROM, RANGE_TO, STEPS, latency=2.0, after_read = after_read, save=False) + +#User inputs - define travel range of crystal +#It is unlikely these values need to be changed +average, samples, xstal_range = r.getReadable(0), r.getReadable(1), r.getPositions(0) + +#return maxium position +[amp, mean_val, sigma, offset], projection = fit_crystal_height(RANGE_FROM, RANGE_TO, STEPS+1, samples) +print(mean_val) + +if not (RANGE_FROM < mean_val < RANGE_TO or RANGE_TO < mean_val < RANGE_FROM): + raise Exception ("Invalid fit mean: " + str(mean_val)) + + +#Set max position +#Cell below will push the maximum position to the xstal height +xstal_height.write(mean_val) +xstal_height.close() + +#Plots +""" +plt.figure(figsize=[10,5]) +plt.subplot(121) +plt.title('PSSS scan of crystal height') +plt.pcolormesh(energy_axis, xstal_range, Scan_spec.mean(axis=1),cmap='CMRmap') +plt.xlim([energy_axis[0],energy_axis[-1]]) +plt.ylim([xstal_range[0], xstal_range[-1]]) +plt.xlabel('PSSS energy axis') +plt.ylabel('Set crystal position [mm] \n'+PSSS_xstal_height_name[0:-4]) +plt.subplot(122) +plt.plot(projection,xstal_range,linewidth = 2, color = 'orange',label ='projected signal') +plt.plot(gaus(xstal_range_fit,*popt),xstal_range_fit,'r:',label='fit') +plt.ylim([xstal_range[0], xstal_range[-1]]) +plt.title('Signal max at %.3f [mm] (from fit)'%popt[1]) +plt.xticks([]) +plt.legend() +plt.grid(True) +""" +p.clear() +p.setTitle("") +plot_gauss_fit(xstal_range, projection, gauss_pars=(offset, amp, mean_val, sigma), p=p, title = "Data") + + +set_return(mean_val) diff --git a/script/psss/EnergyScan.py b/script/psss/EnergyScan.py new file mode 100755 index 0000000..2691046 --- /dev/null +++ b/script/psss/EnergyScan.py @@ -0,0 +1,101 @@ +############################################################################### +#Scan the PSSS photon energy +#Purpose: To find and centre the PSSS photon energy so the measured spectrum is centred on the camera chip + +#PARAMETERS +#User inputs - define energy range to scan below by running the appropiate cell +#Below is for a large scan range assuming offset from machine upto $\pm$ 300 eV + +#If running from editor +if get_exec_pars().source == CommandSource.ui: + RANGE_OFF = None + RANGE_FROM = 11100 + RANGE_TO = 11300 + STEPS = 5 #60 + NUM_SHOTS= 10 #100 + PLOT=None +p = plot(None, title="Data")[0] if (PLOT is None) else PLOT +p.clear() +p.removeMarker(None) +p.setLegendVisible(True) +p.addSeries(LinePlotSeries("PSSS Spectrum Average")) + +if RANGE_OFF is not None: + RANGE_FROM = energy_machine.read()-RANGE_OFF + RANGE_TO = energy_machine.read()+RANGE_OFF + +run("cpython/wrapper") + + + + +#Scan and take data +class PSSS_energy(Writable): + def write(self, value): + if not is_dry_run(): + psss_energy.write(value) + exec_cpython("/ioc/modules/qt/PSSS_motion.py", args = ["-m1", "SARFE10-PSSS059"]) + # python / ioc / modules / qt / PSSS_motion.py - m1 SARFE10 - PSSS059 + time.sleep(1) + print(value) + +en = PSSS_energy() +en.alias = "energy" + +av = create_averager(psss_spectrum_y, NUM_SHOTS, interval=-1, name="spectrum_average") +av_samples = av.samples +av_samples.alias = "spectrum_samples" + + +def after_read(record, scan): + p.getSeries(0).setData(psss_spectrum_x.take(), record[av]) + p.setTitle("Energy = %1.3f" %(record[en])) + +r = lscan(en, (av, av_samples), RANGE_FROM, RANGE_TO, STEPS, latency=0.0, after_read = after_read, save=False ) +average, samples, energy_range = r.getReadable(0), r.getReadable(1), r.getPositions(0) + +[amp, mean_val, sigma, offset],centre_line_out = fit_energy(RANGE_FROM, RANGE_TO, STEPS+1, NUM_SHOTS, samples) + +if not (RANGE_FROM < mean_val < RANGE_TO or RANGE_TO < mean_val < RANGE_FROM): + raise Exception ("Invalid fit mean: " + str(mean_val)) + + +measured_offset = energy_machine.read() - mean_val +#Set fitted energy +print "measured offset", measured_offset + +en.write(mean_val) + + +#Plot +""" +plt.figure(figsize=[10,5]) +plt.subplot(121) +plt.title('PSSS scan of set photon energy') +plt.pcolormesh(np.arange(0,Scan_spec.shape[2]), Energy_range, Scan_spec.mean(axis=1),cmap='CMRmap') +plt.vlines(int(Scan_spec.shape[2]/2), Energy_range[0], Energy_range[-1],linestyles='--', colors='orange') +plt.xlim([0,Scan_spec.shape[2]]) +plt.xlabel('Camera pixel') +plt.ylabel('Set PSSS energy [eV] \n SARFE10-PSSS059:ENERGY') + +plt.subplot(122) +plt.title('At camera centre pixel %1i \nCalibrated energy = %.1f [eV]\n Offset from machine = %.1f [eV]'%(int(Scan_spec.shape[2]/2),popt[1],measured_offset)) +plt.plot(centre_line_out,Energy_range,linewidth = 2, color = 'orange',label ='measured') +plt.plot(gaus(Energy_range_fit,*popt),Energy_range_fit,'r:',label='fit') +plt.xticks([]) +plt.legend() +plt.grid(True) +""" + +p.clear() +p.setTitle("") +plot_gauss_fit(energy_range, centre_line_out, gauss_pars=(offset, amp, mean_val, sigma), p=PLOT, title = "Data") + + +set_return(mean_val) + + + + + + diff --git a/script/psss/psss.py b/script/psss/psss.py new file mode 100755 index 0000000..c82f729 --- /dev/null +++ b/script/psss/psss.py @@ -0,0 +1,188 @@ +import org.jfree.ui.RectangleAnchor as RectangleAnchor +import org.jfree.ui.TextAnchor as TextAnchor +import ch.psi.pshell.imaging.Overlay as Overlay +import ch.psi.pshell.plot.RangeSelectionPlot as RangeSelectionPlot +from collections import deque + +PSSS_CAMERA_NAME = "SARFE10-PSSS059"; + +def integrate_arrays(arrays): + if arrays is None or (len(arrays)==0): + return None + ret = arrays[0] + for a in arrays[1:]: + ret=arradd(ret, a) + return ret + +def average_arrays(arrays): + ret = integrate_arrays(arrays) + if ret is not None: + s=len(arrays) + ret = [x/s for x in ret] + return ret + + +def get_psss_data(average=1): + ax,ay,ac,af=[],[],[],[] + x = psss_spectrum_x.take() + for i in range(average): + y = psss_spectrum_y.take() + center,fwhm = psss_center.take(), psss_fwhm.take() + if average==1: + return x,y,center,fwhm + #ax.append(x) + ay.append(y) + ac.append(center) + af.append(fwhm) + if i < (average-1): + psss_spectrum_y.waitCacheChange(2000) + #psss_center.waitCacheChange(1) + #psss_fwhm.waitCacheChange(1) +#x=average_arrays(ax) + y=average_arrays(ay) + center=mean(ac) + fwhm=mean(af) + return x,y,center,fwhm + +def plot_psss(p, h=None, average = None): + """ + if len(p.getMarkers())==0: + m1=p.addMarker(0,None,"",Color.WHITE) + m2=p.addMarker(0,None,"",Color.WHITE) + m2.setLabelAnchor(RectangleAnchor.TOP) + else: + m1,m2 = p.getMarkers() + """ + + #Manipulate axis (use PSSS_PLOT for the global object): + #p.getAxis(LinePlot.AxisId.X). + + # Setup queues + if p.getNumberOfSeries()==0: + center_queue = deque(maxlen=100) + fwhm_queue = deque(maxlen=100) + + # Setup figures + + if p.getNumberOfSeries()==0: + p.addSeries(LinePlotSeries("spectrum")) + p.addSeries(LinePlotSeries("average")) + p.setLegendVisible(True) + p.getAxis(LinePlot.AxisId.X) + p.getAxis(LinePlot.AxisId.X).setLabel("Energy [eV]") + p.getAxis(LinePlot.AxisId.Y).setLabel("Sum counts") + if len(p.getMarkers())==0: + paint = RangeSelectionPlot().getSelectionColor() #p.chart.getBackgroundPaint() + m=p.addIntervalMarker(0,0, None,"", paint) + m.setLabelAnchor(RectangleAnchor.BOTTOM) + m.alpha=0.2 + m.setLabelPaint(Color.WHITE) + else: + m = p.getMarkers()[0] + + + x,y, = psss_spectrum_x.take(), psss_spectrum_y.take() + # update spectral plot + if (x is None) or (y is None): + p.getSeries(0).clear() + else: + p.getSeries(0).setData(x,y) + if (x is None) or (y is None): + p.getSeries(0).clear() + else: + p.getSeries(0).setData(x,y) + + if average is not None: + print "Average: ", average + x,y, center,fwhm = get_psss_data(average) + else: + y = psss_spectrum_y_average.take() + center = psss_center_average.take() + fwhm = psss_fwhm_average.take() + + if (x is None) or (y is None): + p.getSeries(1).clear() + else: + p.getSeries(1).setData(x,y) + + + if (center!= None) and (fwhm!=None): + center=center.doubleValue() + fwhm=fwhm.doubleValue() + m.startValue, m.endValue = center - fwhm/2, center + fwhm/2 + m.label = str(center) + + if h: + if h.getNumberOfSeries()==0: + h.addSeries(TimePlotSeries("centre")) + h.addSeries(TimePlotSeries("Energy spread SS",2)) + h.addSeries(TimePlotSeries("Energy spread cum avg",2)) + h.setLegendVisible(True) + h.setTimeAxisLabel("") + h.getAxis(Timeplot.AxisId.Y1).setLabel("Central energy [eV]") + per_mil = (fwhm/center)*1e3 + per_mil_avg = psss_fwhm_avg.take() + h.getSeries(0).appendData(center) + h.getSeries(1).appendData(per_mil) + h.getSeries(2).appendData(per_mil_avg) + return center,fwhm + +ovmin, ovmax, ovavg = None, None, None + +def update_psss_image(renderer): + global ovmin, ovmax + #if ovmin: ovmin.update(Point(0,psss_roi_min.take())) + #if ovmax: ovmax.update(Point(0,psss_roi_max.take())) + + + width=psss_spectrum_x.size + if ovmin: ovmin.update(Point(0,psss_roi_min.take()), Point(width, psss_roi_min.take())) + if ovmax: ovmax.update(Point(0,psss_roi_max.take()), Point(width, psss_roi_max.take())) + try: + data = renderer.data + av = "%1.2f" %(data.integrate(False)/data.width/data.height) + except: + av = "" + if ovavg: ovavg.update(av) + +def enable_psss_image(enabled, renderer): + global ovmin, ovmax, ovavg + try: + if (enabled): + #Start or connect to ScreenPanel pipeline + renderer.setDevice(cam_server) + renderer.setProfile(renderer.Profile.Both) + renderer.setShowProfileLimits(False) + + #Changing colormap + #print Colormap.values() #Check values + cam_server.config.colormap=Colormap.Temperature + + + cam_server.start(PSSS_CAMERA_NAME + "_sp", PSSS_CAMERA_NAME + "_sp1") + #ovmin, ovmax= Overlays.Crosshairs(renderer.getPenMarker(), Dimension(-1,1)), \ + # Overlays.Crosshairs(renderer.getPenMarker(), Dimension(-1,1)) + ovmin, ovmax= Overlays.Line(renderer.getPenMarker()), Overlays.Line(renderer.getPenMarker()) + + ovavg = Overlays.Text(Pen(java.awt.Color.GREEN.darker()), "", \ + java.awt.Font("Verdana", java.awt.Font.PLAIN, 12), java.awt.Point(-50,20)) + ovavg.fixed=True + ovavg.anchor=Overlay.ANCHOR_IMAGE_TOP_RIGHT + renderer.addOverlays([ovmin, ovmax, ovavg]) + update_psss_image(renderer) + else: + ovmin, ovmax, ovavg = None, None, None + renderer.setDevice(None) + renderer.clearOverlays() + cam_server.stop() + except: + log(sys.exc_info()[1]) + + +def get_psss_averaging(): + return psss_spectrum_y_average.config.measures + +def set_psss_averaging(measures): + psss_spectrum_y_average.config.measures=measures + psss_center_average.config.measures=measures + psss_fwhm_average.config.measures=measures \ No newline at end of file