This commit is contained in:
BIN
config/.DS_Store
vendored
BIN
config/.DS_Store
vendored
Binary file not shown.
@@ -1,9 +1,9 @@
|
||||
#Wed Jan 24 12:02:38 CET 2018
|
||||
#Mon Mar 26 14:15:56 CEST 2018
|
||||
autoSaveScanData=true
|
||||
createSessionFiles=false
|
||||
dataLayout=table
|
||||
createSessionFiles=true
|
||||
dataLayout=default
|
||||
dataPath={data}/{year}_{month}/{date}/{date}_{time}_{name}
|
||||
dataProvider=txt
|
||||
dataProvider=h5
|
||||
dataScanFlushRecords=false
|
||||
dataScanPreserveTypes=false
|
||||
dataScanReleaseRecords=false
|
||||
@@ -11,7 +11,7 @@ dataServerPort=5573
|
||||
depthDimension=0
|
||||
hostName=
|
||||
instanceName=Dev
|
||||
logDaysToLive=8
|
||||
logDaysToLive=50
|
||||
logLevel=Fine
|
||||
logLevelConsole=Off
|
||||
logPath={logs}/{date}_{time}
|
||||
|
||||
@@ -1,24 +1,24 @@
|
||||
dispatcher=ch.psi.pshell.bs.Provider|tcp://localhost:9999|||
|
||||
stream=ch.psi.pshell.bs.Stream|<dispatcher> #false:boolean|||
|
||||
scalar=ch.psi.pshell.bs.Scalar|stream Int8Scalar|||
|
||||
cam=ch.psi.pshell.bs.StreamCamera|"tcp://gfa-lc6-64:10100" 1|||true
|
||||
cam_server=ch.psi.pshell.bs.PipelineServer|gfa-lc6-64:8889|||
|
||||
cam_server=ch.psi.pshell.bs.PipelineServer|localhost:8889|||true
|
||||
#stream=ch.psi.pshell.bs.Stream|<dispatcher> #false:boolean|||
|
||||
#scalar=ch.psi.pshell.bs.Scalar|stream Int8Scalar|||
|
||||
#cam=ch.psi.pshell.bs.StreamCamera|"tcp://gfa-lc6-64:10100" 1|||true
|
||||
#prosilica=ch.psi.pshell.prosilica.Prosilica|119734 "PacketSize=1504;ExposureValue=300000"|||true
|
||||
rr=RotationReadback|TESTIOC:TESTCALCOUT:Output|||true
|
||||
rp=ch.psi.pshell.epics.Positioner|TESTIOC:TESTCALCOUT:Input null rr|||
|
||||
#rr=RotationReadback|TESTIOC:TESTCALCOUT:Output|||true
|
||||
#rp=ch.psi.pshell.epics.Positioner|TESTIOC:TESTCALCOUT:Input null rr|||
|
||||
#webcam=ch.psi.pshell.webcam.Webcam|:2|||true
|
||||
det=ch.psi.pshell.epics.AreaDetector|13SIM1|||false
|
||||
PiezoRoll1=ch.psi.pshell.epics.ControlledVariable|"TESTIOC:TESTCALCOUT:Input" TESTIOC:TESTCALCOUT:Output|||true
|
||||
#det=ch.psi.pshell.epics.AreaDetector|13SIM1|||false
|
||||
PiezoRoll1=ch.psi.pshell.epics.ControlledVariable|"TESTIOC:TESTCALCOUT:Input" TESTIOC:TESTCALCOUT:Output false|||true
|
||||
$ser=ch.psi.pshell.serial.TcpDevice||||
|
||||
$beam_ok=ch.psi.pshell.epics.ChannelString|CHANNEL|||
|
||||
sc1=ch.psi.pshell.epics.Scaler|MTEST-HW3:JS|||true
|
||||
#sc1=ch.psi.pshell.epics.Scaler|MTEST-HW3:JS|||true
|
||||
#bragg=ch.psi.pshell.crlogic.CrlogicPositioner|MTEST-HW3:MOT1|||
|
||||
#wago=ch.psi.pshell.modbus.ModbusTCP|SF-TEST-WAGO1:502|||
|
||||
testd=ch.psi.pshell.device.DummyPositioner||||
|
||||
#ralays=ch.psi.pshell.modbus.DigitalOutputArray|wago 0 16||1000|
|
||||
#relay1=ch.psi.pshell.modbus.DigitalOutput|wago 0||1000|
|
||||
#relay2=ch.psi.pshell.modbus.DigitalOutput|wago 1||1000|
|
||||
streamcam=ch.psi.pshell.bs.StreamCamera|tcp://gfa-lc6-64:9999||-1000|
|
||||
#streamcam=ch.psi.pshell.bs.StreamCamera|tcp://gfa-lc6-64:9999||-1000|
|
||||
#camtool_provider=ch.psi.pshell.bs.Provider|tcp://gfa-lc6-64:9999|||
|
||||
#camtool_stream=ch.psi.pshell.bs.Stream|camtool_provider|||true
|
||||
#camtool_min=ch.psi.pshell.bs.Scalar|camtool_stream min_value|||
|
||||
@@ -53,7 +53,7 @@ sin=ch.psi.pshell.epics.ChannelDouble|TESTIOC:TESTSINUS:SinCalc 3 true|||true
|
||||
#sinx=ch.psi.pshell.epics.ProcessVariable|TESTIOC:TESTSINUS:SinCalc|||true
|
||||
#average=ch.psi.pshell.device.Averager|sin||2000|
|
||||
isin=ch.psi.pshell.epics.ChannelInteger|TESTIOC:TESTSINUS:SinCalc|||true
|
||||
arr=ch.psi.pshell.epics.ChannelDoubleArray|TESTIOC:TESTWF2:MyWF 6 -1 true|||true
|
||||
arr=ch.psi.pshell.epics.ChannelDoubleArray|TESTIOC:TESTWF2:MyWF 6 -1 false|||true
|
||||
pv=ch.psi.pshell.epics.ProcessVariable|TESTIOC:TESTCALCOUT:Input|||true
|
||||
shutter=ch.psi.pshell.epics.BinaryPositioner|TESTIOC:TESTBO:MyBO TESTIOC:TESTBO:MyBO|||true
|
||||
$motor=ch.psi.pshell.epics.Motor|MTEST-GOBBO:MOT1|||true
|
||||
@@ -69,30 +69,41 @@ slit=ch.psi.pshell.device.Slit|motor motor2|||
|
||||
#$ps=PowerSupply|T-MMDV5|||
|
||||
pe=ch.psi.pshell.epics.Positioner|"TESTIOC:TESTCALCOUT:Input" TESTIOC:TESTCALCOUT:Output|||true
|
||||
cv=ch.psi.pshell.epics.ControlledVariable|TESTIOC:TESTCALCOUT:Input TESTIOC:TESTCALCOUT:Output|||true
|
||||
detector=ch.psi.pshell.detector.DetectorBase|http://127.0.0.1:8090/|||
|
||||
#detector=ch.psi.pshell.detector.DetectorBase|http://127.0.0.1:8090/|||
|
||||
#serial=ch.psi.pshell.serial.SerialPortDevice||||
|
||||
tcp=ch.psi.pshell.serial.TcpDevice|127.0.0.1:5554|||
|
||||
beeper=Beeper||||
|
||||
img=ch.psi.pshell.imaging.CameraSource|det||-500|
|
||||
imgbis=ch.psi.pshell.imaging.CameraSource|det||-500|
|
||||
sc=ch.psi.pshell.imaging.CameraSource|scienta||-500|
|
||||
tst=ch.psi.pshell.imaging.FileSource|C:\\Users\\gobbo_a\\Pictures\\beam.jpg|||
|
||||
tst2=ch.psi.pshell.imaging.FileSource|C:\\Users\\gobbo_a\\Pictures\\tst.png||-500|
|
||||
tst3=ch.psi.pshell.imaging.FileSource|C:\\Users\\gobbo_a\\Pictures\\tst.png||-500|
|
||||
rec=ch.psi.pshell.detector.Receiver|tcp://127.0.0.1:5444|||true
|
||||
#tcp=ch.psi.pshell.serial.TcpDevice|127.0.0.1:5554|||
|
||||
#beeper=Beeper||||
|
||||
#img=ch.psi.pshell.imaging.CameraSource|det||-500|
|
||||
#imgbis=ch.psi.pshell.imaging.CameraSource|det||-500|
|
||||
#sc=ch.psi.pshell.imaging.CameraSource|scienta||-500|
|
||||
tst=ch.psi.pshell.imaging.FileSource|/Users/gobbo_a/dev/pshell/config/mxsc/images/snapshot.png|||
|
||||
#tst2=ch.psi.pshell.imaging.FileSource|C:\\Users\\gobbo_a\\Pictures\\tst.png||-500|
|
||||
#tst3=ch.psi.pshell.imaging.FileSource|C:\\Users\\gobbo_a\\Pictures\\tst.png||-500|
|
||||
#rec=ch.psi.pshell.detector.Receiver|tcp://127.0.0.1:5444|||true
|
||||
#rec2=ch.psi.pshell.detector.Receiver|tcp://127.0.0.1:5555||-500|false
|
||||
pvt=ch.psi.pshell.epics.ProcessVariable|TESTIOC:TESTCALCOUT:Input|Read||
|
||||
$master=ch.psi.pshell.modbus.ModbusTCP|127.0.0.1|||
|
||||
ai=ch.psi.pshell.modbus.AnalogOutput|master 0|||
|
||||
cache=ch.psi.pshell.device.RegisterCache|sin|||
|
||||
$scaler=ch.psi.pshell.epics.Scaler|SCALER|||
|
||||
testpos=ch.psi.pshell.device.DummyPositioner||||true
|
||||
$test=ch.psi.pshell.epics.Motor|X|||true
|
||||
arrsrc=ch.psi.pshell.epics.ArraySource|13SIM1:image1:ArrayData 125000||-500|true
|
||||
#pvt=ch.psi.pshell.epics.ProcessVariable|TESTIOC:TESTCALCOUT:Input|Read||
|
||||
#$master=ch.psi.pshell.modbus.ModbusTCP|127.0.0.1|||
|
||||
#ai=ch.psi.pshell.modbus.AnalogOutput|master 0|||
|
||||
#cache=ch.psi.pshell.device.RegisterCache|sin|||
|
||||
#$scaler=ch.psi.pshell.epics.Scaler|SCALER|||
|
||||
#testpos=ch.psi.pshell.device.DummyPositioner||||true
|
||||
#$test=ch.psi.pshell.epics.Motor|X|||true
|
||||
#arrsrc=ch.psi.pshell.epics.ArraySource|13SIM1:image1:ArrayData 125000||-500|true
|
||||
#barrsrc=ch.psi.pshell.epics.ByteArraySource|13SIM1:image1:ArrayData 125000||-500|
|
||||
#matrix=ch.psi.pshell.epics.GenericMatrix|13SIM1:image1:ArrayData 1024 640 Int8|||
|
||||
#matrix_src=ch.psi.pshell.imaging.RegisterMatrixSource|matrix||-200|
|
||||
ga=ch.psi.pshell.epics.GenericArray|13SIM1:image1:ArrayData 125000|||
|
||||
#ga=ch.psi.pshell.epics.GenericArray|13SIM1:image1:ArrayData 125000|||
|
||||
#img2=ch.psi.pshell.imaging.FileSource|C:\\Users\\gobbo_a\\Pictures\\Chrysanthemum.jpg|||
|
||||
#mxd=ch.psi.pshell.epics.AreaDetector|MX-SAMCAM|||
|
||||
#mx=ch.psi.pshell.imaging.CameraSource|mxd||-1000|
|
||||
$mymot=ch.psi.pshell.epics.Motor|TEST|||true
|
||||
$mypos=ch.psi.pshell.epics.Positioner|SETPOINT READBACK|||true
|
||||
$mycv=ch.psi.pshell.epics.ProcessVariable|SETPOINT|||true
|
||||
mu=ch.psi.pshell.device.DummyMotor||||true
|
||||
delta=ch.psi.pshell.device.DummyMotor||||true
|
||||
gam=ch.psi.pshell.device.DummyMotor||||true
|
||||
eta=ch.psi.pshell.device.DummyMotor||||true
|
||||
chi=ch.psi.pshell.device.DummyMotor||||true
|
||||
phi=ch.psi.pshell.device.DummyMotor||||true
|
||||
sixc=ch.psi.pshell.device.MotorGroupBase|mu delta gam eta chi phi|||
|
||||
en=ch.psi.pshell.device.DummyPositioner||||true
|
||||
|
||||
@@ -1,43 +1,48 @@
|
||||
Nilson.java=disabled
|
||||
CameraCalibrationDialog.java=enabled
|
||||
ScalarPanel.java=enabled
|
||||
CameraConfigDialog.java=disabled
|
||||
Inventory.java=disabled
|
||||
LayoutParallelScan.py=disabled
|
||||
PhaseScan.java=disabled
|
||||
PythonInteractive.java=disabled
|
||||
RotationReadback.java=enabled
|
||||
SfCamera.java=enabled
|
||||
ScreenPanel.java=disabled
|
||||
Didier.java=disabled
|
||||
PersonalizedTheme.java=disabled
|
||||
MXSC-1.5.0.jar=disabled
|
||||
TestLdap.java=disabled
|
||||
TestScan.java=disabled
|
||||
MXSC-1.4.0.jar=disabled
|
||||
TestPlot.java=disabled
|
||||
MatlabTokenMaker.java=enabled
|
||||
BandpassFilter.java=disabled
|
||||
DataFile.java=disabled
|
||||
Commands.java=disabled
|
||||
CustomStopAll.java=disabled
|
||||
importer.java=disabled
|
||||
Beeper.java=enabled
|
||||
DarkTheme.java=disabled
|
||||
DefaultPlugin.java=disabled
|
||||
EnergyScan.java=disabled
|
||||
GroovyPlugin.groovy=disabled
|
||||
HoloScan.java=disabled
|
||||
Image.java=disabled
|
||||
ManipulatorScan.java=disabled
|
||||
Microscope.java=disabled
|
||||
PanelPlugin.java=disabled
|
||||
NetbeansPlugin.java=disabled
|
||||
panel.py=disabled
|
||||
Pearl.java=disabled
|
||||
PhotonEnergy.java=disabled
|
||||
plugin-1.0-SNAPSHOT.jar=disabled
|
||||
PowerSupply.java=disabled
|
||||
sb1.groovy=disabled
|
||||
test.py=disabled
|
||||
XPSSpectrum.java=disabled
|
||||
ScreenPanel2.java=disabled
|
||||
Correlation2.java=disabled
|
||||
Correlation.java=disabled
|
||||
Sender.java=disabled
|
||||
MXSC-1.9.0.jar=disabled
|
||||
Nilson.java=disabled
|
||||
CameraCalibrationDialog.java=enabled
|
||||
ScalarPanel.java=disabled
|
||||
CameraConfigDialog.java=disabled
|
||||
Inventory.java=disabled
|
||||
LayoutParallelScan.py=disabled
|
||||
PhaseScan.java=disabled
|
||||
PythonInteractive.java=disabled
|
||||
RotationReadback.java=disabled
|
||||
SfCamera.java=enabled
|
||||
ScreenPanel.java=disabled
|
||||
Didier.java=disabled
|
||||
PersonalizedTheme.java=disabled
|
||||
MXSC-1.5.0.jar=disabled
|
||||
TestLdap.java=disabled
|
||||
TestScan.java=disabled
|
||||
MXSC-1.4.0.jar=disabled
|
||||
TestPlot.java=disabled
|
||||
MatlabTokenMaker.java=disabled
|
||||
BandpassFilter.java=disabled
|
||||
DataFile.java=disabled
|
||||
Commands.java=disabled
|
||||
CustomStopAll.java=disabled
|
||||
importer.java=disabled
|
||||
Beeper.java=disabled
|
||||
DarkTheme.java=disabled
|
||||
DefaultPlugin.java=disabled
|
||||
EnergyScan.java=disabled
|
||||
GroovyPlugin.groovy=disabled
|
||||
HoloScan.java=disabled
|
||||
Image.java=disabled
|
||||
ManipulatorScan.java=disabled
|
||||
Microscope.java=disabled
|
||||
PanelPlugin.java=disabled
|
||||
NetbeansPlugin.java=disabled
|
||||
panel.py=disabled
|
||||
Pearl.java=disabled
|
||||
PhotonEnergy.java=disabled
|
||||
plugin-1.0-SNAPSHOT.jar=disabled
|
||||
PowerSupply.java=disabled
|
||||
sb1.groovy=disabled
|
||||
test.py=disabled
|
||||
XPSSpectrum.java=disabled
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
#spectrum=1
|
||||
#back=1
|
||||
#task1=3
|
||||
#task2=-1
|
||||
#outupdate=0
|
||||
#spectrum=1
|
||||
#back=1
|
||||
#task1=3
|
||||
#task2=-1
|
||||
|
||||
20
devices/Add.png.properties
Normal file
20
devices/Add.png.properties
Normal file
@@ -0,0 +1,20 @@
|
||||
#Wed Feb 21 09:01:07 CET 2018
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=NaN
|
||||
spatialCalOffsetY=NaN
|
||||
spatialCalScaleX=NaN
|
||||
spatialCalScaleY=NaN
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
4
devices/CurrentCamera provider.properties
Normal file
4
devices/CurrentCamera provider.properties
Normal file
@@ -0,0 +1,4 @@
|
||||
#Thu Apr 05 14:57:15 CEST 2018
|
||||
keepListeningOnStop=false
|
||||
parallelHandlerProcessing=true
|
||||
socketType=SUB
|
||||
@@ -1,4 +1,4 @@
|
||||
#Thu Jan 18 08:50:50 CET 2018
|
||||
#Wed Mar 21 12:02:58 CET 2018
|
||||
colormap=Flame
|
||||
colormapAutomatic=true
|
||||
colormapMax=25300.0
|
||||
|
||||
20
devices/DSC_0530.JPG.properties
Normal file
20
devices/DSC_0530.JPG.properties
Normal file
@@ -0,0 +1,20 @@
|
||||
#Tue Feb 20 15:00:20 CET 2018
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=NaN
|
||||
spatialCalOffsetY=NaN
|
||||
spatialCalScaleX=NaN
|
||||
spatialCalScaleY=NaN
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
20
devices/IMG_0077.JPG.properties
Normal file
20
devices/IMG_0077.JPG.properties
Normal file
@@ -0,0 +1,20 @@
|
||||
#Tue Feb 20 13:25:23 CET 2018
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=NaN
|
||||
spatialCalOffsetY=NaN
|
||||
spatialCalScaleX=NaN
|
||||
spatialCalScaleY=NaN
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
20
devices/IMG_0105.JPG.properties
Normal file
20
devices/IMG_0105.JPG.properties
Normal file
@@ -0,0 +1,20 @@
|
||||
#Tue Feb 20 14:59:27 CET 2018
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=NaN
|
||||
spatialCalOffsetY=NaN
|
||||
spatialCalScaleX=NaN
|
||||
spatialCalScaleY=NaN
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
24
devices/Image Averager.properties
Normal file
24
devices/Image Averager.properties
Normal file
@@ -0,0 +1,24 @@
|
||||
#Tue Mar 06 11:10:32 CET 2018
|
||||
colormap=Flame
|
||||
colormapAutomatic=true
|
||||
colormapMax=50660.0
|
||||
colormapMin=0.0
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=0.0
|
||||
spatialCalOffsetY=0.0
|
||||
spatialCalScaleX=1.0
|
||||
spatialCalScaleY=1.0
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
20
devices/Open.png.properties
Normal file
20
devices/Open.png.properties
Normal file
@@ -0,0 +1,20 @@
|
||||
#Wed Feb 21 09:56:58 CET 2018
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=NaN
|
||||
spatialCalOffsetY=NaN
|
||||
spatialCalScaleX=NaN
|
||||
spatialCalScaleY=NaN
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
20
devices/StepInto.png.properties
Normal file
20
devices/StepInto.png.properties
Normal file
@@ -0,0 +1,20 @@
|
||||
#Wed Feb 21 09:58:16 CET 2018
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=NaN
|
||||
spatialCalOffsetY=NaN
|
||||
spatialCalScaleX=NaN
|
||||
spatialCalScaleY=NaN
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
20
devices/TUSH102f1.png.properties
Normal file
20
devices/TUSH102f1.png.properties
Normal file
@@ -0,0 +1,20 @@
|
||||
#Tue Feb 20 15:35:03 CET 2018
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=NaN
|
||||
spatialCalOffsetY=NaN
|
||||
spatialCalScaleX=NaN
|
||||
spatialCalScaleY=NaN
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
20
devices/TUSH102f2.png.properties
Normal file
20
devices/TUSH102f2.png.properties
Normal file
@@ -0,0 +1,20 @@
|
||||
#Tue Feb 20 15:08:30 CET 2018
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=NaN
|
||||
spatialCalOffsetY=NaN
|
||||
spatialCalScaleX=NaN
|
||||
spatialCalScaleY=NaN
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
20
devices/TUSH102f3.png.properties
Normal file
20
devices/TUSH102f3.png.properties
Normal file
@@ -0,0 +1,20 @@
|
||||
#Tue Feb 20 18:13:00 CET 2018
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=NaN
|
||||
spatialCalOffsetY=NaN
|
||||
spatialCalScaleX=NaN
|
||||
spatialCalScaleY=NaN
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
20
devices/TUSH102f4.png.properties
Normal file
20
devices/TUSH102f4.png.properties
Normal file
@@ -0,0 +1,20 @@
|
||||
#Tue Feb 20 15:01:57 CET 2018
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=NaN
|
||||
spatialCalOffsetY=NaN
|
||||
spatialCalScaleX=NaN
|
||||
spatialCalScaleY=NaN
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
20
devices/arr.png.properties
Normal file
20
devices/arr.png.properties
Normal file
@@ -0,0 +1,20 @@
|
||||
#Tue Feb 20 13:26:51 CET 2018
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=NaN
|
||||
spatialCalOffsetY=NaN
|
||||
spatialCalScaleX=NaN
|
||||
spatialCalScaleY=NaN
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
5
devices/cam_server provider.properties
Normal file
5
devices/cam_server provider.properties
Normal file
@@ -0,0 +1,5 @@
|
||||
#Wed Apr 04 16:34:27 CEST 2018
|
||||
disableCompression=false
|
||||
keepListeningOnStop=false
|
||||
parallelHandlerProcessing=true
|
||||
socketType=SUB
|
||||
@@ -1,24 +1,24 @@
|
||||
#Tue Nov 14 16:09:47 CET 2017
|
||||
colormap=Flame
|
||||
colormapAutomatic=true
|
||||
colormapMax=NaN
|
||||
colormapMin=NaN
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=-661.516812111553
|
||||
spatialCalOffsetY=-490.5109552096732
|
||||
spatialCalScaleX=-25.06265652487295
|
||||
spatialCalScaleY=-32.822757470346716
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
#Thu Mar 22 10:22:45 CET 2018
|
||||
colormap=Flame
|
||||
colormapAutomatic=true
|
||||
colormapMax=NaN
|
||||
colormapMin=NaN
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=0.0
|
||||
spatialCalOffsetY=0.0
|
||||
spatialCalScaleX=1.0
|
||||
spatialCalScaleY=1.0
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
|
||||
14
devices/chi.properties
Normal file
14
devices/chi.properties
Normal file
@@ -0,0 +1,14 @@
|
||||
#Thu Apr 12 11:03:01 CEST 2018
|
||||
defaultSpeed=10.0
|
||||
estbilizationDelay=0
|
||||
maxSpeed=10.0
|
||||
maxValue=360.0
|
||||
minSpeed=0.1
|
||||
minValue=-360.0
|
||||
offset=0.0
|
||||
precision=2
|
||||
resolution=NaN
|
||||
rotation=false
|
||||
scale=1.0
|
||||
startRetries=1
|
||||
unit=mm
|
||||
14
devices/delta.properties
Normal file
14
devices/delta.properties
Normal file
@@ -0,0 +1,14 @@
|
||||
#Mon Apr 16 17:23:26 CEST 2018
|
||||
defaultSpeed=50.0
|
||||
estbilizationDelay=0
|
||||
maxSpeed=50.0
|
||||
maxValue=360.0
|
||||
minSpeed=0.1
|
||||
minValue=-360.0
|
||||
offset=0.0
|
||||
precision=2
|
||||
resolution=NaN
|
||||
rotation=false
|
||||
scale=1.0
|
||||
startRetries=1
|
||||
unit=mm
|
||||
@@ -1,8 +1,14 @@
|
||||
#Tue Jul 04 16:16:39 CEST 2017
|
||||
alignmentRetries=20
|
||||
disableCompression=true
|
||||
dropIncomplete=true
|
||||
keepListeningOnStop=false
|
||||
parallelHandlerProcessing=true
|
||||
sendIncompleteMessages=true
|
||||
socketType=PULL
|
||||
#Wed Apr 04 15:57:04 CEST 2018
|
||||
alignmentRetries=20
|
||||
disableCompression=true
|
||||
dropIncomplete=true
|
||||
keepListeningOnStop=false
|
||||
mappingIncomplete=fill_null
|
||||
parallelHandlerProcessing=true
|
||||
sendAwaitFirstMessage=false
|
||||
sendBuildChannelConfig=at_startup
|
||||
sendIncompleteMessages=true
|
||||
sendStrategy=complete_all
|
||||
sendSyncTimeout=-1
|
||||
socketType=PULL
|
||||
validationInconsistency=keep_as_is
|
||||
|
||||
9
devices/en.properties
Normal file
9
devices/en.properties
Normal file
@@ -0,0 +1,9 @@
|
||||
#Mon Apr 16 16:57:32 CEST 2018
|
||||
maxValue=100.0
|
||||
minValue=0.0
|
||||
offset=0.0
|
||||
precision=4
|
||||
resolution=NaN
|
||||
rotation=false
|
||||
scale=1.0
|
||||
unit=kEv
|
||||
14
devices/eta.properties
Normal file
14
devices/eta.properties
Normal file
@@ -0,0 +1,14 @@
|
||||
#Thu Apr 12 11:02:49 CEST 2018
|
||||
defaultSpeed=10.0
|
||||
estbilizationDelay=0
|
||||
maxSpeed=10.0
|
||||
maxValue=360.0
|
||||
minSpeed=0.1
|
||||
minValue=-360.0
|
||||
offset=0.0
|
||||
precision=2
|
||||
resolution=NaN
|
||||
rotation=false
|
||||
scale=1.0
|
||||
startRetries=1
|
||||
unit=mm
|
||||
14
devices/gam.properties
Normal file
14
devices/gam.properties
Normal file
@@ -0,0 +1,14 @@
|
||||
#Thu Apr 12 11:02:41 CEST 2018
|
||||
defaultSpeed=10.0
|
||||
estbilizationDelay=0
|
||||
maxSpeed=10.0
|
||||
maxValue=360.0
|
||||
minSpeed=0.1
|
||||
minValue=-360.0
|
||||
offset=0.0
|
||||
precision=2
|
||||
resolution=NaN
|
||||
rotation=false
|
||||
scale=1.0
|
||||
startRetries=1
|
||||
unit=mm
|
||||
9
devices/h.properties
Normal file
9
devices/h.properties
Normal file
@@ -0,0 +1,9 @@
|
||||
#Thu Apr 12 11:38:15 CEST 2018
|
||||
maxValue=10.0
|
||||
minValue=-10.0
|
||||
offset=0.0
|
||||
precision=4
|
||||
resolution=0.01
|
||||
rotation=false
|
||||
scale=1.0
|
||||
unit=deg
|
||||
2
devices/hkl.properties
Normal file
2
devices/hkl.properties
Normal file
@@ -0,0 +1,2 @@
|
||||
#Wed Apr 11 11:43:36 CEST 2018
|
||||
precision=6
|
||||
2
devices/hkl_group.properties
Normal file
2
devices/hkl_group.properties
Normal file
@@ -0,0 +1,2 @@
|
||||
#Mon Apr 16 15:45:44 CEST 2018
|
||||
precision=-1
|
||||
2
devices/hkl_reg.properties
Normal file
2
devices/hkl_reg.properties
Normal file
@@ -0,0 +1,2 @@
|
||||
#Mon Apr 16 18:25:05 CEST 2018
|
||||
precision=-1
|
||||
9
devices/k.properties
Normal file
9
devices/k.properties
Normal file
@@ -0,0 +1,9 @@
|
||||
#Thu Apr 12 11:38:04 CEST 2018
|
||||
maxValue=10.0
|
||||
minValue=-10.0
|
||||
offset=0.0
|
||||
precision=4
|
||||
resolution=0.01
|
||||
rotation=false
|
||||
scale=1.0
|
||||
unit=deg
|
||||
9
devices/l.properties
Normal file
9
devices/l.properties
Normal file
@@ -0,0 +1,9 @@
|
||||
#Thu Apr 12 11:37:53 CEST 2018
|
||||
maxValue=10.0
|
||||
minValue=-10.0
|
||||
offset=0.0
|
||||
precision=4
|
||||
resolution=0.01
|
||||
rotation=false
|
||||
scale=1.0
|
||||
unit=deg
|
||||
@@ -1,20 +1,20 @@
|
||||
#Fri Sep 22 09:54:50 CEST 2017
|
||||
accessType=ReadWrite
|
||||
channel=MTEST-GOBBO\:MOT1
|
||||
defaultSpeed=1.0
|
||||
estbilizationDelay=0
|
||||
hasEnable=true
|
||||
homingDirection=Backward
|
||||
homingType=Backward
|
||||
maxSpeed=20.0
|
||||
maxValue=75.0
|
||||
minSpeed=0.1
|
||||
minValue=-5.0
|
||||
offset=0.0
|
||||
precision=4
|
||||
resolution=0.00125
|
||||
rotation=false
|
||||
scale=1.0
|
||||
simulation=false
|
||||
startRetries=1
|
||||
unit=mm
|
||||
#Wed Mar 28 11:52:13 CEST 2018
|
||||
accessType=ReadWrite
|
||||
channel=MTEST-GOBBO\:MOT1
|
||||
defaultSpeed=1.0
|
||||
estbilizationDelay=0
|
||||
hasEnable=true
|
||||
homingDirection=Backward
|
||||
homingType=Backward
|
||||
maxSpeed=20.0
|
||||
maxValue=75.0
|
||||
minSpeed=0.001
|
||||
minValue=-5.0
|
||||
offset=0.0
|
||||
precision=4
|
||||
resolution=0.00125
|
||||
rotation=false
|
||||
scale=1.0
|
||||
simulation=false
|
||||
startRetries=2
|
||||
unit=mm
|
||||
|
||||
14
devices/mu.properties
Normal file
14
devices/mu.properties
Normal file
@@ -0,0 +1,14 @@
|
||||
#Thu Apr 12 11:02:07 CEST 2018
|
||||
defaultSpeed=10.0
|
||||
estbilizationDelay=0
|
||||
maxSpeed=10.0
|
||||
maxValue=360.0
|
||||
minSpeed=0.1
|
||||
minValue=-360.0
|
||||
offset=0.0
|
||||
precision=2
|
||||
resolution=NaN
|
||||
rotation=false
|
||||
scale=1.0
|
||||
startRetries=1
|
||||
unit=mm
|
||||
8
devices/mycv.properties
Normal file
8
devices/mycv.properties
Normal file
@@ -0,0 +1,8 @@
|
||||
#Thu Feb 15 17:19:47 CET 2018
|
||||
maxValue=100.0
|
||||
minValue=-100.0
|
||||
offset=0.0
|
||||
precision=2
|
||||
resolution=0.1
|
||||
scale=1.0
|
||||
unit=mm
|
||||
16
devices/mymot.properties
Normal file
16
devices/mymot.properties
Normal file
@@ -0,0 +1,16 @@
|
||||
#Thu Feb 15 17:16:13 CET 2018
|
||||
defaultSpeed=1.0
|
||||
estbilizationDelay=0
|
||||
hasEnable=false
|
||||
homingType=None
|
||||
maxSpeed=10.0
|
||||
maxValue=100.0
|
||||
minSpeed=0.01
|
||||
minValue=-100.0
|
||||
offset=0.0
|
||||
precision=2
|
||||
resolution=0.1
|
||||
rotation=false
|
||||
scale=1.0
|
||||
startRetries=1
|
||||
unit=mm
|
||||
9
devices/mypos.properties
Normal file
9
devices/mypos.properties
Normal file
@@ -0,0 +1,9 @@
|
||||
#Thu Feb 15 17:17:18 CET 2018
|
||||
maxValue=100.0
|
||||
minValue=100.0
|
||||
offset=0.0
|
||||
precision=2
|
||||
resolution=0.1
|
||||
rotation=false
|
||||
scale=1.0
|
||||
unit=mm
|
||||
14
devices/phi.properties
Normal file
14
devices/phi.properties
Normal file
@@ -0,0 +1,14 @@
|
||||
#Thu Apr 12 11:03:09 CEST 2018
|
||||
defaultSpeed=10.0
|
||||
estbilizationDelay=0
|
||||
maxSpeed=10.0
|
||||
maxValue=360.0
|
||||
minSpeed=0.1
|
||||
minValue=-360.0
|
||||
offset=0.0
|
||||
precision=2
|
||||
resolution=NaN
|
||||
rotation=false
|
||||
scale=1.0
|
||||
startRetries=1
|
||||
unit=mm
|
||||
24
devices/pipeline_server.properties
Normal file
24
devices/pipeline_server.properties
Normal file
@@ -0,0 +1,24 @@
|
||||
#Thu Mar 29 13:44:16 CEST 2018
|
||||
colormap=Flame
|
||||
colormapAutomatic=true
|
||||
colormapMax=NaN
|
||||
colormapMin=NaN
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=0.0
|
||||
spatialCalOffsetY=0.0
|
||||
spatialCalScaleX=1.0
|
||||
spatialCalScaleY=1.0
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
9
devices/pos_chi.properties
Normal file
9
devices/pos_chi.properties
Normal file
@@ -0,0 +1,9 @@
|
||||
#Fri Apr 06 16:39:10 CEST 2018
|
||||
maxValue=NaN
|
||||
minValue=NaN
|
||||
offset=0.0
|
||||
precision=-1
|
||||
resolution=NaN
|
||||
rotation=false
|
||||
scale=1.0
|
||||
unit=null
|
||||
9
devices/pos_delta.properties
Normal file
9
devices/pos_delta.properties
Normal file
@@ -0,0 +1,9 @@
|
||||
#Fri Apr 06 16:39:10 CEST 2018
|
||||
maxValue=NaN
|
||||
minValue=NaN
|
||||
offset=0.0
|
||||
precision=-1
|
||||
resolution=NaN
|
||||
rotation=false
|
||||
scale=1.0
|
||||
unit=null
|
||||
9
devices/pos_en.properties
Normal file
9
devices/pos_en.properties
Normal file
@@ -0,0 +1,9 @@
|
||||
#Fri Apr 06 16:39:10 CEST 2018
|
||||
maxValue=NaN
|
||||
minValue=NaN
|
||||
offset=0.0
|
||||
precision=-1
|
||||
resolution=NaN
|
||||
rotation=false
|
||||
scale=1.0
|
||||
unit=null
|
||||
9
devices/pos_eta.properties
Normal file
9
devices/pos_eta.properties
Normal file
@@ -0,0 +1,9 @@
|
||||
#Fri Apr 06 16:39:10 CEST 2018
|
||||
maxValue=NaN
|
||||
minValue=NaN
|
||||
offset=0.0
|
||||
precision=-1
|
||||
resolution=NaN
|
||||
rotation=false
|
||||
scale=1.0
|
||||
unit=null
|
||||
9
devices/pos_gam.properties
Normal file
9
devices/pos_gam.properties
Normal file
@@ -0,0 +1,9 @@
|
||||
#Fri Apr 06 16:39:10 CEST 2018
|
||||
maxValue=NaN
|
||||
minValue=NaN
|
||||
offset=0.0
|
||||
precision=-1
|
||||
resolution=NaN
|
||||
rotation=false
|
||||
scale=1.0
|
||||
unit=null
|
||||
9
devices/pos_mu.properties
Normal file
9
devices/pos_mu.properties
Normal file
@@ -0,0 +1,9 @@
|
||||
#Fri Apr 06 16:39:10 CEST 2018
|
||||
maxValue=NaN
|
||||
minValue=NaN
|
||||
offset=0.0
|
||||
precision=-1
|
||||
resolution=NaN
|
||||
rotation=false
|
||||
scale=1.0
|
||||
unit=null
|
||||
9
devices/pos_phi.properties
Normal file
9
devices/pos_phi.properties
Normal file
@@ -0,0 +1,9 @@
|
||||
#Fri Apr 06 16:39:10 CEST 2018
|
||||
maxValue=NaN
|
||||
minValue=NaN
|
||||
offset=0.0
|
||||
precision=-1
|
||||
resolution=NaN
|
||||
rotation=false
|
||||
scale=1.0
|
||||
unit=null
|
||||
@@ -1,8 +1,9 @@
|
||||
#Wed Nov 08 12:33:55 CET 2017
|
||||
maxValue=1000.0
|
||||
minValue=-1000.0
|
||||
offset=0.0
|
||||
precision=5
|
||||
resolution=Infinity
|
||||
scale=1.0
|
||||
unit=mm
|
||||
#Tue Feb 06 09:47:44 CET 2018
|
||||
maxValue=1000.0
|
||||
minValue=-1000.0
|
||||
offset=0.0
|
||||
precision=5
|
||||
resolution=Infinity
|
||||
rotation=false
|
||||
scale=1.0
|
||||
unit=mm
|
||||
|
||||
9
devices/scanner.properties
Normal file
9
devices/scanner.properties
Normal file
@@ -0,0 +1,9 @@
|
||||
#Tue Feb 06 14:46:43 CET 2018
|
||||
maxValue=NaN
|
||||
minValue=NaN
|
||||
offset=0.0
|
||||
precision=-1
|
||||
resolution=NaN
|
||||
rotation=false
|
||||
scale=1.0
|
||||
unit=null
|
||||
@@ -1,24 +1,24 @@
|
||||
#Thu Sep 21 17:13:36 CEST 2017
|
||||
colormap=Flame
|
||||
colormapAutomatic=true
|
||||
colormapMax=NaN
|
||||
colormapMin=NaN
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=NaN
|
||||
spatialCalOffsetY=NaN
|
||||
spatialCalScaleX=NaN
|
||||
spatialCalScaleY=NaN
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
#Fri Mar 02 09:23:50 CET 2018
|
||||
colormap=Flame
|
||||
colormapAutomatic=true
|
||||
colormapMax=200.0
|
||||
colormapMin=2.0
|
||||
flipHorizontally=false
|
||||
flipVertically=false
|
||||
grayscale=false
|
||||
invert=false
|
||||
rescaleFactor=1.0
|
||||
rescaleOffset=0.0
|
||||
roiHeight=-1
|
||||
roiWidth=-1
|
||||
roiX=0
|
||||
roiY=0
|
||||
rotation=0.0
|
||||
rotationCrop=false
|
||||
scale=1.0
|
||||
spatialCalOffsetX=-0.0
|
||||
spatialCalOffsetY=0.0
|
||||
spatialCalScaleX=-1.0
|
||||
spatialCalScaleY=1.0
|
||||
spatialCalUnits=mm
|
||||
transpose=false
|
||||
|
||||
243
plugins/Correlation.form
Normal file
243
plugins/Correlation.form
Normal file
@@ -0,0 +1,243 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
|
||||
<Form version="1.5" maxVersion="1.9" type="org.netbeans.modules.form.forminfo.JPanelFormInfo">
|
||||
<AuxValues>
|
||||
<AuxValue name="FormSettings_autoResourcing" type="java.lang.Integer" value="0"/>
|
||||
<AuxValue name="FormSettings_autoSetComponentName" type="java.lang.Boolean" value="false"/>
|
||||
<AuxValue name="FormSettings_generateFQN" type="java.lang.Boolean" value="true"/>
|
||||
<AuxValue name="FormSettings_generateMnemonicsCode" type="java.lang.Boolean" value="false"/>
|
||||
<AuxValue name="FormSettings_i18nAutoMode" type="java.lang.Boolean" value="false"/>
|
||||
<AuxValue name="FormSettings_layoutCodeTarget" type="java.lang.Integer" value="1"/>
|
||||
<AuxValue name="FormSettings_listenerGenerationStyle" type="java.lang.Integer" value="0"/>
|
||||
<AuxValue name="FormSettings_variablesLocal" type="java.lang.Boolean" value="false"/>
|
||||
<AuxValue name="FormSettings_variablesModifier" type="java.lang.Integer" value="2"/>
|
||||
</AuxValues>
|
||||
|
||||
<Layout>
|
||||
<DimensionLayout dim="0">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" alignment="1" attributes="0">
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Component id="jPanel1" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Component id="plot" pref="371" max="32767" attributes="0"/>
|
||||
</Group>
|
||||
</Group>
|
||||
</DimensionLayout>
|
||||
<DimensionLayout dim="1">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" alignment="0" attributes="0">
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Component id="jPanel1" max="32767" attributes="0"/>
|
||||
<Group type="102" attributes="0">
|
||||
<Component id="plot" max="32767" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
</Group>
|
||||
</Group>
|
||||
</Group>
|
||||
</Group>
|
||||
</DimensionLayout>
|
||||
</Layout>
|
||||
<SubComponents>
|
||||
<Component class="ch.psi.pshell.plot.LinePlotJFree" name="plot">
|
||||
<Properties>
|
||||
<Property name="title" type="java.lang.String" value=""/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Container class="javax.swing.JPanel" name="jPanel1">
|
||||
|
||||
<Layout>
|
||||
<DimensionLayout dim="0">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" attributes="0">
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Component id="jLabel7" linkSize="3" alignment="1" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel6" linkSize="3" alignment="1" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel5" linkSize="3" alignment="1" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel3" linkSize="1" alignment="1" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel2" linkSize="1" alignment="1" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel1" linkSize="1" alignment="1" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel4" linkSize="1" alignment="1" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Component id="textDevX" linkSize="4" alignment="0" pref="250" max="32767" attributes="0"/>
|
||||
<Component id="textDevY" linkSize="4" alignment="0" pref="250" max="32767" attributes="0"/>
|
||||
<Component id="textCorrelation" linkSize="4" alignment="0" pref="250" max="32767" attributes="0"/>
|
||||
<Component id="spinnerInterval" linkSize="2" alignment="0" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="spinnerWindow" linkSize="2" alignment="0" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="checkBS" alignment="0" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="textLinear" linkSize="4" alignment="0" pref="250" max="32767" attributes="0"/>
|
||||
<Component id="textQuadratic" linkSize="4" alignment="0" pref="250" max="32767" attributes="0"/>
|
||||
<Component id="checkLinear" alignment="0" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="checkQuadratic" alignment="0" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace min="0" pref="0" max="32767" attributes="0"/>
|
||||
</Group>
|
||||
<Group type="102" alignment="0" attributes="0">
|
||||
<EmptySpace max="32767" attributes="0"/>
|
||||
<Component id="buttonStart" min="-2" pref="189" max="-2" attributes="0"/>
|
||||
<EmptySpace max="32767" attributes="0"/>
|
||||
</Group>
|
||||
</Group>
|
||||
</DimensionLayout>
|
||||
<DimensionLayout dim="1">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" attributes="0">
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="3" attributes="0">
|
||||
<Component id="jLabel1" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="textDevX" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="3" attributes="0">
|
||||
<Component id="jLabel2" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="textDevY" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Component id="checkBS" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace type="separate" max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="3" attributes="0">
|
||||
<Component id="jLabel3" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="spinnerInterval" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="3" attributes="0">
|
||||
<Component id="jLabel4" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="spinnerWindow" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace type="separate" max="-2" attributes="0"/>
|
||||
<Component id="checkLinear" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Component id="checkQuadratic" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace pref="42" max="32767" attributes="0"/>
|
||||
<Group type="103" groupAlignment="2" attributes="0">
|
||||
<Component id="textCorrelation" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel5" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace type="unrelated" max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="2" attributes="0">
|
||||
<Component id="jLabel6" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="textLinear" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace type="unrelated" max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="2" attributes="0">
|
||||
<Component id="jLabel7" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="textQuadratic" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace pref="42" max="32767" attributes="0"/>
|
||||
<Component id="buttonStart" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace min="-2" pref="45" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
</Group>
|
||||
</DimensionLayout>
|
||||
</Layout>
|
||||
<SubComponents>
|
||||
<Component class="javax.swing.JLabel" name="jLabel1">
|
||||
<Properties>
|
||||
<Property name="horizontalAlignment" type="int" value="11"/>
|
||||
<Property name="text" type="java.lang.String" value="X device:"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JTextField" name="textDevX">
|
||||
</Component>
|
||||
<Component class="javax.swing.JTextField" name="textDevY">
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="jLabel2">
|
||||
<Properties>
|
||||
<Property name="horizontalAlignment" type="int" value="11"/>
|
||||
<Property name="text" type="java.lang.String" value="Y device:"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="jLabel3">
|
||||
<Properties>
|
||||
<Property name="horizontalAlignment" type="int" value="11"/>
|
||||
<Property name="text" type="java.lang.String" value="Interval (s):"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JSpinner" name="spinnerInterval">
|
||||
<Properties>
|
||||
<Property name="model" type="javax.swing.SpinnerModel" editor="org.netbeans.modules.form.editors2.SpinnerModelEditor">
|
||||
<SpinnerModel initial="0.1" minimum="0.001" numberType="java.lang.Double" stepSize="1.0" type="number"/>
|
||||
</Property>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="jLabel4">
|
||||
<Properties>
|
||||
<Property name="horizontalAlignment" type="int" value="11"/>
|
||||
<Property name="text" type="java.lang.String" value="Window size:"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JSpinner" name="spinnerWindow">
|
||||
<Properties>
|
||||
<Property name="model" type="javax.swing.SpinnerModel" editor="org.netbeans.modules.form.editors2.SpinnerModelEditor">
|
||||
<SpinnerModel initial="50" minimum="3" numberType="java.lang.Integer" stepSize="1" type="number"/>
|
||||
</Property>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JButton" name="buttonStart">
|
||||
<Properties>
|
||||
<Property name="text" type="java.lang.String" value="Start"/>
|
||||
</Properties>
|
||||
<Events>
|
||||
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="buttonStartActionPerformed"/>
|
||||
</Events>
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="jLabel5">
|
||||
<Properties>
|
||||
<Property name="horizontalAlignment" type="int" value="4"/>
|
||||
<Property name="text" type="java.lang.String" value="Correlation:"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JTextField" name="textCorrelation">
|
||||
<Properties>
|
||||
<Property name="editable" type="boolean" value="false"/>
|
||||
<Property name="horizontalAlignment" type="int" value="0"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JCheckBox" name="checkBS">
|
||||
<Properties>
|
||||
<Property name="text" type="java.lang.String" value="Beam synchronous"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="jLabel6">
|
||||
<Properties>
|
||||
<Property name="horizontalAlignment" type="int" value="4"/>
|
||||
<Property name="text" type="java.lang.String" value="Liner fit:"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JTextField" name="textLinear">
|
||||
<Properties>
|
||||
<Property name="editable" type="boolean" value="false"/>
|
||||
<Property name="horizontalAlignment" type="int" value="0"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="jLabel7">
|
||||
<Properties>
|
||||
<Property name="horizontalAlignment" type="int" value="4"/>
|
||||
<Property name="text" type="java.lang.String" value="Quadratric fit:"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JTextField" name="textQuadratic">
|
||||
<Properties>
|
||||
<Property name="editable" type="boolean" value="false"/>
|
||||
<Property name="horizontalAlignment" type="int" value="0"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JCheckBox" name="checkLinear">
|
||||
<Properties>
|
||||
<Property name="selected" type="boolean" value="true"/>
|
||||
<Property name="text" type="java.lang.String" value="Linear fit"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JCheckBox" name="checkQuadratic">
|
||||
<Properties>
|
||||
<Property name="text" type="java.lang.String" value="Quadratic fit"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
</SubComponents>
|
||||
</Container>
|
||||
</SubComponents>
|
||||
</Form>
|
||||
375
plugins/Correlation.java
Normal file
375
plugins/Correlation.java
Normal file
@@ -0,0 +1,375 @@
|
||||
/*
|
||||
* Copyright (c) 2014 Paul Scherrer Institute. All rights reserved.
|
||||
*/
|
||||
|
||||
import ch.psi.pshell.plot.Plot;
|
||||
import ch.psi.pshell.ui.App;
|
||||
import ch.psi.pshell.ui.Panel;
|
||||
import ch.psi.utils.State;
|
||||
import java.awt.Component;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class Correlation extends Panel {
|
||||
|
||||
public Correlation() {
|
||||
initComponents();
|
||||
this.setPersistedComponents(new Component[]{textDevX, textDevY, spinnerInterval, spinnerWindow, checkBS});
|
||||
plot.getAxis(Plot.AxisId.X).setLabel(null);
|
||||
plot.getAxis(Plot.AxisId.Y).setLabel(null);
|
||||
}
|
||||
|
||||
//Overridable callbacks
|
||||
@Override
|
||||
public void onInitialize(int runCount) {
|
||||
super.onInitialize(runCount);
|
||||
this.startTimer(100, 10);
|
||||
if (App.hasArgument("dx")) {
|
||||
textDevX.setText(App.getArgumentValue("dx"));
|
||||
}
|
||||
if (App.hasArgument("dy")) {
|
||||
textDevY.setText(App.getArgumentValue("dy"));
|
||||
}
|
||||
try{
|
||||
if (App.hasArgument("bs")) {
|
||||
checkBS.setSelected(Boolean.valueOf(App.getArgumentValue("bs")));
|
||||
}
|
||||
} catch (Exception ex){
|
||||
System.err.println(ex.getMessage());
|
||||
}
|
||||
try{
|
||||
if (App.hasArgument("interval")) {
|
||||
spinnerInterval.setValue(Double.valueOf(App.getArgumentValue("interval")));
|
||||
}
|
||||
} catch (Exception ex){
|
||||
System.err.println(ex.getMessage());
|
||||
}
|
||||
try{
|
||||
if (App.hasArgument("window")) {
|
||||
spinnerWindow.setValue(Integer.valueOf(App.getArgumentValue("window")));
|
||||
}
|
||||
} catch (Exception ex){
|
||||
System.err.println(ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onTimer(){
|
||||
if (isRunning()){
|
||||
updateResults();
|
||||
}
|
||||
}
|
||||
|
||||
//DecimalFormat formatter = new DecimalFormat("0.##E0");
|
||||
void updateResults(){
|
||||
try{
|
||||
textCorrelation.setText(String.format("%1.4f", Double.valueOf((Double)getContext().getInterpreterVariable("corr"))));
|
||||
} catch (Exception ex){
|
||||
textCorrelation.setText("");
|
||||
}
|
||||
|
||||
if (checkLinear.isSelected()){
|
||||
try{
|
||||
List pars = (List)getContext().getInterpreterVariable("pars_lin");
|
||||
//textLinear.setText(String.format("%1.3fx%+1.3f", (Double)(pars.get(1)), (Double)(pars.get(0))));
|
||||
textLinear.setText(String.format("%1.6gx%+1.6g",pars.get(1), pars.get(0)));
|
||||
} catch (Exception ex){
|
||||
textLinear.setText("");
|
||||
}
|
||||
}
|
||||
|
||||
if (checkQuadratic.isSelected()){
|
||||
try{
|
||||
List pars = (List)getContext().getInterpreterVariable("pars_quad");
|
||||
//textQuadratic.setText(String.format("%1.2fx\u00B2 %+1.2fx%+1.2f", (Double)(pars.get(0)), (Double)(pars.get(1)), (Double)(pars.get(0))));
|
||||
textQuadratic.setText(String.format("%1.3gx\u00B2%+1.3gx%+1.3g", pars.get(0), pars.get(1), pars.get(0)));
|
||||
//textQuadratic.setText(formatter.format(pars.get(2))+ formatter.format(pars.get(1)) + formatter.format(pars.get(0)));
|
||||
|
||||
} catch (Exception ex){
|
||||
textQuadratic.setText("");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStateChange(State state, State former) {
|
||||
buttonStart.setEnabled((state==State.Ready) || (state==State.Busy));
|
||||
if (isRunning()){
|
||||
if (state==State.Ready){
|
||||
buttonStart.setText("Start");
|
||||
}
|
||||
} else {
|
||||
if (state==State.Busy){
|
||||
buttonStart.setText("Stop");
|
||||
}
|
||||
}
|
||||
textDevX.setEnabled(state==State.Ready);
|
||||
textDevY.setEnabled(state==State.Ready);
|
||||
spinnerInterval.setEnabled(state==State.Ready);
|
||||
spinnerWindow.setEnabled(state==State.Ready);
|
||||
checkBS.setEnabled(state==State.Ready);
|
||||
checkLinear.setEnabled(state==State.Ready);
|
||||
checkQuadratic.setEnabled(state==State.Ready);
|
||||
|
||||
if ((former==State.Initializing) && (state == State.Ready)){
|
||||
if (App.hasArgument("start")) {
|
||||
buttonStartActionPerformed(null);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
boolean isRunning(){
|
||||
return buttonStart.getText().equals("Stop");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onExecutedFile(String fileName, Object result) {
|
||||
}
|
||||
|
||||
|
||||
//Callback to perform update - in event thread
|
||||
@Override
|
||||
protected void doUpdate() {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
|
||||
private void initComponents() {
|
||||
|
||||
plot = new ch.psi.pshell.plot.LinePlotJFree();
|
||||
jPanel1 = new javax.swing.JPanel();
|
||||
jLabel1 = new javax.swing.JLabel();
|
||||
textDevX = new javax.swing.JTextField();
|
||||
textDevY = new javax.swing.JTextField();
|
||||
jLabel2 = new javax.swing.JLabel();
|
||||
jLabel3 = new javax.swing.JLabel();
|
||||
spinnerInterval = new javax.swing.JSpinner();
|
||||
jLabel4 = new javax.swing.JLabel();
|
||||
spinnerWindow = new javax.swing.JSpinner();
|
||||
buttonStart = new javax.swing.JButton();
|
||||
jLabel5 = new javax.swing.JLabel();
|
||||
textCorrelation = new javax.swing.JTextField();
|
||||
checkBS = new javax.swing.JCheckBox();
|
||||
jLabel6 = new javax.swing.JLabel();
|
||||
textLinear = new javax.swing.JTextField();
|
||||
jLabel7 = new javax.swing.JLabel();
|
||||
textQuadratic = new javax.swing.JTextField();
|
||||
checkLinear = new javax.swing.JCheckBox();
|
||||
checkQuadratic = new javax.swing.JCheckBox();
|
||||
|
||||
plot.setTitle("");
|
||||
|
||||
jLabel1.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING);
|
||||
jLabel1.setText("X device:");
|
||||
|
||||
jLabel2.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING);
|
||||
jLabel2.setText("Y device:");
|
||||
|
||||
jLabel3.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING);
|
||||
jLabel3.setText("Interval (s):");
|
||||
|
||||
spinnerInterval.setModel(new javax.swing.SpinnerNumberModel(Double.valueOf(0.1d), Double.valueOf(0.001d), null, Double.valueOf(1.0d)));
|
||||
|
||||
jLabel4.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING);
|
||||
jLabel4.setText("Window size:");
|
||||
|
||||
spinnerWindow.setModel(new javax.swing.SpinnerNumberModel(Integer.valueOf(50), Integer.valueOf(3), null, Integer.valueOf(1)));
|
||||
|
||||
buttonStart.setText("Start");
|
||||
buttonStart.addActionListener(new java.awt.event.ActionListener() {
|
||||
public void actionPerformed(java.awt.event.ActionEvent evt) {
|
||||
buttonStartActionPerformed(evt);
|
||||
}
|
||||
});
|
||||
|
||||
jLabel5.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
|
||||
jLabel5.setText("Correlation:");
|
||||
|
||||
textCorrelation.setEditable(false);
|
||||
textCorrelation.setHorizontalAlignment(javax.swing.JTextField.CENTER);
|
||||
|
||||
checkBS.setText("Beam synchronous");
|
||||
|
||||
jLabel6.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
|
||||
jLabel6.setText("Liner fit:");
|
||||
|
||||
textLinear.setEditable(false);
|
||||
textLinear.setHorizontalAlignment(javax.swing.JTextField.CENTER);
|
||||
|
||||
jLabel7.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
|
||||
jLabel7.setText("Quadratric fit:");
|
||||
|
||||
textQuadratic.setEditable(false);
|
||||
textQuadratic.setHorizontalAlignment(javax.swing.JTextField.CENTER);
|
||||
|
||||
checkLinear.setSelected(true);
|
||||
checkLinear.setText("Linear fit");
|
||||
|
||||
checkQuadratic.setText("Quadratic fit");
|
||||
|
||||
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
|
||||
jPanel1.setLayout(jPanel1Layout);
|
||||
jPanel1Layout.setHorizontalGroup(
|
||||
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addGroup(jPanel1Layout.createSequentialGroup()
|
||||
.addContainerGap()
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addComponent(jLabel7, javax.swing.GroupLayout.Alignment.TRAILING)
|
||||
.addComponent(jLabel6, javax.swing.GroupLayout.Alignment.TRAILING)
|
||||
.addComponent(jLabel5, javax.swing.GroupLayout.Alignment.TRAILING)
|
||||
.addComponent(jLabel3, javax.swing.GroupLayout.Alignment.TRAILING)
|
||||
.addComponent(jLabel2, javax.swing.GroupLayout.Alignment.TRAILING)
|
||||
.addComponent(jLabel1, javax.swing.GroupLayout.Alignment.TRAILING)
|
||||
.addComponent(jLabel4, javax.swing.GroupLayout.Alignment.TRAILING))
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addComponent(textDevX, javax.swing.GroupLayout.DEFAULT_SIZE, 250, Short.MAX_VALUE)
|
||||
.addComponent(textDevY, javax.swing.GroupLayout.DEFAULT_SIZE, 250, Short.MAX_VALUE)
|
||||
.addComponent(textCorrelation, javax.swing.GroupLayout.DEFAULT_SIZE, 250, Short.MAX_VALUE)
|
||||
.addComponent(spinnerInterval, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addComponent(spinnerWindow, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addComponent(checkBS)
|
||||
.addComponent(textLinear, javax.swing.GroupLayout.DEFAULT_SIZE, 250, Short.MAX_VALUE)
|
||||
.addComponent(textQuadratic, javax.swing.GroupLayout.DEFAULT_SIZE, 250, Short.MAX_VALUE)
|
||||
.addComponent(checkLinear)
|
||||
.addComponent(checkQuadratic))
|
||||
.addGap(0, 0, Short.MAX_VALUE))
|
||||
.addGroup(jPanel1Layout.createSequentialGroup()
|
||||
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
|
||||
.addComponent(buttonStart, javax.swing.GroupLayout.PREFERRED_SIZE, 189, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
|
||||
);
|
||||
|
||||
jPanel1Layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {jLabel1, jLabel2, jLabel3, jLabel4});
|
||||
|
||||
jPanel1Layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {spinnerInterval, spinnerWindow});
|
||||
|
||||
jPanel1Layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {jLabel5, jLabel6, jLabel7});
|
||||
|
||||
jPanel1Layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {textCorrelation, textDevX, textDevY, textLinear, textQuadratic});
|
||||
|
||||
jPanel1Layout.setVerticalGroup(
|
||||
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addGroup(jPanel1Layout.createSequentialGroup()
|
||||
.addContainerGap()
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
|
||||
.addComponent(jLabel1)
|
||||
.addComponent(textDevX, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
|
||||
.addComponent(jLabel2)
|
||||
.addComponent(textDevY, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addComponent(checkBS)
|
||||
.addGap(18, 18, 18)
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
|
||||
.addComponent(jLabel3)
|
||||
.addComponent(spinnerInterval, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
|
||||
.addComponent(jLabel4)
|
||||
.addComponent(spinnerWindow, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
|
||||
.addGap(18, 18, 18)
|
||||
.addComponent(checkLinear)
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addComponent(checkQuadratic)
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 42, Short.MAX_VALUE)
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER)
|
||||
.addComponent(textCorrelation, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addComponent(jLabel5))
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER)
|
||||
.addComponent(jLabel6)
|
||||
.addComponent(textLinear, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER)
|
||||
.addComponent(jLabel7)
|
||||
.addComponent(textQuadratic, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 42, Short.MAX_VALUE)
|
||||
.addComponent(buttonStart)
|
||||
.addGap(45, 45, 45))
|
||||
);
|
||||
|
||||
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
|
||||
this.setLayout(layout);
|
||||
layout.setHorizontalGroup(
|
||||
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
|
||||
.addContainerGap()
|
||||
.addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addComponent(plot, javax.swing.GroupLayout.DEFAULT_SIZE, 371, Short.MAX_VALUE))
|
||||
);
|
||||
layout.setVerticalGroup(
|
||||
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addGroup(layout.createSequentialGroup()
|
||||
.addContainerGap()
|
||||
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
|
||||
.addGroup(layout.createSequentialGroup()
|
||||
.addComponent(plot, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
|
||||
.addContainerGap())))
|
||||
);
|
||||
}// </editor-fold>//GEN-END:initComponents
|
||||
|
||||
private void buttonStartActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_buttonStartActionPerformed
|
||||
try {
|
||||
if (isRunning()){
|
||||
abort();
|
||||
updateResults();
|
||||
//buttonStart.setText("Start");
|
||||
} else {
|
||||
textCorrelation.setText("");
|
||||
textLinear.setText("");
|
||||
textQuadratic.setText("");
|
||||
HashMap args = new HashMap();
|
||||
args.put("dx", textDevX.getText());
|
||||
args.put("dy", textDevY.getText());
|
||||
args.put("interval", spinnerInterval.getValue());
|
||||
args.put("window", spinnerWindow.getValue());
|
||||
args.put("bs", checkBS.isSelected());
|
||||
args.put("linear_fit", checkLinear.isSelected());
|
||||
args.put("quadratic_fit", checkQuadratic.isSelected());
|
||||
|
||||
args.put("p", plot);
|
||||
runAsync("Correlation/Correlation", args).handle((ok, ex) -> {
|
||||
if (ex != null) {
|
||||
ex.printStackTrace();
|
||||
}
|
||||
return ok;
|
||||
});
|
||||
///buttonStart.setText("Stop");
|
||||
}
|
||||
|
||||
} catch (Exception ex) {
|
||||
showException(ex);
|
||||
}
|
||||
}//GEN-LAST:event_buttonStartActionPerformed
|
||||
|
||||
// Variables declaration - do not modify//GEN-BEGIN:variables
|
||||
private javax.swing.JButton buttonStart;
|
||||
private javax.swing.JCheckBox checkBS;
|
||||
private javax.swing.JCheckBox checkLinear;
|
||||
private javax.swing.JCheckBox checkQuadratic;
|
||||
private javax.swing.JLabel jLabel1;
|
||||
private javax.swing.JLabel jLabel2;
|
||||
private javax.swing.JLabel jLabel3;
|
||||
private javax.swing.JLabel jLabel4;
|
||||
private javax.swing.JLabel jLabel5;
|
||||
private javax.swing.JLabel jLabel6;
|
||||
private javax.swing.JLabel jLabel7;
|
||||
private javax.swing.JPanel jPanel1;
|
||||
private ch.psi.pshell.plot.LinePlotJFree plot;
|
||||
private javax.swing.JSpinner spinnerInterval;
|
||||
private javax.swing.JSpinner spinnerWindow;
|
||||
private javax.swing.JTextField textCorrelation;
|
||||
private javax.swing.JTextField textDevX;
|
||||
private javax.swing.JTextField textDevY;
|
||||
private javax.swing.JTextField textLinear;
|
||||
private javax.swing.JTextField textQuadratic;
|
||||
// End of variables declaration//GEN-END:variables
|
||||
}
|
||||
267
plugins/Correlation2.form
Normal file
267
plugins/Correlation2.form
Normal file
@@ -0,0 +1,267 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
|
||||
<Form version="1.5" maxVersion="1.9" type="org.netbeans.modules.form.forminfo.JPanelFormInfo">
|
||||
<AuxValues>
|
||||
<AuxValue name="FormSettings_autoResourcing" type="java.lang.Integer" value="0"/>
|
||||
<AuxValue name="FormSettings_autoSetComponentName" type="java.lang.Boolean" value="false"/>
|
||||
<AuxValue name="FormSettings_generateFQN" type="java.lang.Boolean" value="true"/>
|
||||
<AuxValue name="FormSettings_generateMnemonicsCode" type="java.lang.Boolean" value="false"/>
|
||||
<AuxValue name="FormSettings_i18nAutoMode" type="java.lang.Boolean" value="false"/>
|
||||
<AuxValue name="FormSettings_layoutCodeTarget" type="java.lang.Integer" value="1"/>
|
||||
<AuxValue name="FormSettings_listenerGenerationStyle" type="java.lang.Integer" value="0"/>
|
||||
<AuxValue name="FormSettings_variablesLocal" type="java.lang.Boolean" value="false"/>
|
||||
<AuxValue name="FormSettings_variablesModifier" type="java.lang.Integer" value="2"/>
|
||||
</AuxValues>
|
||||
|
||||
<Layout>
|
||||
<DimensionLayout dim="0">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" alignment="1" attributes="0">
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Component id="jPanel1" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Component id="plot" pref="345" max="32767" attributes="0"/>
|
||||
</Group>
|
||||
</Group>
|
||||
</DimensionLayout>
|
||||
<DimensionLayout dim="1">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" alignment="0" attributes="0">
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Component id="jPanel1" max="32767" attributes="0"/>
|
||||
<Group type="102" attributes="0">
|
||||
<Component id="plot" max="32767" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
</Group>
|
||||
</Group>
|
||||
</Group>
|
||||
</Group>
|
||||
</DimensionLayout>
|
||||
</Layout>
|
||||
<SubComponents>
|
||||
<Component class="ch.psi.pshell.plot.LinePlotJFree" name="plot">
|
||||
<Properties>
|
||||
<Property name="title" type="java.lang.String" value=""/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Container class="javax.swing.JPanel" name="jPanel1">
|
||||
|
||||
<Layout>
|
||||
<DimensionLayout dim="0">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" alignment="0" attributes="0">
|
||||
<EmptySpace max="32767" attributes="0"/>
|
||||
<Component id="buttonStart" min="-2" pref="189" max="-2" attributes="0"/>
|
||||
<EmptySpace max="32767" attributes="0"/>
|
||||
</Group>
|
||||
<Group type="102" alignment="0" attributes="0">
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Component id="jLabel7" linkSize="3" alignment="1" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel6" linkSize="3" alignment="1" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel5" linkSize="3" alignment="1" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel3" linkSize="1" alignment="1" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel2" linkSize="1" alignment="1" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel1" linkSize="1" alignment="1" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel4" linkSize="1" alignment="1" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" attributes="0">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Component id="comboTypeY" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="comboTypeX" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="textCorrelation" linkSize="4" alignment="0" pref="250" max="32767" attributes="0"/>
|
||||
<Component id="spinnerInterval" linkSize="2" alignment="0" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="spinnerWindow" linkSize="2" alignment="0" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="textLinear" linkSize="4" alignment="0" pref="250" max="32767" attributes="0"/>
|
||||
<Component id="textQuadratic" linkSize="4" alignment="0" pref="250" max="32767" attributes="0"/>
|
||||
<Component id="checkLinear" alignment="0" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="checkQuadratic" alignment="0" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace min="26" max="32767" attributes="0"/>
|
||||
</Group>
|
||||
<Component id="textDevX" max="32767" attributes="0"/>
|
||||
<Component id="textDevY" alignment="0" max="32767" attributes="0"/>
|
||||
</Group>
|
||||
</Group>
|
||||
</Group>
|
||||
</DimensionLayout>
|
||||
<DimensionLayout dim="1">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" attributes="0">
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="3" attributes="0">
|
||||
<Component id="jLabel1" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="textDevX" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Component id="comboTypeX" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace type="unrelated" max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="3" attributes="0">
|
||||
<Component id="jLabel2" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="textDevY" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace min="-2" pref="3" max="-2" attributes="0"/>
|
||||
<Component id="comboTypeY" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace type="separate" max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="3" attributes="0">
|
||||
<Component id="jLabel3" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="spinnerInterval" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="3" attributes="0">
|
||||
<Component id="jLabel4" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="spinnerWindow" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace type="separate" max="-2" attributes="0"/>
|
||||
<Component id="checkLinear" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Component id="checkQuadratic" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace pref="22" max="32767" attributes="0"/>
|
||||
<Group type="103" groupAlignment="2" attributes="0">
|
||||
<Component id="textCorrelation" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel5" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace type="unrelated" max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="2" attributes="0">
|
||||
<Component id="jLabel6" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="textLinear" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace type="unrelated" max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="2" attributes="0">
|
||||
<Component id="jLabel7" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="textQuadratic" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace pref="24" max="32767" attributes="0"/>
|
||||
<Component id="buttonStart" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace min="-2" pref="45" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
</Group>
|
||||
</DimensionLayout>
|
||||
</Layout>
|
||||
<SubComponents>
|
||||
<Component class="javax.swing.JLabel" name="jLabel1">
|
||||
<Properties>
|
||||
<Property name="horizontalAlignment" type="int" value="11"/>
|
||||
<Property name="text" type="java.lang.String" value="X device:"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JTextField" name="textDevX">
|
||||
</Component>
|
||||
<Component class="javax.swing.JTextField" name="textDevY">
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="jLabel2">
|
||||
<Properties>
|
||||
<Property name="horizontalAlignment" type="int" value="11"/>
|
||||
<Property name="text" type="java.lang.String" value="Y device:"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="jLabel3">
|
||||
<Properties>
|
||||
<Property name="horizontalAlignment" type="int" value="11"/>
|
||||
<Property name="text" type="java.lang.String" value="Interval (s):"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JSpinner" name="spinnerInterval">
|
||||
<Properties>
|
||||
<Property name="model" type="javax.swing.SpinnerModel" editor="org.netbeans.modules.form.editors2.SpinnerModelEditor">
|
||||
<SpinnerModel initial="0.1" minimum="0.001" numberType="java.lang.Double" stepSize="1.0" type="number"/>
|
||||
</Property>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="jLabel4">
|
||||
<Properties>
|
||||
<Property name="horizontalAlignment" type="int" value="11"/>
|
||||
<Property name="text" type="java.lang.String" value="Window size:"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JSpinner" name="spinnerWindow">
|
||||
<Properties>
|
||||
<Property name="model" type="javax.swing.SpinnerModel" editor="org.netbeans.modules.form.editors2.SpinnerModelEditor">
|
||||
<SpinnerModel initial="50" minimum="3" numberType="java.lang.Integer" stepSize="1" type="number"/>
|
||||
</Property>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JButton" name="buttonStart">
|
||||
<Properties>
|
||||
<Property name="text" type="java.lang.String" value="Start"/>
|
||||
</Properties>
|
||||
<Events>
|
||||
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="buttonStartActionPerformed"/>
|
||||
</Events>
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="jLabel5">
|
||||
<Properties>
|
||||
<Property name="horizontalAlignment" type="int" value="4"/>
|
||||
<Property name="text" type="java.lang.String" value="Correlation:"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JTextField" name="textCorrelation">
|
||||
<Properties>
|
||||
<Property name="editable" type="boolean" value="false"/>
|
||||
<Property name="horizontalAlignment" type="int" value="0"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="jLabel6">
|
||||
<Properties>
|
||||
<Property name="horizontalAlignment" type="int" value="4"/>
|
||||
<Property name="text" type="java.lang.String" value="Liner fit:"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JTextField" name="textLinear">
|
||||
<Properties>
|
||||
<Property name="editable" type="boolean" value="false"/>
|
||||
<Property name="horizontalAlignment" type="int" value="0"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="jLabel7">
|
||||
<Properties>
|
||||
<Property name="horizontalAlignment" type="int" value="4"/>
|
||||
<Property name="text" type="java.lang.String" value="Quadratric fit:"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JTextField" name="textQuadratic">
|
||||
<Properties>
|
||||
<Property name="editable" type="boolean" value="false"/>
|
||||
<Property name="horizontalAlignment" type="int" value="0"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JCheckBox" name="checkLinear">
|
||||
<Properties>
|
||||
<Property name="selected" type="boolean" value="true"/>
|
||||
<Property name="text" type="java.lang.String" value="Linear fit"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JCheckBox" name="checkQuadratic">
|
||||
<Properties>
|
||||
<Property name="text" type="java.lang.String" value="Quadratic fit"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JComboBox" name="comboTypeX">
|
||||
<Properties>
|
||||
<Property name="model" type="javax.swing.ComboBoxModel" editor="org.netbeans.modules.form.editors2.ComboBoxModelEditor">
|
||||
<StringArray count="3">
|
||||
<StringItem index="0" value="Channel"/>
|
||||
<StringItem index="1" value="Stream"/>
|
||||
<StringItem index="2" value="Camera"/>
|
||||
</StringArray>
|
||||
</Property>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JComboBox" name="comboTypeY">
|
||||
<Properties>
|
||||
<Property name="model" type="javax.swing.ComboBoxModel" editor="org.netbeans.modules.form.editors2.ComboBoxModelEditor">
|
||||
<StringArray count="3">
|
||||
<StringItem index="0" value="Channel"/>
|
||||
<StringItem index="1" value="Stream"/>
|
||||
<StringItem index="2" value="Camera"/>
|
||||
</StringArray>
|
||||
</Property>
|
||||
</Properties>
|
||||
</Component>
|
||||
</SubComponents>
|
||||
</Container>
|
||||
</SubComponents>
|
||||
</Form>
|
||||
393
plugins/Correlation2.java
Normal file
393
plugins/Correlation2.java
Normal file
@@ -0,0 +1,393 @@
|
||||
/*
|
||||
* Copyright (c) 2014 Paul Scherrer Institute. All rights reserved.
|
||||
*/
|
||||
|
||||
import ch.psi.pshell.plot.Plot;
|
||||
import ch.psi.pshell.ui.App;
|
||||
import ch.psi.pshell.ui.Panel;
|
||||
import ch.psi.utils.State;
|
||||
import java.awt.Component;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class Correlation2 extends Panel {
|
||||
|
||||
public Correlation2() {
|
||||
initComponents();
|
||||
this.setPersistedComponents(new Component[]{textDevX, textDevY, spinnerInterval, spinnerWindow, comboTypeX, comboTypeY});
|
||||
plot.getAxis(Plot.AxisId.X).setLabel(null);
|
||||
plot.getAxis(Plot.AxisId.Y).setLabel(null);
|
||||
}
|
||||
|
||||
//Overridable callbacks
|
||||
@Override
|
||||
public void onInitialize(int runCount) {
|
||||
super.onInitialize(runCount);
|
||||
this.startTimer(100, 10);
|
||||
if (App.hasArgument("dx")) {
|
||||
textDevX.setText(App.getArgumentValue("dx"));
|
||||
}
|
||||
if (App.hasArgument("dy")) {
|
||||
textDevY.setText(App.getArgumentValue("dy"));
|
||||
}
|
||||
try{
|
||||
if (App.hasArgument("dxtype")) {
|
||||
comboTypeX.setSelectedIndex(Integer.valueOf(App.getArgumentValue("dxtype")));
|
||||
}
|
||||
} catch (Exception ex){
|
||||
System.err.println(ex.getMessage());
|
||||
}
|
||||
try{
|
||||
if (App.hasArgument("dytype")) {
|
||||
comboTypeY.setSelectedIndex(Integer.valueOf(App.getArgumentValue("dytype")));
|
||||
}
|
||||
} catch (Exception ex){
|
||||
System.err.println(ex.getMessage());
|
||||
}
|
||||
try{
|
||||
if (App.hasArgument("interval")) {
|
||||
spinnerInterval.setValue(Double.valueOf(App.getArgumentValue("interval")));
|
||||
}
|
||||
} catch (Exception ex){
|
||||
System.err.println(ex.getMessage());
|
||||
}
|
||||
try{
|
||||
if (App.hasArgument("window")) {
|
||||
spinnerWindow.setValue(Integer.valueOf(App.getArgumentValue("window")));
|
||||
}
|
||||
} catch (Exception ex){
|
||||
System.err.println(ex.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void onTimer(){
|
||||
if (isRunning()){
|
||||
updateResults();
|
||||
}
|
||||
}
|
||||
|
||||
//DecimalFormat formatter = new DecimalFormat("0.##E0");
|
||||
void updateResults(){
|
||||
try{
|
||||
textCorrelation.setText(String.format("%1.4f", Double.valueOf((Double)getContext().getInterpreterVariable("corr"))));
|
||||
} catch (Exception ex){
|
||||
textCorrelation.setText("");
|
||||
}
|
||||
|
||||
if (checkLinear.isSelected()){
|
||||
try{
|
||||
List pars = (List)getContext().getInterpreterVariable("pars_lin");
|
||||
//textLinear.setText(String.format("%1.3fx%+1.3f", (Double)(pars.get(1)), (Double)(pars.get(0))));
|
||||
textLinear.setText(String.format("%1.6gx%+1.6g",pars.get(1), pars.get(0)));
|
||||
} catch (Exception ex){
|
||||
textLinear.setText("");
|
||||
}
|
||||
}
|
||||
|
||||
if (checkQuadratic.isSelected()){
|
||||
try{
|
||||
List pars = (List)getContext().getInterpreterVariable("pars_quad");
|
||||
//textQuadratic.setText(String.format("%1.2fx\u00B2 %+1.2fx%+1.2f", (Double)(pars.get(0)), (Double)(pars.get(1)), (Double)(pars.get(0))));
|
||||
textQuadratic.setText(String.format("%1.3gx\u00B2%+1.3gx%+1.3g", pars.get(0), pars.get(1), pars.get(0)));
|
||||
//textQuadratic.setText(formatter.format(pars.get(2))+ formatter.format(pars.get(1)) + formatter.format(pars.get(0)));
|
||||
|
||||
} catch (Exception ex){
|
||||
textQuadratic.setText("");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onStateChange(State state, State former) {
|
||||
buttonStart.setEnabled((state==State.Ready) || (state==State.Busy));
|
||||
if (isRunning()){
|
||||
if (state==State.Ready){
|
||||
buttonStart.setText("Start");
|
||||
}
|
||||
} else {
|
||||
if (state==State.Busy){
|
||||
buttonStart.setText("Stop");
|
||||
}
|
||||
}
|
||||
textDevX.setEnabled(state==State.Ready);
|
||||
textDevY.setEnabled(state==State.Ready);
|
||||
spinnerInterval.setEnabled(state==State.Ready);
|
||||
spinnerWindow.setEnabled(state==State.Ready);
|
||||
comboTypeX.setEnabled(state==State.Ready);
|
||||
comboTypeY.setEnabled(state==State.Ready);
|
||||
checkLinear.setEnabled(state==State.Ready);
|
||||
checkQuadratic.setEnabled(state==State.Ready);
|
||||
|
||||
if ((former==State.Initializing) && (state == State.Ready)){
|
||||
if (App.hasArgument("start")) {
|
||||
buttonStartActionPerformed(null);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
boolean isRunning(){
|
||||
return buttonStart.getText().equals("Stop");
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onExecutedFile(String fileName, Object result) {
|
||||
}
|
||||
|
||||
|
||||
//Callback to perform update - in event thread
|
||||
@Override
|
||||
protected void doUpdate() {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
|
||||
private void initComponents() {
|
||||
|
||||
plot = new ch.psi.pshell.plot.LinePlotJFree();
|
||||
jPanel1 = new javax.swing.JPanel();
|
||||
jLabel1 = new javax.swing.JLabel();
|
||||
textDevX = new javax.swing.JTextField();
|
||||
textDevY = new javax.swing.JTextField();
|
||||
jLabel2 = new javax.swing.JLabel();
|
||||
jLabel3 = new javax.swing.JLabel();
|
||||
spinnerInterval = new javax.swing.JSpinner();
|
||||
jLabel4 = new javax.swing.JLabel();
|
||||
spinnerWindow = new javax.swing.JSpinner();
|
||||
buttonStart = new javax.swing.JButton();
|
||||
jLabel5 = new javax.swing.JLabel();
|
||||
textCorrelation = new javax.swing.JTextField();
|
||||
jLabel6 = new javax.swing.JLabel();
|
||||
textLinear = new javax.swing.JTextField();
|
||||
jLabel7 = new javax.swing.JLabel();
|
||||
textQuadratic = new javax.swing.JTextField();
|
||||
checkLinear = new javax.swing.JCheckBox();
|
||||
checkQuadratic = new javax.swing.JCheckBox();
|
||||
comboTypeX = new javax.swing.JComboBox();
|
||||
comboTypeY = new javax.swing.JComboBox();
|
||||
|
||||
plot.setTitle("");
|
||||
|
||||
jLabel1.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING);
|
||||
jLabel1.setText("X device:");
|
||||
|
||||
jLabel2.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING);
|
||||
jLabel2.setText("Y device:");
|
||||
|
||||
jLabel3.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING);
|
||||
jLabel3.setText("Interval (s):");
|
||||
|
||||
spinnerInterval.setModel(new javax.swing.SpinnerNumberModel(Double.valueOf(0.1d), Double.valueOf(0.001d), null, Double.valueOf(1.0d)));
|
||||
|
||||
jLabel4.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING);
|
||||
jLabel4.setText("Window size:");
|
||||
|
||||
spinnerWindow.setModel(new javax.swing.SpinnerNumberModel(Integer.valueOf(50), Integer.valueOf(3), null, Integer.valueOf(1)));
|
||||
|
||||
buttonStart.setText("Start");
|
||||
buttonStart.addActionListener(new java.awt.event.ActionListener() {
|
||||
public void actionPerformed(java.awt.event.ActionEvent evt) {
|
||||
buttonStartActionPerformed(evt);
|
||||
}
|
||||
});
|
||||
|
||||
jLabel5.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
|
||||
jLabel5.setText("Correlation:");
|
||||
|
||||
textCorrelation.setEditable(false);
|
||||
textCorrelation.setHorizontalAlignment(javax.swing.JTextField.CENTER);
|
||||
|
||||
jLabel6.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
|
||||
jLabel6.setText("Liner fit:");
|
||||
|
||||
textLinear.setEditable(false);
|
||||
textLinear.setHorizontalAlignment(javax.swing.JTextField.CENTER);
|
||||
|
||||
jLabel7.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
|
||||
jLabel7.setText("Quadratric fit:");
|
||||
|
||||
textQuadratic.setEditable(false);
|
||||
textQuadratic.setHorizontalAlignment(javax.swing.JTextField.CENTER);
|
||||
|
||||
checkLinear.setSelected(true);
|
||||
checkLinear.setText("Linear fit");
|
||||
|
||||
checkQuadratic.setText("Quadratic fit");
|
||||
|
||||
comboTypeX.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "Channel", "Stream", "Camera" }));
|
||||
|
||||
comboTypeY.setModel(new javax.swing.DefaultComboBoxModel(new String[] { "Channel", "Stream", "Camera" }));
|
||||
|
||||
javax.swing.GroupLayout jPanel1Layout = new javax.swing.GroupLayout(jPanel1);
|
||||
jPanel1.setLayout(jPanel1Layout);
|
||||
jPanel1Layout.setHorizontalGroup(
|
||||
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addGroup(jPanel1Layout.createSequentialGroup()
|
||||
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
|
||||
.addComponent(buttonStart, javax.swing.GroupLayout.PREFERRED_SIZE, 189, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
|
||||
.addGroup(jPanel1Layout.createSequentialGroup()
|
||||
.addContainerGap()
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addComponent(jLabel7, javax.swing.GroupLayout.Alignment.TRAILING)
|
||||
.addComponent(jLabel6, javax.swing.GroupLayout.Alignment.TRAILING)
|
||||
.addComponent(jLabel5, javax.swing.GroupLayout.Alignment.TRAILING)
|
||||
.addComponent(jLabel3, javax.swing.GroupLayout.Alignment.TRAILING)
|
||||
.addComponent(jLabel2, javax.swing.GroupLayout.Alignment.TRAILING)
|
||||
.addComponent(jLabel1, javax.swing.GroupLayout.Alignment.TRAILING)
|
||||
.addComponent(jLabel4, javax.swing.GroupLayout.Alignment.TRAILING))
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addGroup(jPanel1Layout.createSequentialGroup()
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addComponent(comboTypeY, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addComponent(comboTypeX, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addComponent(textCorrelation, javax.swing.GroupLayout.DEFAULT_SIZE, 250, Short.MAX_VALUE)
|
||||
.addComponent(spinnerInterval, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addComponent(spinnerWindow, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addComponent(textLinear, javax.swing.GroupLayout.DEFAULT_SIZE, 250, Short.MAX_VALUE)
|
||||
.addComponent(textQuadratic, javax.swing.GroupLayout.DEFAULT_SIZE, 250, Short.MAX_VALUE)
|
||||
.addComponent(checkLinear)
|
||||
.addComponent(checkQuadratic))
|
||||
.addContainerGap())
|
||||
.addComponent(textDevX)
|
||||
.addComponent(textDevY)))
|
||||
);
|
||||
|
||||
jPanel1Layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {jLabel1, jLabel2, jLabel3, jLabel4});
|
||||
|
||||
jPanel1Layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {spinnerInterval, spinnerWindow});
|
||||
|
||||
jPanel1Layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {jLabel5, jLabel6, jLabel7});
|
||||
|
||||
jPanel1Layout.linkSize(javax.swing.SwingConstants.HORIZONTAL, new java.awt.Component[] {textCorrelation, textLinear, textQuadratic});
|
||||
|
||||
jPanel1Layout.setVerticalGroup(
|
||||
jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addGroup(jPanel1Layout.createSequentialGroup()
|
||||
.addContainerGap()
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
|
||||
.addComponent(jLabel1)
|
||||
.addComponent(textDevX, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addComponent(comboTypeX, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
|
||||
.addComponent(jLabel2)
|
||||
.addComponent(textDevY, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
|
||||
.addGap(3, 3, 3)
|
||||
.addComponent(comboTypeY, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addGap(18, 18, 18)
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
|
||||
.addComponent(jLabel3)
|
||||
.addComponent(spinnerInterval, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
|
||||
.addComponent(jLabel4)
|
||||
.addComponent(spinnerWindow, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
|
||||
.addGap(18, 18, 18)
|
||||
.addComponent(checkLinear)
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addComponent(checkQuadratic)
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 22, Short.MAX_VALUE)
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER)
|
||||
.addComponent(textCorrelation, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addComponent(jLabel5))
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER)
|
||||
.addComponent(jLabel6)
|
||||
.addComponent(textLinear, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
|
||||
.addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.CENTER)
|
||||
.addComponent(jLabel7)
|
||||
.addComponent(textQuadratic, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 24, Short.MAX_VALUE)
|
||||
.addComponent(buttonStart)
|
||||
.addGap(45, 45, 45))
|
||||
);
|
||||
|
||||
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
|
||||
this.setLayout(layout);
|
||||
layout.setHorizontalGroup(
|
||||
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
|
||||
.addContainerGap()
|
||||
.addComponent(jPanel1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
|
||||
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
|
||||
.addComponent(plot, javax.swing.GroupLayout.DEFAULT_SIZE, 345, Short.MAX_VALUE))
|
||||
);
|
||||
layout.setVerticalGroup(
|
||||
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addGroup(layout.createSequentialGroup()
|
||||
.addContainerGap()
|
||||
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
|
||||
.addComponent(jPanel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
|
||||
.addGroup(layout.createSequentialGroup()
|
||||
.addComponent(plot, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
|
||||
.addContainerGap())))
|
||||
);
|
||||
}// </editor-fold>//GEN-END:initComponents
|
||||
|
||||
private void buttonStartActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_buttonStartActionPerformed
|
||||
try {
|
||||
if (isRunning()){
|
||||
abort();
|
||||
updateResults();
|
||||
//buttonStart.setText("Start");
|
||||
} else {
|
||||
textCorrelation.setText("");
|
||||
textLinear.setText("");
|
||||
textQuadratic.setText("");
|
||||
HashMap args = new HashMap();
|
||||
args.put("dx", textDevX.getText());
|
||||
args.put("dy", textDevY.getText());
|
||||
args.put("interval", spinnerInterval.getValue());
|
||||
args.put("window", spinnerWindow.getValue());
|
||||
args.put("dxtype", comboTypeX.getSelectedIndex());
|
||||
args.put("dytype", comboTypeY.getSelectedIndex());
|
||||
args.put("linear_fit", checkLinear.isSelected());
|
||||
args.put("quadratic_fit", checkQuadratic.isSelected());
|
||||
|
||||
args.put("p", plot);
|
||||
runAsync("Correlation/Correlation2", args).handle((ok, ex) -> {
|
||||
if (ex != null) {
|
||||
ex.printStackTrace();
|
||||
}
|
||||
return ok;
|
||||
});
|
||||
///buttonStart.setText("Stop");
|
||||
}
|
||||
|
||||
} catch (Exception ex) {
|
||||
showException(ex);
|
||||
}
|
||||
}//GEN-LAST:event_buttonStartActionPerformed
|
||||
|
||||
// Variables declaration - do not modify//GEN-BEGIN:variables
|
||||
private javax.swing.JButton buttonStart;
|
||||
private javax.swing.JCheckBox checkLinear;
|
||||
private javax.swing.JCheckBox checkQuadratic;
|
||||
private javax.swing.JComboBox comboTypeX;
|
||||
private javax.swing.JComboBox comboTypeY;
|
||||
private javax.swing.JLabel jLabel1;
|
||||
private javax.swing.JLabel jLabel2;
|
||||
private javax.swing.JLabel jLabel3;
|
||||
private javax.swing.JLabel jLabel4;
|
||||
private javax.swing.JLabel jLabel5;
|
||||
private javax.swing.JLabel jLabel6;
|
||||
private javax.swing.JLabel jLabel7;
|
||||
private javax.swing.JPanel jPanel1;
|
||||
private ch.psi.pshell.plot.LinePlotJFree plot;
|
||||
private javax.swing.JSpinner spinnerInterval;
|
||||
private javax.swing.JSpinner spinnerWindow;
|
||||
private javax.swing.JTextField textCorrelation;
|
||||
private javax.swing.JTextField textDevX;
|
||||
private javax.swing.JTextField textDevY;
|
||||
private javax.swing.JTextField textLinear;
|
||||
private javax.swing.JTextField textQuadratic;
|
||||
// End of variables declaration//GEN-END:variables
|
||||
}
|
||||
Binary file not shown.
@@ -38,7 +38,7 @@
|
||||
<Group type="102" alignment="0" attributes="0">
|
||||
<Component id="sidePanel" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace min="-2" max="-2" attributes="0"/>
|
||||
<Component id="renderer" pref="578" max="32767" attributes="0"/>
|
||||
<Component id="renderer" max="32767" attributes="0"/>
|
||||
</Group>
|
||||
<Group type="102" alignment="0" attributes="0">
|
||||
<EmptySpace min="-2" max="-2" attributes="0"/>
|
||||
@@ -68,7 +68,7 @@
|
||||
<Layout>
|
||||
<DimensionLayout dim="0">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" alignment="0" attributes="0">
|
||||
<Group type="102" attributes="0">
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="1" max="-2" attributes="0">
|
||||
<Component id="jPanel5" max="32767" attributes="0"/>
|
||||
@@ -78,7 +78,7 @@
|
||||
<Component id="panelScreen" max="32767" attributes="0"/>
|
||||
<Component id="panelFilter" max="32767" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<EmptySpace max="32767" attributes="0"/>
|
||||
</Group>
|
||||
</Group>
|
||||
</DimensionLayout>
|
||||
@@ -97,6 +97,7 @@
|
||||
<Component id="panelScreen" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Component id="panelFilter" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace max="32767" attributes="0"/>
|
||||
</Group>
|
||||
</Group>
|
||||
</DimensionLayout>
|
||||
@@ -433,16 +434,27 @@
|
||||
<Component id="textState" min="-2" pref="80" max="-2" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<Group type="102" alignment="1" attributes="0">
|
||||
<EmptySpace max="32767" attributes="0"/>
|
||||
<Component id="filler1" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace min="-2" pref="56" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
</Group>
|
||||
</DimensionLayout>
|
||||
<DimensionLayout dim="1">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" alignment="0" attributes="0">
|
||||
<EmptySpace min="-2" pref="4" max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="3" attributes="0">
|
||||
<Component id="buttonServer" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="buttonDirect" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="textState" linkSize="2" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" alignment="0" attributes="0">
|
||||
<Component id="textState" linkSize="2" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace max="32767" attributes="0"/>
|
||||
<Component id="filler1" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<Group type="103" groupAlignment="3" attributes="0">
|
||||
<Component id="buttonServer" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="buttonDirect" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
</Group>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
</Group>
|
||||
@@ -483,6 +495,16 @@
|
||||
<Property name="enabled" type="boolean" value="false"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.Box$Filler" name="filler1">
|
||||
<Properties>
|
||||
<Property name="maximumSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
|
||||
<Dimension value="[0, 32767]"/>
|
||||
</Property>
|
||||
</Properties>
|
||||
<AuxValues>
|
||||
<AuxValue name="classDetails" type="java.lang.String" value="Box.Filler.VerticalGlue"/>
|
||||
</AuxValues>
|
||||
</Component>
|
||||
</SubComponents>
|
||||
</Container>
|
||||
<Container class="javax.swing.JPanel" name="panelScreen">
|
||||
@@ -865,19 +887,13 @@
|
||||
<Layout>
|
||||
<DimensionLayout dim="0">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" attributes="0">
|
||||
<Group type="102" alignment="1" attributes="0">
|
||||
<Component id="toolBar" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace type="separate" max="-2" attributes="0"/>
|
||||
<Component id="jLabel1" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace min="-2" max="-2" attributes="0"/>
|
||||
<Component id="comboCameras" max="32767" attributes="0"/>
|
||||
<EmptySpace min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel5" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace min="-2" max="-2" attributes="0"/>
|
||||
<Component id="comboType" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace type="separate" max="-2" attributes="0"/>
|
||||
<Component id="pauseSelection" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace min="-2" pref="0" max="-2" attributes="0"/>
|
||||
<Component id="panelCameraSelection" max="32767" attributes="0"/>
|
||||
<EmptySpace min="-2" pref="18" max="-2" attributes="0"/>
|
||||
<Component id="pauseSelection" min="-2" pref="334" max="-2" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
</Group>
|
||||
</Group>
|
||||
</DimensionLayout>
|
||||
@@ -885,61 +901,16 @@
|
||||
<Group type="103" groupAlignment="1" attributes="0">
|
||||
<Group type="102" attributes="0">
|
||||
<EmptySpace min="-2" pref="1" max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Component id="toolBar" min="-2" pref="25" max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="3" attributes="0">
|
||||
<Component id="jLabel1" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="comboCameras" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel5" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="comboType" alignment="3" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<Group type="103" groupAlignment="2" attributes="0">
|
||||
<Component id="pauseSelection" alignment="2" min="-2" pref="29" max="-2" attributes="0"/>
|
||||
<Component id="toolBar" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="panelCameraSelection" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
</Group>
|
||||
<Component id="pauseSelection" alignment="0" min="-2" pref="29" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
</DimensionLayout>
|
||||
</Layout>
|
||||
<SubComponents>
|
||||
<Component class="javax.swing.JComboBox" name="comboCameras">
|
||||
<Properties>
|
||||
<Property name="font" type="java.awt.Font" editor="org.netbeans.beaninfo.editors.FontEditor">
|
||||
<Font name="Dialog" size="14" style="1"/>
|
||||
</Property>
|
||||
<Property name="maximumRowCount" type="int" value="30"/>
|
||||
</Properties>
|
||||
<Events>
|
||||
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="comboCamerasActionPerformed"/>
|
||||
</Events>
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="jLabel1">
|
||||
<Properties>
|
||||
<Property name="text" type="java.lang.String" value="Camera:"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="jLabel5">
|
||||
<Properties>
|
||||
<Property name="text" type="java.lang.String" value="Type:"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JComboBox" name="comboType">
|
||||
<Properties>
|
||||
<Property name="font" type="java.awt.Font" editor="org.netbeans.beaninfo.editors.FontEditor">
|
||||
<Font name="Dialog" size="14" style="1"/>
|
||||
</Property>
|
||||
<Property name="maximumRowCount" type="int" value="30"/>
|
||||
<Property name="model" type="javax.swing.ComboBoxModel" editor="org.netbeans.modules.form.editors2.ComboBoxModelEditor">
|
||||
<StringArray count="4">
|
||||
<StringItem index="0" value="All"/>
|
||||
<StringItem index="1" value="Laser"/>
|
||||
<StringItem index="2" value="Electrons"/>
|
||||
<StringItem index="3" value="Photonics"/>
|
||||
</StringArray>
|
||||
</Property>
|
||||
</Properties>
|
||||
<Events>
|
||||
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="comboTypeActionPerformed"/>
|
||||
</Events>
|
||||
</Component>
|
||||
<Container class="javax.swing.JToolBar" name="toolBar">
|
||||
<Properties>
|
||||
<Property name="floatable" type="boolean" value="false"/>
|
||||
@@ -977,30 +948,6 @@
|
||||
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="buttonStreamDataActionPerformed"/>
|
||||
</Events>
|
||||
</Component>
|
||||
<Component class="javax.swing.JButton" name="buttonArgs">
|
||||
<Properties>
|
||||
<Property name="icon" type="javax.swing.Icon" editor="org.netbeans.modules.form.RADConnectionPropertyEditor">
|
||||
<Connection code="getIcon("Data")" type="code"/>
|
||||
</Property>
|
||||
<Property name="text" type="java.lang.String" value=" "/>
|
||||
<Property name="toolTipText" type="java.lang.String" value="Camera Setup"/>
|
||||
<Property name="horizontalTextPosition" type="int" value="0"/>
|
||||
</Properties>
|
||||
<Events>
|
||||
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="buttonArgsActionPerformed"/>
|
||||
</Events>
|
||||
</Component>
|
||||
<Component class="javax.swing.JToolBar$Separator" name="jSeparator5">
|
||||
<Properties>
|
||||
<Property name="maximumSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
|
||||
<Dimension value="[20, 32767]"/>
|
||||
</Property>
|
||||
<Property name="preferredSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
|
||||
<Dimension value="[20, 0]"/>
|
||||
</Property>
|
||||
<Property name="requestFocusEnabled" type="boolean" value="false"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JToggleButton" name="buttonSave">
|
||||
<Properties>
|
||||
<Property name="icon" type="javax.swing.Icon" editor="org.netbeans.modules.form.RADConnectionPropertyEditor">
|
||||
@@ -1106,6 +1053,19 @@
|
||||
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="buttonReticleActionPerformed"/>
|
||||
</Events>
|
||||
</Component>
|
||||
<Component class="javax.swing.JToggleButton" name="buttonTitle">
|
||||
<Properties>
|
||||
<Property name="icon" type="javax.swing.Icon" editor="org.netbeans.modules.form.RADConnectionPropertyEditor">
|
||||
<Connection code="getIcon("Title")" type="code"/>
|
||||
</Property>
|
||||
<Property name="text" type="java.lang.String" value=" "/>
|
||||
<Property name="toolTipText" type="java.lang.String" value="Show Camera Name"/>
|
||||
<Property name="horizontalTextPosition" type="int" value="0"/>
|
||||
</Properties>
|
||||
<Events>
|
||||
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="buttonTitleActionPerformed"/>
|
||||
</Events>
|
||||
</Component>
|
||||
</SubComponents>
|
||||
</Container>
|
||||
<Component class="ch.psi.pshell.swing.ValueSelection" name="pauseSelection">
|
||||
@@ -1113,6 +1073,88 @@
|
||||
<Property name="decimals" type="int" value="0"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Container class="javax.swing.JPanel" name="panelCameraSelection">
|
||||
|
||||
<Layout>
|
||||
<DimensionLayout dim="0">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" attributes="0">
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Component id="jLabel1" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace min="0" pref="0" max="-2" attributes="0"/>
|
||||
<Component id="comboCameras" min="-2" pref="222" max="-2" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Component id="jLabel5" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace max="-2" attributes="0"/>
|
||||
<Component id="comboType" min="-2" max="-2" attributes="0"/>
|
||||
<EmptySpace min="0" pref="0" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
</Group>
|
||||
</DimensionLayout>
|
||||
<DimensionLayout dim="1">
|
||||
<Group type="103" groupAlignment="0" attributes="0">
|
||||
<Group type="102" attributes="0">
|
||||
<EmptySpace min="0" pref="0" max="-2" attributes="0"/>
|
||||
<Group type="103" groupAlignment="2" attributes="0">
|
||||
<Component id="comboType" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel5" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="jLabel1" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
<Component id="comboCameras" alignment="2" min="-2" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
<EmptySpace min="0" pref="0" max="-2" attributes="0"/>
|
||||
</Group>
|
||||
</Group>
|
||||
</DimensionLayout>
|
||||
</Layout>
|
||||
<SubComponents>
|
||||
<Component class="javax.swing.JLabel" name="jLabel1">
|
||||
<Properties>
|
||||
<Property name="text" type="java.lang.String" value="Camera:"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JComboBox" name="comboCameras">
|
||||
<Properties>
|
||||
<Property name="font" type="java.awt.Font" editor="org.netbeans.beaninfo.editors.FontEditor">
|
||||
<Font name="Dialog" size="14" style="1"/>
|
||||
</Property>
|
||||
<Property name="maximumRowCount" type="int" value="30"/>
|
||||
<Property name="model" type="javax.swing.ComboBoxModel" editor="org.netbeans.modules.form.editors2.ComboBoxModelEditor">
|
||||
<StringArray count="0"/>
|
||||
</Property>
|
||||
<Property name="minimumSize" type="java.awt.Dimension" editor="org.netbeans.beaninfo.editors.DimensionEditor">
|
||||
<Dimension value="[127, 27]"/>
|
||||
</Property>
|
||||
</Properties>
|
||||
<Events>
|
||||
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="comboCamerasActionPerformed"/>
|
||||
</Events>
|
||||
</Component>
|
||||
<Component class="javax.swing.JLabel" name="jLabel5">
|
||||
<Properties>
|
||||
<Property name="text" type="java.lang.String" value="Type:"/>
|
||||
</Properties>
|
||||
</Component>
|
||||
<Component class="javax.swing.JComboBox" name="comboType">
|
||||
<Properties>
|
||||
<Property name="font" type="java.awt.Font" editor="org.netbeans.beaninfo.editors.FontEditor">
|
||||
<Font name="Dialog" size="14" style="1"/>
|
||||
</Property>
|
||||
<Property name="maximumRowCount" type="int" value="30"/>
|
||||
<Property name="model" type="javax.swing.ComboBoxModel" editor="org.netbeans.modules.form.editors2.ComboBoxModelEditor">
|
||||
<StringArray count="4">
|
||||
<StringItem index="0" value="All"/>
|
||||
<StringItem index="1" value="Laser"/>
|
||||
<StringItem index="2" value="Electrons"/>
|
||||
<StringItem index="3" value="Photonics"/>
|
||||
</StringArray>
|
||||
</Property>
|
||||
</Properties>
|
||||
<Events>
|
||||
<EventHandler event="actionPerformed" listener="java.awt.event.ActionListener" parameters="java.awt.event.ActionEvent" handler="comboTypeActionPerformed"/>
|
||||
</Events>
|
||||
</Component>
|
||||
</SubComponents>
|
||||
</Container>
|
||||
</SubComponents>
|
||||
</Container>
|
||||
<Component class="ch.psi.pshell.imaging.Renderer" name="renderer">
|
||||
|
||||
@@ -183,13 +183,15 @@ public class ScreenPanel2 extends Panel {
|
||||
x_center_of_mass = getServerDouble("x_center_of_mass", cache);
|
||||
y_center_of_mass = getServerDouble("y_center_of_mass", cache);
|
||||
x_rms = getServerDouble("x_rms", cache);
|
||||
y_rms = getServerDouble("y_rms", cache);
|
||||
y_rms = getServerDouble("y_rms", cache);
|
||||
if (goodRegion) {
|
||||
double[] gX2 = new double[x_profile.length];
|
||||
Arrays.fill(gX2, Double.NaN);
|
||||
try {
|
||||
double x = getServerDoubleArray("gr_x_axis", cache)[0];
|
||||
System.arraycopy(x_fit_gauss_function, 0, gX2, (int) ((renderer.getCalibration() != null) ? renderer.getCalibration().convertToImageX(x) : x), x_fit_gauss_function.length);
|
||||
gr_size_x = x_fit_gauss_function.length;
|
||||
gr_pos_x = (int) ((renderer.getCalibration() != null) ? renderer.getCalibration().convertToImageX(x) : x);
|
||||
System.arraycopy(x_fit_gauss_function, 0, gX2, gr_pos_x , gr_size_x);
|
||||
} catch (Exception ex) {
|
||||
}
|
||||
x_fit_gauss_function = gX2;
|
||||
@@ -197,7 +199,9 @@ public class ScreenPanel2 extends Panel {
|
||||
Arrays.fill(gY2, Double.NaN);
|
||||
try {
|
||||
double y = getServerDoubleArray("gr_y_axis", cache)[0];
|
||||
System.arraycopy(y_fit_gauss_function, 0, gY2, (int) ((renderer.getCalibration() != null) ? renderer.getCalibration().convertToImageY(y) : y), y_fit_gauss_function.length);
|
||||
gr_size_y = y_fit_gauss_function.length;
|
||||
gr_pos_y = (int) ((renderer.getCalibration() != null) ? renderer.getCalibration().convertToImageY(y) : y);
|
||||
System.arraycopy(y_fit_gauss_function, 0, gY2, gr_pos_y, y_fit_gauss_function.length);
|
||||
} catch (Exception ex) {
|
||||
}
|
||||
y_fit_gauss_function = gY2;
|
||||
@@ -228,6 +232,10 @@ public class ScreenPanel2 extends Panel {
|
||||
public double[] x_fit_gauss_function;
|
||||
public double[] y_profile;
|
||||
public double[] y_fit_gauss_function;
|
||||
public int gr_size_x;
|
||||
public int gr_pos_x;
|
||||
public int gr_size_y;
|
||||
public int gr_pos_y;
|
||||
public PointDouble[] sliceCenters;
|
||||
public StreamValue cache;
|
||||
}
|
||||
@@ -664,7 +672,7 @@ public class ScreenPanel2 extends Panel {
|
||||
try {
|
||||
fo = getFitOverlays(data);
|
||||
} catch (Exception ex) {
|
||||
System.err.println(ex);
|
||||
ex.printStackTrace();
|
||||
}
|
||||
}
|
||||
synchronized (lockOverlays) {
|
||||
@@ -1300,60 +1308,75 @@ public class ScreenPanel2 extends Panel {
|
||||
|
||||
profileSize /= 4;
|
||||
if (pX != null) {
|
||||
int[] x = Arr.indexesInt(pX.length);
|
||||
int[] y = new int[pX.length];
|
||||
int[] p = new int[pX.length];
|
||||
int[] xp = Arr.indexesInt(pX.length);
|
||||
int[] xg = xp;
|
||||
int[] yp = new int[pX.length];
|
||||
int[] yg = new int[pX.length];
|
||||
|
||||
List<Double> l = Arrays.asList((Double[]) Convert.toWrapperArray(pX));
|
||||
Double min = Collections.min(l);
|
||||
Double max = Collections.max(l);
|
||||
double minPlot = min;
|
||||
double rangePlot = max - min;
|
||||
|
||||
for (int i = 0; i < x.length; i++) {
|
||||
double minProfile = Collections.min(l);
|
||||
double maxProfile = Collections.max(l);
|
||||
double rangeProfile = maxProfile - minProfile;
|
||||
double minGauss = minProfile;
|
||||
double rangeGauss = rangeProfile;
|
||||
//If not good region, range of profile and fit are similar so save this calcultion
|
||||
if (goodRegion && id.gr_size_x>0){
|
||||
l = Arrays.asList( (Double[]) Convert.toWrapperArray(Arrays.copyOfRange(gX, id.gr_pos_x, id.gr_pos_x + id.gr_size_x)));
|
||||
minGauss = Collections.min(l);
|
||||
rangeGauss = Collections.max(l) - minGauss;
|
||||
}
|
||||
|
||||
for (int i = 0; i < xp.length; i++) {
|
||||
if (gX != null) {
|
||||
y[i] = (int) (height - 1 - (((gX[i] - minPlot) / rangePlot) * profileSize));
|
||||
yg[i] = (int) (height - 1 - (((gX[i] - minGauss) / rangeGauss) * profileSize));
|
||||
}
|
||||
p[i] = (int) (height - 1 - (((pX[i] - minPlot) / rangePlot) * profileSize));
|
||||
yp[i] = (int) (height - 1 - (((pX[i] - minProfile) / rangeProfile) * profileSize));
|
||||
}
|
||||
|
||||
if (goodRegion) {
|
||||
for (int i = 0; i < x.length; i++) {
|
||||
y[i] = (Double.isNaN(gX[i])) ? 100000 : y[i];
|
||||
}
|
||||
|
||||
if (goodRegion && id.gr_size_x>0){
|
||||
xg = Arrays.copyOfRange(xg, id.gr_pos_x, id.gr_pos_x + id.gr_size_x);
|
||||
yg = Arrays.copyOfRange(yg, id.gr_pos_x, id.gr_pos_x + id.gr_size_x);
|
||||
}
|
||||
|
||||
vgaussian = new Overlays.Polyline(penFit, x, y);
|
||||
vprofile = new Overlays.Polyline(renderer.getPenProfile(), x, p);
|
||||
|
||||
vgaussian = new Overlays.Polyline(penFit, xg, yg);
|
||||
vprofile = new Overlays.Polyline(renderer.getPenProfile(), xp, yp);
|
||||
}
|
||||
|
||||
if (pY != null) {
|
||||
int[] y = Arr.indexesInt(pY.length);
|
||||
int[] x = new int[pY.length];
|
||||
int[] p = new int[pY.length];
|
||||
int[] xp = new int[pY.length];
|
||||
int[] xg = new int[pY.length];
|
||||
int[] yp = Arr.indexesInt(pY.length);
|
||||
int[] yg = yp;
|
||||
|
||||
List<Double> l = Arrays.asList((Double[]) Convert.toWrapperArray(pY));
|
||||
Double min = Collections.min(l);
|
||||
Double max = Collections.max(l);
|
||||
double minPlot = min;
|
||||
double rangePlot = max - min;
|
||||
double minProfile = Collections.min(l);
|
||||
double maxProfile = Collections.max(l);
|
||||
double rangeProfile = maxProfile - minProfile;
|
||||
double minGauss = minProfile;
|
||||
double rangeGauss = rangeProfile;
|
||||
//If not good region, range of profile and fit are similar so save this calcultion
|
||||
if (goodRegion && id.gr_size_y>0){
|
||||
l = Arrays.asList( (Double[]) Convert.toWrapperArray(Arrays.copyOfRange(gY, id.gr_pos_y, id.gr_pos_y + id.gr_size_y)));
|
||||
minGauss = Collections.min(l);
|
||||
rangeGauss = Collections.max(l) - minGauss;
|
||||
}
|
||||
|
||||
for (int i = 0; i < x.length; i++) {
|
||||
for (int i = 0; i < xp.length; i++) {
|
||||
if (gY != null) {
|
||||
x[i] = (int) (((gY[i] - minPlot) / rangePlot) * profileSize);
|
||||
xg[i] = (int) (((gY[i] - minGauss) / rangeGauss) * profileSize);
|
||||
}
|
||||
p[i] = (int) (((pY[i] - minPlot) / rangePlot) * profileSize);
|
||||
xp[i] = (int) (((pY[i] - minProfile) / rangeProfile) * profileSize);
|
||||
}
|
||||
|
||||
if (goodRegion) {
|
||||
for (int i = 0; i < x.length; i++) {
|
||||
x[i] = (Double.isNaN(gY[i])) ? -1 : x[i];
|
||||
}
|
||||
if (goodRegion && id.gr_size_x>0){
|
||||
xg = Arrays.copyOfRange(xg, id.gr_pos_y, id.gr_pos_y + id.gr_size_y);
|
||||
yg = Arrays.copyOfRange(yg, id.gr_pos_y, id.gr_pos_y + id.gr_size_y);
|
||||
}
|
||||
hgaussian = new Overlays.Polyline(penFit, x, y);
|
||||
hprofile = new Overlays.Polyline(renderer.getPenProfile(), p, y);
|
||||
hgaussian = new Overlays.Polyline(penFit, xg, yg);
|
||||
hprofile = new Overlays.Polyline(renderer.getPenProfile(), xp, yp);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
System.err.println(ex.getMessage());
|
||||
ex.printStackTrace();
|
||||
return null;
|
||||
}
|
||||
} else {
|
||||
@@ -2190,6 +2213,11 @@ public class ScreenPanel2 extends Panel {
|
||||
} catch (Exception ex) {
|
||||
dataTableModel.addRow(new Object[]{"Stream", ex.getMessage()});
|
||||
}
|
||||
try {
|
||||
dataTableModel.addRow(new Object[]{"PID", value.getPulseId()});
|
||||
} catch (Exception ex) {
|
||||
dataTableModel.addRow(new Object[]{"PID", ex.getMessage()});
|
||||
}
|
||||
Collections.sort(ids);
|
||||
for (String id : ids) {
|
||||
dataTableModel.addRow(new Object[]{id, ""});
|
||||
@@ -2197,7 +2225,7 @@ public class ScreenPanel2 extends Panel {
|
||||
}
|
||||
Frame frame = getCurrentFrame();
|
||||
if ((frame != null) && (frame.cache!=null)){
|
||||
for (int i = 2; i < dataTableModel.getRowCount(); i++) {
|
||||
for (int i = 3; i < dataTableModel.getRowCount(); i++) {
|
||||
String id = String.valueOf(dataTableModel.getValueAt(i, 0));
|
||||
//Object obj = server.getValue(id);
|
||||
Object obj = frame.cache.getValue(id);
|
||||
|
||||
1
script/*.scan
Normal file
1
script/*.scan
Normal file
@@ -0,0 +1 @@
|
||||
[ "Time series", [ ], [ [ "CamServer", "http://localhost:8889/simulation_sp1?channel=x_fit_mean", 3, 0.1, null ] ], false, [ ], "", 20, 0.0, 0.1, true, true, true, true, "", "", "Default", "h5", 0, null, null, "Positioner" ]
|
||||
21
script/20150418_1152_DyE_minus2.xml
Normal file
21
script/20150418_1152_DyE_minus2.xml
Normal file
@@ -0,0 +1,21 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<configuration xmlns="http://www.psi.ch/~ebner/models/scan/1.0" numberOfExecution="1" failOnSensorError="true">
|
||||
<data format="txt" fileName="DyE_minus"/>
|
||||
<scan id="">
|
||||
<preAction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="ChannelAction" channel="ACOAU-ACCU:OP-MODE" value="Light Available" operation="wait"/>
|
||||
<preAction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="ChannelAction" channel="X07MA-ID:MODE" value="CIRC -" delay="1.0"/>
|
||||
<preAction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="ChannelAction" channel="X07MA-ID:DONE" value="DONE" operation="wait"/>
|
||||
<preAction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="ChannelAction" channel="X07MA-ID:ENERGY-OFFS" value="-4.0"/>
|
||||
<preAction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="ChannelAction" channel="X07MA-ID:DONE" value="DONE" operation="wait"/>
|
||||
<preAction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="ChannelAction" channel="E1" value="1265"/>
|
||||
<preAction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="ChannelAction" channel="E2" value="1355"/>
|
||||
<preAction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="ChannelAction" channel="TIME" value="3"/>
|
||||
<preAction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="ChannelAction" channel="FOLDER" value="2015_04/20150418"/>
|
||||
<preAction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="ChannelAction" channel="X07MA-PHS-E:GO.A" value="1265" operation="putq"/>
|
||||
<preAction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="ChannelAction" channel="X07MA-PHS:alldone" value="1" operation="wait" delay="0.5"/>
|
||||
<preAction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="ChannelAction" channel="FILE" value="DyE_minus" delay="0.1"/>
|
||||
<preAction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="ChannelAction" channel="X07MA-OP-VG13:WT_SET" value="Try open" delay="10.0"/>
|
||||
<preAction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="ChannelAction" channel="START" value="1"/>
|
||||
<postAction xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="ChannelAction" channel="START" value="STOP" operation="wait" delay="2.0"/>
|
||||
</scan>
|
||||
</configuration>
|
||||
172
script/Correlation/Correlation.py
Normal file
172
script/Correlation/Correlation.py
Normal file
@@ -0,0 +1,172 @@
|
||||
import math
|
||||
import sys, traceback
|
||||
from mathutils import fit_polynomial, PolynomialFunction
|
||||
from plotutils import plot_line, plot_function
|
||||
from ch.psi.pshell.swing.Shell import STDOUT_COLOR
|
||||
import org.apache.commons.math3.stat.correlation.PearsonsCorrelation as PearsonsCorrelation
|
||||
|
||||
start_task("outupdate", 0.0, 0.0)
|
||||
|
||||
if get_exec_pars().source == CommandSource.ui:
|
||||
#dx = "SINEG01-RLLE-REF10:SIG-PHASE-AVG"
|
||||
#dy = "SINEG01-RLLE-REF20:SIG-PHASE-AVG"
|
||||
#dx = "SINEG01-RGUN-PUP10:SIG-AMPLT-AVG 4"
|
||||
#dy = "SINEG01-RGUN-PUP20:SIG-AMPLT-AVG 4"
|
||||
|
||||
#dx = "SINDI01-RKLY-DCP10:REF-AMPLT"
|
||||
#dy = "SINDI01-RKLY-DCP10:REF-PHASE"
|
||||
|
||||
#dx = "SINDI01-RLLE-REF10:SIG-PHASE-AVG"
|
||||
#dy = "SINDI01-RLLE-REF20:SIG-PHASE-AVG"
|
||||
|
||||
dx = "TESTIOC:TESTCALCOUT:Input"
|
||||
dx = "TESTIOC:TESTCALCOUT:Output"
|
||||
|
||||
dy = "TESTIOC:TESTSINUS:SinCalc"
|
||||
|
||||
#dx = "SINEG01-DICT215:B1_CHARGE"
|
||||
#dy = "SINEG01-DBPM314:Q1"
|
||||
|
||||
|
||||
#dx=gsx.getReadback()
|
||||
#dy=gsy.getReadback()
|
||||
interval = 0.1
|
||||
window = 40
|
||||
p = plot(None)[0]
|
||||
bs = False
|
||||
linear_fit = True
|
||||
quadratic_fit = True
|
||||
#print dx
|
||||
#print dy
|
||||
corr = None
|
||||
pars_lin = None
|
||||
pars_quad = None
|
||||
|
||||
|
||||
for s in p.getAllSeries():
|
||||
p.removeSeries(s)
|
||||
|
||||
_stream = None
|
||||
|
||||
instances = []
|
||||
|
||||
def _get_device(d):
|
||||
global _stream
|
||||
egu = None
|
||||
if isinstance(d, basestring):
|
||||
name = d.strip()
|
||||
d = None
|
||||
try:
|
||||
d = get_device(name)
|
||||
if d is None:
|
||||
d = eval(name)
|
||||
#print name
|
||||
if d is not None:
|
||||
if not isinstance(r, Device):
|
||||
d = None
|
||||
else:
|
||||
try:
|
||||
egu = d.unit
|
||||
except:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
if d is None:
|
||||
offset = 0
|
||||
if " " in name:
|
||||
tokens = name.split(" ")
|
||||
name = tokens[0]
|
||||
offset = int(tokens[1])
|
||||
if bs == True:
|
||||
if _stream == None:
|
||||
_stream = Stream("corr_stream", dispatcher)
|
||||
instances.append(_stream)
|
||||
d = _stream.addScalar(name, name, int(interval*100), offset)
|
||||
else:
|
||||
d = Channel(name)
|
||||
instances.append(d)
|
||||
try:
|
||||
egu = caget(name+".EGU",'s')
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
egu = d.unit
|
||||
except:
|
||||
pass
|
||||
|
||||
return d, egu
|
||||
|
||||
dx, egux = _get_device(dx)
|
||||
dy, eguy = _get_device(dy)
|
||||
|
||||
p.getAxis(p.AxisId.X).setLabel(egux)
|
||||
p.getAxis(p.AxisId.Y).setLabel(eguy)
|
||||
|
||||
|
||||
try:
|
||||
if _stream != None:
|
||||
_stream.initialize()
|
||||
_stream.start(True)
|
||||
p.addSeries(LinePlotSeries("Data"))
|
||||
sd=p.getSeries(0)
|
||||
sd.setLinesVisible(False)
|
||||
|
||||
sd.setPointSize(4)
|
||||
|
||||
if get_exec_pars().source == CommandSource.ui:
|
||||
if globals().has_key("marker"):
|
||||
p.removeMarker(marker)
|
||||
marker=None
|
||||
|
||||
|
||||
while(True):
|
||||
#Sample and plot data
|
||||
if bs == True:
|
||||
_stream.waitValueNot(_stream.take(), 10000)
|
||||
(x,y) = _stream.take().values
|
||||
else:
|
||||
x=dx.read()
|
||||
y=dy.read()
|
||||
sd.appendData(x, y)
|
||||
if len(sd.x) > window:
|
||||
#Remove First Element
|
||||
sd.token.remove(0)
|
||||
ax = sd.x
|
||||
ay = sd.y
|
||||
if len(ax)>2:
|
||||
x1, x2 = min(ax), max(ax)
|
||||
res = (x2-x1)/100
|
||||
if x1!=x2:
|
||||
#Display correlation
|
||||
corr= PearsonsCorrelation().correlation(to_array(ax,'d'), to_array(ay,'d'))
|
||||
s = "Correlation=" + str(round(corr,4))
|
||||
#print s
|
||||
if get_exec_pars().source == CommandSource.ui:
|
||||
if marker is not None:
|
||||
p.removeMarker(marker)
|
||||
marker = p.addMarker(x2+res, p.AxisId.X, s, p.getBackground())
|
||||
marker.setLabelPaint(STDOUT_COLOR)
|
||||
if linear_fit:
|
||||
#Calculate, print and plot linear fit
|
||||
pars_lin = (a0,a1) = fit_polynomial(ay, ax, 1)
|
||||
#print "Fit lin a1:" , a1, " a0:",a0
|
||||
y1 = poly(x1, pars_lin)
|
||||
y2 = poly(x2, pars_lin)
|
||||
plot_line(p, x1, y1, x2, y2, width = 2, color = Color.BLUE, name = "Fit Linear")
|
||||
if quadratic_fit:
|
||||
#Calculate, print and plot quadratic fit
|
||||
pars_quad = (a0,a1,a2) = fit_polynomial(ay, ax, 2)
|
||||
#print "Fit quad a2:" , a2, "a1:" , a1, " a0:",a0
|
||||
fitted_quad_function = PolynomialFunction(pars_quad)
|
||||
ax = frange(x1, x2, res, True)
|
||||
plot_function(p, fitted_quad_function, "Fit Quadratic", ax, color=Color.GREEN)
|
||||
if bs != True:
|
||||
time.sleep(interval)
|
||||
finally:
|
||||
for dev in instances:
|
||||
dev.close()
|
||||
stop_task("outupdate")
|
||||
|
||||
|
||||
|
||||
197
script/Correlation/Correlation2.py
Normal file
197
script/Correlation/Correlation2.py
Normal file
@@ -0,0 +1,197 @@
|
||||
import math
|
||||
import sys, traceback
|
||||
from mathutils import fit_polynomial, PolynomialFunction
|
||||
from plotutils import plot_line, plot_function
|
||||
from ch.psi.pshell.swing.Shell import STDOUT_COLOR
|
||||
import org.apache.commons.math3.stat.correlation.PearsonsCorrelation as PearsonsCorrelation
|
||||
import ch.psi.pshell.bs.PipelineServer as PipelineServer
|
||||
|
||||
start_task("outupdate", 0.0, 0.0)
|
||||
TYPE_CHANNEL = 0
|
||||
TYPE_STREAM = 1
|
||||
TYPE_CAMERA= 2
|
||||
|
||||
if get_exec_pars().source == CommandSource.ui:
|
||||
#dx = "SINDI01-RLLE-STA:SLAVE1-CPUTIMER"
|
||||
dx = "Int16Scalar"
|
||||
##dx = "SLG-LCAM-C042 x_rms"
|
||||
dxtype = TYPE_STREAM
|
||||
#dxtype = TYPE_CHANNEL
|
||||
#dxtype = TYPE_CAMERA
|
||||
#dy = "SINDI01-RLLE-STA:SLAVE1-DLTIMER"
|
||||
#dy = "SLG-LCAM-C042 y_rms"
|
||||
dy = "Int8Scalar"
|
||||
dytype = TYPE_STREAM
|
||||
#dytype = TYPE_CHANNEL
|
||||
#dytype = TYPE_CAMERA
|
||||
interval = 0.10
|
||||
window = 40
|
||||
p = plot(None)[0]
|
||||
linear_fit = True
|
||||
quadratic_fit = True
|
||||
print dx, dxtype
|
||||
print dy, dytype
|
||||
|
||||
corr = None
|
||||
pars_lin = None
|
||||
pars_quad = None
|
||||
|
||||
|
||||
|
||||
bs = TYPE_STREAM in [dxtype, dytype]
|
||||
|
||||
|
||||
for s in p.getAllSeries():
|
||||
p.removeSeries(s)
|
||||
|
||||
_stream = None
|
||||
_camname = None
|
||||
|
||||
instances = []
|
||||
|
||||
def _get_device(d, type):
|
||||
global _stream, _camname
|
||||
egu = None
|
||||
if isinstance(d, basestring):
|
||||
name = d.strip()
|
||||
d = None
|
||||
try:
|
||||
d = get_device(name)
|
||||
if d is None:
|
||||
d = eval(name)
|
||||
#print name
|
||||
if d is not None:
|
||||
if not isinstance(r, Device):
|
||||
d = None
|
||||
else:
|
||||
try:
|
||||
egu = d.unit
|
||||
except:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
if d is None:
|
||||
offset = 0
|
||||
|
||||
if type==TYPE_STREAM:
|
||||
if " " in name:
|
||||
tokens = name.split(" ")
|
||||
name = tokens[0]
|
||||
offset = int(tokens[1])
|
||||
if _stream == None:
|
||||
_stream = Stream("corr_stream", dispatcher)
|
||||
instances.append(_stream)
|
||||
d = _stream.addScalar(name, name, int(interval*100), offset)
|
||||
elif type==TYPE_CHANNEL:
|
||||
d = Channel(name)
|
||||
d.set_monitored(True)
|
||||
elif type==TYPE_CAMERA:
|
||||
tokens = name.split(" ")
|
||||
_camname = tokens[0]
|
||||
field = tokens[1]
|
||||
return field, ""
|
||||
else:
|
||||
raise Exception("Invalid type: " + str(type))
|
||||
if not isinstance(d, basestring):
|
||||
instances.append(d)
|
||||
try:
|
||||
egu = caget(name+".EGU",'s')
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
egu = d.unit
|
||||
except:
|
||||
pass
|
||||
return d, egu
|
||||
|
||||
dx, egux = _get_device(dx, dxtype)
|
||||
dy, eguy = _get_device(dy, dytype)
|
||||
|
||||
p.getAxis(p.AxisId.X).setLabel(egux)
|
||||
p.getAxis(p.AxisId.Y).setLabel(eguy)
|
||||
|
||||
|
||||
try:
|
||||
if _stream != None:
|
||||
_stream.initialize()
|
||||
_stream.start(True)
|
||||
if _camname != None:
|
||||
cam_server.start(_camname )
|
||||
cam_server.stream.waitCacheChange(10000);
|
||||
if dxtype==TYPE_CAMERA:
|
||||
dx=cam_server.stream.getChild(dx)
|
||||
if dytype==TYPE_CAMERA:
|
||||
dy=cam_server.stream.getChild(dy)
|
||||
|
||||
p.addSeries(LinePlotSeries("Data"))
|
||||
sd=p.getSeries(0)
|
||||
sd.setLinesVisible(False)
|
||||
|
||||
sd.setPointSize(4)
|
||||
|
||||
if get_exec_pars().source == CommandSource.ui:
|
||||
if globals().has_key("marker"):
|
||||
p.removeMarker(marker)
|
||||
marker=None
|
||||
|
||||
|
||||
while(True):
|
||||
#Sample and plot data
|
||||
if bs == True:
|
||||
_stream.waitValueNot(_stream.take(), 10000)
|
||||
bsdata = list(_stream.take().values)
|
||||
|
||||
if dxtype==TYPE_CHANNEL:
|
||||
x=dx.read()
|
||||
elif dxtype==TYPE_STREAM:
|
||||
x=bsdata.pop(0)
|
||||
elif dxtype==TYPE_CAMERA:
|
||||
x=dx.read()
|
||||
|
||||
if dytype==TYPE_CHANNEL:
|
||||
y=dy.read()
|
||||
elif dytype==TYPE_STREAM:
|
||||
y=bsdata.pop(0)
|
||||
elif dytype==TYPE_CAMERA:
|
||||
y=dy.read()
|
||||
|
||||
sd.appendData(x, y)
|
||||
if len(sd.x) > window:
|
||||
#Remove First Element
|
||||
sd.token.remove(0)
|
||||
ax = sd.x
|
||||
ay = sd.y
|
||||
if len(ax)>2:
|
||||
x1, x2 = min(ax), max(ax)
|
||||
res = (x2-x1)/100
|
||||
if x1!=x2:
|
||||
#Display correlation
|
||||
corr= PearsonsCorrelation().correlation(to_array(ax,'d'), to_array(ay,'d'))
|
||||
s = "Correlation=" + str(round(corr,4))
|
||||
#print s
|
||||
if get_exec_pars().source == CommandSource.ui:
|
||||
if marker is not None:
|
||||
p.removeMarker(marker)
|
||||
marker = p.addMarker(x2+res, p.AxisId.X, s, p.getBackground())
|
||||
marker.setLabelPaint(STDOUT_COLOR)
|
||||
if linear_fit:
|
||||
#Calculate, print and plot linear fit
|
||||
pars_lin = (a0,a1) = fit_polynomial(ay, ax, 1)
|
||||
#print "Fit lin a1:" , a1, " a0:",a0
|
||||
y1 = poly(x1, pars_lin)
|
||||
y2 = poly(x2, pars_lin)
|
||||
plot_line(p, x1, y1, x2, y2, width = 2, color = Color.BLUE, name = "Fit Linear")
|
||||
if quadratic_fit:
|
||||
#Calculate, print and plot quadratic fit
|
||||
pars_quad = (a0,a1,a2) = fit_polynomial(ay, ax, 2)
|
||||
#print "Fit quad a2:" , a2, "a1:" , a1, " a0:",a0
|
||||
fitted_quad_function = PolynomialFunction(pars_quad)
|
||||
ax = frange(x1, x2, res, True)
|
||||
plot_function(p, fitted_quad_function, "Fit Quadratic", ax, color=Color.GREEN)
|
||||
if bs != True:
|
||||
time.sleep(interval)
|
||||
finally:
|
||||
for dev in instances:
|
||||
dev.close()
|
||||
stop_task("outupdate")
|
||||
300
script/Devices/Elements.py
Normal file
300
script/Devices/Elements.py
Normal file
@@ -0,0 +1,300 @@
|
||||
DBPM = 1
|
||||
DWSC = 2
|
||||
DBLM = 3
|
||||
DLAC = 4
|
||||
|
||||
elements = (
|
||||
(DBPM, "SINEG01-DBPM340", 3.0149),
|
||||
(DBPM, "SINSB01-DBPM150", 7.9459),
|
||||
(DBPM, "SINSB02-DBPM150", 12.9489),
|
||||
(DBPM, "SINLH01-DBPM060", 14.5560),
|
||||
(DBPM, "SINLH02-DBPM210", 16.4806),
|
||||
(DBLM, "SINLH02-DBLM230", 16.8546),
|
||||
(DBPM, "SINLH02-DBPM240", 17.4826),
|
||||
(DBLM, "SINLH02-DBLM235", 17.4049),
|
||||
(DBPM, "SINLH03-DBPM010", 19.2172),
|
||||
(DBPM, "SINLH03-DBPM050", 20.4632),
|
||||
(DBPM, "SINLH03-DBPM090", 21.6002),
|
||||
(DBPM, "SINSB03-DBPM120", 26.9202),
|
||||
(DBPM, "SINSB03-DBPM220", 32.4202),
|
||||
(DBPM, "SINSB04-DBPM120", 37.9202),
|
||||
(DBPM, "SINSB04-DBPM220", 43.4202),
|
||||
(DBPM, "SINSB05-DBPM120", 48.9202),
|
||||
(DBPM, "SINSB05-DBPM220", 54.4202),
|
||||
(DBPM, "SINXB01-DBPM120", 56.2420),
|
||||
(DBPM, "SINBC01-DBPM010", 60.2912),
|
||||
(DBPM, "SINBC01-DBPM030", 61.0912),
|
||||
(DBPM, "SINBC01-DBPM080", 64.4172),
|
||||
(DBPM, "SINBC01-DBPM100", 65.9432),
|
||||
(DBPM, "SINBC02-DBPM140", 69.3739),
|
||||
(DBPM, "SINBC02-DBPM320", 77.5610),
|
||||
(DBPM, "SINDI01-DBPM010", 82.0967),
|
||||
(DBPM, "SINDI01-DBPM060", 83.5117),
|
||||
(DWSC, "SINDI01-DWSC090", 84.2537),
|
||||
(DBPM, "SINDI02-DBPM010", 86.0767),
|
||||
(DBLM, "SINDI02-DBLM025", 86.862),
|
||||
(DBPM, "SINDI02-DBPM040", 88.3857),
|
||||
(DLAC, "SINDI02-DLAC0550", 89.05),
|
||||
(DBPM, "SINDI02-DBPM080", 90.6297),
|
||||
(DBLM, "SINDI02-DBLM085", 90.7796),
|
||||
(DBPM, "S10CB01-DBPM220", 95.3947),
|
||||
(DBPM, "S10CB01-DBPM420", 100.2947),
|
||||
(DBPM, "S10CB02-DBPM220", 105.1947),
|
||||
(DBPM, "S10CB02-DBPM420", 110.0947),
|
||||
(DWSC, "S10DI01-DWSC010", 110.8237),
|
||||
(DBLM, "S10DI01-DBLM045", 111.67),
|
||||
(DBLM, "S10DI01-DBLM015", 113.632),
|
||||
(DBPM, "S10BD01-DBPM020", 114.6628),
|
||||
(DBPM, "S10DI01-DBPM110", 114.9947),
|
||||
(DBPM, "S10CB03-DBPM220", 119.8947),
|
||||
(DBPM, "S10CB03-DBPM420", 124.7947),
|
||||
(DWSC, "S10CB03-DWSC440", 125.2127),
|
||||
(DBPM, "S10CB04-DBPM220", 129.6947),
|
||||
(DBLM, "S10CB04-DBLM240", 130.1525),
|
||||
(DBPM, "S10CB04-DBPM420", 134.5947),
|
||||
(DBPM, "S10CB05-DBPM220", 139.4947),
|
||||
(DBPM, "S10CB05-DBPM420", 144.3947),
|
||||
(DWSC, "S10CB05-DWSC440", 144.8127),
|
||||
(DBPM, "S10CB06-DBPM220", 149.2947),
|
||||
(DBLM, "S10CB06-DBLM240", 149.7525),
|
||||
(DBPM, "S10CB06-DBPM420", 154.1947),
|
||||
(DBPM, "S10CB07-DBPM220", 159.0947),
|
||||
(DBPM, "S10CB07-DBPM420", 163.9947),
|
||||
(DWSC, "S10CB07-DWSC440", 164.4127),
|
||||
(DBPM, "S10CB08-DBPM220", 168.8947),
|
||||
(DBLM, "S10CB08-DBLM240", 169.3525),
|
||||
(DBPM, "S10CB08-DBPM420", 173.7947),
|
||||
(DBPM, "S10CB09-DBPM220", 178.6947),
|
||||
(DBPM, "S10BC01-DBPM010", 183.5947),
|
||||
(DWSC, "S10BC01-DWSC030", 184.0127),
|
||||
(DBPM, "S10BC01-DBPM050", 187.8307),
|
||||
(DBLM, "S10BC01-DBLM065", 188.684),
|
||||
(DBPM, "S10BC01-DBPM090", 192.2847),
|
||||
(DBPM, "S10BC02-DBPM140", 196.7798),
|
||||
(DBPM, "S10BC02-DBPM320", 206.5399),
|
||||
(DBPM, "S10MA01-DBPM010", 211.2940),
|
||||
(DBPM, "S10MA01-DBPM060", 215.6600),
|
||||
(DBPM, "S10MA01-DBPM120", 220.1200),
|
||||
(DBPM, "S20CB01-DBPM420", 229.0850),
|
||||
(DWSC, "S20CB01-DWSC440", 229.5030),
|
||||
(DBPM, "S20CB02-DBPM420", 238.1850),
|
||||
(DBLM, "S20CB02-DBLM435", 238.6325),
|
||||
(DBPM, "S20CB03-DBPM420", 247.2850),
|
||||
(DBPM, "S20SY01-DBPM010", 256.3850),
|
||||
(DBPM, "S20SY01-DBPM040", 262.5020),
|
||||
(DBPM, "S20SY01-DBPM060", 263.6280),
|
||||
(DWSC, "S20SY01-DWSC070", 263.8280),
|
||||
(DBLM, "S20SY02-DBLM075", 268.617),
|
||||
(DBPM, "S20SY02-DBPM080", 269.1300),
|
||||
(DBPM, "S20SY02-DBPM120", 271.0800),
|
||||
(DBPM, "S20SY02-DBPM150", 272.7600),
|
||||
(DWSC, "S20SY02-DWSC160", 273.1350),
|
||||
(DBPM, "SATSY01-DBPM010", 279.1202),
|
||||
(DBPM, "S20SY03-DBPM010", 280.6200),
|
||||
(DBLM, "S20SY03-DBLM025", 281.156),
|
||||
(DBPM, "SATSY01-DBPM060", 284.6202),
|
||||
(DBPM, "S20SY03-DBPM040", 286.9200),
|
||||
(DBPM, "SATSY01-DBPM100", 288.9352),
|
||||
(DBPM, "S20SY03-DBPM080", 294.3800),
|
||||
(DWSC, "S20SY03-DWSC090", 294.5800),
|
||||
(DBLM, "S20SY03-DBLM110", 295.258),
|
||||
(DBPM, "SATSY01-DBPM240", 296.1202),
|
||||
(DBPM, "SATSY01-DBPM290", 302.8202),
|
||||
(DBPM, "S30CB01-DBPM420", 303.8150),
|
||||
(DWSC, "S30CB01-DWSC440", 304.2330),
|
||||
(DBPM, "SATSY02-DBPM020", 306.8663),
|
||||
(DBPM, "SATSY02-DBPM210", 312.6833),
|
||||
(DBPM, "S30CB02-DBPM420", 312.9150),
|
||||
(DBLM, "S30CB02-DBLM445", 313.3625),
|
||||
(DBPM, "SATSY03-DBPM030", 317.2003),
|
||||
(DBPM, "SATSY03-DBPM060", 320.0003),
|
||||
(DBPM, "S30CB03-DBPM420", 322.0150),
|
||||
(DBPM, "SATSY03-DBPM090", 322.8003),
|
||||
(DWSC, "SATSY03-DWSC110", 325.1663),
|
||||
(DBPM, "SATSY03-DBPM120", 325.6003),
|
||||
(DBPM, "S30CB04-DBPM420", 331.1150),
|
||||
(DBLM, "SATCL01-DBLM135", 331.1179),
|
||||
(DBPM, "SATCL01-DBPM140", 331.9185),
|
||||
(DBPM, "S30CB05-DBPM420", 340.2150),
|
||||
(DWSC, "S30CB05-DWSC440", 340.6330),
|
||||
(DBPM, "SATDI01-DBPM030", 340.8637),
|
||||
(DBPM, "SATDI01-DBPM060", 342.7637),
|
||||
(DWSC, "SATDI01-DWSC065", 342.9837),
|
||||
(DBPM, "SATDI01-DBPM210", 349.2197),
|
||||
(DBPM, "S30CB06-DBPM420", 349.3150),
|
||||
(DBLM, "S30CB06-DBLM445", 349.7625),
|
||||
(DBLM, "SATDI01-DBLM225", 349.5613),
|
||||
(DBPM, "SATDI01-DBPM240", 352.1997),
|
||||
(DBPM, "SATDI01-DBPM270", 355.1797),
|
||||
(DWSC, "SATDI01-DWSC290", 356.4797),
|
||||
(DBPM, "SATDI01-DBPM320", 357.0907),
|
||||
(DBPM, "S30CB07-DBPM420", 358.4150),
|
||||
(DBPM, "SATCB01-DBPM220", 361.5357),
|
||||
(DBLM, "SATCB01-DBLM245", 361.8578),
|
||||
(DBPM, "SATCB01-DBPM420", 366.4357),
|
||||
(DBPM, "S30CB08-DBPM420", 367.5150),
|
||||
(DBPM, "S30CB09-DBPM420", 376.6150),
|
||||
(DWSC, "S30CB09-DWSC440", 377.0330),
|
||||
(DBPM, "SATMA01-DBPM010", 377.2657),
|
||||
(DBPM, "SATMA01-DBPM020", 379.3147),
|
||||
(DBPM, "SATMA01-DBPM040", 381.3637),
|
||||
(DBLM, "SATMA01-DBLM065", 381.6948),
|
||||
(DBPM, "S30CB10-DBPM420", 385.7150),
|
||||
(DBLM, "S30CB10-DBLM445", 386.271),
|
||||
(DBPM, "S30CB11-DBPM420", 394.8150),
|
||||
(DBPM, "S30CB12-DBPM420", 403.9150),
|
||||
(DBPM, "S30CB13-DBPM420", 413.0150),
|
||||
(DWSC, "S30CB13-DWSC440", 413.4330),
|
||||
(DBPM, "S30CB14-DBPM420", 422.1150),
|
||||
(DBLM, "S30CB14-DBLM445", 422.691),
|
||||
(DBPM, "S30CB15-DBPM420", 431.2150),
|
||||
(DBPM, "SARCL01-DBPM010", 440.3150),
|
||||
(DBPM, "SARCL01-DBPM060", 444.6750),
|
||||
(DBPM, "SARCL01-DBPM120", 450.7750),
|
||||
(DBPM, "SARCL01-DBPM150", 452.1510),
|
||||
(DWSC, "SARCL01-DWSC160", 452.3510),
|
||||
(DBPM, "SARCL02-DBPM110", 456.9100),
|
||||
(DBLM, "SARCL02-DBLM135", 457.9854),
|
||||
(DBPM, "SARCL02-DBPM220", 460.9609),
|
||||
(DBPM, "SARCL02-DBPM260", 462.7569),
|
||||
(DWSC, "SARCL02-DWSC270", 462.9769),
|
||||
(DBPM, "SARCL02-DBPM330", 466.6909),
|
||||
(DBLM, "SARCL02-DBLM355", 467.686),
|
||||
(DBPM, "SARCL02-DBPM470", 471.1067),
|
||||
(DBPM, "SARMA01-DBPM040", 476.4267),
|
||||
(DBPM, "SARMA01-DBPM100", 483.2767),
|
||||
(DBPM, "SARMA02-DBPM010", 487.9617),
|
||||
(DBPM, "SARMA02-DBPM020", 490.0107),
|
||||
(DBPM, "SARMA02-DBPM040", 492.0597),
|
||||
(DWSC, "SARMA02-DWSC060", 492.3767),
|
||||
(DBPM, "SARMA02-DBPM110", 496.8097),
|
||||
(DBLM, "SARUN01-DBLM065", 497.8392),
|
||||
(DBPM, "SARUN01-DBPM070", 501.5597),
|
||||
(DBPM, "SARUN02-DBPM070", 506.3097),
|
||||
(DBLM, "SARUN03-DBLM030", 506.671),
|
||||
(DBPM, "SARUN03-DBPM070", 511.0597),
|
||||
(DBLM, "SARUN04-DBLM030", 511.421),
|
||||
(DBPM, "SARUN04-DBPM070", 515.8097),
|
||||
(DBLM, "SARUN05-DBLM030", 516.171),
|
||||
(DBPM, "SARUN05-DBPM070", 520.5597),
|
||||
(DBLM, "SARUN06-DBLM030", 520.921),
|
||||
(DBPM, "SARUN06-DBPM070", 525.3097),
|
||||
(DBLM, "SARUN07-DBLM030", 525.671),
|
||||
(DBPM, "SARUN07-DBPM070", 530.0597),
|
||||
(DBLM, "SARUN08-DBLM030", 530.421),
|
||||
(DBPM, "SARUN08-DBPM070", 534.8097),
|
||||
(DBLM, "SARUN09-DBLM030", 535.171),
|
||||
(DBPM, "SARUN09-DBPM070", 539.5597),
|
||||
(DBLM, "SARUN10-DBLM030", 539.921),
|
||||
(DBPM, "SARUN10-DBPM070", 544.3097),
|
||||
(DBLM, "SARUN11-DBLM030", 544.671),
|
||||
(DBPM, "SARUN11-DBPM070", 549.0597),
|
||||
(DBLM, "SARUN12-DBLM030", 549.421),
|
||||
(DBPM, "SARUN12-DBPM070", 553.8097),
|
||||
(DBLM, "SARUN13-DBLM030", 554.171),
|
||||
(DBPM, "SARUN13-DBPM070", 558.5597),
|
||||
(DBLM, "SARUN14-DBLM030", 558.921),
|
||||
(DBPM, "SARUN14-DBPM070", 563.3097),
|
||||
(DBLM, "SARUN15-DBLM030", 563.671),
|
||||
(DBLM, "SARUN15-DBLM035", 567.970985),
|
||||
(DBPM, "SARUN15-DBPM070", 568.0597),
|
||||
(DBPM, "SARUN16-DBPM070", 572.8097),
|
||||
(DBPM, "SARUN17-DBPM070", 577.5597),
|
||||
(DBPM, "SARUN18-DBPM070", 582.3097),
|
||||
(DBPM, "SARUN19-DBPM070", 587.0597),
|
||||
(DWSC, "SARUN20-DWSC010", 587.3767),
|
||||
(DBLM, "SARUN20-DBLM035", 590.41),
|
||||
(DBPM, "SARUN20-DBPM070", 591.8097),
|
||||
(DBPM, "SARBD01-DBPM040", 593.9132),
|
||||
(DBPM, "SARBD02-DBPM010", 596.9584),
|
||||
(DBPM, "SARBD02-DBPM040", 598.3943),
|
||||
)
|
||||
|
||||
def get_section(element):
|
||||
return element[1:3]
|
||||
|
||||
def get_section_type(element):
|
||||
section = get_section(element)
|
||||
if section == "AR": return "ARAMIS"
|
||||
if section == "AT": return "ATH0S"
|
||||
if section == "IN": return "INJECTOR"
|
||||
if section in["10", "20", "30"] : return "LINAC"
|
||||
return None
|
||||
|
||||
def get_beamline(element):
|
||||
section = get_section(element)
|
||||
if section == "AR": return "ARAMIS"
|
||||
if section == "AT": return "ATH0S"
|
||||
return None
|
||||
|
||||
def get_blms():
|
||||
ret = []
|
||||
for element in elements:
|
||||
if element[0]==DBLM:
|
||||
ret.append(element[1])
|
||||
return ret
|
||||
|
||||
|
||||
def get_bpms():
|
||||
ret = []
|
||||
for element in elements:
|
||||
if element[0]==DBPM:
|
||||
ret.append(element[1])
|
||||
return ret
|
||||
|
||||
def get_wire_scanners():
|
||||
ret = []
|
||||
for element in elements:
|
||||
if element[0]==DWSC:
|
||||
ret.append(element[1])
|
||||
return ret
|
||||
|
||||
def get_dlacs():
|
||||
ret = []
|
||||
for element in elements:
|
||||
if element[0]==DLAC:
|
||||
ret.append(element[1])
|
||||
return ret
|
||||
|
||||
def get_wire_scanners_bpms(wire_scan):
|
||||
last = None
|
||||
ret = []
|
||||
for element in elements:
|
||||
if element[0]==DWSC and element[1] == wire_scan:
|
||||
ret = [last,]
|
||||
elif element[0]==DBPM:
|
||||
if get_beamline(element[1]) == get_beamline(wire_scan):
|
||||
if len(ret) > 0:
|
||||
return [ret[0],element[1]]
|
||||
last = element[1]
|
||||
return None
|
||||
|
||||
|
||||
def get_wire_scanners_blms(wire_scan):
|
||||
last = None
|
||||
ret = None
|
||||
for element in elements:
|
||||
if element[0]==DWSC and element[1] == wire_scan:
|
||||
ret = []
|
||||
elif element[0]==DBLM and (ret is not None):
|
||||
bl = get_beamline(element[1])
|
||||
if bl == get_beamline(wire_scan):
|
||||
ret.append(element[1])
|
||||
if len(ret) == 2:
|
||||
return ret
|
||||
if ret is not None and len(ret)==1:
|
||||
ret.append(None)
|
||||
return ret
|
||||
|
||||
def get_camera_type(camera_name):
|
||||
if "LCAM" in camera_name: return "LASER"
|
||||
if "DSCR" in camera_name or \
|
||||
"DSRM" in camera_name or \
|
||||
"DLAC" in camera_name: return "ELECTRONS"
|
||||
if "PROF" in camera_name or \
|
||||
"PPRM" in camera_name or \
|
||||
"PSSS" in camera_name or \
|
||||
"PSCR" in camera_name or \
|
||||
"PSRD" in camera_name: return "PHOTONICS"
|
||||
return "UNKNOWN"
|
||||
|
||||
29
script/Diagnostics/sig_process_wrapper.py
Normal file
29
script/Diagnostics/sig_process_wrapper.py
Normal file
@@ -0,0 +1,29 @@
|
||||
from jeputils import *
|
||||
|
||||
MODULE = "Diagnostics/sig_process"
|
||||
|
||||
def noise_evaluation(noise):
|
||||
return call_jep(MODULE, "noise_evaluation", [to_npa(noise),])
|
||||
|
||||
def blm_remove_spikes(x):
|
||||
ret = call_jep(MODULE, "blm_remove_spikes", [to_npa(x),])
|
||||
return ret if ret is None or is_list(ret) else ret.data
|
||||
|
||||
def blm_normalize(x, q):
|
||||
ret = call_jep(MODULE, "blm_normalize", [to_npa(x), q])
|
||||
return ret if ret is None or is_list(ret) else ret.data
|
||||
|
||||
def motor_to_wire_cs(pos, ctype = 'u', center_pos = 0.0):
|
||||
return call_jep(MODULE, "motor_to_wire_cs", [pos, ctype, center_pos ])
|
||||
|
||||
def remove_beam_jitter(pos, bpm1, bpm2, d_b1_w=1, d_w_b2=1):
|
||||
ret = call_jep(MODULE, "remove_beam_jitter", [to_npa(pos),to_npa(bpm1), to_npa(bpm2), d_b1_w, d_w_b2 ])
|
||||
return ret if ret is None or is_list(ret) else ret.data
|
||||
|
||||
def profile_gauss_stats(x, y, off=None, amp=None, com=None, sigma=None):
|
||||
ret = call_jep(MODULE, "profile_gauss_stats", [to_npa(x), to_npa(y), off, amp, com, sigma])
|
||||
return ret if ret is None or is_list(ret) else ret.data
|
||||
|
||||
def profile_rms_stats(x, y, noise_std=0, n_sigma=3.5):
|
||||
return call_jep(MODULE, "profile_rms_stats", [to_npa(x), to_npa(y), noise_std, n_sigma])
|
||||
|
||||
23
script/HklScan.py
Normal file
23
script/HklScan.py
Normal file
@@ -0,0 +1,23 @@
|
||||
|
||||
vector = [[1.0,1.0,1.0], [1.0,1.0,1.1], [1.0,1.0,1.2], [1.0,1.0,1.4]]
|
||||
|
||||
|
||||
|
||||
def hklscan(vector, readables,latency = 0.0, **pars):
|
||||
readables=to_list(string_to_obj(readables))
|
||||
#names = [readable.name for readable in readables]
|
||||
scan = ManualScan([h, k, l], readables ,vector[0], vector[-1], [len(vector)-1] * 3, dimensions = 1)
|
||||
if not "domain_axis" in pars.keys():
|
||||
pars["domain_axis"] = "Index"
|
||||
processScanPars(scan, pars)
|
||||
scan.start()
|
||||
try:
|
||||
for pos in vector:
|
||||
hkl.write(pos)
|
||||
time.sleep(1.0)
|
||||
scan.append ([h.take(), k.take(), l.take()], [h.getPosition(), k.getPosition(), l.getPosition()], [readable.read() for readable in readables ])
|
||||
finally:
|
||||
scan.end()
|
||||
|
||||
|
||||
hklscan(vector, [sin, arr], 0.9) #, = "Index" )#, line_plots = [sin])
|
||||
@@ -1,6 +0,0 @@
|
||||
start = 0
|
||||
end = 10.0
|
||||
steps = 10
|
||||
|
||||
|
||||
r = lscan (out, sin, start, end, steps, 0.2)
|
||||
@@ -1,39 +0,0 @@
|
||||
|
||||
prosilica.writeParameter("FrameStartTriggerMode","Freerun")
|
||||
prosilica.writeParameter("PixelFormat","Bayer8" )
|
||||
prosilica.writeParameter("GainValue",0)
|
||||
prosilica.writeParameter("ExposureValue",15000)
|
||||
prosilica.writeParameter("FrameRate",5.000)
|
||||
|
||||
|
||||
|
||||
|
||||
print prosilica.readParameter("AcquisitionMode")
|
||||
print prosilica.readParameter("AcquisitionFrameCount")
|
||||
print prosilica.readParameter("FrameRate")
|
||||
|
||||
|
||||
print prosilica.readParameterRange("AcquisitionMode")
|
||||
print prosilica.readParameterRange("AcquisitionFrameCount")
|
||||
print prosilica.readParameterRange("FrameRate")
|
||||
|
||||
|
||||
print prosilica.writeParameter("AcquisitionMode", "SingleFrame")
|
||||
print prosilica.writeParameter("AcquisitionFrameCount", 2)
|
||||
print prosilica.writeParameter("FrameRate", 6.0)
|
||||
|
||||
|
||||
print prosilica.readParameter("AcquisitionMode")
|
||||
print prosilica.readParameter("AcquisitionFrameCount")
|
||||
print prosilica.readParameter("FrameRate")
|
||||
|
||||
|
||||
|
||||
print prosilica.writeParameter("AcquisitionMode", "Continuous")
|
||||
print prosilica.writeParameter("AcquisitionFrameCount", 1)
|
||||
print prosilica.writeParameter("FrameRate", 5.0)
|
||||
|
||||
|
||||
print prosilica.readParameter("AcquisitionMode")
|
||||
print prosilica.readParameter("AcquisitionFrameCount")
|
||||
print prosilica.readParameter("FrameRate")
|
||||
@@ -1,16 +0,0 @@
|
||||
print args
|
||||
if get_exec_pars().source == CommandSource.ui:
|
||||
msg = None
|
||||
start = 0
|
||||
end = 10.0
|
||||
steps = 10
|
||||
else:
|
||||
msg = args[0]
|
||||
start = float(args[1])
|
||||
end = float(args[2])
|
||||
steps = int(args[3])
|
||||
|
||||
|
||||
print msg
|
||||
r = lscan (out, sin, start, end, steps, 0.2)
|
||||
|
||||
1
script/_Lib/diffcalc
Submodule
1
script/_Lib/diffcalc
Submodule
Submodule script/_Lib/diffcalc added at f152dc9fc5
988
script/_Lib/epics/CaChannel.py
Normal file
988
script/_Lib/epics/CaChannel.py
Normal file
@@ -0,0 +1,988 @@
|
||||
"""
|
||||
CaChannel class having identical API as of caPython/CaChannel class,
|
||||
based on PythonCA ( > 1.20.1beta2)
|
||||
|
||||
Author: Xiaoqiang Wang
|
||||
Created: Sep. 22, 2008
|
||||
Changes:
|
||||
"""
|
||||
# python 2 -> 3 compatible layer
|
||||
import sys
|
||||
if sys.hexversion >= 0x03000000:
|
||||
long = int
|
||||
|
||||
import ca
|
||||
|
||||
ca.cs_never_search = 4
|
||||
|
||||
# retrieve numeric waveforms as numpy arrays, default No
|
||||
USE_NUMPY = False
|
||||
|
||||
class CaChannelException(Exception):
|
||||
def __init__(self, status):
|
||||
self.status = str(status)
|
||||
def __str__(self):
|
||||
return self.status
|
||||
|
||||
class CaChannel:
|
||||
"""CaChannel: A Python class with identical API as of caPython/CaChannel.
|
||||
|
||||
This class implements the methods to operate on channel access so that you can find
|
||||
their C library counterparts ,
|
||||
http://www.aps.anl.gov/epics/base/R3-14/12-docs/CAref.html#Function.
|
||||
Therefore an understanding of C API helps much.
|
||||
|
||||
To get started easily, convenient methods are created for often used operations,
|
||||
|
||||
========== ======
|
||||
Operation Method
|
||||
========== ======
|
||||
connect :meth:`searchw`
|
||||
read :meth:`getw`
|
||||
write :meth:`putw`
|
||||
========== ======
|
||||
|
||||
They have shorter names and default arguments. It is recommended to start with these methods.
|
||||
Study the other C alike methods when necessary.
|
||||
|
||||
>>> import CaChannel
|
||||
>>> chan = CaChannel.CaChannel('catest')
|
||||
>>> chan.searchw()
|
||||
>>> chan.putw(12.3)
|
||||
>>> chan.getw()
|
||||
12.3
|
||||
"""
|
||||
|
||||
ca_timeout = 3.0
|
||||
|
||||
dbr_d = {}
|
||||
dbr_d[ca.DBR_SHORT] = int
|
||||
dbr_d[ca.DBR_INT] = int
|
||||
dbr_d[ca.DBR_LONG] = int
|
||||
dbr_d[ca.DBR_FLOAT] = float
|
||||
dbr_d[ca.DBR_DOUBLE]= float
|
||||
dbr_d[ca.DBR_CHAR] = int
|
||||
dbr_d[ca.DBR_STRING]= str
|
||||
dbr_d[ca.DBR_ENUM] = int
|
||||
|
||||
def __init__(self, pvName=None):
|
||||
self.pvname = pvName
|
||||
self.__chid = None
|
||||
self.__evid = None
|
||||
self.__timeout = None
|
||||
self._field_type = None
|
||||
self._element_count = None
|
||||
self._puser = None
|
||||
self._conn_state = None
|
||||
self._host_name = None
|
||||
self._raccess = None
|
||||
self._waccess = None
|
||||
|
||||
self._callbacks={}
|
||||
|
||||
def __del__(self):
|
||||
try:
|
||||
self.clear_event()
|
||||
self.clear_channel()
|
||||
self.flush_io()
|
||||
except:
|
||||
pass
|
||||
|
||||
def version(self):
|
||||
return "CaChannel, version v28-03-12"
|
||||
#
|
||||
# Class helper methods
|
||||
#
|
||||
def setTimeout(self, timeout):
|
||||
"""Set the timeout for this channel object. It overrides the class timeout.
|
||||
|
||||
:param float timeout: timeout in seconds
|
||||
|
||||
"""
|
||||
if (timeout>=0 or timeout is None):
|
||||
self.__timeout = timeout
|
||||
else:
|
||||
raise ValueError
|
||||
def getTimeout(self):
|
||||
"""Retrieve the timeout set for this channel object.
|
||||
|
||||
:return: timeout in seconds for this channel instance
|
||||
|
||||
"""
|
||||
if self.__timeout is None:
|
||||
timeout = CaChannel.ca_timeout
|
||||
else:
|
||||
timeout = self.__timeout
|
||||
|
||||
return timeout
|
||||
|
||||
|
||||
#
|
||||
# *************** Channel access medthod ***************
|
||||
#
|
||||
|
||||
#
|
||||
# Connection methods
|
||||
# search_and_connect
|
||||
# search
|
||||
# clear_channel
|
||||
|
||||
def search_and_connect(self, pvName, callback, *user_args):
|
||||
"""Attempt to establish a connection to a process variable.
|
||||
|
||||
:param str pvName: process variable name
|
||||
:param callable callback: function called when connection completes and connection status changes later on.
|
||||
:param user_args: user provided arguments that are passed to callback when it is invoked.
|
||||
:raises CaChannelException: if error happens
|
||||
|
||||
The user arguments are returned to the user in a tuple in the callback function.
|
||||
The order of the arguments is preserved.
|
||||
|
||||
Each Python callback function is required to have two arguments.
|
||||
The first argument is a tuple containing the results of the action.
|
||||
The second argument is a tuple containing any user arguments specified by ``user_args``.
|
||||
If no arguments were specified then the tuple is empty.
|
||||
|
||||
|
||||
.. note:: All remote operation requests such as the above are accumulated (buffered)
|
||||
and not forwarded to the IOC until one of execution methods (:meth:`pend_io`, :meth:`poll`, :meth:`pend_event`, :meth:`flush_io`)
|
||||
is called. This allows several requests to be efficiently sent over the network in one message.
|
||||
|
||||
>>> chan = CaChannel('catest')
|
||||
>>> def connCB(epicsArgs, userArgs):
|
||||
... chid = epicsArgs[0]
|
||||
... connection_state = epicsArgs[1]
|
||||
... if connection_state == ca.CA_OP_CONN_UP:
|
||||
... print('%s is connected' % ca.name(chid))
|
||||
>>> chan.search_and_connect(None, connCB, chan)
|
||||
>>> status = chan.pend_event(2)
|
||||
catest is connected
|
||||
"""
|
||||
if pvName is None:
|
||||
pvName = self.pvname
|
||||
else:
|
||||
self.pvname = pvName
|
||||
self._callbacks['connCB']=(callback, user_args)
|
||||
try:
|
||||
self.__chid = ca.search(pvName, self._conn_callback)
|
||||
except ca.error:
|
||||
msg = sys.exc_info()[1]
|
||||
raise CaChannelException(msg)
|
||||
|
||||
def search(self, pvName=None):
|
||||
"""Attempt to establish a connection to a process variable.
|
||||
|
||||
:param str pvName: process variable name
|
||||
:raises CaChannelException: if error happens
|
||||
|
||||
.. note:: All remote operation requests such as the above are accumulated (buffered)
|
||||
and not forwarded to the IOC until one of execution methods (:meth:`pend_io`, :meth:`poll`, :meth:`pend_event`, :meth:`flush_io`)
|
||||
is called. This allows several requests to be efficiently sent over the network in one message.
|
||||
|
||||
>>> chan = CaChannel()
|
||||
>>> chan.search('catest')
|
||||
>>> status = chan.pend_io(1)
|
||||
>>> chan.state()
|
||||
2
|
||||
"""
|
||||
if pvName is None:
|
||||
pvName = self.pvname
|
||||
else:
|
||||
self.pvname = pvName
|
||||
try:
|
||||
self.__chid = ca.search(pvName, None)
|
||||
except ca.error:
|
||||
msg = sys.exc_info()[1]
|
||||
raise CaChannelException(msg)
|
||||
|
||||
def clear_channel(self):
|
||||
"""Close a channel created by one of the search functions.
|
||||
|
||||
Clearing a channel does not cause its connection handler to be called.
|
||||
Clearing a channel does remove any monitors registered for that channel.
|
||||
If the channel is currently connected then resources are freed only some
|
||||
time after this request is flushed out to the server.
|
||||
|
||||
.. note:: All remote operation requests such as the above are accumulated (buffered)
|
||||
and not forwarded to the IOC until one of execution methods (:meth:`pend_io`, :meth:`poll`, :meth:`pend_event`, :meth:`flush_io`)
|
||||
is called. This allows several requests to be efficiently sent over the network in one message.
|
||||
|
||||
"""
|
||||
if(self.__chid is not None):
|
||||
try:
|
||||
ca.clear(self.__chid)
|
||||
except ca.error:
|
||||
msg = sys.exc_info()[1]
|
||||
raise CaChannelException(msg)
|
||||
|
||||
#
|
||||
# Write methods
|
||||
# array_put
|
||||
# array_put_callback
|
||||
#
|
||||
|
||||
def _setup_put(self,value, req_type, count = None):
|
||||
if count is None:
|
||||
count = self.element_count()
|
||||
else:
|
||||
count = max(1, min(self.element_count(), count) )
|
||||
|
||||
if req_type == -1:
|
||||
req_type = self.field_type()
|
||||
|
||||
# single numeric value
|
||||
if (isinstance(value, int) or
|
||||
isinstance(value, long) or
|
||||
isinstance(value, float) or
|
||||
isinstance(value, bool)):
|
||||
pval = (CaChannel.dbr_d[req_type](value),)
|
||||
# single string value
|
||||
# if DBR_CHAR, split into chars
|
||||
# otherwise convert to field type
|
||||
elif isinstance(value, str):
|
||||
if req_type == ca.DBR_CHAR:
|
||||
if len(value) < count:
|
||||
count = len(value)
|
||||
pval = [ord(x) for x in value[:count]]
|
||||
else:
|
||||
pval = (CaChannel.dbr_d[req_type](value),)
|
||||
# assumes other sequence type
|
||||
else:
|
||||
if len(value) < count:
|
||||
count = len(value)
|
||||
pval = [CaChannel.dbr_d[req_type](x) for x in value[:count]]
|
||||
|
||||
return pval
|
||||
|
||||
def array_put(self, value, req_type=None, count=None):
|
||||
"""Write a value or array of values to a channel
|
||||
|
||||
:param value: data to be written. For multiple values use a list or tuple
|
||||
:param req_type: database request type (``ca.DBR_XXXX``). Defaults to be the native data type.
|
||||
:param int count: number of data values to write. Defaults to be the native count.
|
||||
|
||||
>>> chan = CaChannel('catest')
|
||||
>>> chan.searchw()
|
||||
>>> chan.array_put(123)
|
||||
>>> chan.flush_io()
|
||||
>>> chan.getw()
|
||||
123.0
|
||||
>>> chan = CaChannel('cabo')
|
||||
>>> chan.searchw()
|
||||
>>> chan.array_put('Busy', ca.DBR_STRING)
|
||||
>>> chan.flush_io()
|
||||
>>> chan.getw()
|
||||
1
|
||||
>>> chan = CaChannel('cawave')
|
||||
>>> chan.searchw()
|
||||
>>> chan.array_put([1,2,3])
|
||||
>>> chan.flush_io()
|
||||
>>> chan.getw()
|
||||
[1.0, 2.0, 3.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
|
||||
>>> chan.getw(count=3, use_numpy=True)
|
||||
array([ 1., 2., 3.])
|
||||
>>> chan = CaChannel('cawavec')
|
||||
>>> chan.searchw()
|
||||
>>> chan.array_put('1234',count=3)
|
||||
>>> chan.flush_io()
|
||||
>>> chan.getw(count=4)
|
||||
[49, 50, 51, 0]
|
||||
"""
|
||||
if req_type is None: req_type = -1
|
||||
val = self._setup_put(value, req_type, count)
|
||||
try:
|
||||
ca.put(self.__chid, val, None, None, req_type)
|
||||
except ca.error:
|
||||
msg = sys.exc_info()[1]
|
||||
raise CaChannelException(msg)
|
||||
|
||||
def array_put_callback(self, value, req_type, count, callback, *user_args):
|
||||
"""Write a value or array of values to a channel and execute the user
|
||||
supplied callback after the put has completed.
|
||||
|
||||
:param value: data to be written. For multiple values use a list or tuple.
|
||||
:param req_type: database request type (``ca.DBR_XXXX``). Defaults to be the native data type.
|
||||
:param int count: number of data values to write, Defaults to be the native count.
|
||||
:param callable callback: function called when the write is completed.
|
||||
:param user_args: user provided arguments that are passed to callback when it is invoked.
|
||||
:raises CaChannelException: if error happens
|
||||
|
||||
Each Python callback function is required to have two arguments.
|
||||
The first argument is a dictionary containing the results of the action.
|
||||
|
||||
======= ===== =======
|
||||
field type comment
|
||||
======= ===== =======
|
||||
chid int channels id structure
|
||||
type int database request type (ca.DBR_XXXX)
|
||||
count int number of values to transfered
|
||||
status int CA status return code (ca.ECA_XXXX)
|
||||
======= ===== =======
|
||||
|
||||
The second argument is a tuple containing any user arguments specified by ``user_args``.
|
||||
If no arguments were specified then the tuple is empty.
|
||||
|
||||
|
||||
>>> def putCB(epicsArgs, userArgs):
|
||||
... print('%s put completed' % ca.name(epicsArgs['chid']))
|
||||
>>> chan = CaChannel('catest')
|
||||
>>> chan.searchw()
|
||||
>>> chan.array_put_callback(145, None, None, putCB)
|
||||
>>> status = chan.pend_event(1)
|
||||
catest put completed
|
||||
>>> chan = CaChannel('cabo')
|
||||
>>> chan.searchw()
|
||||
>>> chan.array_put_callback('Busy', ca.DBR_STRING, None, putCB)
|
||||
>>> status = chan.pend_event(1)
|
||||
cabo put completed
|
||||
>>> chan = CaChannel('cawave')
|
||||
>>> chan.searchw()
|
||||
>>> chan.array_put_callback([1,2,3], None, None, putCB)
|
||||
>>> status = chan.pend_event(1)
|
||||
cawave put completed
|
||||
>>> chan = CaChannel('cawavec')
|
||||
>>> chan.searchw()
|
||||
>>> chan.array_put_callback('123', None, None, putCB)
|
||||
>>> status = chan.pend_event(1)
|
||||
cawavec put completed
|
||||
"""
|
||||
if req_type is None: req_type = 0
|
||||
val = self._setup_put(value, req_type, count)
|
||||
self._callbacks['putCB']=(callback, user_args)
|
||||
try:
|
||||
ca.put(self.__chid, val, None, self._put_callback, req_type)
|
||||
except ca.error:
|
||||
msg = sys.exc_info()[1]
|
||||
raise CaChannelException(msg)
|
||||
#
|
||||
# Read methods
|
||||
# getValue
|
||||
# array_get
|
||||
# array_get_callback
|
||||
#
|
||||
|
||||
# Obtain read value after ECA_NORMAL is returned on an array_get().
|
||||
def getValue(self):
|
||||
"""Return the value(s) after array_get has completed"""
|
||||
return self.val
|
||||
|
||||
# Simulate with a synchronous getw function call
|
||||
def array_get(self, req_type=None, count=None, **keywords):
|
||||
"""Read a value or array of values from a channel. The new value is
|
||||
retrieved by a call to getValue method.
|
||||
|
||||
:param req_type: database request type (``ca.DBR_XXXX``). Defaults to be the native data type.
|
||||
:param int count: number of data values to read, Defaults to be the native count.
|
||||
:param keywords: optional arguments assigned by keywords
|
||||
|
||||
=========== =====
|
||||
keyword value
|
||||
=========== =====
|
||||
use_numpy True if waveform should be returned as numpy array. Default :data:`CaChannel.USE_NUMPY`.
|
||||
=========== =====
|
||||
|
||||
:raises CaChannelException: if error happens
|
||||
|
||||
.. note:: All remote operation requests such as the above are accumulated (buffered)
|
||||
and not forwarded to the IOC until one of execution methods (``pend_io``, ``poll``, ``pend_event``, ``flush_io``)
|
||||
is called. This allows several requests to be efficiently sent over the network in one message.
|
||||
|
||||
|
||||
>>> chan = CaChannel('catest')
|
||||
>>> chan.searchw()
|
||||
>>> chan.putw(123)
|
||||
>>> chan.array_get()
|
||||
>>> chan.getValue()
|
||||
123.0
|
||||
"""
|
||||
self.val = self.getw(req_type, count, **keywords)
|
||||
|
||||
def array_get_callback(self, req_type, count, callback, *user_args, **keywords):
|
||||
"""Read a value or array of values from a channel and execute the user
|
||||
supplied callback after the get has completed.
|
||||
|
||||
:param req_type: database request type (``ca.DBR_XXXX``). Defaults to be the native data type.
|
||||
:param int count: number of data values to read, Defaults to be the native count.
|
||||
:param callable callback: function called when the get is completed.
|
||||
:param user_args: user provided arguments that are passed to callback when it is invoked.
|
||||
:param keywords: optional arguments assigned by keywords
|
||||
|
||||
=========== =====
|
||||
keyword value
|
||||
=========== =====
|
||||
use_numpy True if waveform should be returned as numpy array. Default :data:`CaChannel.USE_NUMPY`.
|
||||
=========== =====
|
||||
|
||||
:raises CaChannelException: if error happens
|
||||
|
||||
Each Python callback function is required to have two arguments.
|
||||
The first argument is a dictionary containing the results of the action.
|
||||
|
||||
+-----------------+---------------+------------------------------------+-------------------------+---------------+-------------+---------------+
|
||||
| field | type | comment | request type |
|
||||
| | | +----------+--------------+---------------+-------------+---------------+
|
||||
| | | | DBR_XXXX | DBR_STS_XXXX | DBR_TIME_XXXX | DBR_GR_XXXX | DBR_CTRL_XXXX |
|
||||
+=================+===============+====================================+==========+==============+===============+=============+===============+
|
||||
| chid | int | channels id number | X | X | X | X | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| type | int | database request type | X | X | X | X | X |
|
||||
| | | (ca.DBR_XXXX) | | | | | |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| count | int | number of values to transfered | X | X | X | X | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| status | int | CA status return code | X | X | X | X | X |
|
||||
| | | (ca.ECA_XXXX) | | | | | |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| pv_value | | PV value | X | X | X | X | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| pv_status | int | PV alarm status | | X | X | X | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| pv_severity | int | PV alarm severity | | X | X | X | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| pv_seconds | float | timestamp | | | X | | |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| pv_nostrings | int | ENUM PV's number of states | | | | X | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| pv_statestrings | string list | ENUM PV's states string | | | | X | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| pv_units | string | units | | | | X | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| pv_precision | int | precision | | | | X | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| pv_updislim | float | upper display limit | | | | X | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| pv_lodislim | float | lower display limit | | | | X | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| pv_upalarmlim | float | upper alarm limit | | | | X | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| pv_upwarnlim | float | upper warning limit | | | | X | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| pv_loalarmlim | float | lower alarm limit | | | | X | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| pv_lowarnlim | float | lower warning limit | | | | X | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| pv_upctrllim | float | upper control limit | | | | | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
| pv_loctrllim | float | lower control limit | | | | | X |
|
||||
+-----------------+---------------+------------------------------------+----------+--------------+---------------+-------------+---------------+
|
||||
|
||||
The second argument is a tuple containing any user arguments specified by ``user_args``.
|
||||
If no arguments were specified then the tuple is empty.
|
||||
|
||||
.. note:: All remote operation requests such as the above are accumulated (buffered)
|
||||
and not forwarded to the IOC until one of execution methods (``pend_io``, ``poll``, ``pend_event``, ``flush_io``)
|
||||
is called. This allows several requests to be efficiently sent over the network in one message.
|
||||
|
||||
>>> def getCB(epicsArgs, userArgs):
|
||||
... for item in sorted(epicsArgs.keys()):
|
||||
... if item.startswith('pv_'):
|
||||
... print('%s %s' % (item,epicsArgs[item]))
|
||||
>>> chan = CaChannel('catest')
|
||||
>>> chan.searchw()
|
||||
>>> chan.putw(145)
|
||||
>>> chan.array_get_callback(ca.DBR_CTRL_DOUBLE, 1, getCB)
|
||||
>>> status = chan.pend_event(1)
|
||||
pv_loalarmlim -20.0
|
||||
pv_loctrllim 0.0
|
||||
pv_lodislim 0.0
|
||||
pv_lowarnlim -10.0
|
||||
pv_precision 3
|
||||
pv_severity 2
|
||||
pv_status 3
|
||||
pv_units mm
|
||||
pv_upalarmlim 20.0
|
||||
pv_upctrllim 0.0
|
||||
pv_updislim 0.0
|
||||
pv_upwarnlim 10.0
|
||||
pv_value 145.0
|
||||
>>> chan = CaChannel('cabo')
|
||||
>>> chan.searchw()
|
||||
>>> chan.putw(0)
|
||||
>>> chan.array_get_callback(ca.DBR_CTRL_ENUM, 1, getCB)
|
||||
>>> status = chan.pend_event(1)
|
||||
pv_nostrings 2
|
||||
pv_severity 0
|
||||
pv_statestrings ('Done', 'Busy')
|
||||
pv_status 0
|
||||
pv_value 0
|
||||
"""
|
||||
if req_type is None: req_type = ca.dbf_type_to_DBR(self.field_type())
|
||||
if count is None: count = self.element_count()
|
||||
self._callbacks['getCB']=(callback, user_args)
|
||||
try:
|
||||
ca.get(self.__chid, self._get_callback, req_type, count, keywords.get('use_numpy', USE_NUMPY))
|
||||
except ca.error:
|
||||
msg = sys.exc_info()[1]
|
||||
raise CaChannelException(msg)
|
||||
|
||||
#
|
||||
# Monitor methods
|
||||
# add_masked_array_event
|
||||
# clear_event
|
||||
#
|
||||
|
||||
# Creates a new event id and stores it on self.__evid. Only one event registered
|
||||
# per CaChannel object. If an event is already registered the event is cleared
|
||||
# before registering a new event.
|
||||
def add_masked_array_event(self, req_type, count, mask, callback, *user_args, **keywords):
|
||||
"""Specify a callback function to be executed whenever changes occur to a PV.
|
||||
|
||||
:param req_type: database request type (``ca.DBR_XXXX``). Defaults to be the native data type.
|
||||
:param int count: number of data values to read, Defaults to be the native count.
|
||||
:param mask: logical or of ``ca.DBE_VALUE``, ``ca.DBE_LOG``, ``ca.DBE_ALARM``.
|
||||
Defaults to be ``ca.DBE_VALUE|ca.DBE_ALARM``.
|
||||
:param callable callback: function called when the get is completed.
|
||||
:param user_args: user provided arguments that are passed to callback when
|
||||
it is invoked.
|
||||
:param keywords: optional arguments assigned by keywords
|
||||
|
||||
=========== =====
|
||||
keyword value
|
||||
=========== =====
|
||||
use_numpy True if waveform should be returned as numpy array. Default :data:`CaChannel.USE_NUMPY`.
|
||||
=========== =====
|
||||
|
||||
:raises CaChannelException: if error happens
|
||||
|
||||
.. note:: All remote operation requests such as the above are accumulated (buffered)
|
||||
and not forwarded to the IOC until one of execution methods (:meth:`pend_io`, :meth:`poll`, :meth:`pend_event`, :meth:`flush_io`)
|
||||
is called. This allows several requests to be efficiently sent over the network in one message.
|
||||
|
||||
>>> def eventCB(epicsArgs, userArgs):
|
||||
... print('pv_value %s' % epicsArgs['pv_value'])
|
||||
... print('pv_status %d %s' % (epicsArgs['pv_status'], ca.alarmStatusString(epicsArgs['pv_status'])))
|
||||
... print('pv_severity %d %s' % (epicsArgs['pv_severity'], ca.alarmSeverityString(epicsArgs['pv_severity'])))
|
||||
>>> chan = CaChannel('cabo')
|
||||
>>> chan.searchw()
|
||||
>>> chan.putw(1)
|
||||
>>> chan.add_masked_array_event(ca.DBR_STS_ENUM, None, None, eventCB)
|
||||
>>> status = chan.pend_event(1)
|
||||
pv_value 1
|
||||
pv_status 7 STATE
|
||||
pv_severity 1 MINOR
|
||||
>>> chan.clear_event()
|
||||
>>> chan.add_masked_array_event(ca.DBR_STS_STRING, None, None, eventCB)
|
||||
>>> status = chan.pend_event(1)
|
||||
pv_value Busy
|
||||
pv_status 7 STATE
|
||||
pv_severity 1 MINOR
|
||||
>>> chan.clear_event()
|
||||
"""
|
||||
if req_type is None: req_type = ca.dbf_type_to_DBR(self.field_type())
|
||||
if count is None: count = self.element_count()
|
||||
if mask is None: mask = ca.DBE_VALUE|ca.DBE_ALARM
|
||||
if self.__evid is not None:
|
||||
self.clear_event()
|
||||
self.flush_io()
|
||||
self._callbacks['eventCB']=(callback, user_args)
|
||||
try:
|
||||
self.__evid = ca.monitor(self.__chid, self._event_callback, count, mask, req_type, keywords.get('use_numpy', USE_NUMPY))
|
||||
except ca.error:
|
||||
msg = sys.exc_info()[1]
|
||||
raise CaChannelException(msg)
|
||||
|
||||
def clear_event(self):
|
||||
"""Remove previously installed callback function.
|
||||
|
||||
.. note:: All remote operation requests such as the above are accumulated (buffered)
|
||||
and not forwarded to the IOC until one of execution methods (:meth:`pend_io`, :meth:`poll`, :meth:`pend_event`, :meth:`flush_io`)
|
||||
is called. This allows several requests to be efficiently sent over the network in one message.
|
||||
"""
|
||||
if self.__evid is not None:
|
||||
try:
|
||||
ca.clear_monitor(self.__evid)
|
||||
self.__evid = None
|
||||
except ca.error:
|
||||
msg = sys.exc_info()[1]
|
||||
raise CaChannelException(msg)
|
||||
|
||||
#
|
||||
# Execute methods
|
||||
# pend_io
|
||||
# pend_event
|
||||
# poll
|
||||
# flush_io
|
||||
#
|
||||
|
||||
def pend_io(self,timeout=None):
|
||||
"""Flush the send buffer and wait until outstanding queries (``search``, ``array_get``) complete
|
||||
or the specified timeout expires.
|
||||
|
||||
:param float timeout: seconds to wait
|
||||
:raises CaChannelException: if timeout or other error happens
|
||||
|
||||
"""
|
||||
if timeout is None:
|
||||
timeout = self.getTimeout()
|
||||
status = ca.pend_io(float(timeout))
|
||||
if status != 0:
|
||||
raise CaChannelException(ca.caError._caErrorMsg[status])
|
||||
|
||||
def pend_event(self,timeout=None):
|
||||
"""Flush the send buffer and process background activity (connect/get/put/monitor callbacks) for ``timeout`` seconds.
|
||||
|
||||
It will not return before the specified timeout expires and all unfinished channel access labor has been processed.
|
||||
|
||||
:param float timeout: seconds to wait
|
||||
|
||||
"""
|
||||
if timeout is None:
|
||||
timeout = 0.1
|
||||
status = ca.pend_event(timeout)
|
||||
# status is always ECA_TIMEOUT
|
||||
return status
|
||||
|
||||
def poll(self):
|
||||
"""Flush the send buffer and execute any outstanding background activity.
|
||||
|
||||
.. note:: It is an alias to ``pend_event(1e-12)``.
|
||||
"""
|
||||
status = ca.poll()
|
||||
# status is always ECA_TIMEOUT
|
||||
return status
|
||||
|
||||
def flush_io(self):
|
||||
"""Flush the send buffer and does not execute outstanding background activity."""
|
||||
status = ca.flush()
|
||||
if status != 0:
|
||||
raise CaChannelException(ca.caError._caErrorMsg[status])
|
||||
|
||||
#
|
||||
# Channel Access Macros
|
||||
# field_type
|
||||
# element_count
|
||||
# name
|
||||
# state
|
||||
# host_name
|
||||
# read_access
|
||||
# write_access
|
||||
#
|
||||
def get_info(self):
|
||||
try:
|
||||
info=(self._field_type, self._element_count, self._puser,
|
||||
self._conn_state, self._host_name, self._raccess,
|
||||
self._waccess) = ca.ch_info(self.__chid)
|
||||
except ca.error:
|
||||
msg = sys.exc_info()[1]
|
||||
raise CaChannelException(msg)
|
||||
return info
|
||||
|
||||
|
||||
def field_type(self):
|
||||
"""Native type of the PV in the server (``ca.DBF_XXXX``).
|
||||
|
||||
>>> chan = CaChannel('catest')
|
||||
>>> chan.searchw()
|
||||
>>> ftype = chan.field_type()
|
||||
>>> ftype
|
||||
6
|
||||
>>> ca.dbf_text(ftype)
|
||||
'DBF_DOUBLE'
|
||||
>>> ca.DBF_DOUBLE == ftype
|
||||
True
|
||||
"""
|
||||
self.get_info()
|
||||
return self._field_type
|
||||
|
||||
def element_count(self):
|
||||
"""Maximum array element count of the PV in the server.
|
||||
|
||||
>>> chan = CaChannel('catest')
|
||||
>>> chan.searchw()
|
||||
>>> chan.element_count()
|
||||
1
|
||||
"""
|
||||
self.get_info()
|
||||
return self._element_count
|
||||
|
||||
def name(self):
|
||||
"""Channel name specified when the channel was created.
|
||||
|
||||
>>> chan = CaChannel('catest')
|
||||
>>> chan.searchw()
|
||||
>>> chan.name()
|
||||
'catest'
|
||||
"""
|
||||
return ca.name(self.__chid)
|
||||
|
||||
def state(self):
|
||||
"""Current state of the CA connection.
|
||||
|
||||
================== =============
|
||||
States Meaning
|
||||
================== =============
|
||||
ca.cs_never_conn PV not found
|
||||
ca.cs_prev_conn PV was found but unavailable
|
||||
ca.cs_conn PV was found and available
|
||||
ca.cs_closed PV not closed
|
||||
ca.cs_never_search PV not searched yet
|
||||
================== =============
|
||||
|
||||
>>> chan = CaChannel('catest')
|
||||
>>> chan.searchw()
|
||||
>>> chan.state()
|
||||
2
|
||||
"""
|
||||
if self.__chid is None:
|
||||
return ca.cs_never_search
|
||||
else:
|
||||
self.get_info()
|
||||
return self._conn_state
|
||||
|
||||
def host_name(self):
|
||||
"""Host name that hosts the process variable."""
|
||||
self.get_info()
|
||||
return self._host_name
|
||||
|
||||
def read_access(self):
|
||||
"""Access right to read the channel.
|
||||
|
||||
:return: True if the channel can be read, False otherwise.
|
||||
|
||||
"""
|
||||
self.get_info()
|
||||
return self._raccess
|
||||
|
||||
def write_access(self):
|
||||
"""Access right to write the channel.
|
||||
|
||||
:return: True if the channel can be written, False otherwise.
|
||||
|
||||
"""
|
||||
self.get_info()
|
||||
return self._waccess
|
||||
#
|
||||
# Wait functions
|
||||
#
|
||||
# These functions wait for completion of the requested action.
|
||||
def searchw(self, pvName=None):
|
||||
"""Attempt to establish a connection to a process variable.
|
||||
|
||||
:param str pvName: process variable name
|
||||
:raises CaChannelException: if timeout or error happens
|
||||
|
||||
.. note:: This method waits for connection to be established or fail with exception.
|
||||
|
||||
>>> chan = CaChannel('non-exist-channel')
|
||||
>>> chan.searchw()
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
CaChannelException: User specified timeout on IO operation expired
|
||||
"""
|
||||
if pvName is None:
|
||||
pvName = self.pvname
|
||||
else:
|
||||
self.pvname = pvName
|
||||
self.__chid = ca.search(pvName, None)
|
||||
timeout = self.getTimeout()
|
||||
status = ca.pend_io(timeout)
|
||||
if status != 0:
|
||||
raise CaChannelException(ca.caError._caErrorMsg[status])
|
||||
|
||||
def putw(self, value, req_type=None):
|
||||
"""Write a value or array of values to a channel
|
||||
|
||||
If the request type is omitted the data is written as the Python type corresponding to the native format.
|
||||
Multi-element data is specified as a tuple or a list.
|
||||
Internally the sequence is converted to a list before inserting the values into a C array.
|
||||
Access using non-numerical types is restricted to the first element in the data field.
|
||||
Mixing character types with numerical types writes bogus results but is not prohibited at this time.
|
||||
DBF_ENUM fields can be written using DBR_ENUM and DBR_STRING types.
|
||||
DBR_STRING writes of a field of type DBF_ENUM must be accompanied by a valid string out of the possible enumerated values.
|
||||
|
||||
:param value: data to be written. For multiple values use a list or tuple
|
||||
:param req_type: database request type (``ca.DBR_XXXX``). Defaults to be the native data type.
|
||||
:raises CaChannelException: if timeout or error happens
|
||||
|
||||
.. note:: This method does flush the request to the channel access server.
|
||||
|
||||
>>> chan = CaChannel('catest')
|
||||
>>> chan.searchw()
|
||||
>>> chan.putw(145)
|
||||
>>> chan.getw()
|
||||
145.0
|
||||
>>> chan = CaChannel('cabo')
|
||||
>>> chan.searchw()
|
||||
>>> chan.putw('Busy', ca.DBR_STRING)
|
||||
>>> chan.getw()
|
||||
1
|
||||
>>> chan.getw(ca.DBR_STRING)
|
||||
'Busy'
|
||||
>>> chan = CaChannel('cawave')
|
||||
>>> chan.searchw()
|
||||
>>> chan.putw([1,2,3])
|
||||
>>> chan.getw(req_type=ca.DBR_LONG,count=4)
|
||||
[1, 2, 3, 0]
|
||||
>>> chan = CaChannel('cawavec')
|
||||
>>> chan.searchw()
|
||||
>>> chan.putw('123')
|
||||
>>> chan.getw(count=4)
|
||||
[49, 50, 51, 0]
|
||||
>>> chan = CaChannel('cawaves')
|
||||
>>> chan.searchw()
|
||||
>>> chan.putw(['string 1','string 2'])
|
||||
>>> chan.getw()
|
||||
['string 1', 'string 2', '']
|
||||
"""
|
||||
if req_type is None: req_type = -1
|
||||
val = self._setup_put(value, req_type)
|
||||
try:
|
||||
ca.put(self.__chid, val, None, None, req_type)
|
||||
except ca.error:
|
||||
msg = sys.exc_info()[1]
|
||||
raise CaChannelException(msg)
|
||||
self.flush_io()
|
||||
|
||||
def getw(self, req_type=None, count=None, **keywords):
|
||||
"""Read the value from a channel.
|
||||
|
||||
:param req_type: database request type. Defaults to be the native data type.
|
||||
:param int count: number of data values to read, Defaults to be the native count.
|
||||
:param keywords: optional arguments assigned by keywords
|
||||
|
||||
=========== =====
|
||||
keyword value
|
||||
=========== =====
|
||||
use_numpy True if waveform should be returned as numpy array. Default :data:`CaChannel.USE_NUMPY`.
|
||||
=========== =====
|
||||
:return: If req_type is plain request type, only the value is returned. Otherwise a dict returns
|
||||
with information depending on the request type, same as the first argument passed to user's callback.
|
||||
See :meth:`array_get_callback`.
|
||||
|
||||
:raises CaChannelException: if timeout error happens
|
||||
|
||||
If the request type is omitted the data is returned to the user as the Python type corresponding to the native format.
|
||||
Multi-element data has all the elements returned as items in a list and must be accessed using a numerical type.
|
||||
Access using non-numerical types is restricted to the first element in the data field.
|
||||
DBF_ENUM fields can be read using DBR_ENUM and DBR_STRING types.
|
||||
DBR_STRING reads of a field of type DBF_ENUM returns the string corresponding to the current enumerated value.
|
||||
|
||||
"""
|
||||
updated = [False]
|
||||
value = [0]
|
||||
def update_value(args):
|
||||
if args is None:
|
||||
return
|
||||
try:
|
||||
value[0] = self._format_cb_args(args)
|
||||
finally:
|
||||
updated[0] = True
|
||||
if req_type is None: req_type = ca.dbf_type_to_DBR(self.field_type())
|
||||
if count is None: count = self.element_count()
|
||||
try:
|
||||
ca.get(self.__chid, update_value, req_type, count, keywords.get('use_numpy', USE_NUMPY))
|
||||
except ca.error:
|
||||
msg = sys.exc_info()[1]
|
||||
raise CaChannelException(msg)
|
||||
timeout = self.getTimeout()
|
||||
self.flush_io()
|
||||
n = timeout / 0.001
|
||||
while n > 0 and not updated[0]:
|
||||
ca.pend_event(0.001)
|
||||
n-=1
|
||||
if not updated[0]:
|
||||
raise CaChannelException(ca.caError._caErrorMsg[10]) # ECA_TIMEOUT
|
||||
if ca.dbr_type_is_plain(req_type):
|
||||
return value[0]['pv_value']
|
||||
else:
|
||||
return value[0]
|
||||
|
||||
#
|
||||
# Callback functions
|
||||
#
|
||||
# These functions hook user supplied callback functions to CA extension
|
||||
|
||||
def _conn_callback(self):
|
||||
callback = self._callbacks.get('connCB')
|
||||
if callback is None:
|
||||
return
|
||||
callbackFunc, userArgs = callback
|
||||
if self.state() == 2: OP = 6
|
||||
else: OP = 7
|
||||
epicsArgs = (self.__chid, OP)
|
||||
try:
|
||||
callbackFunc(epicsArgs, userArgs)
|
||||
except:
|
||||
pass
|
||||
|
||||
def _put_callback(self, args):
|
||||
callback = self._callbacks.get('putCB')
|
||||
if callback is None:
|
||||
return
|
||||
callbackFunc, userArgs = callback
|
||||
epicsArgs={}
|
||||
epicsArgs['chid']=self.__chid
|
||||
epicsArgs['type']=self.field_type()
|
||||
epicsArgs['count']=self.element_count()
|
||||
epicsArgs['status']=args[1]
|
||||
try:
|
||||
callbackFunc(epicsArgs, userArgs)
|
||||
except:
|
||||
pass
|
||||
|
||||
def _get_callback(self, args):
|
||||
callback = self._callbacks.get('getCB')
|
||||
if callback is None:
|
||||
return
|
||||
callbackFunc, userArgs = callback
|
||||
epicsArgs = self._format_cb_args(args)
|
||||
try:
|
||||
callbackFunc(epicsArgs, userArgs)
|
||||
except:
|
||||
pass
|
||||
|
||||
def _event_callback(self, args):
|
||||
callback = self._callbacks.get('eventCB')
|
||||
if callback is None:
|
||||
return
|
||||
callbackFunc, userArgs = callback
|
||||
epicsArgs = self._format_cb_args(args)
|
||||
try:
|
||||
callbackFunc(epicsArgs, userArgs)
|
||||
except:
|
||||
pass
|
||||
|
||||
def _format_cb_args(self, args):
|
||||
epicsArgs={}
|
||||
epicsArgs['chid'] = self.__chid
|
||||
# dbr_type is not returned
|
||||
# use dbf_type instead
|
||||
epicsArgs['type'] = self.field_type()
|
||||
epicsArgs['count'] = self.element_count()
|
||||
# status flag is not returned,
|
||||
# args[1] is alarm status
|
||||
# assume ECA_NORMAL
|
||||
epicsArgs['status'] = 1
|
||||
if len(args)==2: # Error
|
||||
epicsArgs['pv_value'] = args[0] # always None
|
||||
epicsArgs['status'] = args[1]
|
||||
if len(args)>=3: # DBR_Plain
|
||||
epicsArgs['pv_value'] = args[0]
|
||||
epicsArgs['pv_severity']= args[1]
|
||||
epicsArgs['pv_status'] = args[2]
|
||||
if len(args)==4: # DBR_TIME, 0.0 for others
|
||||
epicsArgs['pv_seconds'] = args[3]
|
||||
if len(args)==5:
|
||||
if len(args[4])==2: # DBR_CTRL_ENUM
|
||||
epicsArgs['pv_nostrings'] = args[4][0]
|
||||
epicsArgs['pv_statestrings']= args[4][1]
|
||||
if len(args[4])>=7: # DBR_GR
|
||||
epicsArgs['pv_units'] = args[4][0]
|
||||
epicsArgs['pv_updislim'] = args[4][1]
|
||||
epicsArgs['pv_lodislim'] = args[4][2]
|
||||
epicsArgs['pv_upalarmlim'] = args[4][3]
|
||||
epicsArgs['pv_upwarnlim'] = args[4][4]
|
||||
epicsArgs['pv_loalarmlim'] = args[4][5]
|
||||
epicsArgs['pv_lowarnlim'] = args[4][6]
|
||||
if len(args[4])==8: # DBR_GR_FLOAT or DBR_GR_DOUBLE
|
||||
epicsArgs['pv_precision'] = args[4][7]
|
||||
if len(args[4])>=9: # DBR_CTRL
|
||||
epicsArgs['pv_upctrllim'] = args[4][7]
|
||||
epicsArgs['pv_loctrllim'] = args[4][8]
|
||||
if len(args[4])==10: # DBR_CTRL_FLOAT or DBR_CTRL_DOUBLE
|
||||
epicsArgs['pv_precision'] = args[4][9]
|
||||
return epicsArgs
|
||||
|
||||
if __name__ == "__main__":
|
||||
import doctest
|
||||
doctest.testmod()
|
||||
0
script/back5.py → script/_Lib/epics/__init__.py
Executable file → Normal file
0
script/back5.py → script/_Lib/epics/__init__.py
Executable file → Normal file
212
script/_Lib/epics/_ca.py
Normal file
212
script/_Lib/epics/_ca.py
Normal file
@@ -0,0 +1,212 @@
|
||||
import sys
|
||||
import traceback
|
||||
import java.lang.System
|
||||
import java.lang.Thread
|
||||
import java.lang.InterruptedException
|
||||
import gov.aps.jca.CAStatus
|
||||
import gov.aps.jca.JCALibrary
|
||||
import gov.aps.jca.configuration.DefaultConfiguration
|
||||
import gov.aps.jca.dbr.DBRType
|
||||
import gov.aps.jca.dbr.Severity
|
||||
import gov.aps.jca.event.PutListener
|
||||
import gov.aps.jca.event.GetListener
|
||||
import gov.aps.jca.event.MonitorListener
|
||||
import ch.psi.jcae.impl.JcaeProperties
|
||||
|
||||
version = "PShell wrapper"
|
||||
release = "1.0.0"
|
||||
revision = "1.0.0"
|
||||
error = Exception
|
||||
|
||||
def dbf_type_is_valid(type):
|
||||
return (type >= 0) and (type <= LAST_TYPE)
|
||||
def dbr_type_is_valid(type):
|
||||
return (type >= 0) and (type <= LAST_BUFFER_TYPE)
|
||||
def dbr_type_is_plain(type):
|
||||
return (type >= DBR_STRING) and (type <= DBR_DOUBLE)
|
||||
def dbr_type_is_STS(type):
|
||||
return (type >= DBR_STS_STRING) and (type <= DBR_STS_DOUBLE)
|
||||
def dbr_type_is_TIME(type):
|
||||
return (type >= DBR_TIME_STRING) and (type <= DBR_TIME_DOUBLE)
|
||||
def dbr_type_is_GR(type):
|
||||
return (type >= DBR_GR_STRING) and (type <= DBR_GR_DOUBLE)
|
||||
def dbr_type_is_CTRL(type):
|
||||
return (type >= DBR_CTRL_STRING) and (type <= DBR_CTRL_DOUBLE)
|
||||
def dbr_type_is_STRING(type):
|
||||
return (type >= 0) and (type <= LAST_BUFFER_TYPE) and (type%(LAST_TYPE+1) == DBR_STRING)
|
||||
def dbr_type_is_SHORT(type):
|
||||
return (type >= 0) and (type <= LAST_BUFFER_TYPE) and (type%(LAST_TYPE+1) == DBR_SHORT)
|
||||
def dbr_type_is_FLOAT(type):
|
||||
return (type >= 0) and (type <= LAST_BUFFER_TYPE) and (type%(LAST_TYPE+1) == DBR_FLOAT)
|
||||
def dbr_type_is_ENUM(type):
|
||||
return (type >= 0) and (type <= LAST_BUFFER_TYPE) and (type%(LAST_TYPE+1) == DBR_ENUM)
|
||||
def dbr_type_is_CHAR(type):
|
||||
return (type >= 0) and (type <= LAST_BUFFER_TYPE) and (type%(LAST_TYPE+1) == DBR_CHAR)
|
||||
def dbr_type_is_LONG(type):
|
||||
return (type >= 0) and (type <= LAST_BUFFER_TYPE) and (type%(LAST_TYPE+1) == DBR_LONG)
|
||||
def dbr_type_is_DOUBLE(type):
|
||||
return (type >= 0) and (type <= LAST_BUFFER_TYPE) and (type%(LAST_TYPE+1) == DBR_DOUBLE)
|
||||
|
||||
#I am assuming have JCAE, but it is possible to implement over JCA only, prividing the configuration
|
||||
properties = ch.psi.jcae.impl.JcaeProperties.getInstance()
|
||||
jca= gov.aps.jca.JCALibrary.getInstance()
|
||||
context = None
|
||||
configuration = gov.aps.jca.configuration.DefaultConfiguration("jython ca context")
|
||||
configuration.setAttribute("class", gov.aps.jca.JCALibrary.CHANNEL_ACCESS_JAVA)
|
||||
configuration.setAttribute("addr_list", properties.getAddressList())
|
||||
configuration.setAttribute("auto_addr_list", str(properties.isAutoAddressList()));
|
||||
if properties.getMaxArrayBytes() is not None:
|
||||
configuration.setAttribute("max_array_bytes", properties.getMaxArrayBytes())
|
||||
if properties.getServerPort() is not None:
|
||||
configuration.setAttribute("server_port", properties.getServerPort())
|
||||
|
||||
def initialize():
|
||||
global jca
|
||||
global context
|
||||
global configuration
|
||||
|
||||
if context is not None:
|
||||
context.destroy()
|
||||
|
||||
context= jca.createContext(configuration)
|
||||
|
||||
initialize()
|
||||
|
||||
|
||||
class PutListener(gov.aps.jca.event.PutListener):
|
||||
def __init__(self, callback):
|
||||
self.callback = callback
|
||||
def putCompleted(self, put_ev):
|
||||
if put_ev is None:
|
||||
self.callback(None , None , None)
|
||||
else:
|
||||
count = put_ev.getCount()
|
||||
status = put_ev.getStatus()
|
||||
dbr_type = put_ev.getType()
|
||||
self.callback([count, status, dbr_type]) #TODO: Check these: status must be second par
|
||||
|
||||
def formatCbArgs(status, dbr):
|
||||
dbrType = dbr.getType()
|
||||
if status <> gov.aps.jca.CAStatus.NORMAL:
|
||||
cb_args=[None, status.getValue()]
|
||||
else:
|
||||
try:
|
||||
val = dbr.getValue()
|
||||
if val is not None:
|
||||
val = val.tolist()
|
||||
if len(val) == 1:
|
||||
val = val[0]
|
||||
if dbr.isSTS():
|
||||
cb_args = [val, dbr.getSeverity().getValue(), status.getValue()]
|
||||
else:
|
||||
cb_args = [val, gov.aps.jca.dbr.Severity.NO_ALARM, status.getValue()]
|
||||
if dbr.isTIME():
|
||||
timestamp = dbr.getTimeStamp()
|
||||
cb_args.append( timestamp.nsec() if (timestamp is not None) else 0.0)
|
||||
else:
|
||||
cb_args.append(0.0)
|
||||
if dbr.isENUM():
|
||||
cb_args.append([None, None]) #TODO
|
||||
elif dbr.isGR():
|
||||
gr=[dbr.getUnits(), dbr.getUpperDispLimit(), dbr.getLowerDispLimit(),
|
||||
dbr.getUpperAlarmLimit(), dbr.getUpperWarningLimit(),
|
||||
dbr.getLowerAlarmLimit(), dbr.getLowerWarningLimit()]
|
||||
if (dbr.isCTRL()):
|
||||
gr.append(dbr.getUpperCtrlLimit())
|
||||
gr.append(dbr.getLowerCtrlLimit())
|
||||
if (dbr.isPRECSION()):
|
||||
gr.append(dbr.getPrecision())
|
||||
cb_args.append(gr)
|
||||
except:
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
cb_args=[None, None]
|
||||
return cb_args
|
||||
|
||||
|
||||
class GetListener(gov.aps.jca.event.GetListener):
|
||||
def __init__(self, callback):
|
||||
self.callback = callback
|
||||
def getCompleted(self, get_ev):
|
||||
status = get_ev.getStatus()
|
||||
dbr = get_ev.getDBR()
|
||||
self.callback(formatCbArgs(status, dbr))
|
||||
|
||||
class MonitorListener(gov.aps.jca.event.MonitorListener):
|
||||
def __init__(self, callback):
|
||||
self.callback = callback
|
||||
def monitorChanged(self, monitor_ev):
|
||||
status = monitor_ev.getStatus()
|
||||
dbr = monitor_ev.getDBR()
|
||||
self.callback(formatCbArgs(status, dbr))
|
||||
#print dbr.getValue()
|
||||
|
||||
def search(name, callback): #returns channel
|
||||
ch= context.createChannel(name)
|
||||
context.pendIO(1.0)
|
||||
return ch
|
||||
|
||||
def name(channel):
|
||||
return channel.getName()
|
||||
|
||||
def clear(channel):
|
||||
channel.destroy()
|
||||
flush()
|
||||
|
||||
def put(channel, val, not_used, put_callback, req_type):
|
||||
type = gov.aps.jca.dbr.DBRType.forValue(req_type)
|
||||
if put_callback is not None:
|
||||
listener = PutListener(put_callback)
|
||||
channel.put(val, listener)
|
||||
else:
|
||||
channel.put(val)
|
||||
flush()
|
||||
|
||||
def get(channel, get_callback, req_type, count, *args):
|
||||
listener = GetListener(get_callback)
|
||||
type = gov.aps.jca.dbr.DBRType.forValue(req_type)
|
||||
channel.get(type, count, listener)
|
||||
flush()
|
||||
|
||||
|
||||
def monitor(channel, event_callback, count, mask, req_type, *args):
|
||||
listener = MonitorListener(event_callback)
|
||||
type = gov.aps.jca.dbr.DBRType.forValue(req_type)
|
||||
monitor = channel.addMonitor(type, count, mask, listener)
|
||||
flush()
|
||||
return monitor
|
||||
|
||||
def clear_monitor(event_id):
|
||||
event_id.removeMonitorListener(event_id.getMonitorListener())
|
||||
flush()
|
||||
|
||||
def ch_info(channel):
|
||||
return (channel.getFieldType().getValue(), channel.getElementCount() ,
|
||||
None, channel.getConnectionState().getValue(), channel.getHostName(),
|
||||
channel.getReadAccess() , channel.getWriteAccess())
|
||||
|
||||
|
||||
def pend_io(timeout):
|
||||
context.pendIO(timeout)
|
||||
_checkInterrupted()
|
||||
return 0 #for OK
|
||||
|
||||
|
||||
def pend_event(timeout):
|
||||
context.pendEvent(timeout)
|
||||
_checkInterrupted()
|
||||
return 80 #C library always returns ECA_TIMEOUT
|
||||
|
||||
def poll():
|
||||
return pend_event(1e-12)
|
||||
|
||||
def flush():
|
||||
context.flushIO()
|
||||
_checkInterrupted()
|
||||
return 0
|
||||
|
||||
|
||||
def _checkInterrupted():
|
||||
java.lang.Thread.currentThread().sleep(0)
|
||||
if java.lang.Thread.currentThread().isInterrupted():
|
||||
raise java.lang.InterruptedException()
|
||||
|
||||
88
script/_Lib/epics/_ca_fnal.py
Normal file
88
script/_Lib/epics/_ca_fnal.py
Normal file
@@ -0,0 +1,88 @@
|
||||
|
||||
from _ca import *
|
||||
from cadefs import *
|
||||
import caError
|
||||
|
||||
def alarmSeverityString(sevr):
|
||||
try:
|
||||
return AlarmSeverity.Strings[sevr]
|
||||
except:
|
||||
return "Unkown Severity"
|
||||
|
||||
def alarmStatusString(status):
|
||||
try:
|
||||
return AlarmStatus.Strings[status]
|
||||
except:
|
||||
return "Unknown Alarm"
|
||||
|
||||
def message(status):
|
||||
try:
|
||||
return caError._caErrorMsg[caError.CA_EXTRACT_MSG_NO(status)]
|
||||
except:
|
||||
return str(status)
|
||||
|
||||
def dbf_type_is_valid(dbftype):
|
||||
return dbftype >= 0 and dbftype <= LAST_TYPE
|
||||
def dbr_type_is_valid(dbrtype):
|
||||
return dbrtype >= 0 and dbrtype <= LAST_BUFFER_TYPE
|
||||
|
||||
def dbr_type_is_plain(dbrtype):
|
||||
return (dbrtype >= DBR_STRING and dbrtype <= DBR_DOUBLE)
|
||||
def dbr_type_is_STS(dbrtype):
|
||||
return (dbrtype >= DBR_STS_STRING and dbrtype <= DBR_STS_DOUBLE)
|
||||
def dbr_type_is_TIME(dbrtype):
|
||||
return (dbrtype >= DBR_TIME_STRING and dbrtype <= DBR_TIME_DOUBLE)
|
||||
def dbr_type_is_GR(dbrtype):
|
||||
return (dbrtype >= DBR_GR_STRING and dbrtype <= DBR_GR_DOUBLE)
|
||||
def dbr_type_is_CTRL(dbrtype):
|
||||
return (dbrtype >= DBR_CTRL_STRING and dbrtype <= DBR_CTRL_DOUBLE)
|
||||
|
||||
def dbr_type_is_STRING(dbrtype):
|
||||
return (dbrtype >= 0 and dbrtype <= LAST_BUFFER_TYPE and
|
||||
dbrtype%(LAST_TYPE+1) == DBR_STRING)
|
||||
def dbr_type_is_SHORT(dbrtype):
|
||||
return (dbrtype >= 0 and dbrtype <= LAST_BUFFER_TYPE and
|
||||
dbrtype%(LAST_TYPE+1) == DBR_SHORT)
|
||||
def dbr_type_is_FLOAT(dbrtype):
|
||||
return (dbrtype >= 0 and dbrtype <= LAST_BUFFER_TYPE and
|
||||
dbrtype%(LAST_TYPE+1) == DBR_FLOAT)
|
||||
def dbr_type_is_ENUM(dbrtype):
|
||||
return (dbrtype >= 0 and dbrtype <= LAST_BUFFER_TYPE and
|
||||
dbrtype%(LAST_TYPE+1) == DBR_ENUM)
|
||||
def dbr_type_is_CHAR(dbrtype):
|
||||
return (dbrtype >= 0 and dbrtype <= LAST_BUFFER_TYPE and
|
||||
dbrtype%(LAST_TYPE+1) == DBR_CHAR)
|
||||
def dbr_type_is_LONG(dbrtype):
|
||||
return (dbrtype >= 0 and dbrtype <= LAST_BUFFER_TYPE and
|
||||
dbrtype%(LAST_TYPE+1) == DBR_LONG)
|
||||
def dbr_type_is_DOUBLE(dbrtype):
|
||||
return (dbrtype >= 0 and dbrtype <= LAST_BUFFER_TYPE and
|
||||
dbrtype%(LAST_TYPE+1) == DBR_DOUBLE)
|
||||
|
||||
def dbf_type_to_DBR(dbftype):
|
||||
if dbftype>=0 and dbftype <= LAST_TYPE:
|
||||
return dbftype
|
||||
else:
|
||||
return -1
|
||||
|
||||
def dbf_type_to_DBR_STS(dbftype):
|
||||
if dbftype>=0 and dbftype <= LAST_TYPE:
|
||||
return dbftype + LAST_TYPE+1
|
||||
else:
|
||||
return -1
|
||||
|
||||
def dbf_type_to_DBR_TIME(dbftype):
|
||||
if dbftype>=0 and dbftype <= LAST_TYPE:
|
||||
return dbftype + (LAST_TYPE+1)*2
|
||||
else:
|
||||
return -1
|
||||
def dbf_type_to_DBR_GR(dbftype):
|
||||
if dbftype>=0 and dbftype <= LAST_TYPE:
|
||||
return dbftype + (LAST_TYPE+1)*3
|
||||
else:
|
||||
return -1
|
||||
def dbf_type_to_DBR_CTRL(dbftype):
|
||||
if dbftype>=0 and dbftype <= LAST_TYPE:
|
||||
return dbftype + (LAST_TYPE+1)*4
|
||||
else:
|
||||
return -1
|
||||
545
script/_Lib/epics/_ca_kek.py
Normal file
545
script/_Lib/epics/_ca_kek.py
Normal file
@@ -0,0 +1,545 @@
|
||||
#!/usr/bin/env python
|
||||
## @package ca: EPICS-CA interface module for Python.
|
||||
"""CA modlue : EPICS-CA interface module for Python.
|
||||
This module provide a version of EPICS-CA and Python interface.
|
||||
It users C module _ca. _ca module basically maps C-API in EPICS ca library into python. Interface between ca.py and _ca module is subject for change. You should not depend on it. API in ca.py will be preserved in future releases as much as possible.
|
||||
Author: Noboru Yamamoto, KEK, JAPAN. -2007.
|
||||
$Revision: 1.1 $
|
||||
"""
|
||||
from __future__ import print_function
|
||||
__version__ = "$Revision: 1.1 $"
|
||||
# $Source: /cvs/G/EPICS/extensions/src/PythonCA/src/_ca_kek.py,v $
|
||||
#
|
||||
try:
|
||||
import signal
|
||||
except:
|
||||
print("signal module is not avaialble")
|
||||
|
||||
import time,thread,gc,sys,atexit
|
||||
from exceptions import ValueError
|
||||
|
||||
# autGIL is not compatible with Tkinter and wx. So code was removed
|
||||
|
||||
# force thread module to call PyEval_InitThread in it.
|
||||
__foo_lock=thread.allocate_lock()
|
||||
def __foo():
|
||||
"""
|
||||
test function foo
|
||||
|
||||
This function is used to ensure thread module is initialized before
|
||||
loading _ca module.
|
||||
"""
|
||||
global __foo_lock
|
||||
__foo_lock.release()
|
||||
thread.exit_thread()
|
||||
# See Python/Include/ceval.h
|
||||
__foo_lock.acquire()
|
||||
thread.start_new_thread(__foo,()) # __foo release lock
|
||||
__foo_lock.acquire() # make sure threading is activated
|
||||
|
||||
import _ca
|
||||
# version from _ca314.cpp
|
||||
version=_ca.version
|
||||
revision=_ca.release
|
||||
|
||||
# some constants for EPICS channel Access library
|
||||
from cadefs import *
|
||||
from caError import *
|
||||
|
||||
#export pend_xxx routines for global operation
|
||||
pendio =_ca.pendio
|
||||
pend_io=_ca.pendio
|
||||
pend_event=_ca.pend_event
|
||||
poll=_ca.poll
|
||||
poll_event=_ca.poll
|
||||
flush_io=_ca.flush
|
||||
flush=_ca.flush
|
||||
test_io=_ca.test_io # test_io retunrs 42 for IODONE , 43 for IOINPROGRESS
|
||||
add_fd_registration=_ca.add_fd_registration
|
||||
|
||||
#Error Object
|
||||
error=_ca.error
|
||||
shutdown=_ca.__ca_task_exit
|
||||
|
||||
#private dictionary for Get/Put functions
|
||||
|
||||
__ca_dict={}
|
||||
__ca_dict_lock=thread.allocate_lock()
|
||||
_channel__debug=False
|
||||
|
||||
class channel:
|
||||
"""
|
||||
a channel object for EPICS Channel Access.
|
||||
|
||||
It does not have direct connection
|
||||
to channel object in C-library for EPICS Channel Access.
|
||||
for creation just supply channel name to connect
|
||||
"""
|
||||
dbr_types=(
|
||||
DBR_NATIVE, # default type
|
||||
DBR_STRING, DBR_CHAR, DBR_FLOAT,
|
||||
DBR_SHORT, #/* same as DBR_INT */
|
||||
DBR_ENUM, DBR_LONG, DBR_DOUBLE,
|
||||
DBR_TIME_STRING, DBR_TIME_CHAR, DBR_TIME_FLOAT,
|
||||
DBR_TIME_SHORT, #:/* same as DBR_TIME_INT */
|
||||
DBR_TIME_ENUM, DBR_TIME_LONG, DBR_TIME_DOUBLE,
|
||||
DBR_CTRL_CHAR, DBR_CTRL_LONG,
|
||||
DBR_CTRL_ENUM, DBR_CTRL_DOUBLE
|
||||
)
|
||||
|
||||
def __init__(self, name, cb=None,noflush=None):
|
||||
if (not cb) : cb=self.update_info
|
||||
if name == "":
|
||||
raise ValueError(name)
|
||||
self.name=name
|
||||
self.field_type = None
|
||||
self.element_count = None
|
||||
self.puser = None
|
||||
self.conn_state = -1
|
||||
self.hostname = None
|
||||
self.raccess = None
|
||||
self.waccess = None
|
||||
self.sevr=None
|
||||
self.ts=None
|
||||
self.status=None
|
||||
self.evid=[]
|
||||
self.autoEvid=None
|
||||
self.__callbacks={}
|
||||
self.cbstate=None
|
||||
self.updated=False
|
||||
self.val=None
|
||||
self.chid=_ca.search(name,cb)
|
||||
if not noflush:
|
||||
self.flush()
|
||||
|
||||
def clear(self):
|
||||
if self.chid:
|
||||
self.clear_monitor()
|
||||
self.flush()
|
||||
_ca.clear(self.chid)
|
||||
self.flush()
|
||||
self.chid=None
|
||||
|
||||
def __del__(self):
|
||||
self.clear()
|
||||
|
||||
def wait_conn(self, wait=20, dt=0.05):
|
||||
n=0
|
||||
self.pend_event(dt)
|
||||
self.update_info()
|
||||
self.poll()
|
||||
while (not self.isConnected()):
|
||||
self.update_info()
|
||||
self.pend_event(dt)
|
||||
n=n+1
|
||||
if (n > wait ) :
|
||||
raise ECA_BADCHID("%s %d"%(self.name,n))
|
||||
return -1
|
||||
|
||||
def get(self,cb=None,Type=DBR_NATIVE, count=0, type=DBR_NATIVE, type_=DBR_NATIVE):
|
||||
try:
|
||||
if not self.isConnected():
|
||||
raise ECA_BADCHID(self.name)
|
||||
except:
|
||||
raise ECA_BADCHID(self.name)
|
||||
if (Type == DBR_NATIVE):
|
||||
if not(type == DBR_NATIVE):
|
||||
Type=type
|
||||
elif not(type_ == DBR_NATIVE):
|
||||
Type=type_
|
||||
rType=max(Type,type,type_)
|
||||
if rType not in self.dbr_types:
|
||||
raise TypeError(rType)
|
||||
if not cb: cb=self.update_val
|
||||
|
||||
self.cbstate=None
|
||||
self.updated=False
|
||||
try:
|
||||
_ca.get(self.chid, cb, Type, count)
|
||||
finally:
|
||||
pass
|
||||
|
||||
def put(self,*val,**kw):
|
||||
"""
|
||||
channel.put(valu) will put scalar value to channel. You may need to call channel.flush()
|
||||
|
||||
"""
|
||||
if( val == ()):
|
||||
print("No value(s) to put")
|
||||
else:
|
||||
if kw.has_key('cb'):
|
||||
cb=kw['cb']
|
||||
else:
|
||||
cb=None
|
||||
#self.__lock.acquire()
|
||||
try:
|
||||
_ca.put(self.chid, val, self.val, cb, DBR_NATIVE)
|
||||
finally:
|
||||
#self.__lock.release()
|
||||
pass
|
||||
|
||||
def put_and_notify(self,*val,**kw):
|
||||
if kw.has_key('cb'):
|
||||
cb=kw['cb']
|
||||
else:
|
||||
cb=None # ca_put_array_callback does not return value.
|
||||
if( val == ()):
|
||||
print("No value(s) to put")
|
||||
else:
|
||||
#self.__lock.acquire()
|
||||
try:
|
||||
_ca.put(self.chid,val,self.val,cb, DBR_NATIVE)
|
||||
finally:
|
||||
#self.__lock.release()
|
||||
pass
|
||||
|
||||
def monitor(self,callback=None,count=0,evmask=(DBE_VALUE|DBE_ALARM)):
|
||||
if(not callback):
|
||||
raise PyCa_NoCallback
|
||||
if (self.conn_state != 2):
|
||||
#print self.name,self.get_info()
|
||||
raise ECA_BADCHID(self.name)
|
||||
|
||||
self.update_info()
|
||||
if (self.field_type == DBR_NATIVE):
|
||||
#print self.name,self.get_info()
|
||||
raise ECA_BADTYPE(self.name)
|
||||
self.evid.append(_ca.monitor(self.chid,callback,count,evmask))
|
||||
self.__callbacks[self.evid[-1]]=callback
|
||||
return self.evid[-1]
|
||||
|
||||
def __clear_event(self,evid):
|
||||
if(_channel__debug): print("clearing evid:",evid)
|
||||
_ca.clear_monitor(evid)
|
||||
del self.__callbacks[evid]
|
||||
|
||||
def clear_monitor(self,evid=None):
|
||||
if(evid):
|
||||
if ( evid in self.evid):
|
||||
self.__clear_event(evid)
|
||||
i=self.evid.index(evid)
|
||||
del self.evid[i]
|
||||
i=self.evid.index(evid)
|
||||
del self.evid[i]
|
||||
else:
|
||||
for evid in self.evid:
|
||||
self.__clear_event(evid)
|
||||
self.evid=[]
|
||||
|
||||
def autoUpdate(self):
|
||||
if self.autoEvid == None:
|
||||
self.monitor(self.update_val)
|
||||
self.autoEvid=self.evid[-1]
|
||||
self.flush()
|
||||
|
||||
def clearAutoUpdate(self):
|
||||
if self.autoEvid is not None:
|
||||
self.clear_monitor(self.autoEvid)
|
||||
self.autoEvid=None
|
||||
self.flush()
|
||||
|
||||
def pendio(self,tmo=0.001):
|
||||
v=_ca.pendio(float(tmo))
|
||||
return v
|
||||
|
||||
def pend_io(self,tmo=0.001):
|
||||
v=_ca.pendio(float(tmo))
|
||||
return v
|
||||
|
||||
def pend_event(self,tmo=0.001):
|
||||
v=_ca.pend_event(float(tmo))
|
||||
return v
|
||||
|
||||
def poll(self):
|
||||
_ca.poll()
|
||||
|
||||
def flush(self,wait=0.001):
|
||||
v=_ca.flush(wait)
|
||||
return v
|
||||
|
||||
def update_val(self,valstat=None):
|
||||
if valstat ==None:
|
||||
raise caError("No value")
|
||||
#self.__lock.acquire()
|
||||
try:
|
||||
self.val=valstat[0]
|
||||
self.sevr=valstat[1]
|
||||
self.status=valstat[2]
|
||||
self.cbstate=1
|
||||
try:
|
||||
self.ts=valstat[3]
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
self.ctrl=valstat[4]
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
#self.__lock.release()
|
||||
self.updated=True
|
||||
pass
|
||||
|
||||
def clear_cbstate(self):
|
||||
#self.__lock.acquire()
|
||||
self.cbstate=None
|
||||
#self.__lock.release()
|
||||
|
||||
def state(self):
|
||||
self.get_info()
|
||||
return (self.conn_state - ch_state.cs_conn)
|
||||
|
||||
def isNeverConnected(self):
|
||||
self.get_info()
|
||||
return (self.conn_state == ch_state.cs_never_conn)
|
||||
|
||||
def isConnected(self):
|
||||
self.get_info()
|
||||
return (self.conn_state == ch_state.cs_conn)
|
||||
|
||||
def isPreviouslyConnected(self):
|
||||
self.get_info()
|
||||
return (self.conn_state == ch_state.cs_prev_conn)
|
||||
|
||||
def isDisonnected(self):
|
||||
self.get_info()
|
||||
return (self.conn_state == ch_state.cs_prev_conn)
|
||||
|
||||
def isClosed(self):
|
||||
self.get_info()
|
||||
return (self.conn_state == ch_state.cs_closed)
|
||||
|
||||
def get_info(self):
|
||||
"""
|
||||
update channel status information. return channel staus as a tuple.
|
||||
"""
|
||||
#self.__lock.acquire()
|
||||
try:
|
||||
info=(self.field_type, self.element_count, self.puser,
|
||||
self.conn_state, self.hostname, self.raccess,
|
||||
self.waccess) = _ca.ch_info(self.chid)
|
||||
finally:
|
||||
#self.__lock.release()
|
||||
pass
|
||||
return info
|
||||
|
||||
def update_info(self):
|
||||
"""
|
||||
Just update channel status information. No return value.
|
||||
"""
|
||||
self.get_info()
|
||||
|
||||
def fileno(self):
|
||||
"""returns socket number used to connect.Scoket id is shared by
|
||||
channels which are connected to the same IOC.
|
||||
It became obsolete in EPICS 3.14 version of Python-CA.
|
||||
You need to use fd_register function. But you may not need it anyway in multi-thread environment.
|
||||
"""
|
||||
return _ca.fileno(self.chid)
|
||||
|
||||
# convenient functions
|
||||
# you need to call Clear() function before stopping Python, otherwise it cause coredump. 2009/2/11 NY
|
||||
def __Ch(name,tmo=0.01):
|
||||
if (type(name) == type("")):
|
||||
if (__ca_dict.has_key(name)):
|
||||
ch=__ca_dict[name]
|
||||
else:
|
||||
try:
|
||||
ch=channel(name)
|
||||
ch.wait_conn()
|
||||
except:
|
||||
raise ECA_BADCHID(name)
|
||||
tmo=20*tmo
|
||||
__ca_dict_lock.acquire()
|
||||
try:
|
||||
__ca_dict[name]=ch
|
||||
finally:
|
||||
__ca_dict_lock.release()
|
||||
if( ch.state() != 0):
|
||||
ch.wait_conn(10)
|
||||
return ch
|
||||
else:
|
||||
raise ECA_BADTYPE(name)
|
||||
|
||||
def Info(name = "",tmo=0.01):
|
||||
"""
|
||||
returns a tuple as channel information.
|
||||
tuple format=(field_type, element_count, puser argument,
|
||||
connection_status, hostname:port,
|
||||
read access mode, write access mode)
|
||||
"""
|
||||
ch=__Ch(name,tmo=tmo)
|
||||
return ch.get_info()
|
||||
|
||||
def ClearAll():
|
||||
for name in __ca_dict.keys():
|
||||
Clear(name)
|
||||
|
||||
# __ca_dict should be cleared before Stopping Python
|
||||
atexit.register(ClearAll)
|
||||
|
||||
def Clear(name= ""):
|
||||
if (type(name) == type("")):
|
||||
__ca_dict_lock.acquire()
|
||||
try:
|
||||
if (__ca_dict.has_key(name)):
|
||||
ch=__ca_dict[name]
|
||||
del __ca_dict[name]
|
||||
ch.clear()
|
||||
del ch
|
||||
else:
|
||||
__ca_dict_lock.release()
|
||||
raise ECA_BADTYPE(name)
|
||||
finally:
|
||||
__ca_dict_lock.release()
|
||||
else:
|
||||
raise ECA_BADTYPE(name)
|
||||
|
||||
def Get(name="",count=0,Type=DBR_NATIVE,tmo=0.01,maxtmo=3):
|
||||
"""
|
||||
Get value from a channel "name".
|
||||
"""
|
||||
ch=__Ch(name,tmo)
|
||||
def CB(vals,ch=ch):
|
||||
ch.update_val(vals)
|
||||
ch.get(cb=CB,Type=Type,count=count)
|
||||
ch.flush()
|
||||
while not ch.updated:
|
||||
time.sleep(tmo)
|
||||
maxtmo -=tmo
|
||||
if maxtmo <=0:
|
||||
raise caError("No get response")
|
||||
return ch.val
|
||||
|
||||
def Put_and_Notify(name,val=None,cb=None):
|
||||
"""
|
||||
Convenient function:Put_and_Notify
|
||||
|
||||
calls put_and_notify with callback.
|
||||
If callback is None, then just put data to a channel.
|
||||
"""
|
||||
ch=__Ch(name,tmo=0.1)
|
||||
ch.put_and_notify(val,cb=cb)
|
||||
ch.flush()
|
||||
return ch.val
|
||||
|
||||
# define synonym
|
||||
Put=Put_and_Notify
|
||||
|
||||
def Put_and_Notify_Array(name,val,cb=None):
|
||||
"""
|
||||
put array test version : not tested with string arrays yet
|
||||
2007.8.30 T. Matsumoto
|
||||
"""
|
||||
ch=__Ch(name,tmo=0.1)
|
||||
apply(ch.put_and_notify,val,dict(cb=cb))
|
||||
ch.flush()
|
||||
return ch.val
|
||||
|
||||
# define synonym
|
||||
Put_Array=Put_and_Notify_Array
|
||||
|
||||
def Monitor(name,cb=None,evmask=(DBE_VALUE|DBE_ALARM)):
|
||||
ch=__Ch(name,tmo=0.1)
|
||||
if not cb:
|
||||
def myCB(val,ch=ch):
|
||||
print(ch.name,":",val[0],val[1],val[2],TS2Ascii(val[3]))
|
||||
else:
|
||||
def myCB(val, ch=ch, cb=cb):
|
||||
cb(ch,val)
|
||||
ch.clear_monitor()
|
||||
evid=ch.monitor(myCB,evmask=evmask)
|
||||
ch.flush()
|
||||
return evid
|
||||
|
||||
def ClearMonitor(name,evid=None):
|
||||
ch=__Ch(name,tmo=0.1)
|
||||
try:
|
||||
ch.clear_monitor(evid)
|
||||
return
|
||||
except:
|
||||
raise ECA_BADCHID(name)
|
||||
#
|
||||
def isIODone():
|
||||
if _ca.test_io()== 42:
|
||||
return 1
|
||||
else:
|
||||
return 0
|
||||
#
|
||||
# syncronus group class
|
||||
# Author: N.Yamamoto
|
||||
# Date: May 27.1999 (first version)
|
||||
#
|
||||
|
||||
class SyncGroup:
|
||||
def __init__(self):
|
||||
self.gid=_ca.sg_create()
|
||||
self.chs={}
|
||||
|
||||
def add(self, chs):
|
||||
try:
|
||||
for ch in chs:
|
||||
if(not self.chs.has_key(ch)):
|
||||
self.chs[ch]=0
|
||||
except:
|
||||
if(not self.chs.has_key(chs)):
|
||||
self.chs[chs]=0
|
||||
|
||||
def test(self):
|
||||
return _ca.sg_test(self.gid)
|
||||
|
||||
def reset(self):
|
||||
return _ca.sg_reset(self.gid)
|
||||
|
||||
def wait(self,tmo=1.0):
|
||||
return _ca.sg_block(self.gid,float(tmo))
|
||||
|
||||
def put(self,ch,*value,**kw):
|
||||
if kw.has_key("Type"):
|
||||
Type=kw["Type"]
|
||||
else:
|
||||
Type=DBR_NATIVE
|
||||
if self.chs.has_key(ch):
|
||||
self.chs[ch]=_ca.sg_put(self.gid, ch.chid,
|
||||
self.chs[ch], value , Type)
|
||||
|
||||
def get(self,ch):
|
||||
if self.chs.has_key(ch):
|
||||
self.chs[ch]=_ca.sg_get(self.gid,
|
||||
ch.chid, self.chs[ch])
|
||||
else:
|
||||
pass
|
||||
|
||||
def convert(self,ch):
|
||||
if self.chs.has_key(ch):
|
||||
val=_ca.ca_convert(ch.chid, self.chs[ch])
|
||||
ch.update_val(val[0])
|
||||
|
||||
def GetAll(self,tmo=1.0):
|
||||
for ch in self.chs.keys():
|
||||
self.chs[ch]=_ca.sg_get(self.gid,
|
||||
ch.chid, self.chs[ch])
|
||||
st=_ca.sg_block(self.gid,tmo)
|
||||
if st == 0:
|
||||
for ch in self.chs.keys():
|
||||
val=_ca.ca_convert(ch.chid,self.chs[ch])
|
||||
ch.update_val(val[0])
|
||||
else:
|
||||
raise Exception("CA_SG time out")
|
||||
|
||||
# TimeStamp utilities
|
||||
# time.gmtime(631152000.0)=(1990, 1, 1, 0, 0, 0, 0, 1, 0)
|
||||
#
|
||||
__EPICS_TS_EPOCH=631152000.0
|
||||
|
||||
def TS2Ascii(ts):
|
||||
import math
|
||||
tstr=time.ctime(ts+__EPICS_TS_EPOCH)
|
||||
nsstr=".%03d"%(math.modf(ts + __EPICS_TS_EPOCH)[0]*1000)
|
||||
return tstr[:-5]+nsstr+tstr[-5:]
|
||||
|
||||
def TS2time(ts):
|
||||
return time.localtime(ts+__EPICS_TS_EPOCH)
|
||||
|
||||
def TS2UTC(ts):
|
||||
return (ts+__EPICS_TS_EPOCH)
|
||||
|
||||
49
script/_Lib/epics/ca.py
Normal file
49
script/_Lib/epics/ca.py
Normal file
@@ -0,0 +1,49 @@
|
||||
#!/usr/bin/env python
|
||||
## @package ca: EPICS-CA interface module for Python.
|
||||
"""CA modlue : EPICS-CA interface module for Python.
|
||||
This module provide a version of EPICS-CA and Python interface.
|
||||
It users C module _ca. _ca module basically maps C-API in EPICS ca library into python. Interface between ca.py and _ca module is subject for change. You should not depend on it. API in ca.py will be preserved in future releases as much as possible.
|
||||
Author: Noboru Yamamoto, KEK, JAPAN. -2007.
|
||||
$Revision: 1.4 $
|
||||
"""
|
||||
|
||||
__version__ = "$Revision: 1.4 $"
|
||||
# $Source: /cvs/G/EPICS/extensions/src/PythonCA/src/ca.py,v $
|
||||
|
||||
import time,gc,sys,atexit
|
||||
if sys.hexversion >= 0x03000000:
|
||||
import _thread as thread
|
||||
else:
|
||||
import thread
|
||||
|
||||
# autGIL is not compatible with Tkinter and wx. So code was removed
|
||||
|
||||
# force thread module to call PyEval_InitThread in it.
|
||||
__foo_lock=thread.allocate_lock()
|
||||
def __foo():
|
||||
"""
|
||||
test function foo
|
||||
|
||||
This function is used to ensure thread module is initialized before
|
||||
loading _ca module.
|
||||
"""
|
||||
global __foo_lock
|
||||
__foo_lock.release()
|
||||
thread.exit_thread()
|
||||
|
||||
# See Python/Include/ceval.h
|
||||
__foo_lock.acquire()
|
||||
thread.start_new_thread(__foo,()) # __foo release lock
|
||||
__foo_lock.acquire() # make sure threading is activated
|
||||
|
||||
import _ca
|
||||
# version from _ca314.cpp
|
||||
version=_ca.version
|
||||
revision=_ca.release
|
||||
|
||||
# some constants for EPICS channel Access library
|
||||
from cadefs import *
|
||||
from caError import *
|
||||
|
||||
# for FNAL version you need to provide _ca_fnal.py and import every thin from them
|
||||
from _ca_fnal import *
|
||||
461
script/_Lib/epics/caError.py
Normal file
461
script/_Lib/epics/caError.py
Normal file
@@ -0,0 +1,461 @@
|
||||
import sys
|
||||
if sys.hexversion >= 0x03000000:
|
||||
intern = sys.intern
|
||||
|
||||
from ca import _ca
|
||||
class caError(_ca.error):
|
||||
""" EPICS ca.py Errors"""
|
||||
pass
|
||||
|
||||
__caErrorMsg=(
|
||||
"Normal successful completion",
|
||||
"Maximum simultaneous IOC connections exceeded",
|
||||
"Unknown internet host",
|
||||
"Unknown internet service",
|
||||
"Unable to allocate a new socket",
|
||||
"Unable to connect to internet host or service",
|
||||
"Unable to allocate additional dynamic memory",
|
||||
"Unknown IO channel",
|
||||
"Record field specified inappropriate for channel specified",
|
||||
"The requested data transfer is greater than available memory or EPICS_CA_MAX_ARRAY_BYTES",
|
||||
"User specified timeout on IO operation expired",
|
||||
"Sorry, that feature is planned but not supported at this time",
|
||||
"The supplied string is unusually large",
|
||||
"The request was ignored because the specified channel is disconnected",
|
||||
"The data type specifed is invalid",
|
||||
"Remote Channel not found",
|
||||
"Unable to locate all user specified channels",
|
||||
"Channel Access Internal Failure",
|
||||
"The requested local DB operation failed",
|
||||
"Channel read request failed",
|
||||
"Channel write request failed",
|
||||
"Channel subscription request failed",
|
||||
"Invalid element count requested",
|
||||
"Invalid string",
|
||||
"Virtual circuit disconnect",
|
||||
"Identical process variable names on multiple servers",
|
||||
"Request inappropriate within subscription (monitor) update callback",
|
||||
"Database value get for that channel failed during channel search",
|
||||
"Unable to initialize without the vxWorks VX_FP_TASK task option set",
|
||||
"Event queue overflow has prevented first pass event after event add",
|
||||
"Bad event subscription (monitor) identifier",
|
||||
"Remote channel has new network address",
|
||||
"New or resumed network connection",
|
||||
"Specified task isnt a member of a CA context",
|
||||
"Attempt to use defunct CA feature failed",
|
||||
"The supplied string is empty",
|
||||
"Unable to spawn the CA repeater thread- auto reconnect will fail",
|
||||
"No channel id match for search reply- search reply ignored",
|
||||
"Reseting dead connection- will try to reconnect",
|
||||
"Server (IOC) has fallen behind or is not responding- still waiting",
|
||||
"No internet interface with broadcast available",
|
||||
"Invalid event selection mask",
|
||||
"IO operations have completed",
|
||||
"IO operations are in progress",
|
||||
"Invalid synchronous group identifier",
|
||||
"Put callback timed out",
|
||||
"Read access denied",
|
||||
"Write access denied",
|
||||
"Requested feature is no longer supported",
|
||||
"Empty PV search address list",
|
||||
"No reasonable data conversion between client and server types",
|
||||
"Invalid channel identifier",
|
||||
"Invalid function pointer",
|
||||
"Thread is already attached to a client context",
|
||||
"Not supported by attached service",
|
||||
"User destroyed channel",
|
||||
"Invalid channel priority",
|
||||
"Preemptive callback not enabled - additional threads may not join context",
|
||||
"Client's protocol revision does not support transfers exceeding 16k bytes",
|
||||
"Virtual circuit connection sequence aborted",
|
||||
"Virtual circuit unresponsive",
|
||||
)
|
||||
_caErrorMsg=map(intern,__caErrorMsg)
|
||||
if sys.hexversion >= 0x03000000:
|
||||
_caErrorMsg = list(_caErrorMsg)
|
||||
|
||||
ErrCode2Class={}
|
||||
class PyCa_NoCallback(caError):
|
||||
__doc__="Null callback routine"
|
||||
CA_M_MSG_NO = 0x0000FFF8
|
||||
CA_M_SEVERITY = 0x00000007
|
||||
CA_M_LEVEL = 0x00000003
|
||||
CA_M_SUCCESS = 0x00000001
|
||||
CA_M_ERROR = 0x00000002
|
||||
CA_M_SEVERE = 0x00000004
|
||||
CA_S_MSG_NO= 0x0D
|
||||
CA_S_SEVERITY=0x03
|
||||
CA_V_MSG_NO= 0x03
|
||||
CA_V_SEVERITY= 0x00
|
||||
CA_V_SUCCESS= 0x00
|
||||
|
||||
def CA_EXTRACT_MSG_NO(code): return ( ( (code) & CA_M_MSG_NO ) >> CA_V_MSG_NO )
|
||||
def CA_EXTRACT_SEVERITY(code): return ( ( (code) & CA_M_SEVERITY ) >> CA_V_SEVERITY)
|
||||
def CA_EXTRACT_SUCCESS(code): ( ( (code) & CA_M_SUCCESS ) >> CA_V_SUCCESS )
|
||||
|
||||
class ECA_NORMAL(caError):
|
||||
__doc__=_caErrorMsg[0]
|
||||
__errcode__=1
|
||||
|
||||
ErrCode2Class[1]=ECA_NORMAL
|
||||
|
||||
class ECA_MAXIOC(caError):
|
||||
__doc__=_caErrorMsg[1]
|
||||
__errcode__=10
|
||||
|
||||
ErrCode2Class[10]=ECA_MAXIOC
|
||||
|
||||
class ECA_UKNHOST(caError):
|
||||
__doc__=_caErrorMsg[2]
|
||||
__errcode__=18
|
||||
|
||||
ErrCode2Class[18]=ECA_UKNHOST
|
||||
|
||||
class ECA_UKNSERV(caError):
|
||||
__doc__=_caErrorMsg[3]
|
||||
__errcode__=26
|
||||
|
||||
ErrCode2Class[26]=ECA_UKNSERV
|
||||
|
||||
class ECA_SOCK(caError):
|
||||
__doc__=_caErrorMsg[4]
|
||||
__errcode__=34
|
||||
|
||||
ErrCode2Class[34]=ECA_SOCK
|
||||
|
||||
class ECA_CONN(caError):
|
||||
__doc__=_caErrorMsg[5]
|
||||
__errcode__=40
|
||||
|
||||
ErrCode2Class[40]=ECA_CONN
|
||||
|
||||
class ECA_ALLOCMEM(caError):
|
||||
__doc__=_caErrorMsg[6]
|
||||
__errcode__=48
|
||||
|
||||
ErrCode2Class[48]=ECA_ALLOCMEM
|
||||
|
||||
class ECA_UKNCHAN(caError):
|
||||
__doc__=_caErrorMsg[7]
|
||||
__errcode__=56
|
||||
|
||||
ErrCode2Class[56]=ECA_UKNCHAN
|
||||
|
||||
class ECA_UKNFIELD(caError):
|
||||
__doc__=_caErrorMsg[8]
|
||||
__errcode__=64
|
||||
|
||||
ErrCode2Class[64]=ECA_UKNFIELD
|
||||
|
||||
class ECA_TOLARGE(caError):
|
||||
__doc__=_caErrorMsg[9]
|
||||
__errcode__=72
|
||||
|
||||
ErrCode2Class[72]=ECA_TOLARGE
|
||||
|
||||
class ECA_TIMEOUT(caError):
|
||||
__doc__=_caErrorMsg[10]
|
||||
__errcode__=80
|
||||
|
||||
ErrCode2Class[80]=ECA_TIMEOUT
|
||||
|
||||
class ECA_NOSUPPORT(caError):
|
||||
__doc__=_caErrorMsg[11]
|
||||
__errcode__=88
|
||||
|
||||
ErrCode2Class[88]=ECA_NOSUPPORT
|
||||
|
||||
class ECA_STRTOBIG(caError):
|
||||
__doc__=_caErrorMsg[12]
|
||||
__errcode__=96
|
||||
|
||||
ErrCode2Class[96]=ECA_STRTOBIG
|
||||
|
||||
class ECA_DISCONNCHID(caError):
|
||||
__doc__=_caErrorMsg[13]
|
||||
__errcode__=106
|
||||
|
||||
ErrCode2Class[106]=ECA_DISCONNCHID
|
||||
|
||||
class ECA_BADTYPE(caError):
|
||||
__doc__=_caErrorMsg[14]
|
||||
__errcode__=114
|
||||
|
||||
ErrCode2Class[114]=ECA_BADTYPE
|
||||
|
||||
class ECA_CHIDNOTFND(caError):
|
||||
__doc__=_caErrorMsg[15]
|
||||
__errcode__=123
|
||||
|
||||
ErrCode2Class[123]=ECA_CHIDNOTFND
|
||||
|
||||
class ECA_CHIDRETRY(caError):
|
||||
__doc__=_caErrorMsg[16]
|
||||
__errcode__=131
|
||||
|
||||
ErrCode2Class[131]=ECA_CHIDRETRY
|
||||
|
||||
class ECA_INTERNAL(caError):
|
||||
__doc__=_caErrorMsg[17]
|
||||
__errcode__=142
|
||||
|
||||
ErrCode2Class[142]=ECA_INTERNAL
|
||||
|
||||
class ECA_DBLCLFAIL(caError):
|
||||
__doc__=_caErrorMsg[18]
|
||||
__errcode__=144
|
||||
|
||||
ErrCode2Class[144]=ECA_DBLCLFAIL
|
||||
|
||||
class ECA_GETFAIL(caError):
|
||||
__doc__=_caErrorMsg[19]
|
||||
__errcode__=152
|
||||
|
||||
ErrCode2Class[152]=ECA_GETFAIL
|
||||
|
||||
class ECA_PUTFAIL(caError):
|
||||
__doc__=_caErrorMsg[20]
|
||||
__errcode__=160
|
||||
|
||||
ErrCode2Class[160]=ECA_PUTFAIL
|
||||
|
||||
class ECA_ADDFAIL(caError):
|
||||
__doc__=_caErrorMsg[21]
|
||||
__errcode__=168
|
||||
|
||||
ErrCode2Class[168]=ECA_ADDFAIL
|
||||
|
||||
class ECA_BADCOUNT(caError):
|
||||
__doc__=_caErrorMsg[22]
|
||||
__errcode__=176
|
||||
|
||||
ErrCode2Class[176]=ECA_BADCOUNT
|
||||
|
||||
class ECA_BADSTR(caError):
|
||||
__doc__=_caErrorMsg[23]
|
||||
__errcode__=186
|
||||
|
||||
ErrCode2Class[186]=ECA_BADSTR
|
||||
|
||||
class ECA_DISCONN(caError):
|
||||
__doc__=_caErrorMsg[24]
|
||||
__errcode__=192
|
||||
|
||||
ErrCode2Class[192]=ECA_DISCONN
|
||||
|
||||
class ECA_DBLCHNL(caError):
|
||||
__doc__=_caErrorMsg[25]
|
||||
__errcode__=200
|
||||
|
||||
ErrCode2Class[200]=ECA_DBLCHNL
|
||||
|
||||
class ECA_EVDISALLOW(caError):
|
||||
__doc__=_caErrorMsg[26]
|
||||
__errcode__=210
|
||||
|
||||
ErrCode2Class[210]=ECA_EVDISALLOW
|
||||
|
||||
class ECA_BUILDGET(caError):
|
||||
__doc__=_caErrorMsg[27]
|
||||
__errcode__=216
|
||||
|
||||
ErrCode2Class[216]=ECA_BUILDGET
|
||||
|
||||
class ECA_NEEDSFP(caError):
|
||||
__doc__=_caErrorMsg[28]
|
||||
__errcode__=224
|
||||
|
||||
ErrCode2Class[224]=ECA_NEEDSFP
|
||||
|
||||
class ECA_OVEVFAIL(caError):
|
||||
__doc__=_caErrorMsg[29]
|
||||
__errcode__=232
|
||||
|
||||
ErrCode2Class[232]=ECA_OVEVFAIL
|
||||
|
||||
class ECA_BADMONID(caError):
|
||||
__doc__=_caErrorMsg[30]
|
||||
__errcode__=242
|
||||
|
||||
ErrCode2Class[242]=ECA_BADMONID
|
||||
|
||||
class ECA_NEWADDR(caError):
|
||||
__doc__=_caErrorMsg[31]
|
||||
__errcode__=248
|
||||
|
||||
ErrCode2Class[248]=ECA_NEWADDR
|
||||
|
||||
class ECA_NEWCONN(caError):
|
||||
__doc__=_caErrorMsg[32]
|
||||
__errcode__=259
|
||||
|
||||
ErrCode2Class[259]=ECA_NEWCONN
|
||||
|
||||
class ECA_NOCACTX(caError):
|
||||
__doc__=_caErrorMsg[33]
|
||||
__errcode__=264
|
||||
|
||||
ErrCode2Class[264]=ECA_NOCACTX
|
||||
|
||||
class ECA_DEFUNCT(caError):
|
||||
__doc__=_caErrorMsg[34]
|
||||
__errcode__=278
|
||||
|
||||
ErrCode2Class[278]=ECA_DEFUNCT
|
||||
|
||||
class ECA_EMPTYSTR(caError):
|
||||
__doc__=_caErrorMsg[35]
|
||||
__errcode__=280
|
||||
|
||||
ErrCode2Class[280]=ECA_EMPTYSTR
|
||||
|
||||
class ECA_NOREPEATER(caError):
|
||||
__doc__=_caErrorMsg[36]
|
||||
__errcode__=288
|
||||
|
||||
ErrCode2Class[288]=ECA_NOREPEATER
|
||||
|
||||
class ECA_NOCHANMSG(caError):
|
||||
__doc__=_caErrorMsg[37]
|
||||
__errcode__=296
|
||||
|
||||
ErrCode2Class[296]=ECA_NOCHANMSG
|
||||
|
||||
class ECA_DLCKREST(caError):
|
||||
__doc__=_caErrorMsg[38]
|
||||
__errcode__=304
|
||||
|
||||
ErrCode2Class[304]=ECA_DLCKREST
|
||||
|
||||
class ECA_SERVBEHIND(caError):
|
||||
__doc__=_caErrorMsg[39]
|
||||
__errcode__=312
|
||||
|
||||
ErrCode2Class[312]=ECA_SERVBEHIND
|
||||
|
||||
class ECA_NOCAST(caError):
|
||||
__doc__=_caErrorMsg[40]
|
||||
__errcode__=320
|
||||
|
||||
ErrCode2Class[320]=ECA_NOCAST
|
||||
|
||||
class ECA_BADMASK(caError):
|
||||
__doc__=_caErrorMsg[41]
|
||||
__errcode__=330
|
||||
|
||||
ErrCode2Class[330]=ECA_BADMASK
|
||||
|
||||
class ECA_IODONE(caError):
|
||||
__doc__=_caErrorMsg[42]
|
||||
__errcode__=339
|
||||
|
||||
ErrCode2Class[339]=ECA_IODONE
|
||||
|
||||
class ECA_IOINPROGRESS(caError):
|
||||
__doc__=_caErrorMsg[43]
|
||||
__errcode__=347
|
||||
|
||||
ErrCode2Class[347]=ECA_IOINPROGRESS
|
||||
|
||||
class ECA_BADSYNCGRP(caError):
|
||||
__doc__=_caErrorMsg[44]
|
||||
__errcode__=354
|
||||
|
||||
ErrCode2Class[354]=ECA_BADSYNCGRP
|
||||
|
||||
class ECA_PUTCBINPROG(caError):
|
||||
__doc__=_caErrorMsg[45]
|
||||
__errcode__=362
|
||||
|
||||
ErrCode2Class[362]=ECA_PUTCBINPROG
|
||||
|
||||
class ECA_NORDACCESS(caError):
|
||||
__doc__=_caErrorMsg[46]
|
||||
__errcode__=368
|
||||
|
||||
ErrCode2Class[368]=ECA_NORDACCESS
|
||||
|
||||
class ECA_NOWTACCESS(caError):
|
||||
__doc__=_caErrorMsg[47]
|
||||
__errcode__=376
|
||||
|
||||
ErrCode2Class[376]=ECA_NOWTACCESS
|
||||
|
||||
class ECA_ANACHRONISM(caError):
|
||||
__doc__=_caErrorMsg[48]
|
||||
__errcode__=386
|
||||
|
||||
ErrCode2Class[386]=ECA_ANACHRONISM
|
||||
|
||||
class ECA_NOSEARCHADDR(caError):
|
||||
__doc__=_caErrorMsg[49]
|
||||
__errcode__=392
|
||||
|
||||
ErrCode2Class[392]=ECA_NOSEARCHADDR
|
||||
|
||||
class ECA_NOCONVERT(caError):
|
||||
__doc__=_caErrorMsg[50]
|
||||
__errcode__=400
|
||||
|
||||
ErrCode2Class[400]=ECA_NOCONVERT
|
||||
|
||||
class ECA_BADCHID(caError):
|
||||
__doc__=_caErrorMsg[51]
|
||||
__errcode__=410
|
||||
|
||||
ErrCode2Class[410]=ECA_BADCHID
|
||||
|
||||
class ECA_BADFUNCPTR(caError):
|
||||
__doc__=_caErrorMsg[52]
|
||||
__errcode__=418
|
||||
|
||||
ErrCode2Class[418]=ECA_BADFUNCPTR
|
||||
|
||||
class ECA_ISATTACHED(caError):
|
||||
__doc__=_caErrorMsg[53]
|
||||
__errcode__=424
|
||||
|
||||
ErrCode2Class[424]=ECA_ISATTACHED
|
||||
|
||||
class ECA_UNAVAILINSERV(caError):
|
||||
__doc__=_caErrorMsg[54]
|
||||
__errcode__=432
|
||||
|
||||
ErrCode2Class[432]=ECA_UNAVAILINSERV
|
||||
|
||||
class ECA_CHANDESTROY(caError):
|
||||
__doc__=_caErrorMsg[55]
|
||||
__errcode__=440
|
||||
|
||||
ErrCode2Class[440]=ECA_CHANDESTROY
|
||||
|
||||
class ECA_BADPRIORITY(caError):
|
||||
__doc__=_caErrorMsg[56]
|
||||
__errcode__=450
|
||||
|
||||
ErrCode2Class[450]=ECA_BADPRIORITY
|
||||
|
||||
class ECA_NOTTHREADED(caError):
|
||||
__doc__=_caErrorMsg[57]
|
||||
__errcode__=458
|
||||
|
||||
ErrCode2Class[458]=ECA_NOTTHREADED
|
||||
|
||||
class ECA_16KARRAYCLIENT(caError):
|
||||
__doc__=_caErrorMsg[58]
|
||||
__errcode__=464
|
||||
|
||||
ErrCode2Class[464]=ECA_16KARRAYCLIENT
|
||||
|
||||
class ECA_CONNSEQTMO(caError):
|
||||
__doc__=_caErrorMsg[59]
|
||||
__errcode__=472
|
||||
|
||||
ErrCode2Class[472]=ECA_CONNSEQTMO
|
||||
|
||||
class ECA_UNRESPTMO(caError):
|
||||
__doc__=_caErrorMsg[60]
|
||||
__errcode__=480
|
||||
|
||||
ErrCode2Class[480]=ECA_UNRESPTMO
|
||||
|
||||
647
script/_Lib/epics/ca_util.py
Normal file
647
script/_Lib/epics/ca_util.py
Normal file
@@ -0,0 +1,647 @@
|
||||
# ca_util.py - a thin wrapper around CaChannel
|
||||
# Tim Mooney 12/05/2008
|
||||
#
|
||||
# Modified by Xiaoqiang Wang to be Python 3 compatible.
|
||||
|
||||
"""ca_util.py is a wrapper around CaChannel that allows the caller to write,
|
||||
e.g.,
|
||||
caget("xxx:m1")
|
||||
instead of having to write
|
||||
m1 = CaChannel()
|
||||
m1.searchw("xxx:m1")
|
||||
m1.getw()
|
||||
Also, ca_util defends against null PV names and some effects of short-term
|
||||
CA disconnections, and it can verify that caput*() operations succeeded.
|
||||
"""
|
||||
|
||||
version = "2.0"
|
||||
|
||||
import ca
|
||||
import CaChannel
|
||||
import time
|
||||
import sys
|
||||
|
||||
# DBR types
|
||||
# ca.DBR_STRING = 0
|
||||
# ca.DBR_SHORT = 1
|
||||
# ca.DBR_INT = 1
|
||||
# ca.DBR_FLOAT = 2
|
||||
# ca.DBR_ENUM = 3
|
||||
# ca.DBR_CHAR = 4
|
||||
# ca.DBR_LONG = 5
|
||||
# ca.DBR_DOUBLE = 6
|
||||
|
||||
# If caller imported CaChannel using "from CaChannel import *", then the
|
||||
# class CaChannel will have the same name as the module CaChannel, and
|
||||
# we won't be able to see the module attribute, 'CaChannel.__file__'.
|
||||
def getCaChannelFileName():
|
||||
""" For internal ca_util use"""
|
||||
return CaChannel.__file__
|
||||
|
||||
#######################################################################
|
||||
# Human readable exception description
|
||||
# try:
|
||||
# x = x + 1
|
||||
# except:
|
||||
# print formatExceptionInfo()
|
||||
import sys
|
||||
import traceback
|
||||
def formatExceptionInfo(maxTBlevel=5):
|
||||
cla, exc, trbk = sys.exc_info()
|
||||
excName = cla.__name__
|
||||
try:
|
||||
excArgs = exc.__dict__["args"]
|
||||
except KeyError:
|
||||
excArgs = "<no args>"
|
||||
excTb = traceback.format_tb(trbk, maxTBlevel)
|
||||
return (excName, excArgs, excTb)
|
||||
|
||||
#######################################################################
|
||||
# channel-access connection states
|
||||
ca_states = {}
|
||||
# ...from cadef.h:
|
||||
ca_states[ca.cs_never_conn] = "never connected"
|
||||
ca_states[ca.cs_prev_conn] = "previously connected"
|
||||
ca_states[ca.cs_conn] = "connected"
|
||||
ca_states[ca.cs_closed] = "closed"
|
||||
ca_states[ca.cs_never_search] = "never searched"
|
||||
|
||||
|
||||
|
||||
#######################################################################
|
||||
# default settings for ca_util
|
||||
defaultTimeout = None # 'None' means use CaChannel's timeout
|
||||
defaultRetries = 3
|
||||
readCheckTolerance = None # 'None" means don't check
|
||||
|
||||
def set_ca_util_defaults(timeout=None, retries=None, read_check_tolerance=None):
|
||||
"""
|
||||
usage: old = set_ca_util_defaults(timeout=None, retries=None,
|
||||
read_check_tolerance=None)
|
||||
alternate: set_ca_util_defaults(defaultsList), where defaultsList is like
|
||||
the list returned by get_ca_util_defaults()
|
||||
Setting an argument to the string "NONE" disables it.
|
||||
Returns the list of previous default values:
|
||||
[defaultTimeout, defaultRetries, readCheckTolerance]
|
||||
"""
|
||||
global defaultTimeout, defaultRetries, readCheckTolerance
|
||||
old = [defaultTimeout, defaultRetries, readCheckTolerance]
|
||||
if type(timeout) == type([]):
|
||||
argList = timeout
|
||||
timeout = argList[0]
|
||||
retries = argList[1]
|
||||
read_check_tolerance = argList[2]
|
||||
if (timeout!=None) : defaultTimeout = timeout
|
||||
if (retries!=None) : defaultRetries = retries
|
||||
if (read_check_tolerance!=None) : readCheckTolerance = read_check_tolerance
|
||||
return old
|
||||
|
||||
def get_ca_util_defaults():
|
||||
"""
|
||||
usage: myList = get_ca_util_defaults()
|
||||
myList is set to [defaultTimeout, defaultRetries, readCheckTolerance]
|
||||
"""
|
||||
global defaultTimeout, defaultRetries, readCheckTolerance
|
||||
return [defaultTimeout, defaultRetries, readCheckTolerance]
|
||||
|
||||
def set_ca_util_default_timeout(timeout=None):
|
||||
"""
|
||||
usage: old = set_ca_util_default_timeout(timeout=None)
|
||||
If timeout == "NONE", then ca_util doesn't specify any timeout in
|
||||
calls to underlying software.
|
||||
Returns previous default timeout.
|
||||
"""
|
||||
global defaultTimeout
|
||||
old = defaultTimeout
|
||||
defaultTimeout = timeout
|
||||
return old
|
||||
|
||||
def get_ca_util_default_timeout():
|
||||
global defaultTimeout
|
||||
return defaultTimeout
|
||||
|
||||
def set_ca_util_default_retries(retries=None):
|
||||
"""
|
||||
usage: old = set_ca_util_default_retries(retries=None)
|
||||
If retries == "NONE", then ca_util doesn't do any retries.
|
||||
Returns previous default retries.
|
||||
"""
|
||||
global defaultRetries
|
||||
old = defaultRetries
|
||||
defaultRetries = retries
|
||||
return old
|
||||
|
||||
def get_ca_util_default_retries():
|
||||
global defaultRetries
|
||||
return defaultRetries
|
||||
|
||||
def set_ca_util_default_read_check_tolerance(read_check_tolerance=None):
|
||||
"""
|
||||
usage: old = set_ca_util_default_read_check_tolerance(read_check_tolerance=None)
|
||||
If read_check_tolerance == "NONE", then ca_util doesn't compare the value
|
||||
it reads to the value it wrote.
|
||||
Returns previous default tolerance.
|
||||
"""
|
||||
global readCheckTolerance
|
||||
old = readCheckTolerance
|
||||
readCheckTolerance = read_check_tolerance
|
||||
return old
|
||||
|
||||
def get_ca_util_default_read_check_tolerance():
|
||||
global readCheckTolerance
|
||||
return readCheckTolerance
|
||||
|
||||
|
||||
#######################################################################
|
||||
# The dictionary, cadict, will be used to associate PV names with the
|
||||
# machinery required to talk to EPICS PV's. If no entry is found (the
|
||||
# name hasn't been used yet in a ca call), then we create a new instance
|
||||
# of CaChannel, connect it to the PV, and put it in the dictionary. We also
|
||||
# include a flag some of the ca_util routines can use to check if a callback
|
||||
# has occurred for this PV.
|
||||
|
||||
class cadictEntry:
|
||||
def __init__(self, channel):
|
||||
self.channel = channel
|
||||
self.callbackReceived = 0 # reserved for use by caputw()
|
||||
self.field_type = channel.field_type()
|
||||
self.element_count = channel.element_count()
|
||||
#self.host_name = channel.host_name()
|
||||
|
||||
cadict = {}
|
||||
|
||||
#######################################################################
|
||||
ca_utilExceptionStrings = ["No name was provided.", "Readback disagrees with put value.",
|
||||
"PV is not connected."]
|
||||
EXCEPTION_NULL_NAME = 0
|
||||
EXCEPTION_READBACK_DISAGREES = 1
|
||||
EXCEPTION_NOT_CONNECTED = 2
|
||||
|
||||
class ca_utilException(Exception):
|
||||
def __init__(self, *args):
|
||||
Exception.__init__(self, *args)
|
||||
self.errorNumber = args[0]
|
||||
|
||||
def __int__(self):
|
||||
return int(self.errorNumber)
|
||||
|
||||
def __str__(self):
|
||||
return ca_utilExceptionStrings[self.errorNumber]
|
||||
|
||||
|
||||
#######################################################################
|
||||
def convertToType(type, value):
|
||||
if type == ca.DBR_STRING:
|
||||
return str(value)
|
||||
elif type == ca.DBR_SHORT or type == ca.DBR_INT or type == ca.DBR_LONG:
|
||||
try:
|
||||
n = int(value)
|
||||
except:
|
||||
n = 0
|
||||
return n
|
||||
elif type == ca.DBR_FLOAT or type == ca.DBR_DOUBLE:
|
||||
try:
|
||||
n = float(value)
|
||||
except:
|
||||
n = 0.0
|
||||
return n
|
||||
elif type == ca.DBR_ENUM:
|
||||
return value
|
||||
elif type == ca.DBR_CHAR:
|
||||
return value
|
||||
else:
|
||||
return value
|
||||
|
||||
#######################################################################
|
||||
def checkName(name, timeout=None, retries=None):
|
||||
"""
|
||||
usage: checkName("xxx:m1.VAL", timeout=None, retries=None)
|
||||
Intended for internal use by ca_util functions.
|
||||
"""
|
||||
|
||||
global cadict, defaultTimeout, defaultRetries
|
||||
if not name:
|
||||
raise ca_utilException(EXCEPTION_NULL_NAME)
|
||||
|
||||
if ((timeout == None) and (defaultTimeout != None)): timeout = defaultTimeout
|
||||
if (timeout == "NONE"): timeout = None
|
||||
|
||||
if ((retries == None) and (defaultRetries != None)): retries = defaultRetries
|
||||
if ((retries == None) or (retries == "NONE")): retries = 0
|
||||
|
||||
tries = 0
|
||||
while (name not in cadict) and (tries <= retries):
|
||||
# Make a new entry in the PV-name dictionary
|
||||
try:
|
||||
channel = CaChannel.CaChannel()
|
||||
if (timeout != None): channel.setTimeout(timeout)
|
||||
channel.searchw(name)
|
||||
cadict[name] = cadictEntry(channel)
|
||||
except CaChannel.CaChannelException:
|
||||
status = sys.exc_info()[1]
|
||||
del channel
|
||||
tries += 1
|
||||
|
||||
if (name not in cadict):
|
||||
print("ca_util.checkName: Can't connect to '%s'" % name)
|
||||
raise CaChannel.CaChannelException(status)
|
||||
|
||||
#######################################################################
|
||||
def castate(name=None, timeout=None, retries=None):
|
||||
"""usage: val = castate("xxx:m1.VAL", timeout=None, retries=None)
|
||||
Try to read a PV, to find out whether it's really connected, and
|
||||
whether caller is permitted to read and write it, without allowing
|
||||
any exceptions to be thrown at the caller.
|
||||
"""
|
||||
|
||||
global cadict, defaultTimeout, defaultRetries
|
||||
|
||||
if not name: return "Null name has no state"
|
||||
|
||||
# The only reliable way to check the *current* state of a PV is to attempt to use it.
|
||||
try:
|
||||
val = caget(name, timeout=timeout, retries=retries)
|
||||
except CaChannel.CaChannelException:
|
||||
pass
|
||||
|
||||
try:
|
||||
checkName(name, timeout=timeout)
|
||||
except CaChannel.CaChannelException:
|
||||
return "not connected"
|
||||
except:
|
||||
return "error"
|
||||
|
||||
try:
|
||||
state = cadict[name].channel.state()
|
||||
except CaChannel.CaChannelException:
|
||||
return "not connected"
|
||||
except:
|
||||
return "error"
|
||||
else:
|
||||
try:
|
||||
read_access = cadict[name].channel.read_access()
|
||||
write_access = cadict[name].channel.write_access()
|
||||
if state in ca_states:
|
||||
s = ca_states[state]
|
||||
else:
|
||||
s = "unknown state"
|
||||
if not read_access: s += ", noread"
|
||||
if not write_access: s += ", nowrite"
|
||||
return s
|
||||
except:
|
||||
return "error"
|
||||
|
||||
#######################################################################
|
||||
def caget(name, timeout=None, retries=None, req_type=None, req_count=None):
|
||||
"""usage: val = caget("xxx:m1.VAL", timeout=None, retries=None,
|
||||
req_type=None, req_count=None)"""
|
||||
|
||||
global cadict, defaultTimeout, defaultRetries
|
||||
|
||||
if not name:
|
||||
print("caget: no PV name supplied")
|
||||
raise ca_utilException(EXCEPTION_NULL_NAME)
|
||||
if ((timeout==None) and (defaultTimeout != None)): timeout = defaultTimeout
|
||||
if (timeout == "NONE"): timeout = None
|
||||
if ((retries==None) and (defaultRetries != None)): retries = defaultRetries
|
||||
if ((retries == None) or (retries == "NONE")): retries = 0
|
||||
retries = max(retries,0)
|
||||
retry = retries + 1
|
||||
success = 0
|
||||
|
||||
# CaChannel sometimes chokes when it tries to process a channel that has been disconnected.
|
||||
# The simplest fix is to clear the channel and reconnect to the PV, which we can do cleanly
|
||||
# by deleting our dict entry for the channel, and calling checkName() to make a new entry.
|
||||
|
||||
while ((not success) and (retry > 0)):
|
||||
checked = 0
|
||||
while ((not checked) and (retry > 0)):
|
||||
retry -= 1
|
||||
try:
|
||||
checkName(name, timeout=timeout)
|
||||
except CaChannel.CaChannelException:
|
||||
if retry <= 0:
|
||||
raise
|
||||
else:
|
||||
checked = 1
|
||||
|
||||
entry = cadict[name]
|
||||
if (timeout != None): entry.channel.setTimeout(timeout)
|
||||
if req_type == None:
|
||||
req_type=entry.field_type
|
||||
# kludge for broken DBR_CHAR
|
||||
if req_type == ca.DBR_CHAR:
|
||||
req_type = ca.DBR_INT
|
||||
if req_count == None:
|
||||
req_count = entry.element_count
|
||||
req_count = max(0, min(req_count, entry.element_count))
|
||||
try:
|
||||
val = entry.channel.getw(req_type=req_type, count=req_count)
|
||||
except CaChannel.CaChannelException:
|
||||
status = sys.exc_info()[1]
|
||||
#print "getw threw an exception (%s)" % status
|
||||
if ((int(status) == ca.ECA_BADTYPE) or (int(status) == ca.ECA_DISCONN)):
|
||||
# Delete dictionary entry. This clears the CA connection.
|
||||
print("caget: Repairing CA connection to ", name)
|
||||
del cadict[name]
|
||||
retry += 1
|
||||
if retry <= 0:
|
||||
raise
|
||||
else:
|
||||
success = 1
|
||||
return val
|
||||
|
||||
def isNumber(s):
|
||||
try:
|
||||
n = int(s)
|
||||
except:
|
||||
return False
|
||||
return True
|
||||
|
||||
#######################################################################
|
||||
def same(value, readback, native_readback, field_type, read_check_tolerance):
|
||||
"""For internal use by ca_util"""
|
||||
#print "ca_util.same(): field_type=%s" % field_type
|
||||
#print "ca_util.same(): value='%s'; readback='%s', native_readback='%s'" % (str(value), str(readback), str(native_readback))
|
||||
#print "ca_util.same(): type(value)=%s; type(readback)=%s, type(native_readback)=%s" % (type(value),
|
||||
# type(readback), type(native_readback))
|
||||
|
||||
if field_type in [ca.DBR_FLOAT, ca.DBR_DOUBLE]:
|
||||
return (abs(float(readback)-float(value)) < read_check_tolerance)
|
||||
elif field_type in [ca.DBR_INT, ca.DBR_SHORT, ca.DBR_LONG]:
|
||||
return (abs(int(readback)-int(value)) == 0)
|
||||
elif field_type == ca.DBR_ENUM:
|
||||
if str(value) == str(readback):
|
||||
return True
|
||||
if str(value) == str(native_readback):
|
||||
return True
|
||||
return False
|
||||
else:
|
||||
return (str(value) == str(readback))
|
||||
|
||||
#######################################################################
|
||||
def caput(name, value, timeout=None, req_type=None, retries=None, read_check_tolerance=None):
|
||||
"""
|
||||
usage: caput("xxx:m1.VAL", new_value, timeout=None, req_type=None,
|
||||
retries=None, read_check_tolerance=None)
|
||||
Put a value, and optionally check that the value arrived safely.
|
||||
If read_check_tolerance == None (or is not supplied) then the default
|
||||
read-check tolerance is used. If read_check_tolerance == "NONE", then no
|
||||
read check is done.
|
||||
If read_check_tolerance != "NONE", then floating point numbers must be
|
||||
closer than the tolerance, and other types must agree exactly.
|
||||
Note that defaults for timeout, retries, and read_check_tolerance can be
|
||||
set for all ca_util functions, using the command set_ca_util_defaults().
|
||||
"""
|
||||
|
||||
_caput("caput", name, value, 0, timeout, req_type, retries, read_check_tolerance)
|
||||
|
||||
|
||||
#######################################################################
|
||||
def __ca_util_waitCB(epics_args, user_args):
|
||||
"""Function for internal use by caputw()."""
|
||||
#print "__ca_util_waitCB: %s done\n" % user_args[0]
|
||||
cadict[user_args[0]].callbackReceived = 1
|
||||
|
||||
#######################################################################
|
||||
def caputw(name, value, wait_timeout=None, timeout=None, req_type=None, retries=None,
|
||||
read_check_tolerance=None):
|
||||
"""
|
||||
usage: caputw("xxx:m1.VAL", new_value, wait_timeout=None, timeout=None,
|
||||
req_type=None, retries=None, read_check_tolerance=None)
|
||||
Put a value, optionally check that the value arrived safely, and wait (no
|
||||
longer than wait_timeout) for processing to complete. If
|
||||
read_check_tolerance == None (or is not supplied) then the default
|
||||
read-check tolerance is used. If read_check_tolerance == "NONE", then no
|
||||
read check is done. If read_check_tolerance != "NONE", then floating point
|
||||
numbers must be closer than the tolerance, and other types must agree
|
||||
exactly. Note that defaults for timeout, retries, and read_check_tolerance
|
||||
can be set for all ca_util functions, using the command
|
||||
set_ca_util_defaults().
|
||||
"""
|
||||
|
||||
_caput("caputw", name, value, wait_timeout, timeout, req_type, retries, read_check_tolerance)
|
||||
|
||||
|
||||
#######################################################################
|
||||
def _caput(function, name, value, wait_timeout=None, timeout=None, req_type=None, retries=None, read_check_tolerance=None):
|
||||
|
||||
global cadict, defaultTimeout, defaultRetries, readCheckTolerance
|
||||
|
||||
#print function
|
||||
if not name:
|
||||
print("%s: no PV name supplied" % function)
|
||||
raise ca_utilException(EXCEPTION_NULL_NAME)
|
||||
if ((timeout == None) and (defaultTimeout != None)): timeout = defaultTimeout
|
||||
if ((retries == None) and (defaultRetries != None)): retries = defaultRetries
|
||||
if ((retries == None) or (retries == "NONE")): retries = 0
|
||||
if ((read_check_tolerance == None) and (readCheckTolerance != None)):
|
||||
read_check_tolerance = readCheckTolerance
|
||||
|
||||
retries = max(retries,0)
|
||||
retry = retries + 1
|
||||
success = 0
|
||||
|
||||
checkName(name, timeout=timeout, retries=retries)
|
||||
|
||||
while ((not success) and (retry > 0)):
|
||||
|
||||
retry -= 1
|
||||
entry = cadict[name]
|
||||
|
||||
state = castate(name, timeout)
|
||||
#print "%s: state='%s'" % (function, state)
|
||||
if (state != 'connected'):
|
||||
print("%s: Repairing CA connection to '%s'" % (function, name))
|
||||
del cadict[name]
|
||||
retry += 1
|
||||
else:
|
||||
if req_type == None:
|
||||
req_type=entry.field_type
|
||||
if ((timeout != None) and (timeout != "NONE")): entry.channel.setTimeout(timeout)
|
||||
entry.callbackReceived = 0 # in case we're doing caputw()
|
||||
#value = convertToType(value, req_type)
|
||||
try:
|
||||
if function == "caput":
|
||||
entry.channel.putw(value, req_type=req_type)
|
||||
else: #caputw
|
||||
retval = entry.channel.array_put_callback(value,req_type,entry.element_count,__ca_util_waitCB,name)
|
||||
except CaChannel.CaChannelException:
|
||||
status = sys.exc_info()[1]
|
||||
print("put() threw an exception (%s)" % status)
|
||||
if ((int(status) == ca.ECA_BADTYPE) or (int(status) == ca.ECA_DISCONN)):
|
||||
# Delete dictionary entry. This clears the CA connection.
|
||||
print("%s: Repairing CA connection to '%s'" % (function, name))
|
||||
del cadict[name]
|
||||
retry += 1
|
||||
if retry <= 0:
|
||||
raise
|
||||
entry.callbackReceived = 1
|
||||
return
|
||||
else:
|
||||
if ((read_check_tolerance == None) or (read_check_tolerance == "NONE")):
|
||||
success = True
|
||||
else:
|
||||
if timeout:
|
||||
ca.pend_io(timeout)
|
||||
else:
|
||||
ca.pend_io(1.0)
|
||||
readback_success = False
|
||||
count = 0
|
||||
while ((not readback_success) and (count < retries+1)):
|
||||
try:
|
||||
readback = caget(name, req_type=req_type)
|
||||
native_readback = caget(name)
|
||||
readback_success = True
|
||||
if same(value, readback, native_readback, entry.field_type, read_check_tolerance):
|
||||
success = True
|
||||
#print "%s: Success\n" % (function)
|
||||
else:
|
||||
print("%s: readback '%s' disagrees with the value '%s' we wrote." % (function, readback, value))
|
||||
raise ca_utilException(EXCEPTION_READBACK_DISAGREES)
|
||||
entry.callbackReceived = 1
|
||||
except CaChannel.CaChannelException:
|
||||
print("%s: exception during readback." % (function))
|
||||
count += 1
|
||||
|
||||
if success and (function == "caputw"):
|
||||
start_time = time.time()
|
||||
timed_out = 0
|
||||
while (not entry.callbackReceived) and (not timed_out):
|
||||
#print "waiting for ", name
|
||||
time.sleep(0.1)
|
||||
#ca.pend_io(0.1)
|
||||
ca.poll()
|
||||
if (not wait_timeout):
|
||||
timed_out = 0
|
||||
else:
|
||||
timed_out = ((time.time()-start_time) > wait_timeout)
|
||||
|
||||
if not entry.callbackReceived:
|
||||
print("Execution not completed by wait_timeout (%d seconds)" % wait_timeout)
|
||||
|
||||
#######################################################################
|
||||
def camonitor(name, function, user_args=None, timeout=None, retries=None):
|
||||
"""
|
||||
usage: camonitor("xxx:m1.VAL", python_function, user_args, timeout=None,
|
||||
retries=None)
|
||||
Don't forget to call ca.pend_event(<pend_time_in_seconds>) periodically.
|
||||
"""
|
||||
|
||||
global defaultTimeout, defaultRetries
|
||||
|
||||
if not name:
|
||||
print("camonitor: no PV name supplied")
|
||||
raise ca_utilException(EXCEPTION_NULL_NAME)
|
||||
if not function:
|
||||
print("camonitor: no callback function supplied")
|
||||
raise ca_utilException(EXCEPTION_NULL_NAME)
|
||||
if not user_args: user_args = name
|
||||
if ((timeout==None) and (defaultTimeout != None)): timeout = defaultTimeout
|
||||
if ((retries==None) and (defaultRetries != None)): retries = defaultRetries
|
||||
if ((retries == None) or (retries == "NONE")): retries = 0
|
||||
|
||||
retries = max(retries,0)
|
||||
retry = retries + 1
|
||||
success = 0
|
||||
|
||||
while ((not success) and (retry > 0)):
|
||||
checked = 0
|
||||
while ((not checked) and (retry > 0)):
|
||||
retry -= 1
|
||||
try:
|
||||
checkName(name, timeout=timeout)
|
||||
except CaChannel.CaChannelException:
|
||||
if retry <= 0:
|
||||
raise
|
||||
else:
|
||||
checked = 1
|
||||
|
||||
entry = cadict[name]
|
||||
if ((timeout != None) and (timeout != "NONE")): entry.channel.setTimeout(timeout)
|
||||
try:
|
||||
entry.channel.add_masked_array_event(entry.field_type,entry.element_count,ca.DBE_VALUE, function, user_args)
|
||||
except CaChannel.CaChannelException:
|
||||
status = sys.exc_info()[1]
|
||||
#print "add_masked_array_event threw an exception (%s)" % status
|
||||
if ((int(status) == ca.ECA_BADTYPE) or (int(status) == ca.ECA_DISCONN)):
|
||||
# Delete dictionary entry. This clears the CA connection.
|
||||
print("camonitor: Repairing CA connection to %s" % name)
|
||||
del cadict[name]
|
||||
retry += 1
|
||||
if retry <= 0:
|
||||
raise
|
||||
else:
|
||||
success = 1
|
||||
|
||||
#######################################################################
|
||||
def caunmonitor(name, timeout=None):
|
||||
"""usage: caunmonitor("xxx:m1.VAL", timeout=None)"""
|
||||
|
||||
global defaultTimeout
|
||||
|
||||
if not name:
|
||||
print("caunmonitor: no PV name supplied")
|
||||
raise ca_utilException(EXCEPTION_NULL_NAME)
|
||||
if ((timeout==None) and (defaultTimeout != None)): timeout = defaultTimeout
|
||||
|
||||
if name not in cadict:
|
||||
print("ca_util has no connection to '%s'" % name)
|
||||
raise ca_utilException(EXCEPTION_NOT_CONNECTED)
|
||||
|
||||
channel = cadict[name].channel
|
||||
if ((timeout != None) and (timeout != "NONE")): channel.setTimeout(timeout)
|
||||
try:
|
||||
channel.clear_event()
|
||||
except CaChannel.CaChannelException:
|
||||
status = sys.exc_info()[1]
|
||||
print("caunmonitor: CaChannel exception, status=%d (%s)" % (status, ca.message(status)))
|
||||
return
|
||||
|
||||
#######################################################################
|
||||
def test_monitor_function(epics_args, user_args):
|
||||
"""Example callback routine for use with camonitor()."""
|
||||
print('test_monitor_function:')
|
||||
print("...epics_args: %s" % repr(epics_args))
|
||||
print("...user_args: %s" % repr(user_args))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#-------------------------------------------------------------------------------------------
|
||||
# miscellaneous functions that might be useful, but haven't been integrated into the package
|
||||
|
||||
#######################################################################
|
||||
def endianUs():
|
||||
"""
|
||||
usage: endianUs()
|
||||
Returns one of "Little Endian", "Big Endian", "Unknown Endian".
|
||||
"""
|
||||
|
||||
from struct import pack
|
||||
if pack('<h', 1) == pack('=h',1):
|
||||
return "Little Endian"
|
||||
elif pack('>h', 1) == pack('=h',1):
|
||||
return "Big Endian"
|
||||
else:
|
||||
return "Unknown Endian"
|
||||
|
||||
#######################################################################
|
||||
def printExceptionInfo(maxTBlevel=15):
|
||||
"""Intended for internal use by ca_util functions."""
|
||||
|
||||
import sys, traceback
|
||||
cla, exc, trbk = sys.exc_info()
|
||||
excName = cla.__name__
|
||||
try:
|
||||
excArgs = exc.__dict__["args"]
|
||||
except KeyError:
|
||||
excArgs = "<no args>"
|
||||
excTb = traceback.format_tb(trbk, maxTBlevel)
|
||||
print("Unanticipated exception: %s %s\n" % (excName, excArgs))
|
||||
if (len(excTb) > 0):
|
||||
print("Traceback:")
|
||||
for trace in excTb:
|
||||
print(trace)
|
||||
return
|
||||
189
script/_Lib/epics/cadefs.py
Normal file
189
script/_Lib/epics/cadefs.py
Normal file
@@ -0,0 +1,189 @@
|
||||
""" @package cadefs
|
||||
contstants and enumerated conststant
|
||||
|
||||
This defines constants and classes useful to inteprete code returned from CA library.
|
||||
"""
|
||||
#
|
||||
CA_OP_GET = 0
|
||||
CA_OP_PUT = 1
|
||||
CA_OP_CREATE_CHANNEL = 2
|
||||
CA_OP_ADD_EVENT = 3
|
||||
CA_OP_CLEAR_EVENT = 4
|
||||
CA_OP_OTHER = 5
|
||||
# used with connection callbacks
|
||||
CA_OP_CONN_UP = 6
|
||||
CA_OP_CONN_DOWN = 7
|
||||
# imported from caeventmask.h
|
||||
DBE_VALUE =(1<<0)
|
||||
DBE_LOG =(1<<1)
|
||||
DBE_ALARM =(1<<2)
|
||||
DBE_PROPERTY=(1<<3)
|
||||
# also chekc ECA_IODONE/ECA_IOINPROGRESS in caError.py
|
||||
IODONE = 42
|
||||
IOINPROGRESS = 43
|
||||
#
|
||||
DBF_NATIVE=-1
|
||||
DBF_STRING=0
|
||||
DBF_INT = 1
|
||||
DBF_SHORT =1
|
||||
DBF_FLOAT =2
|
||||
DBF_ENUM =3
|
||||
DBF_CHAR =4
|
||||
DBF_LONG = 5
|
||||
DBF_DOUBLE = 6
|
||||
DBF_NO_ACCES = 7
|
||||
LAST_TYPE = DBF_DOUBLE
|
||||
|
||||
def VALID_DB_FIELD(x):
|
||||
return ((x >= 0) and (x <= LAST_TYPE))
|
||||
def INVALID_DB_FIELD(x):
|
||||
return ((x < 0) or (x > LAST_TYPE))
|
||||
|
||||
#/* data request buffer types */
|
||||
DBR_NATIVE= DBF_NATIVE
|
||||
DBR_STRING = DBF_STRING
|
||||
DBR_INT = DBF_INT
|
||||
DBR_SHORT = DBF_INT
|
||||
DBR_FLOAT = DBF_FLOAT
|
||||
DBR_ENUM = DBF_ENUM
|
||||
DBR_CHAR = DBF_CHAR
|
||||
DBR_LONG = DBF_LONG
|
||||
DBR_DOUBLE = DBF_DOUBLE
|
||||
DBR_STS_STRING = 7
|
||||
DBR_STS_SHORT = 8
|
||||
DBR_STS_INT = DBR_STS_SHORT
|
||||
DBR_STS_FLOAT = 9
|
||||
DBR_STS_ENUM = 10
|
||||
DBR_STS_CHAR = 11
|
||||
DBR_STS_LONG = 12
|
||||
DBR_STS_DOUBLE = 13
|
||||
DBR_TIME_STRING = 14
|
||||
DBR_TIME_INT = 15
|
||||
DBR_TIME_SHORT = 15
|
||||
DBR_TIME_FLOAT = 16
|
||||
DBR_TIME_ENUM = 17
|
||||
DBR_TIME_CHAR = 18
|
||||
DBR_TIME_LONG = 19
|
||||
DBR_TIME_DOUBLE = 20
|
||||
DBR_GR_STRING = 21
|
||||
DBR_GR_SHORT = 22
|
||||
DBR_GR_INT = DBR_GR_SHORT
|
||||
DBR_GR_FLOAT = 23
|
||||
DBR_GR_ENUM = 24
|
||||
DBR_GR_CHAR = 25
|
||||
DBR_GR_LONG = 26
|
||||
DBR_GR_DOUBLE = 27
|
||||
DBR_CTRL_STRING = 28
|
||||
DBR_CTRL_SHORT = 29
|
||||
DBR_CTRL_INT = DBR_CTRL_SHORT
|
||||
DBR_CTRL_FLOAT = 30
|
||||
DBR_CTRL_ENUM = 31
|
||||
DBR_CTRL_CHAR = 32
|
||||
DBR_CTRL_LONG = 33
|
||||
DBR_CTRL_DOUBLE = 34
|
||||
DBR_PUT_ACKT = DBR_CTRL_DOUBLE + 1
|
||||
DBR_PUT_ACKS = DBR_PUT_ACKT + 1
|
||||
DBR_STSACK_STRING = DBR_PUT_ACKS + 1
|
||||
LAST_BUFFER_TYPE = DBR_STSACK_STRING
|
||||
|
||||
def VALID_DB_REQ(x):
|
||||
return ((x >= 0) and (x <= LAST_BUFFER_TYPE))
|
||||
def INVALID_DB_REQ(x):
|
||||
return ((x < 0) or (x > LAST_BUFFER_TYPE))
|
||||
|
||||
class AlarmSeverity:
|
||||
"""Alarm Severity class
|
||||
|
||||
AlarmSeverity class is provided to keep constants representing EPICS channel severity status.
|
||||
It also keeps strings and colors for each severity states.
|
||||
"""
|
||||
NO_ALARM =0x0
|
||||
MINOR_ALARM=0x1
|
||||
MAJOR_ALARM=0x2
|
||||
INVALID_ALARM=0x3
|
||||
ALARM_NSEV=INVALID_ALARM+1
|
||||
Strings=(
|
||||
"NO_ALARM",
|
||||
"MINOR",
|
||||
"MAJOR",
|
||||
"INVALID",
|
||||
)
|
||||
Colors=("green","yellow","red","grey")
|
||||
|
||||
class AlarmStatus:
|
||||
"""!
|
||||
AlarmStatus class provides constants returned by EPICS Channe Access library as channel status code.
|
||||
It also gives you strings for corresponding channel status.
|
||||
"""
|
||||
NO_ALARM = 0
|
||||
READ_ALARM = 1
|
||||
WRITE_ALARM = 2
|
||||
#/* ANALOG ALARMS */
|
||||
HIHI_ALARM = 3
|
||||
HIGH_ALARM = 4
|
||||
LOLO_ALARM = 5
|
||||
LOW_ALARM = 6
|
||||
#/* BINARY ALARMS */
|
||||
STATE_ALARM = 7
|
||||
COS_ALARM = 8
|
||||
#/* other alarms */
|
||||
COMM_ALARM = 9
|
||||
TIMEOUT_ALARM = 10
|
||||
HW_LIMIT_ALARM = 11
|
||||
CALC_ALARM = 12
|
||||
SCAN_ALARM = 13
|
||||
LINK_ALARM = 14
|
||||
SOFT_ALARM = 15
|
||||
BAD_SUB_ALARM = 16
|
||||
UDF_ALARM = 17
|
||||
DISABLE_ALARM = 18
|
||||
SIMM_ALARM = 19
|
||||
READ_ACCESS_ALARM = 20
|
||||
WRITE_ACCESS_ALARM = 21
|
||||
Strings=(
|
||||
"NO_ALARM",
|
||||
"READ",
|
||||
"WRITE",
|
||||
"HIHI",
|
||||
"HIGH",
|
||||
"LOLO",
|
||||
"LOW",
|
||||
"STATE",
|
||||
"COS",
|
||||
"COMM",
|
||||
"TIMEOUT",
|
||||
"HWLIMIT",
|
||||
"CALC",
|
||||
"SCAN",
|
||||
"LINK",
|
||||
"SOFT",
|
||||
"BAD_SUB",
|
||||
"UDF",
|
||||
"DISABLE",
|
||||
"SIMM",
|
||||
"READ_ACCESS",
|
||||
"WRITE_ACCESS",
|
||||
)
|
||||
|
||||
|
||||
# ch_state={cs_never_conn=0, cs_prev_conn, cs_conn, cs_closed}
|
||||
|
||||
cs_never_conn= 0
|
||||
cs_prev_conn = 1
|
||||
cs_conn = 2
|
||||
cs_closed = 3
|
||||
|
||||
class ch_state:
|
||||
"""
|
||||
ch_state class provides constants representing channel connection status.
|
||||
"""
|
||||
cs_never_conn= 0
|
||||
cs_prev_conn = 1
|
||||
cs_conn = 2
|
||||
cs_closed = 3
|
||||
Strings=(
|
||||
"channel never connected",
|
||||
"channel previously connected",
|
||||
"channel connected",
|
||||
"channel already closed",
|
||||
)
|
||||
357
script/_Lib/epics/epicsMotor.py
Normal file
357
script/_Lib/epics/epicsMotor.py
Normal file
@@ -0,0 +1,357 @@
|
||||
"""
|
||||
This module provides support for the EPICS motor record.
|
||||
|
||||
Author: Mark Rivers
|
||||
Created: Sept. 16, 2002
|
||||
Modifications:
|
||||
"""
|
||||
import time
|
||||
|
||||
import epicsPV
|
||||
|
||||
class epicsMotor:
|
||||
"""
|
||||
This module provides a class library for the EPICS motor record.
|
||||
It uses the epicsPV class, which is in turn a subclass of CaChannel.
|
||||
|
||||
Virtual attributes:
|
||||
These attributes do not appear in the dictionary for this class, but
|
||||
are implemented with the __getattr__ and __setattr__ methods. They
|
||||
simply do getw() or putw(value) to the appropriate motor record fields.
|
||||
All attributes can be both read and written unless otherwise noted.
|
||||
|
||||
Attribute Description Field
|
||||
--------- ----------------------- -----
|
||||
slew_speed Slew speed or velocity .VELO
|
||||
base_speed Base or starting speed .VBAS
|
||||
acceleration Acceleration time (sec) .ACCL
|
||||
description Description of motor .DESC
|
||||
resolution Resolution (units/step) .MRES
|
||||
high_limit High soft limit (user) .HLM
|
||||
low_limit Low soft limit (user) .LLM
|
||||
dial_high_limit High soft limit (dial) .DHLM
|
||||
dial_low_limit Low soft limit (dial) .DLLM
|
||||
backlash Backlash distance .BDST
|
||||
offset Offset from dial to user .OFF
|
||||
done_moving 1=Done, 0=Moving, read-only .DMOV
|
||||
|
||||
Exceptions:
|
||||
The check_limits() method raises an "epicsMotorException" if a soft limit
|
||||
or hard limit is detected. The move() and wait() methods call
|
||||
check_limits() before they return, unless they are called with the
|
||||
ignore_limits=1 keyword set.
|
||||
|
||||
Example use:
|
||||
from epicsMotor import *
|
||||
m=epicsMotor('13BMD:m38')
|
||||
m.move(10) # Move to position 10 in user coordinates
|
||||
m.move(100, dial=1) # Move to position 100 in dial coordinates
|
||||
m.move(1, step=1, relative=1) # Move 1 step relative to current position
|
||||
m.wait() # Wait for motor to stop moving
|
||||
m.wait(start=1) # Wait for motor to start moving
|
||||
m.wait(start=1, stop=1) # Wait for motor to start, then to stop
|
||||
m.stop() # Stop moving immediately
|
||||
high = m.high_limit # Get the high soft limit in user coordinates
|
||||
m.dial_high_limit = 100 # Set the high limit to 100 in dial coodinates
|
||||
speed = m.slew_speed # Get the slew speed
|
||||
m.acceleration = 0.1 # Set the acceleration to 0.1 seconds
|
||||
p=m.get_position() # Get the desired motor position in user coordinates
|
||||
p=m.get_position(dial=1) # Get the desired motor position in dial coordinates
|
||||
p=m.get_position(readback=1) # Get the actual position in user coordinates
|
||||
p=m.get_position(readback=1, step=1) Get the actual motor position in steps
|
||||
p=m.set_position(100) # Set the current position to 100 in user coordinates
|
||||
# Puts motor in Set mode, writes value, puts back in Use mode.
|
||||
p=m.set_position(10000, step=1) # Set the current position to 10000 steps
|
||||
"""
|
||||
|
||||
def __init__(self, name):
|
||||
"""
|
||||
Creates a new epicsMotor instance.
|
||||
|
||||
Inputs:
|
||||
name:
|
||||
The name of the EPICS motor record without any trailing period or field
|
||||
name.
|
||||
|
||||
Example:
|
||||
m=epicsMotor('13BMD:m38')
|
||||
"""
|
||||
self.pvs = {'val' : epicsPV.epicsPV(name+'.VAL', wait=0),
|
||||
'dval': epicsPV.epicsPV(name+'.DVAL', wait=0),
|
||||
'rval': epicsPV.epicsPV(name+'.RVAL', wait=0),
|
||||
'rlv' : epicsPV.epicsPV(name+'.RLV', wait=0),
|
||||
'rbv' : epicsPV.epicsPV(name+'.RBV', wait=0),
|
||||
'drbv': epicsPV.epicsPV(name+'.DRBV', wait=0),
|
||||
'rrbv': epicsPV.epicsPV(name+'.RRBV', wait=0),
|
||||
'dmov': epicsPV.epicsPV(name+'.DMOV', wait=0),
|
||||
'stop': epicsPV.epicsPV(name+'.STOP', wait=0),
|
||||
'velo': epicsPV.epicsPV(name+'.VELO', wait=0),
|
||||
'vbas': epicsPV.epicsPV(name+'.VBAS', wait=0),
|
||||
'accl': epicsPV.epicsPV(name+'.ACCL', wait=0),
|
||||
'desc': epicsPV.epicsPV(name+'.DESC', wait=0),
|
||||
'mres': epicsPV.epicsPV(name+'.MRES', wait=0),
|
||||
'hlm': epicsPV.epicsPV(name+'.HLM', wait=0),
|
||||
'llm': epicsPV.epicsPV(name+'.LLM', wait=0),
|
||||
'dhlm': epicsPV.epicsPV(name+'.DHLM', wait=0),
|
||||
'dllm': epicsPV.epicsPV(name+'.DLLM', wait=0),
|
||||
'bdst': epicsPV.epicsPV(name+'.BDST', wait=0),
|
||||
'set': epicsPV.epicsPV(name+'.SET', wait=0),
|
||||
'lvio': epicsPV.epicsPV(name+'.LVIO', wait=0),
|
||||
'lls': epicsPV.epicsPV(name+'.LLS', wait=0),
|
||||
'hls': epicsPV.epicsPV(name+'.HLS', wait=0),
|
||||
'off': epicsPV.epicsPV(name+'.OFF', wait=0)
|
||||
}
|
||||
# Wait for all PVs to connect
|
||||
self.pvs['val'].pend_io()
|
||||
|
||||
def move(self, value, relative=0, dial=0, step=0, ignore_limits=0):
|
||||
"""
|
||||
Moves a motor to an absolute position or relative to the current position
|
||||
in user, dial or step coordinates.
|
||||
|
||||
Inputs:
|
||||
value:
|
||||
The absolute position or relative amount of the move
|
||||
|
||||
Keywords:
|
||||
relative:
|
||||
Set relative=1 to move relative to current position.
|
||||
The default is an absolute move.
|
||||
|
||||
dial:
|
||||
Set dial=1 if "value" is in dial coordinates.
|
||||
The default is user coordinates.
|
||||
|
||||
step:
|
||||
Set step=1 if "value" is in steps.
|
||||
The default is user coordinates.
|
||||
|
||||
ignore_limits:
|
||||
Set ignore_limits=1 to suppress raising exceptions
|
||||
if the move results in a soft or hard limit violation.
|
||||
|
||||
Notes:
|
||||
The "step" and "dial" keywords are mutually exclusive.
|
||||
The "relative" keyword can be used in user, dial or step
|
||||
coordinates.
|
||||
|
||||
Examples:
|
||||
m=epicsMotor('13BMD:m38')
|
||||
m.move(10) # Move to position 10 in user coordinates
|
||||
m.move(100, dial=1) # Move to position 100 in dial coordinates
|
||||
m.move(2, step=1, relative=1) # Move 2 steps
|
||||
"""
|
||||
if (dial != 0):
|
||||
# Position in dial coordinates
|
||||
if (relative != 0):
|
||||
current = self.get_position(dial=1)
|
||||
self.pvs['dval'].putw(current+value)
|
||||
else:
|
||||
self.pvs['dval'].putw(value)
|
||||
|
||||
elif (step != 0):
|
||||
# Position in steps
|
||||
if (relative != 0):
|
||||
current = self.get_position(step=1)
|
||||
self.pvs['rval'].putw(current + value)
|
||||
else:
|
||||
self.pvs['rval'].putw(value)
|
||||
else:
|
||||
# Position in user coordinates
|
||||
if (relative != 0):
|
||||
self.pvs['rlv'].putw(value)
|
||||
else:
|
||||
self.pvs['val'].putw(value)
|
||||
|
||||
# Check for limit violations
|
||||
if (ignore_limits == 0): self.check_limits()
|
||||
|
||||
def check_limits(self):
|
||||
limit = self.pvs['lvio'].getw()
|
||||
if (limit!=0):
|
||||
raise epicsMotorException('Soft limit violation')
|
||||
limit = self.pvs['lls'].getw()
|
||||
if (limit!=0):
|
||||
raise epicsMotorException('Low hard limit violation')
|
||||
limit = self.pvs['hls'].getw()
|
||||
if (limit!=0):
|
||||
raise epicsMotorException('High hard limit violation')
|
||||
|
||||
|
||||
def stop(self):
|
||||
"""
|
||||
Immediately stops a motor from moving by writing 1 to the .STOP field.
|
||||
|
||||
Examples:
|
||||
m=epicsMotor('13BMD:m38')
|
||||
m.move(10) # Move to position 10 in user coordinates
|
||||
m.stop() # Stop motor
|
||||
"""
|
||||
self.pvs['stop'].putw(1)
|
||||
|
||||
def get_position(self, dial=0, readback=0, step=0):
|
||||
"""
|
||||
Returns the target or readback motor position in user, dial or step
|
||||
coordinates.
|
||||
|
||||
Keywords:
|
||||
readback:
|
||||
Set readback=1 to return the readback position in the
|
||||
desired coordinate system. The default is to return the
|
||||
target position of the motor.
|
||||
|
||||
dial:
|
||||
Set dial=1 to return the position in dial coordinates.
|
||||
The default is user coordinates.
|
||||
|
||||
step:
|
||||
Set step=1 to return the position in steps.
|
||||
The default is user coordinates.
|
||||
|
||||
Notes:
|
||||
The "step" and "dial" keywords are mutually exclusive.
|
||||
The "readback" keyword can be used in user, dial or step
|
||||
coordinates.
|
||||
|
||||
Examples:
|
||||
m=epicsMotor('13BMD:m38')
|
||||
m.move(10) # Move to position 10 in user coordinates
|
||||
p=m.get_position(dial=1) # Read the target position in
|
||||
# dial coordinates
|
||||
p=m.get_position(readback=1, step=1) # Read the actual position in
|
||||
# steps
|
||||
"""
|
||||
if (dial != 0):
|
||||
if (readback != 0):
|
||||
return self.pvs['drbv'].getw()
|
||||
else:
|
||||
return self.pvs['dval'].getw()
|
||||
elif (step != 0):
|
||||
if (readback != 0):
|
||||
return self.pvs['rrbv'].getw()
|
||||
else:
|
||||
return self.pvs['rval'].getw()
|
||||
else:
|
||||
if (readback != 0):
|
||||
return self.pvs['rbv'].getw()
|
||||
else:
|
||||
return self.pvs['val'].getw()
|
||||
|
||||
def set_position(self, position, dial=0, step=0):
|
||||
"""
|
||||
Sets the motor position in user, dial or step coordinates.
|
||||
|
||||
Inputs:
|
||||
position:
|
||||
The new motor position
|
||||
|
||||
Keywords:
|
||||
dial:
|
||||
Set dial=1 to set the position in dial coordinates.
|
||||
The default is user coordinates.
|
||||
|
||||
step:
|
||||
Set step=1 to set the position in steps.
|
||||
The default is user coordinates.
|
||||
|
||||
Notes:
|
||||
The "step" and "dial" keywords are mutually exclusive.
|
||||
|
||||
Examples:
|
||||
m=epicsMotor('13BMD:m38')
|
||||
m.set_position(10, dial=1) # Set the motor position to 10 in
|
||||
# dial coordinates
|
||||
m.set_position(1000, step=1) # Set the motor position to 1000 steps
|
||||
"""
|
||||
# Put the motor in "SET" mode
|
||||
self.pvs['set'].putw(1)
|
||||
if (dial != 0):
|
||||
self.pvs['dval'].putw(position)
|
||||
elif (step != 0):
|
||||
self.pvs['rval'].putw(position)
|
||||
else:
|
||||
self.pvs['val'].putw(position)
|
||||
# Put the motor back in "Use" mode
|
||||
self.pvs['set'].putw(0)
|
||||
|
||||
|
||||
def wait(self, start=0, stop=0, poll=0.01, ignore_limits=0):
|
||||
"""
|
||||
Waits for the motor to start moving and/or stop moving.
|
||||
|
||||
Keywords:
|
||||
start:
|
||||
Set start=1 to wait for the motor to start moving.
|
||||
|
||||
stop:
|
||||
Set stop=1 to wait for the motor to stop moving.
|
||||
|
||||
poll:
|
||||
Set this keyword to the time to wait between reading the
|
||||
.DMOV field of the record to see if the motor is moving.
|
||||
The default is 0.01 seconds.
|
||||
|
||||
ignore_limits:
|
||||
Set ignore_limits=1 to suppress raising an exception if a soft or
|
||||
hard limit is detected
|
||||
|
||||
Notes:
|
||||
If neither the "start" nor "stop" keywords are set then "stop"
|
||||
is set to 1, so the routine waits for the motor to stop moving.
|
||||
If only "start" is set to 1 then the routine only waits for the
|
||||
motor to start moving.
|
||||
If both "start" and "stop" are set to 1 then the routine first
|
||||
waits for the motor to start moving, and then to stop moving.
|
||||
|
||||
Examples:
|
||||
m=epicsMotor('13BMD:m38')
|
||||
m.move(100) # Move to position 100
|
||||
m.wait(start=1, stop=1) # Wait for the motor to start moving
|
||||
# and then to stop moving
|
||||
"""
|
||||
if (start == 0) and (stop == 0): stop=1
|
||||
if (start != 0):
|
||||
while(1):
|
||||
done = self.pvs['dmov'].getw()
|
||||
if (done != 1): break
|
||||
time.sleep(poll)
|
||||
if (stop != 0):
|
||||
while(1):
|
||||
done = self.pvs['dmov'].getw()
|
||||
if (done != 0): break
|
||||
time.sleep(poll)
|
||||
if (ignore_limits == 0): self.check_limits()
|
||||
|
||||
def __getattr__(self, attrname):
|
||||
if (attrname == 'slew_speed'): return self.pvs['velo'].getw()
|
||||
elif (attrname == 'base_speed'): return self.pvs['vbas'].getw()
|
||||
elif (attrname == 'acceleration'): return self.pvs['accl'].getw()
|
||||
elif (attrname == 'description'): return self.pvs['desc'].getw()
|
||||
elif (attrname == 'resolution'): return self.pvs['mres'].getw()
|
||||
elif (attrname == 'high_limit'): return self.pvs['hlm'].getw()
|
||||
elif (attrname == 'low_limit'): return self.pvs['llm'].getw()
|
||||
elif (attrname == 'dial_high_limit'): return self.pvs['dhlm'].getw()
|
||||
elif (attrname == 'dial_low_limit'): return self.pvs['dllm'].getw()
|
||||
elif (attrname == 'backlash'): return self.pvs['bdst'].getw()
|
||||
elif (attrname == 'offset'): return self.pvs['off'].getw()
|
||||
elif (attrname == 'done_moving'): return self.pvs['dmov'].getw()
|
||||
else: raise AttributeError(attrname)
|
||||
|
||||
def __setattr__(self, attrname, value):
|
||||
if (attrname == 'pvs'): self.__dict__[attrname]=value
|
||||
elif (attrname == 'slew_speed'): self.pvs['velo'].putw(value)
|
||||
elif (attrname == 'base_speed'): self.pvs['vbas'].putw(value)
|
||||
elif (attrname == 'acceleration'): self.pvs['accl'].putw(value)
|
||||
elif (attrname == 'description'): self.pvs['desc'].putw(value)
|
||||
elif (attrname == 'resolution'): self.pvs['mres'].putw(value)
|
||||
elif (attrname == 'high_limit'): self.pvs['hlm'].putw(value)
|
||||
elif (attrname == 'low_limit'): self.pvs['llm'].putw(value)
|
||||
elif (attrname == 'dial_high_limit'): self.pvs['dhlm'].putw(value)
|
||||
elif (attrname == 'dial_low_limit'): self.pvs['dllm'].putw(value)
|
||||
elif (attrname == 'backlash'): self.pvs['bdst'].putw(value)
|
||||
elif (attrname == 'offset'): self.pvs['off'].putw(value)
|
||||
else: raise AttributeError(attrname)
|
||||
|
||||
class epicsMotorException(Exception):
|
||||
def __init__(self, args=None):
|
||||
self.args=args
|
||||
273
script/_Lib/epics/epicsPV.py
Normal file
273
script/_Lib/epics/epicsPV.py
Normal file
@@ -0,0 +1,273 @@
|
||||
"""
|
||||
This module defines the epicsPV class, which adds additional features to
|
||||
Geoff Savage's CaChannel class.
|
||||
|
||||
Author: Mark Rivers
|
||||
Created: Sept. 16, 2002.
|
||||
Modifications:
|
||||
"""
|
||||
import CaChannel
|
||||
|
||||
class epicsPV(CaChannel.CaChannel):
|
||||
"""
|
||||
This class subclasses Geoff Savage's CaChannel class to add the following
|
||||
features:
|
||||
|
||||
- If a PV name is given then the class constructor will do a searchw()
|
||||
by default.
|
||||
|
||||
- setMonitor() sets a generic callback routine for value change events.
|
||||
Subsequent getw(), getValue() or array_get() calls will return the
|
||||
value from the most recent callback, and hence do not result in any
|
||||
network activity or latency. This can greatly improve performance.
|
||||
|
||||
- checkMonitor() returns a flag to indicate if a callback has occured
|
||||
since the last call to checkMonitor(), getw(), getValue() or
|
||||
array_get(). It can be used to increase efficiency in polling
|
||||
applications.
|
||||
|
||||
- getControl() reads the "control" and other information from an
|
||||
EPICS PV without having to use callbacks.
|
||||
In addition to the PV value, this will return the graphic, control and
|
||||
alarm limits, etc.
|
||||
|
||||
- putWait() calls array_put_callback() and waits for the callback to
|
||||
occur before it returns. This allows programs to use array_put_callback()
|
||||
synchronously and without user-written callbacks.
|
||||
|
||||
Created: Mark Rivers, Sept. 16, 2002.
|
||||
Modifications:
|
||||
"""
|
||||
|
||||
def __init__(self, pvName=None, wait=1):
|
||||
"""
|
||||
Keywords:
|
||||
pvName:
|
||||
An optional name of an EPICS Process Variable.
|
||||
|
||||
wait:
|
||||
If wait==1 and pvName != None then this constructor will do a
|
||||
CaChannel.searchw() on the PV. If wait==0 and pvName != None then
|
||||
this constructor will do a CaChannel.search() on the PV, and the user
|
||||
must subsequently do a pend_io() on this or another epicsPV or CaChannel
|
||||
object.
|
||||
|
||||
Procedure:
|
||||
Invokes CaChannel.__init__() and then searchw() or search() as explained
|
||||
above
|
||||
"""
|
||||
# Invoke the base class initialization
|
||||
self.callBack = callBack()
|
||||
CaChannel.CaChannel.__init__(self)
|
||||
if (pvName != None):
|
||||
if (wait):
|
||||
self.searchw(pvName)
|
||||
else:
|
||||
self.search(pvName)
|
||||
|
||||
def setMonitor(self):
|
||||
"""
|
||||
Sets a generic callback routine for value change events.
|
||||
Subsequent getw(), getValue() or array_get() calls will return the
|
||||
value from the most recent callback, do not result in any network
|
||||
latency. This can greatly improve efficiency.
|
||||
"""
|
||||
self.add_masked_array_event(None, None, CaChannel.ca.DBE_VALUE,
|
||||
getCallback, self.callBack)
|
||||
self.callBack.monitorState = 1
|
||||
|
||||
def clearMonitor(self):
|
||||
"""
|
||||
Cancels the effect of a previous call to setMonitor().
|
||||
Calls CaChannel.clear_event().
|
||||
Subsequent getw(), getValue() or array_get() calls will no longer
|
||||
return the value from the most recent callback, but will actually result
|
||||
in channel access calls.
|
||||
"""
|
||||
self.clear_event()
|
||||
self.callBack.monitorState = 0
|
||||
|
||||
def checkMonitor(self):
|
||||
"""
|
||||
Returns 1 to indicate if a value callback has occured
|
||||
since the last call to checkMonitor(), getw(), getValue() or
|
||||
array_get(), indicating that a new value is available. Returns 0 if
|
||||
no such callback has occurred.
|
||||
It can be used to increase efficiency in polling applications.
|
||||
"""
|
||||
# This should be self.poll(), but that is generating errors
|
||||
self.pend_event(.0001)
|
||||
m = self.callBack.newMonitor
|
||||
self.callBack.newMonitor = 0
|
||||
return m
|
||||
|
||||
def getControl(self, req_type=None, count=None, wait=1, poll=.01):
|
||||
"""
|
||||
Provides a method to read the "control" and other information from an
|
||||
EPICS PV without having to use callbacks.
|
||||
It calls CaChannel.array_get_callback() with a database request type of
|
||||
CaChannel.ca.dbf_type_to_DBR_CTRL(req_type).
|
||||
In addition to the PV value, this will return the graphic, control and
|
||||
alarm limits, etc.
|
||||
|
||||
Example:
|
||||
>>> pv = epicsPV('13IDC:m1')
|
||||
>>> pv.getControl()
|
||||
>>> for field in dir(pv.callBack):
|
||||
>>> print field, ':', getattr(pv.callBack, field)
|
||||
chid : _bfffec34_chid_p
|
||||
count : 1
|
||||
monitorState : 0
|
||||
newMonitor : 1
|
||||
putComplete : 0
|
||||
pv_loalarmlim : 0.0
|
||||
pv_loctrllim : -22.0
|
||||
pv_lodislim : -22.0
|
||||
pv_lowarnlim : 0.0
|
||||
pv_precision : 4
|
||||
pv_riscpad0 : 256
|
||||
pv_severity : 0
|
||||
pv_status : 0
|
||||
pv_units : mm
|
||||
pv_upalarmlim : 0.0
|
||||
pv_upctrllim : 28.0
|
||||
pv_updislim : 28.0
|
||||
pv_upwarnlim : 0.0
|
||||
pv_value : -15.0
|
||||
status : 1
|
||||
type : 34
|
||||
|
||||
Note the fields such as pv_plocrtllim, the lower control limit, and
|
||||
pv_precision, the display precision.
|
||||
|
||||
Keywords:
|
||||
wait:
|
||||
If this keyword is 1 (the default) then this routine waits for
|
||||
the callback before returning. If this keyword is 0 then it is
|
||||
the user's responsibility to wait or check for the callback
|
||||
by calling checkMonitor().
|
||||
|
||||
poll:
|
||||
The timeout for pend_event() calls, waiting for the callback
|
||||
to occur. Shorter times reduce the latency at the price of CPU
|
||||
cycles.
|
||||
"""
|
||||
if (req_type == None): req_type = self.field_type()
|
||||
if (wait != 0): self.callBack.newMonitor = 0
|
||||
self.array_get_callback(CaChannel.ca.dbf_type_to_DBR_CTRL(req_type),
|
||||
count, getCallback, self.callBack)
|
||||
if (wait != 0):
|
||||
while(self.callBack.newMonitor == 0):
|
||||
self.pend_event(poll)
|
||||
|
||||
def array_get(self, req_type=None, count=None):
|
||||
"""
|
||||
If setMonitor() has not been called then this function simply calls
|
||||
CaChannel.array_get(). If setMonitor has been called then it calls
|
||||
CaChannel.pend_event() with a very short timeout, and then returns the
|
||||
PV value from the last callback.
|
||||
"""
|
||||
if (self.callBack.monitorState != 0):
|
||||
# This should be self.poll(), but that is generating errors
|
||||
self.pend_event(.0001)
|
||||
if (self.callBack.monitorState == 2):
|
||||
self.callBack.newMonitor = 0
|
||||
return self.callBack.pv_value
|
||||
else:
|
||||
return CaChannel.CaChannel.array_get(self, req_type, count)
|
||||
|
||||
def getw(self, req_type=None, count=None):
|
||||
"""
|
||||
If setMonitor() has not been called then this function simply calls
|
||||
CaChannel.getw(). If setMonitor has been called then it calls
|
||||
CaChannel.pend_event() with a very short timeout, and then returns the
|
||||
PV value from the last callback.
|
||||
"""
|
||||
if (self.callBack.monitorState != 0):
|
||||
# This should be self.poll(), but that is generating errors
|
||||
self.pend_event(.0001)
|
||||
if (self.callBack.monitorState == 2):
|
||||
self.callBack.newMonitor = 0
|
||||
if (count == None):
|
||||
return self.callBack.pv_value
|
||||
else:
|
||||
return self.callBack.pv_value[0:count]
|
||||
else:
|
||||
return CaChannel.CaChannel.getw(self, req_type, count)
|
||||
|
||||
def getValue(self):
|
||||
"""
|
||||
If setMonitor() has not been called then this function simply calls
|
||||
CaChannel.getValue(). If setMonitor has been called then it calls
|
||||
CaChannel.pend_event() with a very short timeout, and then returns the
|
||||
PV value from the last callback.
|
||||
"""
|
||||
if (self.callBack.monitorState != 0):
|
||||
# This should be self.poll(), but that is generating errors
|
||||
self.pend_event(.0001)
|
||||
if (self.callBack.monitorState == 2):
|
||||
self.callBack.newMonitor = 0
|
||||
return self.callBack.pv_value
|
||||
else:
|
||||
return CaChannel.CaChannel.getValue(self)
|
||||
|
||||
def putWait(self, value, req_type=None, count=None, poll=.01):
|
||||
"""
|
||||
Calls CaChannel.array_put_callback() and waits for the callback to
|
||||
occur before it returns. This allows programs to use array_put_callback()
|
||||
without having to handle asynchronous callbacks.
|
||||
|
||||
Keywords:
|
||||
req_type:
|
||||
See CaChannel.array_put_callback()
|
||||
|
||||
count:
|
||||
See CaChannel.array_put_callback()
|
||||
|
||||
poll:
|
||||
The timeout for pend_event() calls, waiting for the callback
|
||||
to occur. Shorter times reduce the latency at the price of CPU
|
||||
cycles.
|
||||
"""
|
||||
self.callBack.putComplete=0
|
||||
self.array_put_callback(value, req_type, count, putCallBack, self.callBack)
|
||||
while(self.callBack.putComplete == 0):
|
||||
self.pend_event(poll)
|
||||
|
||||
class callBack:
|
||||
"""
|
||||
This class is used by the epicsPV class to handle callbacks. It is required
|
||||
to avoid circular references to the epicsPV object itself when dealing with
|
||||
callbacks, in order to allow the CaChannel destructor to be called.
|
||||
Users will only be interested in the fields that are copied to this class in
|
||||
the callback resulting from a call to epicsPV.getControl().
|
||||
"""
|
||||
def __init__(self):
|
||||
self.newMonitor = 0
|
||||
self.putComplete = 0
|
||||
self.monitorState = 0
|
||||
# monitorState:
|
||||
# 0=not monitored
|
||||
# 1=monitor requested, but no callback yet
|
||||
# 2=monitor requested, callback has arrived
|
||||
|
||||
|
||||
def putCallBack(epicsArgs, userArgs):
|
||||
"""
|
||||
This is the generic callback function used by the epicsPV.putWait() method.
|
||||
It simply sets the callBack.putComplete flag to 1.
|
||||
"""
|
||||
userArgs[0].putComplete=1
|
||||
|
||||
def getCallback(epicsArgs, userArgs):
|
||||
"""
|
||||
This is the generic callback function enabled by the epicsPV.setMonitor() method.
|
||||
It sets the callBack.monitorState flag to 2, indicating that a monitor has
|
||||
been received. It copies all of the attributes in the epicsArgs dictionary
|
||||
to the callBack attribute of the epicsPV object.
|
||||
"""
|
||||
for key in epicsArgs.keys():
|
||||
setattr(userArgs[0], key, epicsArgs[key])
|
||||
if (userArgs[0].monitorState == 1): userArgs[0].monitorState = 2
|
||||
userArgs[0].newMonitor = 1
|
||||
|
||||
896
script/_Lib/ijutils.js
Normal file
896
script/_Lib/ijutils.js
Normal file
@@ -0,0 +1,896 @@
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Facade to ImageJ functionality
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
//More information on:
|
||||
// Image: https://imagej.nih.gov/ij/docs/guide/146-28.htmltoc-Section-28
|
||||
// Process: https://imagej.nih.gov/ij/docs/guide/146-29.html#toc-Section-29
|
||||
// Analyze: https://imagej.nih.gov/ij/docs/guide/146-30.html#toc-Section-30
|
||||
|
||||
Utils = Java.type('ch.psi.pshell.imaging.Utils')
|
||||
Pair = Java.type('org.apache.commons.math3.util.Pair')
|
||||
|
||||
|
||||
IJ = Java.type('ij.IJ')
|
||||
ImageJ = Java.type('ij.ImageJ')
|
||||
WindowManager = Java.type('ij.WindowManager')
|
||||
ImagePlus = Java.type('ij.ImagePlus')
|
||||
Prefs = Java.type('ij.Prefs')
|
||||
FileSaver = Java.type('ij.io.FileSaver')
|
||||
|
||||
ImageProcessor = Java.type('ij.process.ImageProcessor')
|
||||
ByteProcessor = Java.type('ij.process.ByteProcessor')
|
||||
ShortProcessor = Java.type('ij.process.ShortProcessor')
|
||||
ColorProcessor = Java.type('ij.process.ColorProcessor')
|
||||
FloatProcessor = Java.type('ij.process.FloatProcessor')
|
||||
ImageConverter = Java.type('ij.process.ImageConverter')
|
||||
AutoThresholder = Java.type('ij.process.AutoThresholder')
|
||||
LUT = Java.type('ij.process.LUT')
|
||||
Measurements = Java.type('ij.measure.Measurements')
|
||||
|
||||
ResultsTable = Java.type('ij.measure.ResultsTable')
|
||||
Analyzer = Java.type('ij.plugin.filter.Analyzer')
|
||||
GaussianBlur = Java.type('ij.plugin.filter.GaussianBlur')
|
||||
Filters = Java.type('ij.plugin.filter.Filters')
|
||||
FFTFilter = Java.type('ij.plugin.filter.FFTFilter')
|
||||
BackgroundSubtracter = Java.type('ij.plugin.filter.BackgroundSubtracter')
|
||||
EDM = Java.type('ij.plugin.filter.EDM')
|
||||
Shadows = Java.type('ij.plugin.filter.Shadows')
|
||||
UnsharpMask = Java.type('ij.plugin.filter.UnsharpMask')
|
||||
MaximumFinder = Java.type('ij.plugin.filter.MaximumFinder')
|
||||
EDM = Java.type('ij.plugin.filter.EDM')
|
||||
Shadows = Java.type('ij.plugin.filter.Shadows')
|
||||
UnsharpMask = Java.type('ij.plugin.filter.UnsharpMask')
|
||||
RankFilters = Java.type('ij.plugin.filter.RankFilters')
|
||||
Convolver = Java.type('ij.plugin.filter.Convolver')
|
||||
ParticleAnalyzer = Java.type('ij.plugin.filter.ParticleAnalyzer')
|
||||
|
||||
ContrastEnhancer = Java.type('ij.plugin.ContrastEnhancer')
|
||||
Thresholder = Java.type('ij.plugin.Thresholder')
|
||||
ImageCalculator = Java.type('ij.plugin.ImageCalculator')
|
||||
FFT = Java.type('ij.plugin.FFT')
|
||||
Concatenator = Java.type('ij.plugin.Concatenator')
|
||||
|
||||
//ImageJ customizations
|
||||
FFTMath = Java.type('ch.psi.pshell.imaging.ij.FFTMath')
|
||||
FFTFilter = Java.type('ch.psi.pshell.imaging.ij.FFTFilter')
|
||||
Binary = Java.type('ch.psi.pshell.imaging.ij.Binary')
|
||||
Slicer = Java.type('ch.psi.pshell.imaging.ij.Slicer')
|
||||
|
||||
|
||||
//This eliminates the error messages due to the bug on ij.gui.ImageWindow row 555 (ij is null)
|
||||
try{
|
||||
_image_j
|
||||
} catch(ex) {
|
||||
_image_j = new ImageJ(null, ImageJ.NO_SHOW)
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//Image creation, copying & saving
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function load_image(image, title){
|
||||
/*
|
||||
image: file name or BufferedImage
|
||||
*/
|
||||
if (!is_defined(title)) title ="img"
|
||||
|
||||
if (typeof image == 'string')
|
||||
try{
|
||||
file = get_context().setup.expandPath(image)
|
||||
} catch(ex) {
|
||||
}
|
||||
try{
|
||||
image = Utils.newImage(file)
|
||||
} catch(ex) {
|
||||
//try loading from assembly
|
||||
image = get_context().setup.getAssemblyImage(image)
|
||||
}
|
||||
return new ImagePlus(title, image)
|
||||
}
|
||||
|
||||
function load_array(array, width, height, title){
|
||||
/*
|
||||
array: 1d array if width and height defined , or else 2d array to be flattened.
|
||||
*/
|
||||
if (!is_defined(width)) width = null
|
||||
if (!is_defined(height)) height = null
|
||||
if (!is_defined(title)) title ="img"
|
||||
//2D
|
||||
if ((width==null) && (height==null)){
|
||||
if (array.typecode == '[B') proc = new ByteProcessor(array[0].length, array.length, Convert.flatten(array))
|
||||
else if (array.typecode == '[S') proc = new ShortProcessor(array[0].length, array.length, Convert.flatten(array), null)
|
||||
else if (array.typecode == '[I') proc = new FloatProcessor(array[0].length, array.length, Convert.flatten(array))
|
||||
else if (array.typecode == '[F') proc = new FloatProcessor(array[0].length, array.length, Convert.flatten(array))
|
||||
else if (array.typecode == '[D') proc = new FloatProcessor(array[0].length, array.length, Convert.flatten(array))
|
||||
else throw "Invalid array type"
|
||||
//1D
|
||||
}else{
|
||||
if (array.length > width*height)
|
||||
array = array.slice[0, width*height]
|
||||
if (array.typecode == 'b') proc = new yteProcessor(width, height, array)
|
||||
else if (array.typecode == 'h') proc = new ShortProcessor(width, height, array, null)
|
||||
else if (array.typecode == 'i') proc = new FloatProcessor(width, height, array)
|
||||
else if (array.typecode == 'f') proc = new FloatProcessor(width, height, array)
|
||||
else if (array.typecode == 'd') proc = new FloatProcessor(width, height, array)
|
||||
else throw "Invalid array type"
|
||||
}
|
||||
return new ImagePlus(title, proc)
|
||||
}
|
||||
|
||||
function save_image(ip, path, format){
|
||||
/*
|
||||
Saves image or stack
|
||||
If parameters omitted, saves image again in same location, with same format.
|
||||
*/
|
||||
if (!is_defined(path)) path = null
|
||||
if (!is_defined(format)) format = null
|
||||
var fs = new FileSaver(ip)
|
||||
if (path == null)fs.save()
|
||||
else{
|
||||
try{
|
||||
path = get_context().setup.expandPath(path)
|
||||
} catch(ex) {
|
||||
}
|
||||
if (format == "bmp") fs.saveAsBmp(path)
|
||||
else if (format == "fits") fs.saveAsFits(path)
|
||||
else if (format == "gif") fs.saveAsGif(path)
|
||||
else if (format == "jpeg") fs.saveAsJpeg(path)
|
||||
else if (format == "lut") fs.saveAsLut(path)
|
||||
else if (format == "pgm") fs.saveAsPgm(path)
|
||||
else if (format == "png") fs.saveAsPng(path)
|
||||
else if (format == "raw" && (ip.getImageStackSize()>1)) fs.saveAsRawStack(path)
|
||||
else if (format == "raw") fs.saveAsRaw(path)
|
||||
else if (format == "txt") fs.saveAsText(path)
|
||||
else if (format == "tiff" && (ip.getImageStackSize()>1)) fs.saveAsTiffStack(path)
|
||||
else if (format == "tiff") fs.saveAsTiff(path)
|
||||
else if (format == "zip") fs.saveAsZip(path)
|
||||
}
|
||||
}
|
||||
|
||||
function new_image(width, height, image_type, title, fill_color){
|
||||
/*
|
||||
type = "byte", "short", "color" or "float"
|
||||
*/
|
||||
if (!is_defined(image_type)) image_type = "byte"
|
||||
if (!is_defined(title)) title = "img"
|
||||
if (!is_defined(fill_color)) fill_color = null
|
||||
if (image_type == "byte") p= new ByteProcessor(width, height)
|
||||
else if (image_type == "short") p= new ShortProcessor(width, height)
|
||||
else if (image_type == "color") p= new ColorProcessor(width, height)
|
||||
else if (image_type == "float") p= new FloatProcessor(width, height)
|
||||
else throw ("Invalid image type " + str(image_type))
|
||||
ret = new ImagePlus(title, p)
|
||||
if (fill_color != null){
|
||||
p.setColor(fill_color)
|
||||
p.resetRoi()
|
||||
p.fill()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
function sub_image(ip, x, y, width, height){
|
||||
/*
|
||||
Returns new ImagePlus
|
||||
*/
|
||||
ip.setRoi(x, y, width, height)
|
||||
p=ip.getProcessor().crop()
|
||||
return new ImagePlus(ip.getTitle() + " subimage", p)
|
||||
}
|
||||
|
||||
function copy_image(ip){
|
||||
return ip.duplicate()
|
||||
}
|
||||
|
||||
function copy_image_to(ip_source, ip_dest, x, y){
|
||||
ip_source.deleteRoi()
|
||||
ip_source.copy()
|
||||
ip_dest.setRoi(x, y, ip_source.getWidth(), ip_source.getHeight())
|
||||
ip_dest.paste()
|
||||
ip_dest.changes = false
|
||||
ip_dest.deleteRoi()
|
||||
}
|
||||
|
||||
function pad_image(ip, left, right, top, bottom, fill_color){
|
||||
if (!is_defined(left)) left = 0
|
||||
if (!is_defined(right)) right = 0
|
||||
if (!is_defined(top)) top = 0
|
||||
if (!is_defined(bottom)) bottom = 0
|
||||
if (!is_defined(fill_color)) fill_color = null
|
||||
p=ip.getProcessor()
|
||||
width = p.getWidth() + left + right
|
||||
height = p.getHeight() + top + bottom
|
||||
image_type = get_image_type(ip)
|
||||
ret = new_image(width, height, image_type, ip.getTitle() + " padded", fill_color)
|
||||
ip.deleteRoi()
|
||||
ip.copy()
|
||||
ret.setRoi(left, top, p.getWidth(), p.getHeight())
|
||||
ret.paste()
|
||||
ret.changes = false
|
||||
ret.deleteRoi()
|
||||
return ret
|
||||
}
|
||||
|
||||
function get_image_type(ip){
|
||||
/*
|
||||
Returns: "byte", "short", "color" or "float"
|
||||
*/
|
||||
p=ip.getProcessor()
|
||||
if (p instanceof ShortProcessor) return "short"
|
||||
else if (p instanceof ColorProcessor) return "color"
|
||||
else if (p instanceof FloatProcessor) return "float"
|
||||
return "byte"
|
||||
}
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//Image type conversion
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function grayscale(ip, in_place){
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = (in_place==true) ? ip : ip.duplicate()
|
||||
ic = new ImageConverter(ip)
|
||||
ic.convertToGray8()
|
||||
return ip
|
||||
}
|
||||
|
||||
function get_channel(ip, channel){
|
||||
/*
|
||||
Return a channel from a color image as a new ImagePlus.
|
||||
channel: "red", "green","blue", "alpha", "brightness",
|
||||
*/
|
||||
proc = ip.getProcessor()
|
||||
if (channel == "red") ret = proc.getChannel(1, null)
|
||||
else if (channel == "green") ret = proc.getChannel(2, null)
|
||||
else if (channel == "blue") ret = proc.getChannel(3, null)
|
||||
else if (channel == "alpha") ret = proc.getChannel(4, null)
|
||||
else if (channel == "brightness") ret = proc.getBrightness()
|
||||
else throw ("Invalid channel " + channel)
|
||||
return new ImagePlus(ip.getTitle() + " channel: " + channel, ret)
|
||||
}
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//Thresholder
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function threshold(ip, min_threshold, max_threshold, in_place){
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
ip.getProcessor().setThreshold(min_threshold, max_threshold, ImageProcessor.NO_LUT_UPDATE)
|
||||
WindowManager.setTempCurrentImage(ip)
|
||||
new Thresholder().run("mask")
|
||||
return ip
|
||||
}
|
||||
|
||||
function auto_threshold(ip, dark_background, method, in_place){
|
||||
if (!is_defined(dark_background)) dark_background = false
|
||||
if (!is_defined(method)) method = AutoThresholder.getMethods()[0]
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
ip.getProcessor().setAutoThreshold(method, dark_background , ImageProcessor.NO_LUT_UPDATE)
|
||||
WindowManager.setTempCurrentImage(ip)
|
||||
thresholder=new Thresholder().run("mask")
|
||||
return ip
|
||||
}
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//Binary functions
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function binary_op(ip, op, dark_background, iterations, count, in_place){
|
||||
/*
|
||||
op = "erode","dilate", "open","close", "outline", "fill holes", "skeletonize"
|
||||
*/
|
||||
if (!is_defined(dark_background)) dark_background = false
|
||||
if (!is_defined(iterations)) iterations = 1
|
||||
if (!is_defined(count)) count = 1
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
binary = new Binary()
|
||||
Binary.count = count
|
||||
Binary.iterations = iterations
|
||||
Prefs.blackBackground=dark_background
|
||||
binary.setup(op, ip)
|
||||
binary.run(ip.getProcessor())
|
||||
return ip
|
||||
}
|
||||
|
||||
function binary_erode(ip, dark_background, iterations, count, in_place){
|
||||
return binary_op(ip, "erode", dark_background, iterations, count, in_place)
|
||||
}
|
||||
|
||||
function binary_dilate(ip, dark_background, iterations, count, in_place){
|
||||
return binary_op(ip, "dilate", dark_background, iterations, count, in_place)
|
||||
}
|
||||
|
||||
function binary_open(ip, dark_background, iterations, count, in_place){
|
||||
return binary_op(ip, "open", dark_background, iterations, count, in_place)
|
||||
}
|
||||
|
||||
function binary_close(ip, dark_background, iterations, count, in_place){
|
||||
return binary_op(ip, "close", dark_background, iterations, count, in_place)
|
||||
}
|
||||
|
||||
function binary_outline(ip, dark_background, in_place){
|
||||
return binary_op(ip, "outline", dark_background, 1, 1, in_place)
|
||||
}
|
||||
|
||||
function binary_fill_holes(ip, dark_background, in_place){
|
||||
return binary_op(ip, "fill holes", dark_background, 1, 1, in_place)
|
||||
}
|
||||
|
||||
function binary_skeletonize(ip, dark_background, in_place){
|
||||
return binary_op(ip, "skeletonize", dark_background, 1, 1, in_place)
|
||||
}
|
||||
|
||||
function analyse_particles(ip, min_size, max_size, fill_holes, exclude_edges, extra_measurements,print_table, output_image, minCirc, maxCirc){
|
||||
/*
|
||||
Returns: tuple (ResultsTable results_table, ImagePlus output_image)
|
||||
output_image = "outlines", "overlay_outlines", "masks", "overlay_masks", "roi_masks" or null
|
||||
extra_measurements = mask with Measurements.CENTROID, PERIMETER, RECT, MIN_MAX, ELLIPSE, CIRCULARITY, AREA_FRACTION, INTEGRATED_DENSITY, INVERT_Y, FERET, KURTOSIS, MEDIAN, MODE, SKEWNESS, STD_DEV
|
||||
Measurements is a mask of flags: https://imagej.nih.gov/ij/developer/api/ij/measure/Measurements.html.
|
||||
Returned ResultsTable hold public fields: https://imagej.nih.gov/ij/developer/api/ij/measure/ResultsTable.html
|
||||
|
||||
*/
|
||||
if (!is_defined(fill_holes)) fill_holes = true
|
||||
if (!is_defined(exclude_edges)) exclude_edges = true
|
||||
if (!is_defined(extra_measurements)) extra_measurements = 0
|
||||
if (!is_defined(print_table)) print_table = false
|
||||
if (!is_defined(output_image)) output_image = "outlines"
|
||||
if (!is_defined(minCirc)) minCirc = 0.0
|
||||
if (!is_defined(maxCirc)) maxCirc = 1.0
|
||||
rt = new ResultsTable()
|
||||
show_summary = false
|
||||
var options = ParticleAnalyzer.SHOW_RESULTS | ParticleAnalyzer.CLEAR_WORKSHEET
|
||||
/*
|
||||
ParticleAnalyzer.SHOW_ROI_MASKS | \
|
||||
//ParticleAnalyzer.RECORD_STARTS | \
|
||||
//ParticleAnalyzer.ADD_TO_MANAGER | \
|
||||
//ParticleAnalyzer.FOUR_CONNECTED | \
|
||||
//ParticleAnalyzer.IN_SITU_SHOW | \
|
||||
//ParticleAnalyzer.SHOW_NONE | \
|
||||
*/
|
||||
if (show_summary) options = options | ParticleAnalyzer.DISPLAY_SUMMARY
|
||||
if (output_image == "outlines") options = options | ParticleAnalyzer.SHOW_OUTLINES
|
||||
else if (output_image == "overlay_outlines") options = options | ParticleAnalyzer.SHOW_OVERLAY_OUTLINES
|
||||
else if (output_image == "masks") options = options | ParticleAnalyzer.SHOW_MASKS
|
||||
else if (output_image == "overlay_masks") options = options | ParticleAnalyzer.SHOW_OVERLAY_MASKS
|
||||
else if (output_image == "roi_masks") options = options | ParticleAnalyzer.SHOW_ROI_MASKS
|
||||
//ParticleAnalyzer.SHOW_ROI_MASKS
|
||||
if (exclude_edges) options = options | ParticleAnalyzer.EXCLUDE_EDGE_PARTICLES
|
||||
if (fill_holes) options = options | ParticleAnalyzer.INCLUDE_HOLES
|
||||
measurements = Measurements.AREA | Measurements.MEAN | Measurements.CENTER_OF_MASS | Measurements.RECT
|
||||
pa = new ParticleAnalyzer(options, measurements, rt, min_size, max_size, minCirc, maxCirc)
|
||||
pa.setHideOutputImage(true)
|
||||
ParticleAnalyzer.setResultsTable(rt)
|
||||
if (pa.analyze(ip)){
|
||||
if (print_table){
|
||||
print (rt.getColumnHeadings())
|
||||
for (var row= 0; row<rt.counter; row++){
|
||||
print (rt.getRowAsString(row))
|
||||
}
|
||||
}
|
||||
return [rt, pa.getOutputImage()]
|
||||
}
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//Image operators
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function op_image(ip1, ip2, op, float_result, in_place){
|
||||
/*
|
||||
op = "add","subtract", "multiply","divide", "and", "or", "xor", "min", "max", "average", "difference" or "copy"
|
||||
*/
|
||||
if (!is_defined(float_result)) float_result = false
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
ic = new ImageCalculator()
|
||||
pars = op
|
||||
if (float_result) op = op + " float"
|
||||
ic.run(pars, ip1, ip2)
|
||||
return ip1
|
||||
}
|
||||
|
||||
function op_const(ip, op, val, in_place){
|
||||
/*
|
||||
op = "add","subtract", "multiply","divide", "and", "or", "xor", "min", "max", "gamma", "set" or "log", "exp", "sqr", "sqrt","abs"
|
||||
*/
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
pr = ip.getProcessor()
|
||||
if (op == 'add') pr.add(val)
|
||||
else if (op == 'sub') pr.subtract(val)
|
||||
else if (op == 'multiply') pr.multiply(val)
|
||||
else if (op == 'divide' && val!=0) pr.multiply(1.0/val)
|
||||
else if (op == 'and') pr.and(val)
|
||||
else if (op == 'or') pr.or(val)
|
||||
else if (op == 'xor') pr.xor(val)
|
||||
else if (op == 'min') {pr.min(val);pr.resetMinAndMax()}
|
||||
else if (op == 'max') {pr.max(val);pr.resetMinAndMax()}
|
||||
else if (op == 'gamma' && (0.05 < val) && (val < 5.0)) pr.gamma(val)
|
||||
else if (op == 'set') pr.set(val)
|
||||
else if (op == 'log') pr.log()
|
||||
else if (op == 'exp') pr.exp()
|
||||
else if (op == 'sqr') pr.sqr()
|
||||
else if (op == 'sqrt') pr.sqrt()
|
||||
else if (op == 'abs') {pr.abs();pr.resetMinAndMax()}
|
||||
else throw "Invalid operation " + op
|
||||
return ip
|
||||
}
|
||||
|
||||
function op_fft(ip1, ip2, op, do_inverse){
|
||||
/*
|
||||
Images must have same sizes, and multiple of 2 height and width.
|
||||
op = "correlate" (complex conjugate multiply), "convolve" (Fourier domain multiply), "deconvolve" (Fourier domain divide)
|
||||
*/
|
||||
if (!is_defined(do_inverse)) do_inverse = true
|
||||
var op_index
|
||||
if (op == "correlate") op_index = 0
|
||||
else if (op == "convolve") op_index = 1
|
||||
else if (op == "deconvolve") op_index = 2
|
||||
else throw "Invalid operation " + op
|
||||
return new FFTMath().doMath(ip1, ip2, op_index, do_inverse)
|
||||
}
|
||||
|
||||
function op_rank(ip, op, kernel_radius , dark_outliers ,threshold, in_place){
|
||||
/*
|
||||
op = "mean", "min", "max", "variance", "median", "close_maxima", "open_maxima", "remove_outliers", "remove_nan", "despeckle"
|
||||
*/
|
||||
if (!is_defined(kernel_radius)) kernel_radius = 1
|
||||
if (!is_defined(dark_outliers)) dark_outliers = false
|
||||
if (!is_defined(threshold)) threshold = 50
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
var filter_type
|
||||
if (op == "mean") filter_type = RankFilters.MEAN
|
||||
else if (op == "min") filter_type = RankFilters.MIN
|
||||
else if (op == "max") filter_type = RankFilters.MAX
|
||||
else if (op == "variance") filter_type = RankFilters.VARIANCE
|
||||
else if (op == "median") filter_type = RankFilters.MEDIAN
|
||||
else if (op == "close_maxima") filter_type = RankFilters.CLOSE
|
||||
else if (op == "open_maxima") filter_type = RankFilters.OPEN
|
||||
else if (op == "remove_outliers") filter_type = RankFilters.OUTLIERS
|
||||
else if (op == "remove_nan") filter_type = RankFilters.REMOVE_NAN
|
||||
else if (op == "despeckle") {filter_type = RankFilters.MEDIAN, kernel_radius = 1}
|
||||
else throw "Invalid operation " + op
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
new RankFilters().rank(ip.getProcessor(), kernel_radius, filter_type, dark_outliers ? RankFilters.DARK_OUTLIERS : RankFilters.BRIGHT_OUTLIERS ,threshold)
|
||||
return ip
|
||||
}
|
||||
|
||||
function op_edm(ip, op, dark_background, in_place){
|
||||
/*
|
||||
Euclidian distance map & derived operations
|
||||
op ="edm", "watershed","points", "voronoi"
|
||||
*/
|
||||
if (!is_defined(op)) op = "edm"
|
||||
if (!is_defined(dark_background)) dark_background = false
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
pr = ip.getProcessor()
|
||||
var edm=new EDM()
|
||||
Prefs.blackBackground=dark_background
|
||||
if (op=="edm"){
|
||||
//pr.setPixels(0, edm.makeFloatEDM(pr, 0, false));
|
||||
//pr.resetMinAndMax();
|
||||
if (dark_background) pr.invert()
|
||||
edm.toEDM(pr)
|
||||
}else{
|
||||
edm.setup(op, ip)
|
||||
edm.run(pr)
|
||||
}
|
||||
return ip
|
||||
}
|
||||
|
||||
function watershed(ip, dark_background, in_place){
|
||||
if (!is_defined(dark_background)) dark_background = false
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
return op_edm(ip, "watershed", dark_background, in_place)
|
||||
}
|
||||
|
||||
function ultimate_points(ip, dark_background, in_place){
|
||||
if (!is_defined(dark_background)) dark_background = false
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
return op_edm(ip, "points", dark_background, in_place)
|
||||
}
|
||||
|
||||
function veronoi(ip, dark_background, in_place){
|
||||
if (!is_defined(dark_background)) dark_background = false
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
return op_edm(ip, "voronoi", dark_background, in_place)
|
||||
}
|
||||
|
||||
function edm(ip, dark_background, in_place){
|
||||
if (!is_defined(dark_background)) dark_background = false
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
return op_edm(ip, "edm", dark_background, in_place)
|
||||
}
|
||||
|
||||
function op_filter(ip, op, in_place){
|
||||
|
||||
/*
|
||||
This is redundant as just calls processor methods.
|
||||
op ="invert", "smooth", "sharpen", "edge", "add"
|
||||
*/
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
f = new Filters()
|
||||
f.setup(op, ip )
|
||||
f.run(ip.getProcessor())
|
||||
return ip
|
||||
}
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//Other operations
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function gaussian_blur(ip, sigma_x, sigma_y, accuracy, in_place){
|
||||
if (!is_defined(sigma_x)) sigma_x = 3.0
|
||||
if (!is_defined(sigma_y)) sigma_y = 3.0
|
||||
if (!is_defined(accuracy)) accuracy = 0.01
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
new GaussianBlur().blurGaussian(ip.getProcessor(), sigma_x, sigma_y, accuracy)
|
||||
return ip
|
||||
}
|
||||
|
||||
function find_maxima(ip, tolerance, threshold, output_type, exclude_on_edges, is_edm ){
|
||||
/*
|
||||
Returns new ImagePlus
|
||||
tolerance: maxima are accepted only if protruding more than this value from the ridge to a higher maximum
|
||||
threshhold: minimum height of a maximum (uncalibrated);
|
||||
output_type = SINGLE_POINTS, IN_TOLERANCE or SEGMENTED. No output image is created for output types POINT_SELECTION, LIST and COUNT.
|
||||
*/
|
||||
if (!is_defined(tolerance)) tolerance = 25
|
||||
if (!is_defined(threshold)) threshold = ImageProcessor.NO_THRESHOLD
|
||||
if (!is_defined(output_type)) output_type = MaximumFinder.IN_TOLERANCE
|
||||
if (!is_defined(exclude_on_edges)) exclude_on_edges = false
|
||||
if (!is_defined(is_edm)) is_edm = false
|
||||
byte_processor = new MaximumFinder().findMaxima(ip.getProcessor(), tolerance, threshold, output_type, exclude_on_edges, is_edm)
|
||||
return new ImagePlus(ip.getTitle() + " maxima", byte_processor)
|
||||
}
|
||||
|
||||
function get_maxima_points(ip, tolerance, exclude_on_edges){
|
||||
if (!is_defined(tolerance)) tolerance = 25
|
||||
if (!is_defined(exclude_on_edges)) exclude_on_edges = false
|
||||
polygon = new MaximumFinder().getMaxima(ip.getProcessor(), tolerance, exclude_on_edges)
|
||||
return (polygon.xpoints, polygon.ypoints)
|
||||
}
|
||||
|
||||
function enhance_contrast(ip, equalize_histo, saturated_pixels, normalize, stack_histo, in_place){
|
||||
if (!is_defined(equalize_histo)) equalize_histo = true
|
||||
if (!is_defined(saturated_pixels)) saturated_pixels = 0.5
|
||||
if (!is_defined(normalize)) normalize = false
|
||||
if (!is_defined(stack_histo)) stack_histo = false
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
ce = new ContrastEnhancer()
|
||||
if (equalize_histo){
|
||||
ce.equalize(ip.getProcessor());
|
||||
} else{
|
||||
ce.stretchHistogram(ip.getProcessor(), saturated_pixels)
|
||||
if (normalize){
|
||||
ip.getProcessor().setMinAndMax(0,(ip.getProcessor().getBitDepth()==32) ? 1.0 : ip.getProcessor().maxValue())
|
||||
}
|
||||
}
|
||||
return ip
|
||||
}
|
||||
|
||||
function shadows(ip, op, in_place){
|
||||
/*
|
||||
op ="north","northeast", "east", "southeast","south", "southwest", "west","northwest"
|
||||
*/
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
shadows= new Shadows()
|
||||
shadows.setup(op, ip)
|
||||
shadows.run(ip.getProcessor())
|
||||
return ip
|
||||
}
|
||||
|
||||
function unsharp_mask(ip, sigma, weight, in_place){
|
||||
/*
|
||||
Float processor
|
||||
*/
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
ip.getProcessor().snapshot()
|
||||
unsharp=new UnsharpMask()
|
||||
USmask.setup(" ", ip)
|
||||
USmask.sharpenFloat( ip.getProcessor(),sigma, weight)
|
||||
return ip
|
||||
}
|
||||
|
||||
function subtract_background(ip, radius, create_background, dark_background, use_paraboloid, do_presmooth, correctCorners, rgb_brightness, in_place){
|
||||
if (!is_defined(radius)) radius = 50
|
||||
if (!is_defined(create_background)) create_background = false
|
||||
if (!is_defined(dark_background)) dark_background = false
|
||||
if (!is_defined(use_paraboloid)) use_paraboloid = true
|
||||
if (!is_defined(do_presmooth)) do_presmooth = true
|
||||
if (!is_defined(correctCorners)) correctCorners = true
|
||||
if (!is_defined(rgb_brightness)) rgb_brightness = false
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
if (rgb_brightness)
|
||||
new BackgroundSubtracter().rollingBallBrightnessBackground(ip.getProcessor(), radius, create_background,! dark_background, use_paraboloid, do_presmooth, correctCorners)
|
||||
else
|
||||
new BackgroundSubtracter().rollingBallBackground(ip.getProcessor(), radius, create_background, !dark_background, use_paraboloid, do_presmooth, correctCorners)
|
||||
return ip
|
||||
}
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//FFT
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function image_fft(ip, show){
|
||||
if (!is_defined(show)) show = true
|
||||
WindowManager.setTempCurrentImage(ip)
|
||||
fft = new FFT()
|
||||
fft.run("fft")
|
||||
//TODO: how to avoid it to be created?
|
||||
//ret = ImagePlus("FHT of " + ip.getTitle(), WindowManager.getCurrentImage().getProcessor())
|
||||
ret = WindowManager.getCurrentImage()
|
||||
if (show == false)
|
||||
WindowManager.getCurrentImage().hide()
|
||||
return ret
|
||||
}
|
||||
|
||||
function image_ffti(ip, show){
|
||||
if (!is_defined(show)) show = true
|
||||
WindowManager.setTempCurrentImage(ip)
|
||||
fft = new FFT()
|
||||
fft.run("inverse")
|
||||
//WindowManager.getCurrentImage().hide()
|
||||
//TODO: how to avoid it to be created?
|
||||
//ret = WindowManager.getCurrentImage()
|
||||
//WindowManager.getCurrentImage().hide()
|
||||
//ret = ImagePlus(ip.getTitle() + " ffti", WindowManager.getCurrentImage().getProcessor())
|
||||
ret = WindowManager.getCurrentImage()
|
||||
if (show == false)
|
||||
WindowManager.getCurrentImage().hide()
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
function bandpass_filter(ip, small_dia_px, large_dia_px, suppress_stripes, stripes_tolerance_direction, autoscale_after_filtering, saturate_if_autoscale, display_filter, in_place){
|
||||
/*
|
||||
suppress_stripes = 0 for none, 1 for horizontal, 2 for vertical
|
||||
*/
|
||||
if (!is_defined(suppress_stripes)) suppress_stripes = 0
|
||||
if (!is_defined(stripes_tolerance_direction)) stripes_tolerance_direction = 5.0
|
||||
if (!is_defined(autoscale_after_filtering)) autoscale_after_filtering = false
|
||||
if (!is_defined(saturate_if_autoscale)) saturate_if_autoscale = false
|
||||
if (!is_defined(display_filter)) display_filter = false
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
filter= new FFTFilter();
|
||||
FFTFilter.filterLargeDia = large_dia_px
|
||||
FFTFilter.filterSmallDia = small_dia_px
|
||||
FFTFilter.choiceIndex = suppress_stripes
|
||||
FFTFilter.toleranceDia = stripes_tolerance_direction
|
||||
FFTFilter.doScalingDia = autoscale_after_filtering
|
||||
FFTFilter.saturateDia = saturate_if_autoscale
|
||||
FFTFilter.displayFilter =display_filter
|
||||
filter.setup(null, ip);
|
||||
filter.run(ip.getProcessor())
|
||||
return ip
|
||||
}
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//Convolution
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
KERNEL_BLUR = [[0.1111, 0.1111, 0.1111], [0.1111, 0.1111, 0.1111], [0.1111, 0.1111, 0.1111]]
|
||||
KERNEL_SHARPEN = [[0.0, -0.75, 0.0], [-0.75, 4.0, -0.75], [0.0, -0.75, 0.0]]
|
||||
KERNEL_SHARPEN_2 = [[-1.0, -1.0, -1.0], [-1.0, 9.0, -1.0], [-1.0, -1.0, -1.0]]
|
||||
KERNEL_LIGHT = [[0.1, 0.1, 0.1], [0.1, 1.0, 0.1],[0.1, 0.1, 0.1]]
|
||||
KERNEL_DARK = [[0.01, 0.01, 0.01],[0.01, 0.5, 0.01],[0.01, 0.01, 0.01]]
|
||||
KERNEL_EDGE_DETECT = [[0.0, -0.75, 0.0], [-0.75, 3.0, -0.75], [0.0, -0.75, 0.0]]
|
||||
KERNEL_EDGE_DETECT_2 = [[-0.5, -0.5, -0.5], [-0.5, 4.0, -0.5], [-0.5, -0.5, -0.5]]
|
||||
KERNEL_DIFFERENTIAL_EDGE_DETECT = [[-1.0, 0.0, 1.0], [0.0, 0.0, 0.0], [1.0, 0.0, -1.0]]
|
||||
KERNEL_PREWITT = [[-2.0, -1.0, 0.0], [-1.0, 0.0, 1.0 ], [0.0, 1.0, 2.0]]
|
||||
KERNEL_SOBEL = [[2.0, 2.0, 0.0], [2.0, 0.0, -2.0 ], [0.0, -2.0, -2.0]]
|
||||
|
||||
function convolve(ip, kernel, in_place){
|
||||
/*
|
||||
kernel: list of lists
|
||||
*/
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
kernel_width = kernel.length
|
||||
kernel_height= kernel[0].length
|
||||
kernel =flatten(kernel)
|
||||
|
||||
//Convolver().convolve(ip.getProcessor(), kernel, kernel_width, kernel_height)
|
||||
//ip.getProcessor().convolve(to_array(kernel, '[f'), kernel_width, kernel_height)
|
||||
ip.getProcessor().convolve(kernel, kernel_width, kernel_height)
|
||||
return ip
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//Shortcut to ImageProcessor methods
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function invert(ip, in_place){
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
ip.getProcessor().invert()
|
||||
return ip
|
||||
}
|
||||
|
||||
function smooth(ip, in_place){
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
ip.getProcessor().smooth()
|
||||
return ip
|
||||
}
|
||||
|
||||
function sharpen(ip, in_place){
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
ip.getProcessor().sharpen()
|
||||
return ip
|
||||
}
|
||||
|
||||
function edges(ip, in_place){ //Sobel
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
ip.getProcessor().findEdges()
|
||||
return ip
|
||||
}
|
||||
|
||||
function noise(ip, sigma, in_place){
|
||||
if (!is_defined(sigma)) sigma = 25.0
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
ip.getProcessor().noise(sigma)
|
||||
return ip
|
||||
}
|
||||
|
||||
function remap(ip, min, max, in_place){
|
||||
if (!is_defined(min)) min = null
|
||||
if (!is_defined(max)) max = null
|
||||
if (!is_defined(in_place)) in_place = true
|
||||
ip = in_place ? ip : ip.duplicate()
|
||||
if ((min ==null) || (max == null)){
|
||||
stats = get_statistics(ip, Measurements.MIN_MAX)
|
||||
if (min== null) min = stats.min
|
||||
if (max == null) max = stats.max
|
||||
}
|
||||
ip.getProcessor().setMinAndMax(min, max)
|
||||
return ip
|
||||
}
|
||||
|
||||
function set_lut(ip, r, g, b){
|
||||
/*
|
||||
r,g and b are lists of 256 integers
|
||||
*/
|
||||
r = r.map(function(x) {return (x<128) ? x : (x-256 )})
|
||||
g = g.map(function(x) {return (x<128) ? x : (x-256 )})
|
||||
b = b.map(function(x) {return (x<128) ? x : (x-256 )})
|
||||
ip.setLut(new LUT(to_array(r,'b'),to_array(g,'b'),to_array(b,'b')))
|
||||
}
|
||||
|
||||
function resize(ip, width, height){
|
||||
/*
|
||||
Returns new ImagePlus
|
||||
*/
|
||||
p = ip.getProcessor().resize(width, height)
|
||||
return new ImagePlus(ip.getTitle() + " resized", p)
|
||||
}
|
||||
|
||||
function binning(ip, factor){
|
||||
p=ip.getProcessor().bin(factor)
|
||||
return new ImagePlus(ip.getTitle() + " resized", p)
|
||||
}
|
||||
|
||||
function get_histogram(ip, hist_min, hist_max, hist_bins, roi){
|
||||
/*
|
||||
hist_min, hist_max, hist_bins used only for float images (otherwise fixed to 0,255,256)
|
||||
roi is list [x,y,w,h]
|
||||
*/
|
||||
if (!is_defined(hist_min)) hist_min = 0
|
||||
if (!is_defined(hist_max)) hist_max = 0
|
||||
if (!is_defined(hist_bins)) hist_bins = 256
|
||||
if (!is_defined(roi)) roi = null
|
||||
if (roi == null) {
|
||||
ip.deleteRoi()
|
||||
} else {
|
||||
ip.setRoi(roi[0],roi[1],roi[2],roi[3])
|
||||
}
|
||||
image_statistics = ip.getStatistics(0, hist_bins, hist_min, hist_max)
|
||||
return to_array(image_statistics.getHistogram())
|
||||
}
|
||||
|
||||
function get_array(ip){
|
||||
return ip.getProcessor().getIntArray()
|
||||
}
|
||||
|
||||
function get_line(ip, x1, y1, x2, y2){
|
||||
return ip.getProcessor().getLine(x1, y1, x2, y2)
|
||||
}
|
||||
|
||||
function get_pixel_range(ip){
|
||||
return (ip.getProcessor().getMin(), ip.getProcessor().getMax())
|
||||
}
|
||||
|
||||
function get_num_channels(ip){
|
||||
return ip.getProcessor().getNChannels()
|
||||
}
|
||||
|
||||
function is_binary(ip){
|
||||
return ip.getProcessor().isBinary()
|
||||
}
|
||||
|
||||
function get_pixel(ip, x, y){
|
||||
return ip.getProcessor().getPixel(x,y)
|
||||
}
|
||||
|
||||
function get_pixel_array(ip, x, y){
|
||||
a = [0]*get_num_channels(ip)
|
||||
return ip.getProcessor().getPixel(x,y,a)
|
||||
}
|
||||
|
||||
function get_pixels(ip){
|
||||
return ip.getProcessor().getPixels()
|
||||
}
|
||||
|
||||
function get_width(ip){
|
||||
return ip.getProcessor().getWidth()
|
||||
}
|
||||
|
||||
function get_height(ip){
|
||||
return ip.getProcessor().getHeight()
|
||||
}
|
||||
|
||||
function get_row(ip, y){
|
||||
a = [0]*get_width(ip)
|
||||
array = jarray.array(a,'i')
|
||||
ip.getProcessor().getRow(0, y, array, get_width(ip))
|
||||
return array
|
||||
}
|
||||
|
||||
function get_col(ip, x){
|
||||
a = [0]*get_height(ip)
|
||||
array = jarray.array(a,'i')
|
||||
ip.getProcessor().getColumn(x, 0, array, get_height(ip))
|
||||
return array
|
||||
}
|
||||
|
||||
function get_statistics(ip, measurements){
|
||||
/*
|
||||
Measurements is a mask of flags: https://imagej.nih.gov/ij/developer/api/ij/measure/Measurements.html.
|
||||
Statistics object hold public fields: https://imagej.nih.gov/ij/developer/api/ij/process/ImageStatistics.html
|
||||
*/
|
||||
if (!is_defined(measurements)) measurements = null
|
||||
if (measurements == null){
|
||||
return ip.getStatistics()
|
||||
} else {
|
||||
return ip.getStatistics(measurements)
|
||||
}
|
||||
}
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//Image stack functions
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function create_stack(ip_list, keep, title){
|
||||
if (!is_defined(keep)) keep = true
|
||||
if (!is_defined(title)) title = null
|
||||
stack = new Concatenator().concatenate(ip_list, keep)
|
||||
if (title != null)
|
||||
stack.setTitle(title)
|
||||
return stack
|
||||
}
|
||||
|
||||
function reslice(stack, start_at, vertically, flip, output_pixel_spacing, avoid_interpolation, title){
|
||||
if (!is_defined(start_at)) start_at = "Top"
|
||||
if (!is_defined(vertically)) vertically = true
|
||||
if (!is_defined(flip)) flip = true
|
||||
if (!is_defined(output_pixel_spacing)) output_pixel_spacing = 1.0
|
||||
if (!is_defined(avoid_interpolation)) avoid_interpolation = true
|
||||
if (!is_defined(title)) title = null
|
||||
ss = new Slicer()
|
||||
ss.rotate = vertically
|
||||
ss.startAt = start_at
|
||||
ss.flip = flip
|
||||
ss.nointerpolate = avoid_interpolation
|
||||
ss.outputZSpacing = output_pixel_spacing
|
||||
stack = ss.reslice(stack)
|
||||
if (title != null)
|
||||
stack.setTitle(title)
|
||||
return stack
|
||||
}
|
||||
707
script/_Lib/ijutils.py
Normal file
707
script/_Lib/ijutils.py
Normal file
@@ -0,0 +1,707 @@
|
||||
####################################################################################################
|
||||
# Facade to ImageJ functionality
|
||||
####################################################################################################
|
||||
|
||||
#More information on:
|
||||
# Image: https://imagej.nih.gov/ij/docs/guide/146-28.html#toc-Section-28
|
||||
# Process: https://imagej.nih.gov/ij/docs/guide/146-29.html#toc-Section-29
|
||||
# Analyze: https://imagej.nih.gov/ij/docs/guide/146-30.html#toc-Section-30
|
||||
|
||||
import ch.psi.utils.Convert as Convert
|
||||
import ch.psi.pshell.imaging.Utils as Utils
|
||||
from startup import get_context
|
||||
import java.awt.image.BufferedImage as BufferedImage
|
||||
import jarray
|
||||
|
||||
import ij.IJ as IJ
|
||||
import ij.ImageJ as ImageJ
|
||||
import ij.WindowManager as WindowManager
|
||||
import ij.ImagePlus as ImagePlus
|
||||
import ij.Prefs as Prefs
|
||||
import ij.io.FileSaver as FileSaver
|
||||
|
||||
import ij.process.ImageProcessor as ImageProcessor
|
||||
import ij.process.ByteProcessor as ByteProcessor
|
||||
import ij.process.ShortProcessor as ShortProcessor
|
||||
import ij.process.ColorProcessor as ColorProcessor
|
||||
import ij.process.FloatProcessor as FloatProcessor
|
||||
import ij.process.ImageConverter as ImageConverter
|
||||
import ij.process.AutoThresholder as AutoThresholder
|
||||
import ij.process.LUT as LUT
|
||||
import ij.measure.Measurements as Measurements
|
||||
import ij.measure.ResultsTable as ResultsTable
|
||||
import ij.plugin.filter.Analyzer as Analyzer
|
||||
import ij.plugin.filter.GaussianBlur as GaussianBlur
|
||||
import ij.plugin.filter.Filters as Filters
|
||||
import ij.plugin.filter.FFTFilter as FFTFilter
|
||||
import ij.plugin.filter.BackgroundSubtracter as BackgroundSubtracter
|
||||
import ij.plugin.filter.EDM as EDM
|
||||
import ij.plugin.filter.Shadows as Shadows
|
||||
import ij.plugin.filter.UnsharpMask as UnsharpMask
|
||||
import ij.plugin.filter.MaximumFinder as MaximumFinder
|
||||
import ij.plugin.filter.EDM as EDM
|
||||
import ij.plugin.filter.Shadows as Shadows
|
||||
import ij.plugin.filter.UnsharpMask as UnsharpMask
|
||||
import ij.plugin.filter.RankFilters as RankFilters
|
||||
import ij.plugin.filter.Convolver as Convolver
|
||||
import ij.plugin.filter.ParticleAnalyzer as ParticleAnalyzer
|
||||
|
||||
import ij.plugin.ContrastEnhancer as ContrastEnhancer
|
||||
import ij.plugin.Thresholder as Thresholder
|
||||
import ij.plugin.ImageCalculator as ImageCalculator
|
||||
import ij.plugin.FFT as FFT
|
||||
import ij.plugin.Concatenator as Concatenator
|
||||
|
||||
#ImageJ customizations
|
||||
import ch.psi.pshell.imaging.ij.FFTMath as FFTMath
|
||||
import ch.psi.pshell.imaging.ij.FFTFilter as FFTFilter
|
||||
import ch.psi.pshell.imaging.ij.Binary as Binary
|
||||
import ch.psi.pshell.imaging.ij.Slicer as Slicer
|
||||
|
||||
|
||||
#This eliminates the error messages due to the bug on ij.gui.ImageWindow row 555 (ij is null)
|
||||
if not "_image_j" in globals().keys():
|
||||
_image_j = ImageJ(None, ImageJ.NO_SHOW)
|
||||
|
||||
###################################################################################################
|
||||
#Image creation, copying & saving
|
||||
###################################################################################################
|
||||
def load_image(image, title = "img"):
|
||||
"""
|
||||
image: file name or BufferedImage
|
||||
"""
|
||||
if isinstance(image, str):
|
||||
try:
|
||||
file = get_context().setup.expandPath(image)
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
image = Utils.newImage(file)
|
||||
except:
|
||||
#try loading from assembly
|
||||
image = get_context().setup.getAssemblyImage(image)
|
||||
return ImagePlus(title, image)
|
||||
|
||||
|
||||
|
||||
def load_array(array, width=None, height=None, title = "img"):
|
||||
"""
|
||||
array: 1d array if width and height defined , or else 2d array to be flattened.
|
||||
"""
|
||||
#2D
|
||||
if (width==None) and (height==None):
|
||||
if array.typecode == '[B': proc = ByteProcessor(len(array[0]), len(array), Convert.flatten(array))
|
||||
elif array.typecode == '[S': proc = ShortProcessor(len(array[0]), len(array), Convert.flatten(array), None)
|
||||
elif array.typecode in ['[I','[F', '[D']: proc = FloatProcessor(len(array[0]), len(array), Convert.flatten(array))
|
||||
else: raise Exception("Invalid array type")
|
||||
#1D
|
||||
else:
|
||||
if (len(array) > width*height):
|
||||
array = array[:(width*height)]
|
||||
if array.typecode == 'b': proc = ByteProcessor(width, height, array)
|
||||
elif array.typecode == 'h': proc = ShortProcessor(width, height, array, None)
|
||||
elif array.typecode in ['i','f','d']: proc = FloatProcessor(width, height, array)
|
||||
else: raise Exception("Invalid array type")
|
||||
return ImagePlus(title, proc)
|
||||
|
||||
def save_image(ip, path=None, format = None):
|
||||
"""
|
||||
Saves image or stack
|
||||
If parameters omitted, saves image again in same location, with same format.
|
||||
"""
|
||||
fs = FileSaver(ip)
|
||||
if path == None: fs.save()
|
||||
else:
|
||||
try:
|
||||
path = get_context().setup.expandPath(path)
|
||||
except:
|
||||
pass
|
||||
if format == "bmp": fs.saveAsBmp(path)
|
||||
elif format == "fits": fs.saveAsFits(path)
|
||||
elif format == "gif": fs.saveAsGif(path)
|
||||
elif format == "jpeg": fs.saveAsJpeg(path)
|
||||
elif format == "lut": fs.saveAsLut(path)
|
||||
elif format == "pgm": fs.saveAsPgm(path)
|
||||
elif format == "png": fs.saveAsPng(path)
|
||||
elif format == "raw" and ip.getImageStackSize()>1: fs.saveAsRawStack(path)
|
||||
elif format == "raw": fs.saveAsRaw(path)
|
||||
elif format == "txt": fs.saveAsText(path)
|
||||
elif format == "tiff" and ip.getImageStackSize()>1: fs.saveAsTiffStack(path)
|
||||
elif format == "tiff": fs.saveAsTiff(path)
|
||||
elif format == "zip": fs.saveAsZip(path)
|
||||
|
||||
def new_image(width, height, image_type="byte", title = "img", fill_color = None):
|
||||
"""
|
||||
type = "byte", "short", "color" or "float"
|
||||
"""
|
||||
if image_type == "byte": p=ByteProcessor(width, height)
|
||||
elif image_type == "short": p=ShortProcessor(width, height)
|
||||
elif image_type == "color": p=ColorProcessor(width, height)
|
||||
elif image_type == "float": p=FloatProcessor(width, height)
|
||||
else: raise Exception("Invalid image type " + str(image_type))
|
||||
ret = ImagePlus(title, p)
|
||||
if fill_color is not None:
|
||||
p.setColor(fill_color)
|
||||
p.resetRoi()
|
||||
p.fill()
|
||||
return ret
|
||||
|
||||
def sub_image(ip, x, y, width, height):
|
||||
"""
|
||||
Returns new ImagePlus
|
||||
"""
|
||||
ip.setRoi(x, y, width, height)
|
||||
p=ip.getProcessor().crop()
|
||||
return ImagePlus(ip.getTitle() + " subimage", p)
|
||||
|
||||
def copy_image(ip):
|
||||
return ip.duplicate()
|
||||
|
||||
def copy_image_to(ip_source, ip_dest, x, y):
|
||||
ip_source.deleteRoi()
|
||||
ip_source.copy()
|
||||
ip_dest.setRoi(x, y, ip_source.getWidth(), ip_source.getHeight())
|
||||
ip_dest.paste()
|
||||
ip_dest.changes = False
|
||||
ip_dest.deleteRoi()
|
||||
|
||||
def pad_image(ip, left=0, right=0, top=0, bottom=0, fill_color = None):
|
||||
p=ip.getProcessor()
|
||||
width = p.getWidth() + left + right
|
||||
height = p.getHeight() + top + bottom
|
||||
image_type = get_image_type(ip)
|
||||
ret = new_image(width, height, image_type, ip.getTitle() + " padded", fill_color)
|
||||
ip.deleteRoi()
|
||||
ip.copy()
|
||||
ret.setRoi(left, top, p.getWidth(), p.getHeight())
|
||||
ret.paste()
|
||||
ret.changes = False
|
||||
ret.deleteRoi()
|
||||
return ret
|
||||
|
||||
def get_image_type(ip):
|
||||
"""
|
||||
Returns: "byte", "short", "color" or "float"
|
||||
"""
|
||||
p=ip.getProcessor()
|
||||
if type(p) == ShortProcessor: return "short"
|
||||
elif type(p) == ColorProcessor: return "color"
|
||||
elif type(p) == FloatProcessor: return "float"
|
||||
return "byte"
|
||||
|
||||
###################################################################################################
|
||||
#Image type conversion
|
||||
###################################################################################################
|
||||
def grayscale(ip, do_scaling=None, in_place=True):
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
ic = ImageConverter(ip)
|
||||
if do_scaling is not None:
|
||||
ic.setDoScaling(do_scaling)
|
||||
ic.convertToGray8()
|
||||
return ip
|
||||
|
||||
def get_channel(ip, channel):
|
||||
"""
|
||||
Return a channel from a color image as a new ImagePlus.
|
||||
channel: "red", "green","blue", "alpha", "brightness",
|
||||
"""
|
||||
proc = ip.getProcessor()
|
||||
if channel == "red": ret = proc.getChannel(1, None)
|
||||
elif channel == "green": ret = proc.getChannel(2, None)
|
||||
elif channel == "blue": ret = proc.getChannel(3, None)
|
||||
elif channel == "alpha": ret = proc.getChannel(4, None)
|
||||
elif channel == "brightness": ret = proc.getBrightness()
|
||||
else: raise Exception("Invalid channel " + str(channel))
|
||||
return ImagePlus(ip.getTitle() + " channel: " + channel, ret)
|
||||
|
||||
###################################################################################################
|
||||
#Thresholder
|
||||
###################################################################################################
|
||||
def threshold(ip, min_threshold, max_threshold, in_place=True):
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
ip.getProcessor().setThreshold(min_threshold, max_threshold, ImageProcessor.NO_LUT_UPDATE)
|
||||
WindowManager.setTempCurrentImage(ip)
|
||||
Thresholder().run("mask")
|
||||
return ip
|
||||
|
||||
def auto_threshold(ip, dark_background = False, method = AutoThresholder.getMethods()[0], in_place=True):
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
ip.getProcessor().setAutoThreshold(method, dark_background , ImageProcessor.NO_LUT_UPDATE)
|
||||
WindowManager.setTempCurrentImage(ip)
|
||||
thresholder=Thresholder().run("mask")
|
||||
return ip
|
||||
|
||||
###################################################################################################
|
||||
#Binary functions
|
||||
###################################################################################################
|
||||
def binary_op(ip, op, dark_background=False, iterations=1, count=1, in_place=True):
|
||||
"""
|
||||
op = "erode","dilate", "open","close", "outline", "fill holes", "skeletonize"
|
||||
"""
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
binary = Binary()
|
||||
Binary.count = count
|
||||
Binary.iterations = iterations
|
||||
Prefs.blackBackground=dark_background
|
||||
binary.setup(op, ip)
|
||||
binary.run(ip.getProcessor())
|
||||
return ip
|
||||
|
||||
def binary_erode(ip, dark_background=False, iterations=1, count=1, in_place=True):
|
||||
return binary_op(ip, "erode", dark_background, iterations, count, in_place)
|
||||
|
||||
def binary_dilate(ip, dark_background=False, iterations=1, count=1, in_place=True):
|
||||
return binary_op(ip, "dilate", dark_background, iterations, count, in_place)
|
||||
|
||||
def binary_open(ip, dark_background=False, iterations=1, count=1, in_place=True):
|
||||
return binary_op(ip, "open", dark_background, iterations, count, in_place)
|
||||
|
||||
def binary_close(ip, dark_background=False, iterations=1, count=1, in_place=True):
|
||||
return binary_op(ip, "close", dark_background, iterations, count)
|
||||
|
||||
def binary_outline(ip, dark_background=False, in_place=True):
|
||||
return binary_op(ip, "outline", dark_background, in_place=in_place)
|
||||
|
||||
def binary_fill_holes(ip, dark_background=False, in_place=True):
|
||||
return binary_op(ip, "fill holes", dark_background, in_place=in_place)
|
||||
|
||||
def binary_skeletonize(ip, dark_background=False, in_place=True):
|
||||
return binary_op(ip, "skeletonize", dark_background, in_place=in_place)
|
||||
|
||||
def analyse_particles(ip, min_size, max_size, fill_holes = True, exclude_edges = True, extra_measurements = 0, \
|
||||
print_table = False, output_image = "outlines", minCirc = 0.0, maxCirc = 1.0):
|
||||
"""
|
||||
Returns: tuple (ResultsTable results_table, ImagePlus output_image)
|
||||
output_image = "outlines", "overlay_outlines", "masks", "overlay_masks", "roi_masks" or None
|
||||
extra_measurements = mask with Measurements.CENTROID, PERIMETER, RECT, MIN_MAX, ELLIPSE, CIRCULARITY, AREA_FRACTION, INTEGRATED_DENSITY, INVERT_Y, FERET, KURTOSIS, MEDIAN, MODE, SKEWNESS, STD_DEV
|
||||
Measurements is a mask of flags: https://imagej.nih.gov/ij/developer/api/ij/measure/Measurements.html.
|
||||
Returned ResultsTable hold public fields: https://imagej.nih.gov/ij/developer/api/ij/measure/ResultsTable.html
|
||||
|
||||
"""
|
||||
rt = ResultsTable()
|
||||
show_summary = False
|
||||
options = ParticleAnalyzer.SHOW_RESULTS | ParticleAnalyzer.CLEAR_WORKSHEET
|
||||
"""
|
||||
ParticleAnalyzer.SHOW_ROI_MASKS | \
|
||||
#ParticleAnalyzer.RECORD_STARTS | \
|
||||
#ParticleAnalyzer.ADD_TO_MANAGER | \
|
||||
#ParticleAnalyzer.FOUR_CONNECTED | \
|
||||
#ParticleAnalyzer.IN_SITU_SHOW | \
|
||||
#ParticleAnalyzer.SHOW_NONE | \
|
||||
"""
|
||||
if show_summary: options = options | ParticleAnalyzer.DISPLAY_SUMMARY
|
||||
if output_image == "outlines": options = options | ParticleAnalyzer.SHOW_OUTLINES
|
||||
elif output_image == "overlay_outlines": options = options | ParticleAnalyzer.SHOW_OVERLAY_OUTLINES
|
||||
elif output_image == "masks": options = options | ParticleAnalyzer.SHOW_MASKS
|
||||
elif output_image == "overlay_masks": options = options | ParticleAnalyzer.SHOW_OVERLAY_MASKS
|
||||
elif output_image == "roi_masks": options = options | ParticleAnalyzer.SHOW_ROI_MASKS
|
||||
#ParticleAnalyzer.SHOW_ROI_MASKS
|
||||
if exclude_edges: options = options | ParticleAnalyzer.EXCLUDE_EDGE_PARTICLES
|
||||
if fill_holes: options = options | ParticleAnalyzer.INCLUDE_HOLES
|
||||
measurements = Measurements.AREA | Measurements.MEAN | Measurements.CENTER_OF_MASS | Measurements.RECT
|
||||
pa = ParticleAnalyzer(options, measurements, rt, min_size, max_size, minCirc, maxCirc)
|
||||
pa.setHideOutputImage(True)
|
||||
pa.setResultsTable(rt)
|
||||
if pa.analyze(ip):
|
||||
if print_table:
|
||||
print rt.getColumnHeadings()
|
||||
for row in range (rt.counter):
|
||||
print rt.getRowAsString(row)
|
||||
return (rt, pa.getOutputImage())
|
||||
|
||||
###################################################################################################
|
||||
#Image operators
|
||||
###################################################################################################
|
||||
def op_image(ip1, ip2, op, float_result=False, in_place=True):
|
||||
"""
|
||||
op = "add","subtract", "multiply","divide", "and", "or", "xor", "min", "max", "average", "difference" or "copy"
|
||||
"""
|
||||
ip1 = ip1 if in_place else ip1.duplicate()
|
||||
ic = ImageCalculator()
|
||||
pars = op
|
||||
if float_result:
|
||||
op = op + " float"
|
||||
ic.run(pars, ip1, ip2)
|
||||
return ip1
|
||||
|
||||
def op_const(ip, op, val, in_place=True):
|
||||
"""
|
||||
op = "add","subtract", "multiply","divide", "and", "or", "xor", "min", "max", "gamma", "set" or "log", "exp", "sqr", "sqrt","abs"
|
||||
"""
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
pr = ip.getProcessor()
|
||||
if op == 'add': pr.add(val)
|
||||
elif op == 'sub': pr.subtract(val)
|
||||
elif op == 'multiply': pr.multiply(val)
|
||||
elif op == 'divide' and val!=0: pr.multiply(1.0/val)
|
||||
elif op == 'and': pr.and(val)
|
||||
elif op == 'or': pr.or(val)
|
||||
elif op == 'xor': pr.xor(val)
|
||||
elif op == 'min': pr.min(val);pr.resetMinAndMax()
|
||||
elif op == 'max': pr.max(val);pr.resetMinAndMax()
|
||||
elif op == 'gamma' and 0.05 < val < 5.0: pr.gamma(val)
|
||||
elif op == 'set': pr.set(val)
|
||||
elif op == 'log': pr.log()
|
||||
elif op == 'exp': pr.exp()
|
||||
elif op == 'sqr': pr.sqr()
|
||||
elif op == 'sqrt': pr.sqrt()
|
||||
elif op == 'abs': pr.abs();pr.resetMinAndMax()
|
||||
else: raise Exception("Invalid operation " + str(op))
|
||||
return ip
|
||||
|
||||
def op_fft(ip1, ip2, op, do_inverse = True) :
|
||||
"""
|
||||
Images must have same sizes, and multiple of 2 height and width.
|
||||
op = "correlate" (complex conjugate multiply), "convolve" (Fourier domain multiply), "deconvolve" (Fourier domain divide)
|
||||
"""
|
||||
if op == "correlate": op_index = 0
|
||||
elif op == "convolve": op_index = 1
|
||||
elif op == "deconvolve": op_index = 2
|
||||
else: raise Exception("Invalid operation " + str(op))
|
||||
return FFTMath().doMath(ip1, ip2, op_index, do_inverse)
|
||||
|
||||
def op_rank(ip, op, kernel_radius =1 , dark_outliers = False ,threshold = 50, in_place=True):
|
||||
"""
|
||||
op = "mean", "min", "max", "variance", "median", "close_maxima", "open_maxima", "remove_outliers", "remove_nan", "despeckle"
|
||||
"""
|
||||
if op == "mean": filter_type = RankFilters.MEAN
|
||||
elif op == "min": filter_type = RankFilters.MIN
|
||||
elif op == "max": filter_type = RankFilters.MAX
|
||||
elif op == "variance": filter_type = RankFilters.VARIANCE
|
||||
elif op == "median": filter_type = RankFilters.MEDIAN
|
||||
elif op == "close_maxima": filter_type = RankFilters.CLOSE
|
||||
elif op == "open_maxima": filter_type = RankFilters.OPEN
|
||||
elif op == "remove_outliers": filter_type = RankFilters.OUTLIERS
|
||||
elif op == "remove_nan": filter_type = RankFilters.REMOVE_NAN
|
||||
elif op == "despeckle": filter_type, kernel_radius = RankFilters.MEDIAN, 1
|
||||
else: raise Exception("Invalid operation " + str(op))
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
RankFilters().rank(ip.getProcessor(), kernel_radius, filter_type, RankFilters.DARK_OUTLIERS if dark_outliers else RankFilters.BRIGHT_OUTLIERS ,threshold)
|
||||
return ip
|
||||
|
||||
def op_edm(ip, op="edm", dark_background=False, in_place=True):
|
||||
"""
|
||||
Euclidian distance map & derived operations
|
||||
op ="edm", "watershed","points", "voronoi"
|
||||
"""
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
pr = ip.getProcessor()
|
||||
edm=EDM()
|
||||
Prefs.blackBackground=dark_background
|
||||
if op=="edm":
|
||||
#pr.setPixels(0, edm.makeFloatEDM(pr, 0, False));
|
||||
#pr.resetMinAndMax();
|
||||
if dark_background:
|
||||
pr.invert()
|
||||
edm.toEDM(pr)
|
||||
else:
|
||||
edm.setup(op, ip)
|
||||
edm.run(pr)
|
||||
return ip
|
||||
|
||||
def watershed(ip, dark_background=False, in_place=True):
|
||||
return op_edm(ip, "watershed", dark_background, in_place)
|
||||
|
||||
def ultimate_points(ip, dark_background=False, in_place=True):
|
||||
return op_edm(ip, "points", dark_background, in_place)
|
||||
|
||||
def veronoi(ip, dark_background=False, in_place=True):
|
||||
return op_edm(ip, "voronoi", dark_background, in_place)
|
||||
|
||||
def edm(ip, dark_background=False, in_place=True):
|
||||
return op_edm(ip, "edm", dark_background, in_place)
|
||||
|
||||
def op_filter(ip, op, in_place=True):
|
||||
"""
|
||||
This is redundant as just calls processor methods.
|
||||
op ="invert", "smooth", "sharpen", "edge", "add"
|
||||
"""
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
f = Filters()
|
||||
f.setup(op, ip )
|
||||
f.run(ip.getProcessor())
|
||||
return ip
|
||||
|
||||
###################################################################################################
|
||||
#Other operations
|
||||
###################################################################################################
|
||||
def gaussian_blur(ip, sigma_x=3.0, sigma_y=3.0, accuracy = 0.01, in_place=True):
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
GaussianBlur().blurGaussian(ip.getProcessor(), sigma_x, sigma_y, accuracy)
|
||||
return ip
|
||||
|
||||
def find_maxima(ip, tolerance=25, threshold = ImageProcessor.NO_THRESHOLD, output_type=MaximumFinder.IN_TOLERANCE, exclude_on_edges = False, is_edm = False):
|
||||
"""
|
||||
Returns new ImagePlus
|
||||
tolerance: maxima are accepted only if protruding more than this value from the ridge to a higher maximum
|
||||
threshhold: minimum height of a maximum (uncalibrated);
|
||||
output_type = SINGLE_POINTS, IN_TOLERANCE or SEGMENTED. No output image is created for output types POINT_SELECTION, LIST and COUNT.
|
||||
"""
|
||||
byte_processor = MaximumFinder().findMaxima(ip.getProcessor(), tolerance, threshold, output_type, exclude_on_edges, is_edm)
|
||||
return ImagePlus(ip.getTitle() + " maxima", byte_processor)
|
||||
|
||||
|
||||
def get_maxima_points(ip, tolerance=25, exclude_on_edges = False):
|
||||
polygon = MaximumFinder().getMaxima(ip.getProcessor(), tolerance, exclude_on_edges)
|
||||
return (polygon.xpoints, polygon.ypoints)
|
||||
|
||||
def enhance_contrast(ip, equalize_histo = True, saturated_pixels = 0.5, normalize = False, stack_histo = False, in_place=True):
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
ce = ContrastEnhancer()
|
||||
if equalize_histo:
|
||||
ce.equalize(ip.getProcessor());
|
||||
else:
|
||||
ce.stretchHistogram(ip.getProcessor(), saturated_pixels)
|
||||
if normalize:
|
||||
ip.getProcessor().setMinAndMax(0,1.0 if (ip.getProcessor().getBitDepth()==32) else ip.getProcessor().maxValue())
|
||||
return ip
|
||||
|
||||
def shadows(ip, op, in_place=True):
|
||||
"""
|
||||
op ="north","northeast", "east", "southeast","south", "southwest", "west","northwest"
|
||||
"""
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
shadows= Shadows()
|
||||
shadows.setup(op, ip)
|
||||
shadows.run(ip.getProcessor())
|
||||
return ip
|
||||
|
||||
def unsharp_mask(ip, sigma, weight, in_place=True):
|
||||
"""
|
||||
Float processor
|
||||
"""
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
ip.getProcessor().snapshot()
|
||||
unsharp=UnsharpMask()
|
||||
USmask.setup(" ", ip)
|
||||
USmask.sharpenFloat( ip.getProcessor(),sigma, weight)
|
||||
return ip
|
||||
|
||||
def subtract_background(ip, radius = 50, create_background=False, dark_background=False, use_paraboloid =True, do_presmooth = True, correctCorners = True, rgb_brightness=False, in_place=True):
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
if rgb_brightness:
|
||||
BackgroundSubtracter().rollingBallBrightnessBackground(ip.getProcessor(), radius, create_background,not dark_background, use_paraboloid, do_presmooth, correctCorners)
|
||||
else:
|
||||
BackgroundSubtracter().rollingBallBackground(ip.getProcessor(), radius, create_background, not dark_background, use_paraboloid, do_presmooth, correctCorners)
|
||||
return ip
|
||||
|
||||
###################################################################################################
|
||||
#FFT
|
||||
###################################################################################################
|
||||
def image_fft(ip, show = True):
|
||||
WindowManager.setTempCurrentImage(ip)
|
||||
fft = FFT()
|
||||
fft.run("fft")
|
||||
#TODO: how to avoid it to be created?
|
||||
#ret = ImagePlus("FHT of " + ip.getTitle(), WindowManager.getCurrentImage().getProcessor())
|
||||
ret = WindowManager.getCurrentImage()
|
||||
if not show:
|
||||
WindowManager.getCurrentImage().hide()
|
||||
return ret
|
||||
|
||||
|
||||
def image_ffti(ip, show = True):
|
||||
WindowManager.setTempCurrentImage(ip)
|
||||
fft = FFT()
|
||||
fft.run("inverse")
|
||||
#WindowManager.getCurrentImage().hide()
|
||||
#TODO: how to avoid it to be created?
|
||||
#ret = WindowManager.getCurrentImage()
|
||||
#WindowManager.getCurrentImage().hide()
|
||||
#ret = ImagePlus(ip.getTitle() + " ffti", WindowManager.getCurrentImage().getProcessor())
|
||||
ret = WindowManager.getCurrentImage()
|
||||
if not show:
|
||||
WindowManager.getCurrentImage().hide()
|
||||
|
||||
return ret
|
||||
|
||||
def bandpass_filter(ip, small_dia_px, large_dia_px, suppress_stripes = 0, stripes_tolerance_direction = 5.0, autoscale_after_filtering = False, saturate_if_autoscale = False, display_filter = False, in_place=True):
|
||||
"""
|
||||
suppress_stripes = 0 for none, 1 for horizontal, 2 for vertical
|
||||
"""
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
filter= FFTFilter();
|
||||
FFTFilter.filterLargeDia = large_dia_px
|
||||
FFTFilter.filterSmallDia = small_dia_px
|
||||
FFTFilter.choiceIndex = suppress_stripes
|
||||
FFTFilter.toleranceDia = stripes_tolerance_direction
|
||||
FFTFilter.doScalingDia = autoscale_after_filtering
|
||||
FFTFilter.saturateDia = saturate_if_autoscale
|
||||
FFTFilter.displayFilter =display_filter
|
||||
filter.setup(None, ip);
|
||||
filter.run(ip.getProcessor())
|
||||
return ip
|
||||
|
||||
###################################################################################################
|
||||
#Convolution
|
||||
###################################################################################################
|
||||
|
||||
KERNEL_BLUR = [[0.1111, 0.1111, 0.1111], [0.1111, 0.1111, 0.1111], [0.1111, 0.1111, 0.1111]]
|
||||
KERNEL_SHARPEN = [[0.0, -0.75, 0.0], [-0.75, 4.0, -0.75], [0.0, -0.75, 0.0]]
|
||||
KERNEL_SHARPEN_2 = [[-1.0, -1.0, -1.0], [-1.0, 9.0, -1.0], [-1.0, -1.0, -1.0]]
|
||||
KERNEL_LIGHT = [[0.1, 0.1, 0.1], [0.1, 1.0, 0.1],[0.1, 0.1, 0.1]]
|
||||
KERNEL_DARK = [[0.01, 0.01, 0.01],[0.01, 0.5, 0.01],[0.01, 0.01, 0.01]]
|
||||
KERNEL_EDGE_DETECT = [[0.0, -0.75, 0.0], [-0.75, 3.0, -0.75], [0.0, -0.75, 0.0]]
|
||||
KERNEL_EDGE_DETECT_2 = [[-0.5, -0.5, -0.5], [-0.5, 4.0, -0.5], [-0.5, -0.5, -0.5]]
|
||||
KERNEL_DIFFERENTIAL_EDGE_DETECT = [[-1.0, 0.0, 1.0], [0.0, 0.0, 0.0], [1.0, 0.0, -1.0]]
|
||||
KERNEL_PREWITT = [[-2.0, -1.0, 0.0], [-1.0, 0.0, 1.0 ], [0.0, 1.0, 2.0]]
|
||||
KERNEL_SOBEL = [[2.0, 2.0, 0.0], [2.0, 0.0, -2.0 ], [0.0, -2.0, -2.0]]
|
||||
|
||||
|
||||
def convolve(ip, kernel, in_place=True):
|
||||
"""
|
||||
kernel: list of lists
|
||||
"""
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
kernel_width = len(kernel)
|
||||
kernel_height= len(kernel[0])
|
||||
kernel = [item for row in kernel for item in row]
|
||||
#Convolver().convolve(ip.getProcessor(), kernel, kernel_width, kernel_height)
|
||||
ip.getProcessor().convolve(kernel, kernel_width, kernel_height)
|
||||
return ip
|
||||
|
||||
|
||||
###################################################################################################
|
||||
#Shortcut to ImageProcessor methods
|
||||
###################################################################################################
|
||||
def invert(ip, in_place=True):
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
ip.getProcessor().invert()
|
||||
return ip
|
||||
|
||||
def smooth(ip, in_place=True):
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
ip.getProcessor().smooth()
|
||||
return ip
|
||||
|
||||
def sharpen(ip, in_place=True):
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
ip.getProcessor().sharpen()
|
||||
return ip
|
||||
|
||||
def edges(ip, in_place=True): #Sobel
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
ip.getProcessor().findEdges()
|
||||
return ip
|
||||
|
||||
def noise(ip, sigma = 25.0, in_place=True):
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
ip.getProcessor().noise(sigma)
|
||||
return ip
|
||||
|
||||
def remap(ip, min=None, max=None, in_place=True):
|
||||
ip = ip if in_place else ip.duplicate()
|
||||
if min is None or max is None:
|
||||
stats = get_statistics(ip, Measurements.MIN_MAX)
|
||||
if min is None: min = stats.min
|
||||
if max is None: max = stats.max
|
||||
ip.getProcessor().setMinAndMax(min, max)
|
||||
return ip
|
||||
|
||||
def set_lut(ip, r, g, b):
|
||||
"""
|
||||
r,g and b are lists of 256 integers
|
||||
"""
|
||||
r = [x if x<128 else x-256 for x in r]
|
||||
g = [x if x<128 else x-256 for x in g]
|
||||
b = [x if x<128 else x-256 for x in b]
|
||||
ip.setLut(LUT(jarray.array(r,'b'),jarray.array(g,'b'),jarray.array(b,'b')))
|
||||
|
||||
def resize(ip, width, height):
|
||||
"""
|
||||
Returns new ImagePlus
|
||||
"""
|
||||
p = ip.getProcessor().resize(width, height)
|
||||
return ImagePlus(ip.getTitle() + " resized", p)
|
||||
|
||||
def binning(ip, factor):
|
||||
p=ip.getProcessor().bin(factor)
|
||||
return ImagePlus(ip.getTitle() + " resized", p)
|
||||
|
||||
def get_histogram(ip, hist_min = 0, hist_max = 0, hist_bins = 256, roi=None):
|
||||
"""
|
||||
hist_min, hist_max, hist_bins used only for float images (otherwise fixed to 0,255,256)
|
||||
roi is list [x,y,w,h]
|
||||
"""
|
||||
if roi == None: ip.deleteRoi()
|
||||
else: ip.setRoi(roi[0],roi[1],roi[2],roi[3])
|
||||
image_statistics = ip.getStatistics(0, hist_bins, hist_min, hist_max)
|
||||
return image_statistics.getHistogram()
|
||||
|
||||
|
||||
def get_array(ip):
|
||||
return ip.getProcessor().getIntArray()
|
||||
|
||||
def get_line(ip, x1, y1, x2, y2):
|
||||
return ip.getProcessor().getLine(x1, y1, x2, y2)
|
||||
|
||||
def get_pixel_range(ip):
|
||||
return (ip.getProcessor().getMin(), ip.getProcessor().getMax())
|
||||
|
||||
def get_num_channels(ip):
|
||||
return ip.getProcessor().getNChannels()
|
||||
|
||||
def is_binary(ip):
|
||||
return ip.getProcessor().isBinary()
|
||||
|
||||
def get_pixel(ip, x, y):
|
||||
return ip.getProcessor().getPixel(x,y)
|
||||
|
||||
def get_pixel_array(ip, x, y):
|
||||
a = [0]*get_num_channels(ip)
|
||||
return ip.getProcessor().getPixel(x,y,a)
|
||||
|
||||
def get_pixels(ip):
|
||||
return ip.getProcessor().getPixels()
|
||||
|
||||
def get_width(ip):
|
||||
return ip.getProcessor().getWidth()
|
||||
|
||||
def get_height(ip):
|
||||
return ip.getProcessor().getHeight()
|
||||
|
||||
def get_row(ip, y):
|
||||
a = [0]*get_width(ip)
|
||||
array = jarray.array(a,'i')
|
||||
ip.getProcessor().getRow(0, y, array, get_width(ip))
|
||||
return array
|
||||
|
||||
def get_col(ip, x):
|
||||
a = [0]*get_height(ip)
|
||||
array = jarray.array(a,'i')
|
||||
ip.getProcessor().getColumn(x, 0, array, get_height(ip))
|
||||
return array
|
||||
|
||||
def get_statistics(ip, measurements = None):
|
||||
"""
|
||||
Measurements is a mask of flags: https://imagej.nih.gov/ij/developer/api/ij/measure/Measurements.html.
|
||||
Statistics object hold public fields: https://imagej.nih.gov/ij/developer/api/ij/process/ImageStatistics.html
|
||||
"""
|
||||
if measurements is None:
|
||||
return ip.getStatistics()
|
||||
else:
|
||||
return ip.getStatistics(measurements)
|
||||
|
||||
###################################################################################################
|
||||
#Image stack functions
|
||||
###################################################################################################
|
||||
def create_stack(ip_list, keep=True, title = None):
|
||||
stack = Concatenator().concatenate(ip_list, keep)
|
||||
if title is not None:
|
||||
stack.setTitle(title)
|
||||
return stack
|
||||
|
||||
def reslice(stack, start_at = "Top", vertically = True, flip = True, output_pixel_spacing=1.0, avoid_interpolation = True, title = None):
|
||||
ss = Slicer()
|
||||
ss.rotate = vertically
|
||||
ss.startAt = start_at
|
||||
ss.flip = flip
|
||||
ss.nointerpolate = avoid_interpolation
|
||||
ss.outputZSpacing = output_pixel_spacing
|
||||
stack = ss.reslice(stack)
|
||||
if title is not None:
|
||||
stack.setTitle(title)
|
||||
return stack
|
||||
|
||||
|
||||
|
||||
148
script/_Lib/jeputils.js
Normal file
148
script/_Lib/jeputils.js
Normal file
@@ -0,0 +1,148 @@
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Facade to JEP: Embedded Python
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
//Matplotlib won't work out of the box because it's default backend (Qt) uses signals, which only works in
|
||||
//the main thread. Ideally should find a fix, in order to mark the running thread as the main.
|
||||
//As a workaround, one can use the Tk backend:
|
||||
//
|
||||
//import matplotlib
|
||||
//matplotlib.use('TkAgg')
|
||||
|
||||
|
||||
importClass(java.io.File)
|
||||
importClass(java.lang.Thread)
|
||||
|
||||
Jep = Java.type('jep.Jep')
|
||||
NDArray = Java.type('jep.NDArray')
|
||||
|
||||
|
||||
__jep = {}
|
||||
|
||||
function __get_jep(){
|
||||
t = java.lang.Thread.currentThread()
|
||||
if (!(t in __jep)){
|
||||
init_jep()
|
||||
}
|
||||
return __jep[t]
|
||||
}
|
||||
|
||||
function __close_jep(){
|
||||
t = java.lang.Thread.currentThread()
|
||||
if (t in __jep){
|
||||
__jep[t].close()
|
||||
}
|
||||
}
|
||||
|
||||
function init_jep(){
|
||||
//TODO: Should do it but generates errors
|
||||
//__close_jep()
|
||||
j = new Jep(false)
|
||||
//Faster, but statements must be complete
|
||||
j.setInteractive(false)
|
||||
__jep[java.lang.Thread.currentThread()] = j
|
||||
j.eval("import sys")
|
||||
//sys.argv is not present in JEP and may be needed for certain modules (as Tkinter)
|
||||
j.eval("sys.argv = ['PShell']");
|
||||
//Add standard script path to python path
|
||||
j.eval("sys.path.append('" + get_context().setup.getScriptPath() + "')")
|
||||
|
||||
//Redirect stdout
|
||||
j.eval("class JepStdout:\n" +
|
||||
" def write(self, str):\n" +
|
||||
" self.str += str\n" +
|
||||
" def clear(self):\n" +
|
||||
" self.str = ''\n" +
|
||||
" def flush(self):\n" +
|
||||
" pass\n")
|
||||
j.eval("sys.stdout=JepStdout()")
|
||||
j.eval("sys.stderr=JepStdout()")
|
||||
j.eval("sys.stdout.clear()")
|
||||
j.eval("sys.stderr.clear()")
|
||||
}
|
||||
|
||||
function __print_stdout(){
|
||||
j=__get_jep()
|
||||
output = j.getValue("sys.stdout.str")
|
||||
err = j.getValue("sys.stderr.str")
|
||||
j.eval("sys.stdout.clear()")
|
||||
j.eval("sys.stderr.clear()")
|
||||
if ((output != null) && (output.length>0)){
|
||||
print (output)
|
||||
}
|
||||
if ((err != null) && (err.length>0)){
|
||||
java.lang.System.err.println(err)
|
||||
}
|
||||
}
|
||||
|
||||
function run_jep(script_name, vars){
|
||||
if (!script_name.toLowerCase().endsWith(".py")){
|
||||
script_name += ".py"
|
||||
}
|
||||
if (!is_defined(vars)) {
|
||||
vars = {};
|
||||
}
|
||||
script = get_context().scriptManager.library.resolveFile(script_name)
|
||||
if (script == null){
|
||||
script= new File(script_name).getAbsolutePath()
|
||||
}
|
||||
j=__get_jep()
|
||||
|
||||
for (var v in vars){
|
||||
j.set(v, vars[v])
|
||||
}
|
||||
try{
|
||||
j.runScript(script)
|
||||
} finally {
|
||||
__print_stdout()
|
||||
}
|
||||
}
|
||||
|
||||
function eval_jep(line){
|
||||
j=__get_jep()
|
||||
try{
|
||||
j.eval(line)
|
||||
} finally {
|
||||
__print_stdout()
|
||||
}
|
||||
}
|
||||
|
||||
function set_jep(v, value){
|
||||
j=__get_jep()
|
||||
j.set(v, value)
|
||||
}
|
||||
|
||||
function get_jep(v){
|
||||
j=__get_jep()
|
||||
return j.getValue(v)
|
||||
}
|
||||
|
||||
function call_jep(module, func, args){
|
||||
if (!is_defined(args)) {
|
||||
args = [];
|
||||
}
|
||||
|
||||
j=__get_jep()
|
||||
f = module+"_" + func +"_"+ j.hashCode()
|
||||
try{
|
||||
eval_jep("from " + module + " import " + func + " as " + f)
|
||||
ret = j.invoke(f, to_array(args,'o'))
|
||||
} finally {
|
||||
__print_stdout()
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
function to_npa(data, dimensions, type){
|
||||
if (!is_defined(dimensions)) {
|
||||
dimensions = null;
|
||||
}
|
||||
if (!is_defined(type)) {
|
||||
type='d'
|
||||
}
|
||||
data = to_array(data, type)
|
||||
return new NDArray(data, dimensions)
|
||||
}
|
||||
|
||||
|
||||
|
||||
128
script/_Lib/jeputils.py
Normal file
128
script/_Lib/jeputils.py
Normal file
@@ -0,0 +1,128 @@
|
||||
###################################################################################################
|
||||
# Facade to JEP: Embedded Python
|
||||
###################################################################################################
|
||||
|
||||
#Matplotlib won't work out of the box because it's default backend (Qt) uses signals, which only works in
|
||||
#the main thread. Ideally should find a fix, in order to mark the running thread as the main.
|
||||
#As a workaround, one can use the Tk backend:
|
||||
#
|
||||
#import matplotlib
|
||||
#matplotlib.use('TkAgg')
|
||||
|
||||
|
||||
import sys
|
||||
import os
|
||||
import jep.Jep
|
||||
import jep.NDArray
|
||||
import java.lang.Thread
|
||||
from startup import to_array, get_context
|
||||
|
||||
__jep = {}
|
||||
|
||||
def __get_jep():
|
||||
t = java.lang.Thread.currentThread()
|
||||
if not t in __jep:
|
||||
init_jep()
|
||||
return __jep[t]
|
||||
|
||||
def __close_jep():
|
||||
t = java.lang.Thread.currentThread()
|
||||
if t in __jep:
|
||||
__jep[t].close()
|
||||
|
||||
def init_jep():
|
||||
#TODO: Should do it but generates errors
|
||||
#__close_jep()
|
||||
j = jep.Jep(False)
|
||||
#Faster, but statements must be complete
|
||||
j.setInteractive(False)
|
||||
__jep[java.lang.Thread.currentThread()] = j
|
||||
j.eval("import sys")
|
||||
#sys.argv is not present in JEP and may be needed for certain modules (as Tkinter)
|
||||
j.eval("sys.argv = ['PShell']");
|
||||
#Add standard script path to python path
|
||||
j.eval("sys.path.append('" + get_context().setup.getScriptPath() + "')")
|
||||
|
||||
#Redirect stdout
|
||||
j.eval("class JepStdout:\n" +
|
||||
" def write(self, str):\n" +
|
||||
" self.str += str\n" +
|
||||
" def clear(self):\n" +
|
||||
" self.str = ''\n" +
|
||||
" def flush(self):\n" +
|
||||
" pass\n")
|
||||
j.eval("sys.stdout=JepStdout()");
|
||||
j.eval("sys.stderr=JepStdout()");
|
||||
j.eval("sys.stdout.clear()")
|
||||
j.eval("sys.stderr.clear()")
|
||||
|
||||
def __print_stdout():
|
||||
j=__get_jep()
|
||||
output = j.getValue("sys.stdout.str")
|
||||
err = j.getValue("sys.stderr.str")
|
||||
j.eval("sys.stdout.clear()")
|
||||
j.eval("sys.stderr.clear()")
|
||||
if (output is not None) and len(output)>0:
|
||||
print output
|
||||
if (err is not None) and len(err)>0:
|
||||
print >> sys.stderr, err
|
||||
|
||||
def run_jep(script_name, vars = {}):
|
||||
global __jep
|
||||
script = get_context().scriptManager.library.resolveFile(script_name)
|
||||
if script is None :
|
||||
script= os.path.abspath(script_name)
|
||||
j=__get_jep()
|
||||
|
||||
for v in vars:
|
||||
j.set(v, vars[v])
|
||||
try:
|
||||
j.runScript(script)
|
||||
finally:
|
||||
__print_stdout()
|
||||
|
||||
def eval_jep(line):
|
||||
j=__get_jep()
|
||||
try:
|
||||
j.eval(line)
|
||||
finally:
|
||||
__print_stdout()
|
||||
|
||||
def set_jep(var, value):
|
||||
j=__get_jep()
|
||||
j.set(var, value)
|
||||
|
||||
def get_jep(var):
|
||||
j=__get_jep()
|
||||
return j.getValue(var)
|
||||
|
||||
def call_jep(module, function, args = []):
|
||||
j=__get_jep()
|
||||
if "/" in module:
|
||||
script = get_context().scriptManager.library.resolveFile(module)
|
||||
if "\\" in script:
|
||||
#Windows paths
|
||||
module_path = script[0:script.rfind("\\")]
|
||||
module = script[script.rfind("\\")+1:]
|
||||
else:
|
||||
#Linux paths
|
||||
module_path = script[0:script.rfind("/")]
|
||||
module = script[script.rfind("/")+1:]
|
||||
eval_jep("import sys")
|
||||
eval_jep("sys.path.append('" + module_path + "')")
|
||||
if module.endswith(".py"):
|
||||
module = module[0:-3]
|
||||
|
||||
f = module+"_" + function+"_"+str(j.hashCode())
|
||||
try:
|
||||
eval_jep("from " + module + " import " + function + " as " + f)
|
||||
ret = j.invoke(f, args)
|
||||
finally:
|
||||
__print_stdout()
|
||||
return ret
|
||||
|
||||
#Converts pythonlist or Java array to numpy array
|
||||
def to_npa(data, dimensions = None, type = None):
|
||||
|
||||
data = to_array(data,'d' if type is None else type)
|
||||
return jep.NDArray(data, dimensions)
|
||||
234
script/_Lib/jsonrpclib/SimpleJSONRPCServer.py
Normal file
234
script/_Lib/jsonrpclib/SimpleJSONRPCServer.py
Normal file
@@ -0,0 +1,234 @@
|
||||
import jsonrpclib
|
||||
from jsonrpclib import Fault
|
||||
from jsonrpclib.jsonrpc import USE_UNIX_SOCKETS
|
||||
import SimpleXMLRPCServer
|
||||
import SocketServer
|
||||
import socket
|
||||
import logging
|
||||
import os
|
||||
import types
|
||||
import traceback
|
||||
import sys
|
||||
try:
|
||||
import fcntl
|
||||
except ImportError:
|
||||
# For Windows
|
||||
fcntl = None
|
||||
|
||||
|
||||
def get_version(request):
|
||||
# must be a dict
|
||||
if 'jsonrpc' in request.keys():
|
||||
return 2.0
|
||||
if 'id' in request.keys():
|
||||
return 1.0
|
||||
return None
|
||||
|
||||
|
||||
def validate_request(request):
|
||||
if not isinstance(request, dict):
|
||||
fault = Fault(
|
||||
-32600, 'Request must be {}, not %s.' % type(request)
|
||||
)
|
||||
return fault
|
||||
rpcid = request.get('id', None)
|
||||
version = get_version(request)
|
||||
if not version:
|
||||
fault = Fault(-32600, 'Request %s invalid.' % request, rpcid=rpcid)
|
||||
return fault
|
||||
request.setdefault('params', [])
|
||||
method = request.get('method', None)
|
||||
params = request.get('params')
|
||||
param_types = (types.ListType, types.DictType, types.TupleType)
|
||||
if not method or type(method) not in types.StringTypes or \
|
||||
type(params) not in param_types:
|
||||
fault = Fault(
|
||||
-32600, 'Invalid request parameters or method.', rpcid=rpcid
|
||||
)
|
||||
return fault
|
||||
return True
|
||||
|
||||
|
||||
class SimpleJSONRPCDispatcher(SimpleXMLRPCServer.SimpleXMLRPCDispatcher):
|
||||
|
||||
def __init__(self, encoding=None):
|
||||
SimpleXMLRPCServer.SimpleXMLRPCDispatcher.__init__(
|
||||
self, allow_none=True, encoding=encoding)
|
||||
|
||||
def _marshaled_dispatch(self, data, dispatch_method=None):
|
||||
response = None
|
||||
try:
|
||||
request = jsonrpclib.loads(data)
|
||||
except Exception, e:
|
||||
fault = Fault(-32700, 'Request %s invalid. (%s)' % (data, e))
|
||||
response = fault.response()
|
||||
return response
|
||||
if not request:
|
||||
fault = Fault(-32600, 'Request invalid -- no request data.')
|
||||
return fault.response()
|
||||
if isinstance(request, list):
|
||||
# This SHOULD be a batch, by spec
|
||||
responses = []
|
||||
for req_entry in request:
|
||||
result = validate_request(req_entry)
|
||||
if type(result) is Fault:
|
||||
responses.append(result.response())
|
||||
continue
|
||||
resp_entry = self._marshaled_single_dispatch(req_entry)
|
||||
if resp_entry is not None:
|
||||
responses.append(resp_entry)
|
||||
if len(responses) > 0:
|
||||
response = '[%s]' % ','.join(responses)
|
||||
else:
|
||||
response = ''
|
||||
else:
|
||||
result = validate_request(request)
|
||||
if type(result) is Fault:
|
||||
return result.response()
|
||||
response = self._marshaled_single_dispatch(request)
|
||||
return response
|
||||
|
||||
def _marshaled_single_dispatch(self, request):
|
||||
# TODO - Use the multiprocessing and skip the response if
|
||||
# it is a notification
|
||||
# Put in support for custom dispatcher here
|
||||
# (See SimpleXMLRPCServer._marshaled_dispatch)
|
||||
method = request.get('method')
|
||||
params = request.get('params')
|
||||
try:
|
||||
response = self._dispatch(method, params)
|
||||
except:
|
||||
exc_type, exc_value, exc_tb = sys.exc_info()
|
||||
fault = Fault(-32603, '%s:%s' % (exc_type, exc_value))
|
||||
return fault.response()
|
||||
if 'id' not in request.keys() or request['id'] is None:
|
||||
# It's a notification
|
||||
return None
|
||||
try:
|
||||
response = jsonrpclib.dumps(response,
|
||||
methodresponse=True,
|
||||
rpcid=request['id']
|
||||
)
|
||||
return response
|
||||
except:
|
||||
exc_type, exc_value, exc_tb = sys.exc_info()
|
||||
fault = Fault(-32603, '%s:%s' % (exc_type, exc_value))
|
||||
return fault.response()
|
||||
|
||||
def _dispatch(self, method, params):
|
||||
func = None
|
||||
try:
|
||||
func = self.funcs[method]
|
||||
except KeyError:
|
||||
if self.instance is not None:
|
||||
if hasattr(self.instance, '_dispatch'):
|
||||
return self.instance._dispatch(method, params)
|
||||
else:
|
||||
try:
|
||||
func = SimpleXMLRPCServer.resolve_dotted_attribute(
|
||||
self.instance,
|
||||
method,
|
||||
True
|
||||
)
|
||||
except AttributeError:
|
||||
pass
|
||||
if func is not None:
|
||||
try:
|
||||
if isinstance(params, types.ListType):
|
||||
response = func(*params)
|
||||
else:
|
||||
response = func(**params)
|
||||
return response
|
||||
# except TypeError:
|
||||
# return Fault(-32602, 'Invalid parameters.')
|
||||
except:
|
||||
err_lines = traceback.format_exc().splitlines()
|
||||
trace_string = '%s | %s' % (err_lines[-3], err_lines[-1])
|
||||
fault = jsonrpclib.Fault(-32603, 'Server error: %s' %
|
||||
trace_string)
|
||||
return fault
|
||||
else:
|
||||
return Fault(-32601, 'Method %s not supported.' % method)
|
||||
|
||||
|
||||
class SimpleJSONRPCRequestHandler(
|
||||
SimpleXMLRPCServer.SimpleXMLRPCRequestHandler):
|
||||
|
||||
def do_POST(self):
|
||||
if not self.is_rpc_path_valid():
|
||||
self.report_404()
|
||||
return
|
||||
try:
|
||||
max_chunk_size = 10*1024*1024
|
||||
size_remaining = int(self.headers["content-length"])
|
||||
L = []
|
||||
while size_remaining:
|
||||
chunk_size = min(size_remaining, max_chunk_size)
|
||||
L.append(self.rfile.read(chunk_size))
|
||||
size_remaining -= len(L[-1])
|
||||
data = ''.join(L)
|
||||
response = self.server._marshaled_dispatch(data)
|
||||
self.send_response(200)
|
||||
except Exception:
|
||||
self.send_response(500)
|
||||
err_lines = traceback.format_exc().splitlines()
|
||||
trace_string = '%s | %s' % (err_lines[-3], err_lines[-1])
|
||||
fault = jsonrpclib.Fault(-32603, 'Server error: %s' % trace_string)
|
||||
response = fault.response()
|
||||
if response is None:
|
||||
response = ''
|
||||
self.send_header("Content-type", "application/json-rpc")
|
||||
self.send_header("Content-length", str(len(response)))
|
||||
self.end_headers()
|
||||
self.wfile.write(response)
|
||||
self.wfile.flush()
|
||||
self.connection.shutdown(1)
|
||||
|
||||
|
||||
class SimpleJSONRPCServer(SocketServer.TCPServer, SimpleJSONRPCDispatcher):
|
||||
|
||||
allow_reuse_address = True
|
||||
|
||||
def __init__(self, addr, requestHandler=SimpleJSONRPCRequestHandler,
|
||||
logRequests=True, encoding=None, bind_and_activate=True,
|
||||
address_family=socket.AF_INET):
|
||||
self.logRequests = logRequests
|
||||
SimpleJSONRPCDispatcher.__init__(self, encoding)
|
||||
# TCPServer.__init__ has an extra parameter on 2.6+, so
|
||||
# check Python version and decide on how to call it
|
||||
vi = sys.version_info
|
||||
self.address_family = address_family
|
||||
if USE_UNIX_SOCKETS and address_family == socket.AF_UNIX:
|
||||
# Unix sockets can't be bound if they already exist in the
|
||||
# filesystem. The convention of e.g. X11 is to unlink
|
||||
# before binding again.
|
||||
if os.path.exists(addr):
|
||||
try:
|
||||
os.unlink(addr)
|
||||
except OSError:
|
||||
logging.warning("Could not unlink socket %s", addr)
|
||||
# if python 2.5 and lower
|
||||
if vi[0] < 3 and vi[1] < 6:
|
||||
SocketServer.TCPServer.__init__(self, addr, requestHandler)
|
||||
else:
|
||||
SocketServer.TCPServer.__init__(
|
||||
self, addr, requestHandler, bind_and_activate)
|
||||
if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'):
|
||||
flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD)
|
||||
flags |= fcntl.FD_CLOEXEC
|
||||
fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags)
|
||||
|
||||
|
||||
class CGIJSONRPCRequestHandler(SimpleJSONRPCDispatcher):
|
||||
|
||||
def __init__(self, encoding=None):
|
||||
SimpleJSONRPCDispatcher.__init__(self, encoding)
|
||||
|
||||
def handle_jsonrpc(self, request_text):
|
||||
response = self._marshaled_dispatch(request_text)
|
||||
print 'Content-Type: application/json-rpc'
|
||||
print 'Content-Length: %d' % len(response)
|
||||
print
|
||||
sys.stdout.write(response)
|
||||
|
||||
handle_xmlrpc = handle_jsonrpc
|
||||
6
script/_Lib/jsonrpclib/__init__.py
Normal file
6
script/_Lib/jsonrpclib/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
||||
from jsonrpclib.config import Config
|
||||
config = Config.instance()
|
||||
from jsonrpclib.history import History
|
||||
history = History.instance()
|
||||
from jsonrpclib.jsonrpc import Server, MultiCall, Fault
|
||||
from jsonrpclib.jsonrpc import ProtocolError, loads, dumps
|
||||
40
script/_Lib/jsonrpclib/config.py
Normal file
40
script/_Lib/jsonrpclib/config.py
Normal file
@@ -0,0 +1,40 @@
|
||||
import sys
|
||||
|
||||
|
||||
class LocalClasses(dict):
|
||||
def add(self, cls):
|
||||
self[cls.__name__] = cls
|
||||
|
||||
|
||||
class Config(object):
|
||||
"""
|
||||
This is pretty much used exclusively for the 'jsonclass'
|
||||
functionality... set use_jsonclass to False to turn it off.
|
||||
You can change serialize_method and ignore_attribute, or use
|
||||
the local_classes.add(class) to include "local" classes.
|
||||
"""
|
||||
use_jsonclass = True
|
||||
# Change to False to keep __jsonclass__ entries raw.
|
||||
serialize_method = '_serialize'
|
||||
# The serialize_method should be a string that references the
|
||||
# method on a custom class object which is responsible for
|
||||
# returning a tuple of the constructor arguments and a dict of
|
||||
# attributes.
|
||||
ignore_attribute = '_ignore'
|
||||
# The ignore attribute should be a string that references the
|
||||
# attribute on a custom class object which holds strings and / or
|
||||
# references of the attributes the class translator should ignore.
|
||||
classes = LocalClasses()
|
||||
# The list of classes to use for jsonclass translation.
|
||||
version = 2.0
|
||||
# Version of the JSON-RPC spec to support
|
||||
user_agent = 'jsonrpclib/0.1 (Python %s)' % \
|
||||
'.'.join([str(ver) for ver in sys.version_info[0:3]])
|
||||
# User agent to use for calls.
|
||||
_instance = None
|
||||
|
||||
@classmethod
|
||||
def instance(cls):
|
||||
if not cls._instance:
|
||||
cls._instance = cls()
|
||||
return cls._instance
|
||||
40
script/_Lib/jsonrpclib/history.py
Normal file
40
script/_Lib/jsonrpclib/history.py
Normal file
@@ -0,0 +1,40 @@
|
||||
class History(object):
|
||||
"""
|
||||
This holds all the response and request objects for a
|
||||
session. A server using this should call "clear" after
|
||||
each request cycle in order to keep it from clogging
|
||||
memory.
|
||||
"""
|
||||
requests = []
|
||||
responses = []
|
||||
_instance = None
|
||||
|
||||
@classmethod
|
||||
def instance(cls):
|
||||
if not cls._instance:
|
||||
cls._instance = cls()
|
||||
return cls._instance
|
||||
|
||||
def add_response(self, response_obj):
|
||||
self.responses.append(response_obj)
|
||||
|
||||
def add_request(self, request_obj):
|
||||
self.requests.append(request_obj)
|
||||
|
||||
@property
|
||||
def request(self):
|
||||
if len(self.requests) == 0:
|
||||
return None
|
||||
else:
|
||||
return self.requests[-1]
|
||||
|
||||
@property
|
||||
def response(self):
|
||||
if len(self.responses) == 0:
|
||||
return None
|
||||
else:
|
||||
return self.responses[-1]
|
||||
|
||||
def clear(self):
|
||||
del self.requests[:]
|
||||
del self.responses[:]
|
||||
155
script/_Lib/jsonrpclib/jsonclass.py
Normal file
155
script/_Lib/jsonrpclib/jsonclass.py
Normal file
@@ -0,0 +1,155 @@
|
||||
import types
|
||||
import inspect
|
||||
import re
|
||||
|
||||
from jsonrpclib import config
|
||||
|
||||
iter_types = [
|
||||
types.DictType,
|
||||
types.ListType,
|
||||
types.TupleType
|
||||
]
|
||||
|
||||
string_types = [
|
||||
types.StringType,
|
||||
types.UnicodeType
|
||||
]
|
||||
|
||||
numeric_types = [
|
||||
types.IntType,
|
||||
types.LongType,
|
||||
types.FloatType
|
||||
]
|
||||
|
||||
value_types = [
|
||||
types.BooleanType,
|
||||
types.NoneType
|
||||
]
|
||||
|
||||
supported_types = iter_types+string_types+numeric_types+value_types
|
||||
invalid_module_chars = r'[^a-zA-Z0-9\_\.]'
|
||||
|
||||
|
||||
class TranslationError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def dump(obj, serialize_method=None, ignore_attribute=None, ignore=[]):
|
||||
if not serialize_method:
|
||||
serialize_method = config.serialize_method
|
||||
if not ignore_attribute:
|
||||
ignore_attribute = config.ignore_attribute
|
||||
obj_type = type(obj)
|
||||
# Parse / return default "types"...
|
||||
if obj_type in numeric_types+string_types+value_types:
|
||||
return obj
|
||||
if obj_type in iter_types:
|
||||
if obj_type in (types.ListType, types.TupleType):
|
||||
new_obj = []
|
||||
for item in obj:
|
||||
new_obj.append(
|
||||
dump(item, serialize_method, ignore_attribute, ignore))
|
||||
if isinstance(obj_type, types.TupleType):
|
||||
new_obj = tuple(new_obj)
|
||||
return new_obj
|
||||
# It's a dict...
|
||||
else:
|
||||
new_obj = {}
|
||||
for key, value in obj.iteritems():
|
||||
new_obj[key] = dump(
|
||||
value, serialize_method, ignore_attribute, ignore)
|
||||
return new_obj
|
||||
# It's not a standard type, so it needs __jsonclass__
|
||||
module_name = inspect.getmodule(obj).__name__
|
||||
class_name = obj.__class__.__name__
|
||||
json_class = class_name
|
||||
if module_name not in ['', '__main__']:
|
||||
json_class = '%s.%s' % (module_name, json_class)
|
||||
return_obj = {"__jsonclass__": [json_class]}
|
||||
# If a serialization method is defined..
|
||||
if serialize_method in dir(obj):
|
||||
# Params can be a dict (keyword) or list (positional)
|
||||
# Attrs MUST be a dict.
|
||||
serialize = getattr(obj, serialize_method)
|
||||
params, attrs = serialize()
|
||||
return_obj['__jsonclass__'].append(params)
|
||||
return_obj.update(attrs)
|
||||
return return_obj
|
||||
# Otherwise, try to figure it out
|
||||
# Obviously, we can't assume to know anything about the
|
||||
# parameters passed to __init__
|
||||
return_obj['__jsonclass__'].append([])
|
||||
attrs = {}
|
||||
ignore_list = getattr(obj, ignore_attribute, [])+ignore
|
||||
for attr_name, attr_value in obj.__dict__.iteritems():
|
||||
if type(attr_value) in supported_types and \
|
||||
attr_name not in ignore_list and \
|
||||
attr_value not in ignore_list:
|
||||
attrs[attr_name] = dump(
|
||||
attr_value, serialize_method, ignore_attribute, ignore)
|
||||
return_obj.update(attrs)
|
||||
return return_obj
|
||||
|
||||
|
||||
def load(obj):
|
||||
if type(obj) in string_types + numeric_types + value_types:
|
||||
return obj
|
||||
|
||||
if isinstance(obj, list):
|
||||
return_list = []
|
||||
for entry in obj:
|
||||
return_list.append(load(entry))
|
||||
return return_list
|
||||
# Othewise, it's a dict type
|
||||
if '__jsonclass__' not in obj:
|
||||
return_dict = {}
|
||||
for key, value in obj.iteritems():
|
||||
new_value = load(value)
|
||||
return_dict[key] = new_value
|
||||
return return_dict
|
||||
# It's a dict, and it's a __jsonclass__
|
||||
orig_module_name = obj['__jsonclass__'][0]
|
||||
params = obj['__jsonclass__'][1]
|
||||
if orig_module_name == '':
|
||||
raise TranslationError('Module name empty.')
|
||||
json_module_clean = re.sub(invalid_module_chars, '', orig_module_name)
|
||||
if json_module_clean != orig_module_name:
|
||||
raise TranslationError('Module name %s has invalid characters.' %
|
||||
orig_module_name)
|
||||
json_module_parts = json_module_clean.split('.')
|
||||
json_class = None
|
||||
if len(json_module_parts) == 1:
|
||||
# Local class name -- probably means it won't work
|
||||
if json_module_parts[0] not in config.classes.keys():
|
||||
raise TranslationError('Unknown class or module %s.' %
|
||||
json_module_parts[0])
|
||||
json_class = config.classes[json_module_parts[0]]
|
||||
else:
|
||||
json_class_name = json_module_parts.pop()
|
||||
json_module_tree = '.'.join(json_module_parts)
|
||||
try:
|
||||
temp_module = __import__(json_module_tree)
|
||||
except ImportError:
|
||||
raise TranslationError('Could not import %s from module %s.' %
|
||||
(json_class_name, json_module_tree))
|
||||
|
||||
# The returned class is the top-level module, not the one we really
|
||||
# want. (E.g., if we import a.b.c, we now have a.) Walk through other
|
||||
# path components to get to b and c.
|
||||
for i in json_module_parts[1:]:
|
||||
temp_module = getattr(temp_module, i)
|
||||
|
||||
json_class = getattr(temp_module, json_class_name)
|
||||
# Creating the object...
|
||||
new_obj = None
|
||||
if isinstance(params, list):
|
||||
new_obj = json_class(*params)
|
||||
elif isinstance(params, dict):
|
||||
new_obj = json_class(**params)
|
||||
else:
|
||||
raise TranslationError('Constructor args must be a dict or list.')
|
||||
for key, value in obj.iteritems():
|
||||
if key == '__jsonclass__':
|
||||
continue
|
||||
setattr(new_obj, key, value)
|
||||
return new_obj
|
||||
596
script/_Lib/jsonrpclib/jsonrpc.py
Normal file
596
script/_Lib/jsonrpclib/jsonrpc.py
Normal file
@@ -0,0 +1,596 @@
|
||||
"""
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
============================
|
||||
JSONRPC Library (jsonrpclib)
|
||||
============================
|
||||
|
||||
This library is a JSON-RPC v.2 (proposed) implementation which
|
||||
follows the xmlrpclib API for portability between clients. It
|
||||
uses the same Server / ServerProxy, loads, dumps, etc. syntax,
|
||||
while providing features not present in XML-RPC like:
|
||||
|
||||
* Keyword arguments
|
||||
* Notifications
|
||||
* Versioning
|
||||
* Batches and batch notifications
|
||||
|
||||
Eventually, I'll add a SimpleXMLRPCServer compatible library,
|
||||
and other things to tie the thing off nicely. :)
|
||||
|
||||
For a quick-start, just open a console and type the following,
|
||||
replacing the server address, method, and parameters
|
||||
appropriately.
|
||||
>>> import jsonrpclib
|
||||
>>> server = jsonrpclib.Server('http://localhost:8181')
|
||||
>>> server.add(5, 6)
|
||||
11
|
||||
>>> server._notify.add(5, 6)
|
||||
>>> batch = jsonrpclib.MultiCall(server)
|
||||
>>> batch.add(3, 50)
|
||||
>>> batch.add(2, 3)
|
||||
>>> batch._notify.add(3, 5)
|
||||
>>> batch()
|
||||
[53, 5]
|
||||
|
||||
See http://code.google.com/p/jsonrpclib/ for more info.
|
||||
"""
|
||||
|
||||
import types
|
||||
from xmlrpclib import Transport as XMLTransport
|
||||
from xmlrpclib import SafeTransport as XMLSafeTransport
|
||||
from xmlrpclib import ServerProxy as XMLServerProxy
|
||||
from xmlrpclib import _Method as XML_Method
|
||||
import string
|
||||
import random
|
||||
|
||||
# Library includes
|
||||
from jsonrpclib import config
|
||||
from jsonrpclib import history
|
||||
|
||||
# JSON library importing
|
||||
cjson = None
|
||||
json = None
|
||||
try:
|
||||
import cjson
|
||||
except ImportError:
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
try:
|
||||
import simplejson as json
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
'You must have the cjson, json, or simplejson ' +
|
||||
'module(s) available.'
|
||||
)
|
||||
|
||||
IDCHARS = string.ascii_lowercase+string.digits
|
||||
|
||||
|
||||
class UnixSocketMissing(Exception):
|
||||
"""
|
||||
Just a properly named Exception if Unix Sockets usage is
|
||||
attempted on a platform that doesn't support them (Windows)
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
# JSON Abstractions
|
||||
|
||||
|
||||
def jdumps(obj, encoding='utf-8'):
|
||||
# Do 'serialize' test at some point for other classes
|
||||
global cjson
|
||||
if cjson:
|
||||
return cjson.encode(obj)
|
||||
else:
|
||||
return json.dumps(obj, encoding=encoding)
|
||||
|
||||
|
||||
def jloads(json_string):
|
||||
global cjson
|
||||
if cjson:
|
||||
return cjson.decode(json_string)
|
||||
else:
|
||||
return json.loads(json_string)
|
||||
|
||||
|
||||
# XMLRPClib re-implementations
|
||||
|
||||
|
||||
class ProtocolError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TransportMixIn(object):
|
||||
""" Just extends the XMLRPC transport where necessary. """
|
||||
user_agent = config.user_agent
|
||||
# for Python 2.7 support
|
||||
_connection = (None, None)
|
||||
_extra_headers = []
|
||||
|
||||
def send_content(self, connection, request_body):
|
||||
connection.putheader("Content-Type", "application/json-rpc")
|
||||
connection.putheader("Content-Length", str(len(request_body)))
|
||||
connection.endheaders()
|
||||
if request_body:
|
||||
connection.send(request_body)
|
||||
|
||||
def getparser(self):
|
||||
target = JSONTarget()
|
||||
return JSONParser(target), target
|
||||
|
||||
|
||||
class JSONParser(object):
|
||||
def __init__(self, target):
|
||||
self.target = target
|
||||
|
||||
def feed(self, data):
|
||||
self.target.feed(data)
|
||||
|
||||
def close(self):
|
||||
pass
|
||||
|
||||
|
||||
class JSONTarget(object):
|
||||
def __init__(self):
|
||||
self.data = []
|
||||
|
||||
def feed(self, data):
|
||||
self.data.append(data)
|
||||
|
||||
def close(self):
|
||||
return ''.join(self.data)
|
||||
|
||||
|
||||
class Transport(TransportMixIn, XMLTransport):
|
||||
def __init__(self):
|
||||
TransportMixIn.__init__(self)
|
||||
XMLTransport.__init__(self)
|
||||
|
||||
|
||||
class SafeTransport(TransportMixIn, XMLSafeTransport):
|
||||
def __init__(self):
|
||||
TransportMixIn.__init__(self)
|
||||
XMLSafeTransport.__init__(self)
|
||||
|
||||
from httplib import HTTP, HTTPConnection
|
||||
from socket import socket
|
||||
|
||||
USE_UNIX_SOCKETS = False
|
||||
|
||||
try:
|
||||
from socket import AF_UNIX, SOCK_STREAM
|
||||
USE_UNIX_SOCKETS = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
if (USE_UNIX_SOCKETS):
|
||||
|
||||
class UnixHTTPConnection(HTTPConnection):
|
||||
def connect(self):
|
||||
self.sock = socket(AF_UNIX, SOCK_STREAM)
|
||||
self.sock.connect(self.host)
|
||||
|
||||
class UnixHTTP(HTTP):
|
||||
_connection_class = UnixHTTPConnection
|
||||
|
||||
class UnixTransport(TransportMixIn, XMLTransport):
|
||||
|
||||
def make_connection(self, host):
|
||||
host, extra_headers, x509 = self.get_host_info(host)
|
||||
return UnixHTTP(host)
|
||||
|
||||
|
||||
class ServerProxy(XMLServerProxy):
|
||||
"""
|
||||
Unfortunately, much more of this class has to be copied since
|
||||
so much of it does the serialization.
|
||||
"""
|
||||
|
||||
def __init__(self, uri, transport=None, encoding=None,
|
||||
verbose=0, version=None):
|
||||
import urllib
|
||||
if not version:
|
||||
version = config.version
|
||||
self.__version = version
|
||||
schema, uri = urllib.splittype(uri)
|
||||
if schema not in ('http', 'https', 'unix'):
|
||||
raise IOError('Unsupported JSON-RPC protocol.')
|
||||
if schema == 'unix':
|
||||
if not USE_UNIX_SOCKETS:
|
||||
# Don't like the "generic" Exception...
|
||||
raise UnixSocketMissing("Unix sockets not available.")
|
||||
self.__host = uri
|
||||
self.__handler = '/'
|
||||
else:
|
||||
self.__host, self.__handler = urllib.splithost(uri)
|
||||
if not self.__handler:
|
||||
# Not sure if this is in the JSON spec?
|
||||
# self.__handler = '/'
|
||||
self.__handler == '/'
|
||||
if transport is None:
|
||||
if schema == 'unix':
|
||||
transport = UnixTransport()
|
||||
elif schema == 'https':
|
||||
transport = SafeTransport()
|
||||
else:
|
||||
transport = Transport()
|
||||
self.__transport = transport
|
||||
self.__encoding = encoding
|
||||
self.__verbose = verbose
|
||||
|
||||
def _request(self, methodname, params, rpcid=None):
|
||||
request = dumps(params, methodname, encoding=self.__encoding,
|
||||
rpcid=rpcid, version=self.__version)
|
||||
response = self._run_request(request)
|
||||
check_for_errors(response)
|
||||
return response['result']
|
||||
|
||||
def _request_notify(self, methodname, params, rpcid=None):
|
||||
request = dumps(params, methodname, encoding=self.__encoding,
|
||||
rpcid=rpcid, version=self.__version, notify=True)
|
||||
response = self._run_request(request, notify=True)
|
||||
check_for_errors(response)
|
||||
return
|
||||
|
||||
def _run_request(self, request, notify=None):
|
||||
history.add_request(request)
|
||||
|
||||
response = self.__transport.request(
|
||||
self.__host,
|
||||
self.__handler,
|
||||
request,
|
||||
verbose=self.__verbose
|
||||
)
|
||||
|
||||
# Here, the XMLRPC library translates a single list
|
||||
# response to the single value -- should we do the
|
||||
# same, and require a tuple / list to be passed to
|
||||
# the response object, or expect the Server to be
|
||||
# outputting the response appropriately?
|
||||
|
||||
history.add_response(response)
|
||||
if not response:
|
||||
return None
|
||||
return_obj = loads(response)
|
||||
return return_obj
|
||||
|
||||
def __getattr__(self, name):
|
||||
# Same as original, just with new _Method reference
|
||||
return _Method(self._request, name)
|
||||
|
||||
@property
|
||||
def _notify(self):
|
||||
# Just like __getattr__, but with notify namespace.
|
||||
return _Notify(self._request_notify)
|
||||
|
||||
|
||||
class _Method(XML_Method):
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
if len(args) > 0 and len(kwargs) > 0:
|
||||
raise ProtocolError(
|
||||
'Cannot use both positional and keyword arguments '
|
||||
'(according to JSON-RPC spec.)')
|
||||
if len(args) > 0:
|
||||
return self.__send(self.__name, args)
|
||||
else:
|
||||
return self.__send(self.__name, kwargs)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return _Method(self.__send, "%s.%s" % (self.__name, name))
|
||||
|
||||
def __repr__(self):
|
||||
return '<{} "{}">'.format(self.__class__.__name__, self.__name)
|
||||
|
||||
def __str__(self):
|
||||
return self.__repr__()
|
||||
|
||||
def __dir__(self):
|
||||
return self.__dict__.keys()
|
||||
|
||||
|
||||
class _Notify(object):
|
||||
def __init__(self, request):
|
||||
self._request = request
|
||||
|
||||
def __getattr__(self, name):
|
||||
return _Method(self._request, name)
|
||||
|
||||
|
||||
# Batch implementation
|
||||
|
||||
|
||||
class MultiCallMethod(object):
|
||||
|
||||
def __init__(self, method, notify=False):
|
||||
self.method = method
|
||||
self.params = []
|
||||
self.notify = notify
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
if len(kwargs) > 0 and len(args) > 0:
|
||||
raise ProtocolError('JSON-RPC does not support both ' +
|
||||
'positional and keyword arguments.')
|
||||
if len(kwargs) > 0:
|
||||
self.params = kwargs
|
||||
else:
|
||||
self.params = args
|
||||
|
||||
def request(self, encoding=None, rpcid=None):
|
||||
return dumps(self.params, self.method, version=2.0,
|
||||
encoding=encoding, rpcid=rpcid, notify=self.notify)
|
||||
|
||||
def __repr__(self):
|
||||
return '%s' % self.request()
|
||||
|
||||
def __getattr__(self, method):
|
||||
new_method = '%s.%s' % (self.method, method)
|
||||
self.method = new_method
|
||||
return self
|
||||
|
||||
|
||||
class MultiCallNotify(object):
|
||||
|
||||
def __init__(self, multicall):
|
||||
self.multicall = multicall
|
||||
|
||||
def __getattr__(self, name):
|
||||
new_job = MultiCallMethod(name, notify=True)
|
||||
self.multicall._job_list.append(new_job)
|
||||
return new_job
|
||||
|
||||
|
||||
class MultiCallIterator(object):
|
||||
|
||||
def __init__(self, results):
|
||||
self.results = results
|
||||
|
||||
def __iter__(self):
|
||||
for i in range(0, len(self.results)):
|
||||
yield self[i]
|
||||
raise StopIteration
|
||||
|
||||
def __getitem__(self, i):
|
||||
item = self.results[i]
|
||||
check_for_errors(item)
|
||||
return item['result']
|
||||
|
||||
def __len__(self):
|
||||
return len(self.results)
|
||||
|
||||
|
||||
class MultiCall(object):
|
||||
|
||||
def __init__(self, server):
|
||||
self._server = server
|
||||
self._job_list = []
|
||||
|
||||
def _request(self):
|
||||
if len(self._job_list) < 1:
|
||||
# Should we alert? This /is/ pretty obvious.
|
||||
return
|
||||
request_body = '[ {0} ]'.format(
|
||||
','.join([job.request() for job in self._job_list]))
|
||||
responses = self._server._run_request(request_body)
|
||||
del self._job_list[:]
|
||||
if not responses:
|
||||
responses = []
|
||||
return MultiCallIterator(responses)
|
||||
|
||||
@property
|
||||
def _notify(self):
|
||||
return MultiCallNotify(self)
|
||||
|
||||
def __getattr__(self, name):
|
||||
new_job = MultiCallMethod(name)
|
||||
self._job_list.append(new_job)
|
||||
return new_job
|
||||
|
||||
__call__ = _request
|
||||
|
||||
# These lines conform to xmlrpclib's "compatibility" line.
|
||||
# Not really sure if we should include these, but oh well.
|
||||
Server = ServerProxy
|
||||
|
||||
|
||||
class Fault(object):
|
||||
# JSON-RPC error class
|
||||
|
||||
def __init__(self, code=-32000, message='Server error', rpcid=None):
|
||||
self.faultCode = code
|
||||
self.faultString = message
|
||||
self.rpcid = rpcid
|
||||
|
||||
def error(self):
|
||||
return {'code': self.faultCode, 'message': self.faultString}
|
||||
|
||||
def response(self, rpcid=None, version=None):
|
||||
if not version:
|
||||
version = config.version
|
||||
if rpcid:
|
||||
self.rpcid = rpcid
|
||||
return dumps(
|
||||
self, methodresponse=True, rpcid=self.rpcid, version=version
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return '<Fault %s: %s>' % (self.faultCode, self.faultString)
|
||||
|
||||
|
||||
def random_id(length=8):
|
||||
return_id = ''
|
||||
for i in range(length):
|
||||
return_id += random.choice(IDCHARS)
|
||||
return return_id
|
||||
|
||||
|
||||
class Payload(dict):
|
||||
def __init__(self, rpcid=None, version=None):
|
||||
if not version:
|
||||
version = config.version
|
||||
self.id = rpcid
|
||||
self.version = float(version)
|
||||
|
||||
def request(self, method, params=[]):
|
||||
if type(method) not in types.StringTypes:
|
||||
raise ValueError('Method name must be a string.')
|
||||
if not self.id:
|
||||
self.id = random_id()
|
||||
request = {'id': self.id, 'method': method}
|
||||
if params:
|
||||
request['params'] = params
|
||||
if self.version >= 2:
|
||||
request['jsonrpc'] = str(self.version)
|
||||
return request
|
||||
|
||||
def notify(self, method, params=[]):
|
||||
request = self.request(method, params)
|
||||
if self.version >= 2:
|
||||
del request['id']
|
||||
else:
|
||||
request['id'] = None
|
||||
return request
|
||||
|
||||
def response(self, result=None):
|
||||
response = {'result': result, 'id': self.id}
|
||||
if self.version >= 2:
|
||||
response['jsonrpc'] = str(self.version)
|
||||
else:
|
||||
response['error'] = None
|
||||
return response
|
||||
|
||||
def error(self, code=-32000, message='Server error.'):
|
||||
error = self.response()
|
||||
if self.version >= 2:
|
||||
del error['result']
|
||||
else:
|
||||
error['result'] = None
|
||||
error['error'] = {'code': code, 'message': message}
|
||||
return error
|
||||
|
||||
|
||||
def dumps(
|
||||
params=[], methodname=None, methodresponse=None,
|
||||
encoding=None, rpcid=None, version=None, notify=None):
|
||||
"""
|
||||
This differs from the Python implementation in that it implements
|
||||
the rpcid argument since the 2.0 spec requires it for responses.
|
||||
"""
|
||||
if not version:
|
||||
version = config.version
|
||||
valid_params = (types.TupleType, types.ListType, types.DictType)
|
||||
if methodname in types.StringTypes and \
|
||||
type(params) not in valid_params and \
|
||||
not isinstance(params, Fault):
|
||||
"""
|
||||
If a method, and params are not in a listish or a Fault,
|
||||
error out.
|
||||
"""
|
||||
raise TypeError('Params must be a dict, list, tuple or Fault ' +
|
||||
'instance.')
|
||||
# Begin parsing object
|
||||
payload = Payload(rpcid=rpcid, version=version)
|
||||
if not encoding:
|
||||
encoding = 'utf-8'
|
||||
if type(params) is Fault:
|
||||
response = payload.error(params.faultCode, params.faultString)
|
||||
return jdumps(response, encoding=encoding)
|
||||
|
||||
if type(methodname) not in types.StringTypes and \
|
||||
methodresponse is not True:
|
||||
raise ValueError(
|
||||
'Method name must be a string, or methodresponse must '
|
||||
'be set to True.')
|
||||
|
||||
if config.use_jsonclass is True:
|
||||
from jsonrpclib import jsonclass
|
||||
params = jsonclass.dump(params)
|
||||
if methodresponse is True:
|
||||
if rpcid is None:
|
||||
raise ValueError('A method response must have an rpcid.')
|
||||
response = payload.response(params)
|
||||
return jdumps(response, encoding=encoding)
|
||||
request = None
|
||||
if notify is True:
|
||||
request = payload.notify(methodname, params)
|
||||
else:
|
||||
request = payload.request(methodname, params)
|
||||
return jdumps(request, encoding=encoding)
|
||||
|
||||
|
||||
def loads(data):
|
||||
"""
|
||||
This differs from the Python implementation, in that it returns
|
||||
the request structure in Dict format instead of the method, params.
|
||||
It will return a list in the case of a batch request / response.
|
||||
"""
|
||||
if data == '':
|
||||
# notification
|
||||
return None
|
||||
result = jloads(data)
|
||||
# if the above raises an error, the implementing server code
|
||||
# should return something like the following:
|
||||
# { 'jsonrpc':'2.0', 'error': fault.error(), id: None }
|
||||
if config.use_jsonclass is True:
|
||||
from jsonrpclib import jsonclass
|
||||
result = jsonclass.load(result)
|
||||
return result
|
||||
|
||||
|
||||
def check_for_errors(result):
|
||||
if not result:
|
||||
# Notification
|
||||
return result
|
||||
|
||||
if not isinstance(result, dict):
|
||||
raise TypeError('Response is not a dict.')
|
||||
if 'jsonrpc' in result.keys() and float(result['jsonrpc']) > 2.0:
|
||||
raise NotImplementedError('JSON-RPC version not yet supported.')
|
||||
if 'result' not in result.keys() and 'error' not in result.keys():
|
||||
raise ValueError('Response does not have a result or error key.')
|
||||
if 'error' in result.keys() and result['error'] is not None:
|
||||
code = result['error']['code']
|
||||
message = result['error']['message']
|
||||
raise ProtocolError((code, message))
|
||||
return result
|
||||
|
||||
|
||||
def isbatch(result):
|
||||
if type(result) not in (types.ListType, types.TupleType):
|
||||
return False
|
||||
if len(result) < 1:
|
||||
return False
|
||||
if not isinstance(result[0], dict):
|
||||
return False
|
||||
if 'jsonrpc' not in result[0].keys():
|
||||
return False
|
||||
try:
|
||||
version = float(result[0]['jsonrpc'])
|
||||
except ValueError:
|
||||
raise ProtocolError('"jsonrpc" key must be a float(able) value.')
|
||||
if version < 2:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def isnotification(request):
|
||||
if 'id' not in request.keys():
|
||||
# 2.0 notification
|
||||
return True
|
||||
if request['id'] is None:
|
||||
# 1.0 notification
|
||||
return True
|
||||
return False
|
||||
599
script/_Lib/mathutils.js
Normal file
599
script/_Lib/mathutils.js
Normal file
@@ -0,0 +1,599 @@
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Facade to Apache Commons Math
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
importClass(java.util.List)
|
||||
importClass(java.lang.Class)
|
||||
|
||||
FastMath = Java.type('org.apache.commons.math3.util.FastMath')
|
||||
Pair = Java.type('org.apache.commons.math3.util.Pair')
|
||||
Complex = Java.type('org.apache.commons.math3.complex.Complex')
|
||||
|
||||
DifferentiableUnivariateFunction = Java.type('org.apache.commons.math3.analysis.DifferentiableUnivariateFunction')
|
||||
Gaussian = Java.type('org.apache.commons.math3.analysis.function.Gaussian')
|
||||
HarmonicOscillator = Java.type('org.apache.commons.math3.analysis.function.HarmonicOscillator')
|
||||
DerivativeStructure = Java.type('org.apache.commons.math3.analysis.differentiation.DerivativeStructure')
|
||||
FiniteDifferencesDifferentiator = Java.type('org.apache.commons.math3.analysis.differentiation.FiniteDifferencesDifferentiator')
|
||||
SimpsonIntegrator = Java.type('org.apache.commons.math3.analysis.integration.SimpsonIntegrator')
|
||||
TrapezoidIntegrator = Java.type('org.apache.commons.math3.analysis.integration.TrapezoidIntegrator')
|
||||
RombergIntegrator = Java.type('org.apache.commons.math3.analysis.integration.RombergIntegrator')
|
||||
MidPointIntegrator = Java.type('org.apache.commons.math3.analysis.integration.MidPointIntegrator')
|
||||
PolynomialFunction = Java.type('org.apache.commons.math3.analysis.polynomials.PolynomialFunction')
|
||||
PolynomialFunctionLagrangeForm = Java.type('org.apache.commons.math3.analysis.polynomials.PolynomialFunctionLagrangeForm')
|
||||
LaguerreSolver = Java.type('org.apache.commons.math3.analysis.solvers.LaguerreSolver')
|
||||
UnivariateFunction = Java.type('org.apache.commons.math3.analysis.UnivariateFunction')
|
||||
SplineInterpolator = Java.type('org.apache.commons.math3.analysis.interpolation.SplineInterpolator')
|
||||
LinearInterpolator = Java.type('org.apache.commons.math3.analysis.interpolation.LinearInterpolator')
|
||||
NevilleInterpolator = Java.type('org.apache.commons.math3.analysis.interpolation.NevilleInterpolator')
|
||||
LoessInterpolator = Java.type('org.apache.commons.math3.analysis.interpolation.LoessInterpolator')
|
||||
DividedDifferenceInterpolator = Java.type('org.apache.commons.math3.analysis.interpolation.DividedDifferenceInterpolator')
|
||||
AkimaSplineInterpolator = Java.type('org.apache.commons.math3.analysis.interpolation.AkimaSplineInterpolator')
|
||||
|
||||
GaussianCurveFitter = Java.type('org.apache.commons.math3.fitting.GaussianCurveFitter')
|
||||
PolynomialCurveFitter = Java.type('org.apache.commons.math3.fitting.PolynomialCurveFitter')
|
||||
HarmonicCurveFitter = Java.type('org.apache.commons.math3.fitting.HarmonicCurveFitter')
|
||||
WeightedObservedPoint = Java.type('org.apache.commons.math3.fitting.WeightedObservedPoint')
|
||||
MultivariateJacobianFunction = Java.type('org.apache.commons.math3.fitting.leastsquares.MultivariateJacobianFunction')
|
||||
LeastSquaresBuilder = Java.type('org.apache.commons.math3.fitting.leastsquares.LeastSquaresBuilder')
|
||||
LevenbergMarquardtOptimizer = Java.type('org.apache.commons.math3.fitting.leastsquares.LevenbergMarquardtOptimizer')
|
||||
GaussNewtonOptimizer = Java.type('org.apache.commons.math3.fitting.leastsquares.GaussNewtonOptimizer')
|
||||
|
||||
SimpleRegression = Java.type('org.apache.commons.math3.stat.regression.SimpleRegression')
|
||||
|
||||
FastFourierTransformer = Java.type('org.apache.commons.math3.transform.FastFourierTransformer')
|
||||
DftNormalization = Java.type('org.apache.commons.math3.transform.DftNormalization')
|
||||
TransformType = Java.type('org.apache.commons.math3.transform.TransformType')
|
||||
|
||||
ArrayRealVector = Java.type('org.apache.commons.math3.linear.ArrayRealVector')
|
||||
Array2DRowRealMatrix = Java.type('org.apache.commons.math3.linear.Array2DRowRealMatrix')
|
||||
MatrixUtils = Java.type('org.apache.commons.math3.linear.MatrixUtils')
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//Derivative and interpolation
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function get_values(f, xdata){
|
||||
/*
|
||||
Return list of values of a function
|
||||
|
||||
Args:
|
||||
f(UnivariateFunction): function
|
||||
xdata(float array or list): Domain values
|
||||
Returns:
|
||||
List of doubles
|
||||
|
||||
*/
|
||||
v = []
|
||||
for (var x in xdata){
|
||||
v.push(f.value(xdata[x]))
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
function interpolate(data, xdata, interpolation_type){
|
||||
/*
|
||||
Interpolate data array or list to a UnivariateFunction
|
||||
|
||||
Args:
|
||||
data(float array or list): The values to interpolate
|
||||
xdata(float array or list, optional): Domain values
|
||||
interpolation_type(str , optional): "linear", "cubic", "akima", "neville", "loess", "newton"
|
||||
Returns:
|
||||
UnivariateDifferentiableFunction object
|
||||
|
||||
*/
|
||||
if (!is_defined(xdata)) xdata =null
|
||||
if (!is_defined(interpolation_type)) interpolation_type ="linear"
|
||||
if (xdata == null){
|
||||
xdata = range(0, data.length, 1.0)
|
||||
}
|
||||
if ((data.length != xdata.length) || (data.length<2)){
|
||||
throw "Dimension mismatch"
|
||||
}
|
||||
if (interpolation_type == "cubic"){
|
||||
i = new SplineInterpolator()
|
||||
} else if (interpolation_type == "linear"){
|
||||
i = new LinearInterpolator()
|
||||
} else if (interpolation_type == "akima"){
|
||||
i = new AkimaSplineInterpolator()
|
||||
} else if (interpolation_type == "neville"){
|
||||
i = new NevilleInterpolator()
|
||||
} else if (interpolation_type == "loess"){
|
||||
i = new LoessInterpolator()
|
||||
} else if (interpolation_type == "newton"){
|
||||
i = new DividedDifferenceInterpolator()
|
||||
}else{
|
||||
throw "Invalid interpolation type"
|
||||
}
|
||||
return i.interpolate(to_array(xdata,'d'), to_array(data,'d'))
|
||||
}
|
||||
|
||||
function deriv(f, xdata, interpolation_type){
|
||||
/*
|
||||
Calculate derivative of UnivariateFunction, array or list.
|
||||
|
||||
Args:
|
||||
f(UnivariateFunction or array): The function object. If array it is interpolated.
|
||||
xdata(float array or list, optional): Domain values to process.
|
||||
interpolation_type(str , optional): "linear", "cubic", "akima", "neville", "loess", "newton"
|
||||
Returns:
|
||||
List with the derivative values for xdata
|
||||
|
||||
*/
|
||||
if (!is_defined(xdata)) xdata =null
|
||||
if (!is_defined(interpolation_type)) interpolation_type ="linear"
|
||||
|
||||
if (! (f instanceof UnivariateFunction)){
|
||||
if (xdata == null){
|
||||
xdata = range(0, f.length, 1.0)
|
||||
}
|
||||
f = interpolate(f, xdata, interpolation_type)
|
||||
}
|
||||
if (xdata == null){
|
||||
if (f instanceof DifferentiableUnivariateFunction){
|
||||
return f.derivative()
|
||||
}
|
||||
throw "Domain range not defined"
|
||||
}
|
||||
d = []
|
||||
for (var x in xdata){
|
||||
var xds = new DerivativeStructure(1, 2, 0, x)
|
||||
var yds = f.value(xds)
|
||||
d.push( yds.getPartialDerivative(1))
|
||||
}
|
||||
return d
|
||||
}
|
||||
|
||||
function integrate(f, range, xdata , interpolation_type , integrator_type){
|
||||
/*
|
||||
Integrate UnivariateFunction, array or list in an interval.
|
||||
|
||||
Args:
|
||||
f(UnivariateFunction or array): The function object. If array it is interpolated.
|
||||
range(list, optional): integration range ([min, max]).
|
||||
xdata(float array or list, optional): disregarded if f is UnivariateFunction.
|
||||
interpolation_type(str , optional): "linear", "cubic", "akima", "neville", "loess", "newton"
|
||||
integrator_type(str , optional): "simpson", "trapezoid", "romberg" or "midpoint"
|
||||
Returns:
|
||||
Integrated value (Float)
|
||||
|
||||
*/
|
||||
if (!is_defined(range)) range =null
|
||||
if (!is_defined(xdata)) xdata =null
|
||||
if (!is_defined(interpolation_type)) interpolation_type ="linear"
|
||||
if (!is_defined(integrator_type)) integrator_type ="simpson"
|
||||
|
||||
if (! (f instanceof UnivariateFunction)){
|
||||
if (xdata == null){
|
||||
xdata = range(0, f.length, 1.0)
|
||||
}
|
||||
if (range == null){
|
||||
range = xdata
|
||||
}
|
||||
f = interpolate(f, xdata, interpolation_type)
|
||||
}
|
||||
if (range == null){
|
||||
throw "Domain range not defined"
|
||||
}
|
||||
d = []
|
||||
if (integrator_type == "simpson"){
|
||||
integrator = new SimpsonIntegrator()
|
||||
} else if (integrator_type == "trapezoid"){
|
||||
integrator = new TrapezoidIntegrator()
|
||||
} else if (integrator_type == "romberg"){
|
||||
integrator = new RombergIntegrator()
|
||||
} else if (integrator_type == "midpoint"){
|
||||
integrator = new MidPointIntegrator()
|
||||
throw "Invalid integrator type"
|
||||
}
|
||||
max_eval = 1000000
|
||||
lower = Math.min.apply(null, range)
|
||||
upper = Math.max.apply(null, range)
|
||||
return integrator.integrate(max_eval, f, lower, upper)
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//Fitting and peak search
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
MAX_FLOAT = 1.7976931348623157e+308
|
||||
|
||||
MAX_ITERATIONS = 1000
|
||||
MAX_EVALUATIONS = 1000
|
||||
|
||||
function calculate_peaks(func, start_value, end_value, positive){
|
||||
/*
|
||||
Calculate peaks of a DifferentiableUnivariateFunction in a given range by finding the roots of the derivative
|
||||
|
||||
Args:
|
||||
function(DifferentiableUnivariateFunction): The function object.
|
||||
start_value(float): start of range
|
||||
end_value(float, optional): end of range
|
||||
positive (boolean, optional): True for searching positive peaks, False for negative.
|
||||
Returns:
|
||||
List of peaks in the interval
|
||||
|
||||
*/
|
||||
if (!is_defined(end_value)) end_value =MAX_FLOAT
|
||||
if (!is_defined(positive)) positive =true
|
||||
derivative = func.derivative()
|
||||
derivative2 = derivative.derivative()
|
||||
var peaks = []
|
||||
solver = new LaguerreSolver()
|
||||
var ret = solver.solveAllComplex(derivative.coefficients, start_value)
|
||||
for (var complex in ret){
|
||||
var r = ret[complex].getReal()
|
||||
if ((start_value < r) && (r < end_value)){
|
||||
if ((positive && (derivative2.value(r) < 0)) || ( (!positive) && (derivative2.value(r) > 0)))
|
||||
peaks.push(r)
|
||||
}
|
||||
}
|
||||
return peaks
|
||||
}
|
||||
|
||||
function estimate_peak_indexes(data, xdata, threshold, min_peak_distance, positive){
|
||||
/*
|
||||
Estimation of peaks in an array by ordering local maxima according to given criteria.
|
||||
|
||||
Args:
|
||||
data(float array or list)
|
||||
xdata(float array or list, optional): if not null must have the same length as data.
|
||||
threshold(float, optional): if specified filter peaks below this value
|
||||
min_peak_distance(float, optional): if specified defines minimum distance between two peaks.
|
||||
if xdata == null, it represents index counts, otherwise in xdata units.
|
||||
positive (boolean, optional): True for searching positive peaks, False for negative.
|
||||
Returns:
|
||||
List of peaks indexes.
|
||||
*/
|
||||
if (!is_defined(xdata)) xdata =null
|
||||
if (!is_defined(threshold)) threshold =null
|
||||
if (!is_defined(min_peak_distance)) min_peak_distance =null
|
||||
if (!is_defined(positive)) positive =true
|
||||
peaks = []
|
||||
indexes = sort_indexes(data, positive)
|
||||
for (var index in indexes){
|
||||
first = (indexes[index] == 0)
|
||||
last = (indexes[index] == (data.length-1))
|
||||
val=data[indexes[index]]
|
||||
prev = first ? Number.NaN : data[indexes[index]-1]
|
||||
next = last ? Number.NaN : data[indexes[index]+1]
|
||||
|
||||
if (threshold != null){
|
||||
if ((positive && (val<threshold)) || ((!positive) && (val>threshold)))
|
||||
break
|
||||
}
|
||||
if ( ( (positive) && (first || val>prev ) && (last || val>=next ) ) || (
|
||||
(!positive) && (first || val<prev ) && (last || val<=next ) ) ) {
|
||||
var append = true
|
||||
if (min_peak_distance != null){
|
||||
for (var peak in peaks){
|
||||
if ( ((xdata == null) && (Math.abs(peaks[peak]-indexes[index]) < min_peak_distance)) ||
|
||||
((xdata != null) && (Math.abs(xdata[peaks[peak]]-xdata[indexes[index]]) < min_peak_distance)) ){
|
||||
append = false
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
if (append) peaks.push(indexes[index])
|
||||
}
|
||||
}
|
||||
return peaks
|
||||
}
|
||||
|
||||
function _assert_valid_for_fit(fy,fx){
|
||||
if ((fy.length<2) || ((fx != null) && (fx.length>fy.length)))
|
||||
throw "Invalid data for fit"
|
||||
}
|
||||
|
||||
function fit_gaussians(fy, fx, peak_indexes){
|
||||
/*
|
||||
Fits data on multiple gaussians on the given peak indexes.
|
||||
|
||||
Args:
|
||||
x(float array or list)
|
||||
y(float array or list)
|
||||
peak_indexes(list of int)
|
||||
Returns:
|
||||
List of tuples of gaussian parameters: (normalization, mean, sigma)
|
||||
*/
|
||||
fx = to_array(fx)
|
||||
fy = to_array(fy)
|
||||
_assert_valid_for_fit(fy,fx)
|
||||
ret = []
|
||||
|
||||
minimum = Math.min.apply(null, fy)
|
||||
for (var peak in peak_indexes){
|
||||
//Copy data
|
||||
data = fy.slice(0)
|
||||
//Remover data from other peaks
|
||||
for (var p in peak_indexes){
|
||||
limit = Math.floor(Math.round((peak_indexes[p]+peak_indexes[peak])/2))
|
||||
if (peak_indexes[p] > peak_indexes[peak]){
|
||||
for (var x = limit; x< fy.length; x++){
|
||||
data[x] = minimum
|
||||
}
|
||||
} else if (peak_indexes[p] < peak_indexes[peak]){
|
||||
for (var x = 0; x< limit; x++){
|
||||
data[x] = minimum
|
||||
}
|
||||
}
|
||||
}
|
||||
//Build fit point list
|
||||
values = create_fit_point_list(data, fx)
|
||||
maximum = Math.max.apply(null, data)
|
||||
gaussian_fitter = GaussianCurveFitter.create().withStartPoint([(maximum-minimum)/2,fx[peak_indexes[peak]],1.0]).withMaxIterations(MAX_ITERATIONS)
|
||||
//Fit return parameters: (normalization, mean, sigma)
|
||||
try{
|
||||
ret.push(to_array(gaussian_fitter.fit(values)))
|
||||
} catch(ex) {
|
||||
ret.push(null) //Fitting error
|
||||
}
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
function create_fit_point_list(fy, fx, weights){
|
||||
if (!is_defined(weights)) weights =null
|
||||
values = []
|
||||
for (var i = 0; i< fx.length; i++)
|
||||
if (weights == null){
|
||||
values.push(new WeightedObservedPoint(1.0, fx[i], fy[i]))
|
||||
} else {
|
||||
values.push(new WeightedObservedPoint(weights[i], fx[i], fy[i]))
|
||||
}
|
||||
return values
|
||||
}
|
||||
|
||||
function fit_polynomial(fy, fx, order, start_point, weights){
|
||||
/*
|
||||
Fits data into a polynomial.
|
||||
|
||||
Args:
|
||||
x(float array or list): observed points x
|
||||
y(float array or list): observed points y
|
||||
order(int): if start_point is provided order parameter is disregarded - set to len(start_point)-1.
|
||||
start_point(optional tuple of float): initial parameters (a0, a1, a2, ...)
|
||||
weights(optional float array or list): weight for each observed point
|
||||
Returns:
|
||||
Tuples of polynomial parameters: (a0, a1, a2, ...)
|
||||
*/
|
||||
if (!is_defined(start_point)) start_point =null
|
||||
if (!is_defined(weights)) weights =null
|
||||
_assert_valid_for_fit(fy,fx)
|
||||
fit_point_list = create_fit_point_list(fy, fx, weights)
|
||||
if (start_point == null){
|
||||
polynomial_fitter = PolynomialCurveFitter.create(order).withMaxIterations(MAX_ITERATIONS)
|
||||
} else {
|
||||
polynomial_fitter = PolynomialCurveFitter.create(0).withStartPoint(start_point).withMaxIterations(MAX_ITERATIONS)
|
||||
}
|
||||
try{
|
||||
return to_array(polynomial_fitter.fit(fit_point_list))
|
||||
} catch(ex) {
|
||||
throw "Fitting failure"
|
||||
}
|
||||
}
|
||||
|
||||
function fit_gaussian(fy, fx, start_point, weights){
|
||||
/*
|
||||
Fits data into a gaussian.
|
||||
|
||||
Args:
|
||||
x(float array or list): observed points x
|
||||
y(float array or list): observed points y
|
||||
start_point(optional tuple of float): initial parameters (normalization, mean, sigma)
|
||||
weights(optional float array or list): weight for each observed point
|
||||
Returns:
|
||||
Tuples of gaussian parameters: (normalization, mean, sigma)
|
||||
*/
|
||||
if (!is_defined(start_point)) start_point =null
|
||||
if (!is_defined(weights)) weights =null
|
||||
_assert_valid_for_fit(fy,fx)
|
||||
fit_point_list = create_fit_point_list(fy, fx, weights)
|
||||
|
||||
//If start point not provided, start on peak
|
||||
if (start_point == null){
|
||||
peaks = estimate_peak_indexes(fy, fx)
|
||||
minimum = Math.min.apply(null, fy)
|
||||
maximum = Math.max.apply(null, fy)
|
||||
start_point = [(maximum-minimum)/2,fx[peaks[0]],1.0]
|
||||
}
|
||||
gaussian_fitter = GaussianCurveFitter.create().withStartPoint(start_point).withMaxIterations(MAX_ITERATIONS)
|
||||
try{
|
||||
return to_array(gaussian_fitter.fit(fit_point_list)) // (normalization, mean, sigma)
|
||||
} catch(ex) {
|
||||
throw "Fitting failure"
|
||||
}
|
||||
}
|
||||
|
||||
function fit_harmonic(fy, fx, start_point, weights){
|
||||
/*
|
||||
Fits data into an harmonic.
|
||||
|
||||
Args:
|
||||
x(float array or list): observed points x
|
||||
y(float array or list): observed points y
|
||||
start_point(optional tuple of float): initial parameters (amplitude, angular_frequency, phase)
|
||||
weights(optional float array or list): weight for each observed point
|
||||
Returns:
|
||||
Tuples of harmonic parameters: (amplitude, angular_frequency, phase)
|
||||
*/
|
||||
if (!is_defined(start_point)) start_point =null
|
||||
if (!is_defined(weights)) weights =null
|
||||
_assert_valid_for_fit(fy,fx)
|
||||
fit_point_list = create_fit_point_list(fy, fx, weights)
|
||||
if (start_point == null){
|
||||
harmonic_fitter = HarmonicCurveFitter.create().withMaxIterations(MAX_ITERATIONS)
|
||||
} else {
|
||||
harmonic_fitter = HarmonicCurveFitter.create().withStartPoint(start_point).withMaxIterations(MAX_ITERATIONS)
|
||||
}
|
||||
try{
|
||||
return to_array(harmonic_fitter.fit(fit_point_list)) // (amplitude, angular_frequency, phase)
|
||||
} catch(ex) {
|
||||
throw "Fitting failure"
|
||||
}
|
||||
}
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//Least squares problem
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function optimize_least_squares(model, target, initial, weights){
|
||||
if (is_array(weights)){
|
||||
weights = MatrixUtils.createRealDiagonalMatrix(weights)
|
||||
}
|
||||
problem = new LeastSquaresBuilder().start(initial).model(model).target(target).lazyEvaluation(false).maxEvaluations(MAX_EVALUATIONS).maxIterations(MAX_ITERATIONS).weight(weights).build()
|
||||
optimizer = new LevenbergMarquardtOptimizer()
|
||||
optimum = optimizer.optimize(problem)
|
||||
|
||||
parameters=to_array(optimum.getPoint().toArray())
|
||||
residuals = to_array(optimum.getResiduals().toArray())
|
||||
rms = optimum.getRMS()
|
||||
evals = optimum.getEvaluations()
|
||||
iters = optimum.getIterations()
|
||||
return [parameters, residuals, rms, evals, iters]
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//FFT
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
function is_power_of_2(n){
|
||||
return n && (n & (n - 1)) === 0;
|
||||
}
|
||||
|
||||
function bit_length(num) {
|
||||
return num.toString(2).length
|
||||
}
|
||||
|
||||
|
||||
function is_complex(v) {
|
||||
return v instanceof Complex
|
||||
}
|
||||
|
||||
|
||||
function pad_to_power_of_two(data){
|
||||
if (is_power_of_2(data.length)){
|
||||
return data
|
||||
}
|
||||
pad =(1 << bit_length(data.length)) - data.length
|
||||
elem = is_complex(data[0]) ? new Complex(0,0) : [0.0,]
|
||||
for (var i=0; i<pad; i++){
|
||||
data.push(elem)
|
||||
}
|
||||
return data
|
||||
}
|
||||
|
||||
|
||||
function get_real(values){
|
||||
/*
|
||||
Returns real part of a complex numbers vector.
|
||||
Args:
|
||||
values: List of complex.
|
||||
Returns:
|
||||
List of float
|
||||
*/
|
||||
var ret = []
|
||||
for (var c in values){
|
||||
ret.push(values[c].getReal())
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
function get_imag(values){
|
||||
/*
|
||||
Returns imaginary part of a complex numbers vector.
|
||||
Args:
|
||||
values: List of complex.
|
||||
Returns:
|
||||
List of float
|
||||
*/
|
||||
var ret = []
|
||||
for (var c in values){
|
||||
ret.push(values[c].getImaginary())
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
function get_modulus(values){
|
||||
/*
|
||||
Returns the modulus of a complex numbers vector.
|
||||
Args:
|
||||
values: List of complex.
|
||||
Returns:
|
||||
List of float
|
||||
*/
|
||||
var ret = []
|
||||
for (var c in values){
|
||||
ret.push(hypot(values[c].getImaginary(),values[c].getReal()))
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
function get_phase(values){
|
||||
/*
|
||||
Returns the phase of a complex numbers vector.
|
||||
Args:
|
||||
values: List of complex.
|
||||
Returns:
|
||||
List of float
|
||||
*/
|
||||
var ret = []
|
||||
for (var c in values){
|
||||
ret.push(Math.atan(values[c].getImaginary()/values[c].getReal()))
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
function fft(f){
|
||||
/*
|
||||
Calculates the Fast Fourrier Transform of a vector, padding to the next power of 2 elements.
|
||||
Args:
|
||||
values(): List of float or complex
|
||||
Returns:
|
||||
List of complex
|
||||
*/
|
||||
f = pad_to_power_of_two(f)
|
||||
if (is_complex(f[0])){
|
||||
aux = []
|
||||
for (var c in f){
|
||||
aux.append(Complex(f[c].getReal(), f[c].getImaginary()))
|
||||
}
|
||||
f = aux
|
||||
} else {
|
||||
f = to_array(f,'d')
|
||||
}
|
||||
fftt = new FastFourierTransformer(DftNormalization.STANDARD)
|
||||
var ret = []
|
||||
transform = fftt.transform(f,TransformType.FORWARD)
|
||||
for (var c in transform){
|
||||
ret.push(new Complex(transform[c].getReal(),transform[c].getImaginary()))
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
|
||||
function ffti(f){
|
||||
/*
|
||||
Calculates the Inverse Fast Fourrier Transform of a vector, padding to the next power of 2 elements.
|
||||
Args:
|
||||
values(): List of float or complex
|
||||
Returns:
|
||||
List of complex
|
||||
*/
|
||||
f = pad_to_power_of_two(f)
|
||||
if (is_complex(f[0])){
|
||||
aux = []
|
||||
for (var c in f){
|
||||
aux.append(Complex(f[c].getReal(), f[c].getImaginary()))
|
||||
}
|
||||
f = aux
|
||||
} else {
|
||||
f = to_array(f,'d')
|
||||
}
|
||||
fftt = new FastFourierTransformer(DftNormalization.STANDARD)
|
||||
var ret = []
|
||||
transform = fftt.transform(f,TransformType.INVERSE )
|
||||
for (var c in transform){
|
||||
ret.push(new Complex(transform[c].getReal(),transform[c].getImaginary()))
|
||||
}
|
||||
return ret
|
||||
}
|
||||
609
script/_Lib/mathutils.py
Normal file
609
script/_Lib/mathutils.py
Normal file
@@ -0,0 +1,609 @@
|
||||
###################################################################################################
|
||||
# Facade to Apache Commons Math
|
||||
###################################################################################################
|
||||
|
||||
import sys
|
||||
import math
|
||||
import operator
|
||||
|
||||
import java.util.List
|
||||
import java.lang.reflect.Array
|
||||
import java.lang.Class as Class
|
||||
import jarray
|
||||
import org.python.core.PyArray as PyArray
|
||||
import ch.psi.utils.Convert as Convert
|
||||
|
||||
import org.apache.commons.math3.util.FastMath as FastMath
|
||||
import org.apache.commons.math3.util.Pair as Pair
|
||||
import org.apache.commons.math3.complex.Complex as Complex
|
||||
|
||||
import org.apache.commons.math3.analysis.DifferentiableUnivariateFunction as DifferentiableUnivariateFunction
|
||||
import org.apache.commons.math3.analysis.function.Gaussian as Gaussian
|
||||
import org.apache.commons.math3.analysis.function.HarmonicOscillator as HarmonicOscillator
|
||||
import org.apache.commons.math3.analysis.differentiation.DerivativeStructure as DerivativeStructure
|
||||
import org.apache.commons.math3.analysis.differentiation.FiniteDifferencesDifferentiator as FiniteDifferencesDifferentiator
|
||||
import org.apache.commons.math3.analysis.integration.SimpsonIntegrator as SimpsonIntegrator
|
||||
import org.apache.commons.math3.analysis.integration.TrapezoidIntegrator as TrapezoidIntegrator
|
||||
import org.apache.commons.math3.analysis.integration.RombergIntegrator as RombergIntegrator
|
||||
import org.apache.commons.math3.analysis.integration.MidPointIntegrator as MidPointIntegrator
|
||||
import org.apache.commons.math3.analysis.polynomials.PolynomialFunction as PolynomialFunction
|
||||
import org.apache.commons.math3.analysis.polynomials.PolynomialFunctionLagrangeForm as PolynomialFunctionLagrangeForm
|
||||
import org.apache.commons.math3.analysis.solvers.LaguerreSolver as LaguerreSolver
|
||||
import org.apache.commons.math3.analysis.UnivariateFunction as UnivariateFunction
|
||||
import org.apache.commons.math3.analysis.interpolation.SplineInterpolator as SplineInterpolator
|
||||
import org.apache.commons.math3.analysis.interpolation.LinearInterpolator as LinearInterpolator
|
||||
import org.apache.commons.math3.analysis.interpolation.NevilleInterpolator as NevilleInterpolator
|
||||
import org.apache.commons.math3.analysis.interpolation.LoessInterpolator as LoessInterpolator
|
||||
import org.apache.commons.math3.analysis.interpolation.DividedDifferenceInterpolator as DividedDifferenceInterpolator
|
||||
import org.apache.commons.math3.analysis.interpolation.AkimaSplineInterpolator as AkimaSplineInterpolator
|
||||
|
||||
import org.apache.commons.math3.fitting.GaussianCurveFitter as GaussianCurveFitter
|
||||
import org.apache.commons.math3.fitting.PolynomialCurveFitter as PolynomialCurveFitter
|
||||
import org.apache.commons.math3.fitting.HarmonicCurveFitter as HarmonicCurveFitter
|
||||
import org.apache.commons.math3.fitting.WeightedObservedPoint as WeightedObservedPoint
|
||||
import org.apache.commons.math3.fitting.leastsquares.MultivariateJacobianFunction as MultivariateJacobianFunction
|
||||
import org.apache.commons.math3.fitting.leastsquares.LeastSquaresBuilder as LeastSquaresBuilder
|
||||
import org.apache.commons.math3.fitting.leastsquares.LevenbergMarquardtOptimizer as LevenbergMarquardtOptimizer
|
||||
import org.apache.commons.math3.fitting.leastsquares.GaussNewtonOptimizer as GaussNewtonOptimizer
|
||||
|
||||
import org.apache.commons.math3.stat.regression.SimpleRegression as SimpleRegression
|
||||
|
||||
import org.apache.commons.math3.transform.FastFourierTransformer as FastFourierTransformer
|
||||
import org.apache.commons.math3.transform.DftNormalization as DftNormalization
|
||||
import org.apache.commons.math3.transform.TransformType as TransformType
|
||||
|
||||
import org.apache.commons.math3.linear.ArrayRealVector as ArrayRealVector
|
||||
import org.apache.commons.math3.linear.Array2DRowRealMatrix as Array2DRowRealMatrix
|
||||
import org.apache.commons.math3.linear.MatrixUtils as MatrixUtils
|
||||
|
||||
|
||||
|
||||
###################################################################################################
|
||||
#Derivative and interpolation
|
||||
###################################################################################################
|
||||
|
||||
def get_values(f, xdata):
|
||||
"""Return list of values of a function
|
||||
|
||||
Args:
|
||||
f(UnivariateFunction): function
|
||||
xdata(float array or list): Domain values
|
||||
Returns:
|
||||
List of doubles
|
||||
|
||||
"""
|
||||
v = []
|
||||
for x in xdata:
|
||||
v.append(f.value(x))
|
||||
return v
|
||||
|
||||
def interpolate(data, xdata = None, interpolation_type = "linear"):
|
||||
"""Interpolate data array or list to a UnivariateFunction
|
||||
|
||||
Args:
|
||||
data(float array or list): The values to interpolate
|
||||
xdata(float array or list, optional): Domain values
|
||||
interpolation_type(str , optional): "linear", "cubic", "akima", "neville", "loess", "newton"
|
||||
Returns:
|
||||
UnivariateDifferentiableFunction object
|
||||
|
||||
"""
|
||||
if xdata is None:
|
||||
from startup import frange
|
||||
xdata = frange(0, len(data), 1.0)
|
||||
else:
|
||||
#X must be ordered
|
||||
xy = sorted(zip(xdata,data), key=operator.itemgetter(0))
|
||||
xdata, data = zip(*xy)
|
||||
if len(data) != len(xdata) or len(data)<2:
|
||||
raise Exception("Dimension mismatch")
|
||||
|
||||
if interpolation_type == "cubic":
|
||||
i = SplineInterpolator()
|
||||
elif interpolation_type == "linear":
|
||||
i = LinearInterpolator()
|
||||
elif interpolation_type == "akima":
|
||||
i = AkimaSplineInterpolator()
|
||||
elif interpolation_type == "neville":
|
||||
i = NevilleInterpolator()
|
||||
elif interpolation_type == "loess":
|
||||
i = LoessInterpolator()
|
||||
elif interpolation_type == "newton":
|
||||
i = DividedDifferenceInterpolator()
|
||||
else:
|
||||
raise Exception("Invalid interpolation type")
|
||||
from startup import to_array
|
||||
return i.interpolate(to_array(xdata,'d'), to_array(data,'d'))
|
||||
|
||||
def deriv(f, xdata = None, interpolation_type = "linear"):
|
||||
"""Calculate derivative of UnivariateFunction, array or list.
|
||||
|
||||
Args:
|
||||
f(UnivariateFunction or array): The function object. If array it is interpolated.
|
||||
xdata(float array or list, optional): Domain values to process.
|
||||
interpolation_type(str , optional): "linear", "cubic", "akima", "neville", "loess", "newton"
|
||||
Returns:
|
||||
List with the derivative values for xdata
|
||||
|
||||
"""
|
||||
if not isinstance(f,UnivariateFunction):
|
||||
if xdata is None:
|
||||
from startup import frange
|
||||
xdata = frange(0, len(f), 1.0)
|
||||
f = interpolate(f, xdata, interpolation_type)
|
||||
if xdata is None:
|
||||
if isinstance(f,DifferentiableUnivariateFunction):
|
||||
return f.derivative()
|
||||
raise Exception("Domain range not defined")
|
||||
d = []
|
||||
for x in xdata:
|
||||
xds = DerivativeStructure(1, 2, 0, x)
|
||||
yds = f.value(xds)
|
||||
d.append( yds.getPartialDerivative(1))
|
||||
return d
|
||||
|
||||
def integrate(f, range = None, xdata = None, interpolation_type = "linear", integrator_type = "simpson"):
|
||||
"""Integrate UnivariateFunction, array or list in an interval.
|
||||
|
||||
Args:
|
||||
f(UnivariateFunction or array): The function object. If array it is interpolated.
|
||||
range(list, optional): integration range ([min, max]).
|
||||
xdata(float array or list, optional): disregarded if f is UnivariateFunction.
|
||||
interpolation_type(str , optional): "linear", "cubic", "akima", "neville", "loess", "newton"
|
||||
integrator_type(str , optional): "simpson", "trapezoid", "romberg" or "midpoint"
|
||||
Returns:
|
||||
Integrated value (Float)
|
||||
|
||||
"""
|
||||
if not isinstance(f, UnivariateFunction):
|
||||
from startup import frange
|
||||
if xdata is None:
|
||||
xdata = frange(0, len(f), 1.0)
|
||||
if range is None:
|
||||
range = xdata
|
||||
f = interpolate(f, xdata, interpolation_type)
|
||||
if range is None:
|
||||
raise Exception("Domain range not defined")
|
||||
d = []
|
||||
if integrator_type == "simpson":
|
||||
integrator = SimpsonIntegrator()
|
||||
elif integrator_type == "trapezoid":
|
||||
integrator = TrapezoidIntegrator()
|
||||
elif integrator_type == "romberg":
|
||||
integrator = RombergIntegrator()
|
||||
elif integrator_type == "midpoint":
|
||||
integrator = MidPointIntegrator()
|
||||
raise Exception("Invalid integrator type")
|
||||
lower = min(range)
|
||||
upper = max(range)
|
||||
return integrator.integrate(MAX_EVALUATIONS, f, lower, upper)
|
||||
|
||||
def trapz(y, xdata=None):
|
||||
"""Integrate an array or list using the composite trapezoidal rule.
|
||||
|
||||
Args:
|
||||
y(array or list)
|
||||
xdata(float array or list, optional)
|
||||
"""
|
||||
return integrate(y, range = None, xdata = xdata, interpolation_type = "linear", integrator_type = "trapezoid")
|
||||
|
||||
###################################################################################################
|
||||
#Fitting and peak search
|
||||
###################################################################################################
|
||||
|
||||
try:
|
||||
MAX_FLOAT = sys.float_info.max
|
||||
except: # Python 2.5
|
||||
MAX_FLOAT = 1.7976931348623157e+308
|
||||
|
||||
MAX_ITERATIONS = 1000
|
||||
MAX_EVALUATIONS = 1000000
|
||||
|
||||
def calculate_peaks(function, start_value, end_value = MAX_FLOAT, positive=True):
|
||||
"""Calculate peaks of a DifferentiableUnivariateFunction in a given range by finding the roots of the derivative
|
||||
|
||||
Args:
|
||||
function(DifferentiableUnivariateFunction): The function object.
|
||||
start_value(float): start of range
|
||||
end_value(float, optional): end of range
|
||||
positive (boolean, optional): True for searching positive peaks, False for negative.
|
||||
Returns:
|
||||
List of peaks in the interval
|
||||
|
||||
"""
|
||||
derivative = function.derivative()
|
||||
derivative2 = derivative.derivative()
|
||||
ret = []
|
||||
solver = LaguerreSolver()
|
||||
for complex in solver.solveAllComplex(derivative.coefficients, start_value):
|
||||
r = complex.real
|
||||
if start_value < r < end_value:
|
||||
if (positive and (derivative2.value(r) < 0)) or ( (not positive) and (derivative2.value(r) > 0)):
|
||||
ret.append(r)
|
||||
return ret
|
||||
|
||||
|
||||
def estimate_peak_indexes(data, xdata = None, threshold = None, min_peak_distance = None, positive = True):
|
||||
"""Estimation of peaks in an array by ordering local maxima according to given criteria.
|
||||
|
||||
Args:
|
||||
data(float array or list)
|
||||
xdata(float array or list, optional): if not None must have the same length as data.
|
||||
threshold(float, optional): if specified filter peaks below this value
|
||||
min_peak_distance(float, optional): if specified defines minimum distance between two peaks.
|
||||
if xdata == None, it represents index counts, otherwise in xdata units.
|
||||
positive (boolean, optional): True for searching positive peaks, False for negative.
|
||||
Returns:
|
||||
List of peaks indexes.
|
||||
"""
|
||||
peaks = []
|
||||
indexes = sorted(range(len(data)),key=lambda x:data[x])
|
||||
if positive:
|
||||
indexes = reversed(indexes)
|
||||
for index in indexes:
|
||||
first = (index == 0)
|
||||
last = (index == (len(data)-1))
|
||||
val=data[index]
|
||||
prev = float('NaN') if first else data[index-1]
|
||||
next = float('NaN') if last else data[index+1]
|
||||
|
||||
if threshold is not None:
|
||||
if (positive and (val<threshold)) or ((not positive) and (val>threshold)):
|
||||
break
|
||||
if ( positive and (first or val>prev ) and (last or val>=next ) ) or (
|
||||
(not positive) and (first or val<prev ) and (last or val<=next ) ):
|
||||
append = True
|
||||
if min_peak_distance is not None:
|
||||
for peak in peaks:
|
||||
if ((xdata is None) and (abs(peak-index) < min_peak_distance)) or (
|
||||
(xdata is not None) and (abs(xdata[peak]-xdata[index]) < min_peak_distance)):
|
||||
append = False
|
||||
break
|
||||
if append:
|
||||
peaks.append(index)
|
||||
return peaks
|
||||
|
||||
def _assert_valid_for_fit(y,x):
|
||||
if len(y)<2 or ((x is not None) and (len(x)>len(y))):
|
||||
raise Exception("Invalid data for fit")
|
||||
|
||||
def fit_gaussians(y, x, peak_indexes):
|
||||
"""Fits data on multiple gaussians on the given peak indexes.
|
||||
|
||||
Args:
|
||||
x(float array or list)
|
||||
y(float array or list)
|
||||
peak_indexes(list of int)
|
||||
Returns:
|
||||
List of tuples of gaussian parameters: (normalization, mean, sigma)
|
||||
"""
|
||||
_assert_valid_for_fit(y,x)
|
||||
ret = []
|
||||
|
||||
minimum = min(y)
|
||||
for peak in peak_indexes:
|
||||
#Copy data
|
||||
data = y[:]
|
||||
#Remover data from other peaks
|
||||
for p in peak_indexes:
|
||||
limit = int(round((p+peak)/2))
|
||||
if (p > peak):
|
||||
data[limit : len(y)] =[minimum] * (len(y)-limit)
|
||||
elif (p < peak):
|
||||
data[0:limit] = [minimum] *limit
|
||||
#Build fit point list
|
||||
values = create_fit_point_list(data, x)
|
||||
maximum = max(data)
|
||||
gaussian_fitter = GaussianCurveFitter.create().withStartPoint([(maximum-minimum)/2,x[peak],1.0]).withMaxIterations(MAX_ITERATIONS)
|
||||
#Fit return parameters: (normalization, mean, sigma)
|
||||
try:
|
||||
ret.append(gaussian_fitter.fit(values).tolist())
|
||||
except:
|
||||
ret.append(None) #Fitting error
|
||||
return ret
|
||||
|
||||
|
||||
def create_fit_point_list(y, x, weights = None):
|
||||
values = []
|
||||
for i in sorted(range(len(x)),key=lambda v:x[v]): #Creating list ordered by x, needed for gauss fit
|
||||
if weights is None:
|
||||
values.append(WeightedObservedPoint(1.0, x[i], y[i]))
|
||||
else:
|
||||
values.append(WeightedObservedPoint(weights[i], x[i], y[i]))
|
||||
return values
|
||||
|
||||
def fit_polynomial(y, x, order, start_point = None, weights = None):
|
||||
"""Fits data into a polynomial.
|
||||
|
||||
Args:
|
||||
x(float array or list): observed points x
|
||||
y(float array or list): observed points y
|
||||
order(int): if start_point is provided order parameter is disregarded - set to len(start_point)-1.
|
||||
start_point(optional tuple of float): initial parameters (a0, a1, a2, ...)
|
||||
weights(optional float array or list): weight for each observed point
|
||||
Returns:
|
||||
Tuples of polynomial parameters: (a0, a1, a2, ...)
|
||||
"""
|
||||
_assert_valid_for_fit(y,x)
|
||||
fit_point_list = create_fit_point_list(y, x, weights)
|
||||
if start_point is None:
|
||||
polynomial_fitter = PolynomialCurveFitter.create(order).withMaxIterations(MAX_ITERATIONS)
|
||||
else:
|
||||
polynomial_fitter = PolynomialCurveFitter.create(0).withStartPoint(start_point).withMaxIterations(MAX_ITERATIONS)
|
||||
try:
|
||||
return polynomial_fitter.fit(fit_point_list).tolist()
|
||||
except:
|
||||
raise Exception("Fitting failure")
|
||||
|
||||
def fit_gaussian(y, x, start_point = None, weights = None):
|
||||
"""Fits data into a gaussian.
|
||||
|
||||
Args:
|
||||
x(float array or list): observed points x
|
||||
y(float array or list): observed points y
|
||||
start_point(optional tuple of float): initial parameters (normalization, mean, sigma)
|
||||
If None, use a custom initial estimation.
|
||||
Set to "default" to force Commons.Math the default (GaussianCurveFitter.ParameterGuesser).
|
||||
weights(optional float array or list): weight for each observed point
|
||||
Returns:
|
||||
Tuples of gaussian parameters: (normalization, mean, sigma)
|
||||
"""
|
||||
_assert_valid_for_fit(y,x)
|
||||
fit_point_list = create_fit_point_list(y, x, weights)
|
||||
|
||||
#If start point not provided, start on peak
|
||||
if start_point is None:
|
||||
maximum, minimum = max(y), min(y)
|
||||
norm = maximum - minimum
|
||||
mean = x[y.index(maximum)]
|
||||
sigma = trapz([v-minimum for v in y], x) / (norm*math.sqrt(2*math.pi))
|
||||
start_point = (norm, mean, sigma)
|
||||
elif start_point == "simple":
|
||||
start_point = [(max(y)-min(y))/2, x[y.index(max(y))], 1.0]
|
||||
elif start_point == "default":
|
||||
start_point = GaussianCurveFitter.ParameterGuesser(fit_point_list).guess().tolist()
|
||||
gaussian_fitter = GaussianCurveFitter.create().withStartPoint(start_point).withMaxIterations(MAX_ITERATIONS)
|
||||
try:
|
||||
return gaussian_fitter.fit(fit_point_list).tolist() # (normalization, mean, sigma)
|
||||
except:
|
||||
raise Exception("Fitting failure")
|
||||
|
||||
def fit_harmonic(y, x, start_point = None, weights = None):
|
||||
"""Fits data into an harmonic.
|
||||
|
||||
Args:
|
||||
x(float array or list): observed points x
|
||||
y(float array or list): observed points y
|
||||
start_point(optional tuple of float): initial parameters (amplitude, angular_frequency, phase)
|
||||
weights(optional float array or list): weight for each observed point
|
||||
Returns:
|
||||
Tuples of harmonic parameters: (amplitude, angular_frequency, phase)
|
||||
"""
|
||||
_assert_valid_for_fit(y,x)
|
||||
fit_point_list = create_fit_point_list(y, x, weights)
|
||||
if start_point is None:
|
||||
harmonic_fitter = HarmonicCurveFitter.create().withMaxIterations(MAX_ITERATIONS)
|
||||
else:
|
||||
harmonic_fitter = HarmonicCurveFitter.create().withStartPoint(start_point).withMaxIterations(MAX_ITERATIONS)
|
||||
try:
|
||||
return harmonic_fitter.fit(fit_point_list).tolist() # (amplitude, angular_frequency, phase)
|
||||
except:
|
||||
raise Exception("Fitting failure")
|
||||
|
||||
|
||||
def fit_gaussian_offset(y, x, start_point = None, weights = None):
|
||||
"""Fits data into a gaussian with offset (constant background).
|
||||
f(x) = a + b * exp(-(pow((x - c), 2) / (2 * pow(d, 2))))
|
||||
|
||||
Args:
|
||||
x(float array or list): observed points x
|
||||
y(float array or list): observed points y
|
||||
start_point(optional tuple of float): initial parameters (normalization, mean, sigma)
|
||||
weights(optional float array or list): weight for each observed point
|
||||
Returns:
|
||||
Tuples of gaussian parameters: (offset, normalization, mean, sigma)
|
||||
"""
|
||||
|
||||
# For normalised gauss curve sigma=1/(amp*sqrt(2*pi))
|
||||
if start_point is None:
|
||||
off = min(y) # good enough starting point for offset
|
||||
com = x[y.index(max(y))]
|
||||
amp = max(y) - off
|
||||
sigma = trapz([v-off for v in y], x) / (amp*math.sqrt(2*math.pi))
|
||||
start_point = [off, amp, com , sigma]
|
||||
|
||||
class Model(MultivariateJacobianFunction):
|
||||
def value(self, variables):
|
||||
value = ArrayRealVector(len(x))
|
||||
jacobian = Array2DRowRealMatrix(len(x), 4)
|
||||
for i in range(len(x)):
|
||||
(a,b,c,d) = (variables.getEntry(0), variables.getEntry(1), variables.getEntry(2), variables.getEntry(3))
|
||||
v = math.exp(-(math.pow((x[i] - c), 2) / (2 * math.pow(d, 2))))
|
||||
model = a + b * v
|
||||
value.setEntry(i, model)
|
||||
jacobian.setEntry(i, 0, 1) # derivative with respect to p0 = a
|
||||
jacobian.setEntry(i, 1, v) # derivative with respect to p1 = b
|
||||
v2 = b*v*((x[i] - c)/math.pow(d, 2))
|
||||
jacobian.setEntry(i, 2, v2) # derivative with respect to p2 = c
|
||||
jacobian.setEntry(i, 3, v2*(x[i] - c)/d ) # derivative with respect to p3 = d
|
||||
return Pair(value, jacobian)
|
||||
|
||||
model = Model()
|
||||
target = [v for v in y] #the target is to have all points at the positios
|
||||
(parameters, residuals, rms, evals, iters) = optimize_least_squares(model, target, start_point, weights)
|
||||
return parameters
|
||||
|
||||
|
||||
def fit_gaussian_linear(y, x, start_point = None, weights = None):
|
||||
"""Fits data into a gaussian with linear background.
|
||||
f(x) = a * x + b + c * exp(-(pow((x - d), 2) / (2 * pow(e, 2))))
|
||||
|
||||
Args:
|
||||
x(float array or list): observed points x
|
||||
y(float array or list): observed points y
|
||||
start_point(optional tuple of float): initial parameters (normalization, mean, sigma)
|
||||
weights(optional float array or list): weight for each observed point
|
||||
Returns:
|
||||
Tuples of gaussian parameters: (offset, normalization, mean, sigma)
|
||||
"""
|
||||
|
||||
# For normalised gauss curve sigma=1/(amp*sqrt(2*pi))
|
||||
if start_point is None:
|
||||
off = min(y) # good enough starting point for offset
|
||||
com = x[y.index(max(y))]
|
||||
amp = max(y) - off
|
||||
sigma = trapz([v-off for v in y], x) / (amp*math.sqrt(2*math.pi))
|
||||
start_point = [0, off, amp, com, sigma]
|
||||
|
||||
class Model(MultivariateJacobianFunction):
|
||||
def value(self, variables):
|
||||
value = ArrayRealVector(len(x))
|
||||
jacobian = Array2DRowRealMatrix(len(x), 5)
|
||||
for i in range(len(x)):
|
||||
(a,b,c,d,e) = (variables.getEntry(0), variables.getEntry(1), variables.getEntry(2), variables.getEntry(3), variables.getEntry(4))
|
||||
v = math.exp(-(math.pow((x[i] - d), 2) / (2 * math.pow(e, 2))))
|
||||
model = a*x[i] + b + c * v
|
||||
value.setEntry(i, model)
|
||||
jacobian.setEntry(i, 0, x[i]) # derivative with respect to p0 = a
|
||||
jacobian.setEntry(i, 1, 1) # derivative with respect to p1 = b
|
||||
jacobian.setEntry(i, 2, v) # derivative with respect to p2 = c
|
||||
v2 = c*v*((x[i] - d)/math.pow(e, 2))
|
||||
jacobian.setEntry(i, 3, v2) # derivative with respect to p3 = d
|
||||
jacobian.setEntry(i, 4, v2*(x[i] - d)/e ) # derivative with respect to p4 = e
|
||||
return Pair(value, jacobian)
|
||||
|
||||
model = Model()
|
||||
target = [v for v in y] #the target is to have all points at the positios
|
||||
(parameters, residuals, rms, evals, iters) = optimize_least_squares(model, target, start_point, weights)
|
||||
return parameters
|
||||
|
||||
###################################################################################################
|
||||
#Least squares problem
|
||||
###################################################################################################
|
||||
|
||||
def optimize_least_squares(model, target, initial, weights):
|
||||
"""Fits a parametric model to a set of observed values by minimizing a cost function.
|
||||
|
||||
Args:
|
||||
model(MultivariateJacobianFunction): observed points x
|
||||
target(float array or list): observed data
|
||||
initial(optional tuple of float): initial guess
|
||||
weights(optional float array or list): weight for each observed point
|
||||
Returns:
|
||||
Tuples of harmonic parameters: (amplitude, angular_frequency, phase)
|
||||
"""
|
||||
if isinstance(weights,tuple) or isinstance(weights,list):
|
||||
weights = MatrixUtils.createRealDiagonalMatrix(weights)
|
||||
problem = LeastSquaresBuilder().start(initial).model(model).target(target).lazyEvaluation(False).maxEvaluations(MAX_EVALUATIONS).maxIterations(MAX_ITERATIONS).weight(weights).build()
|
||||
optimizer = LevenbergMarquardtOptimizer()
|
||||
optimum = optimizer.optimize(problem)
|
||||
|
||||
parameters=optimum.getPoint().toArray().tolist()
|
||||
residuals = optimum.getResiduals().toArray().tolist()
|
||||
rms = optimum.getRMS()
|
||||
evals = optimum.getEvaluations()
|
||||
iters = optimum.getIterations()
|
||||
return (parameters, residuals, rms, evals, iters)
|
||||
|
||||
|
||||
###################################################################################################
|
||||
#FFT
|
||||
###################################################################################################
|
||||
|
||||
def is_power(num, base):
|
||||
if base<=1: return num == 1
|
||||
power = int (math.log (num, base) + 0.5)
|
||||
return base ** power == num
|
||||
|
||||
def pad_to_power_of_two(data):
|
||||
if is_power(len(data),2):
|
||||
return data
|
||||
pad =(1 << len(data).bit_length()) - len(data)
|
||||
elem = complex(0,0) if type(data[0]) is complex else [0.0,]
|
||||
return data + elem * pad
|
||||
|
||||
def get_real(values):
|
||||
"""Returns real part of a complex numbers vector.
|
||||
Args:
|
||||
values: List of complex.
|
||||
Returns:
|
||||
List of float
|
||||
"""
|
||||
ret = []
|
||||
for c in values:
|
||||
ret.append(c.real)
|
||||
return ret
|
||||
|
||||
def get_imag(values):
|
||||
"""Returns imaginary part of a complex numbers vector.
|
||||
Args:
|
||||
values: List of complex.
|
||||
Returns:
|
||||
List of float
|
||||
"""
|
||||
ret = []
|
||||
for c in values:
|
||||
ret.append(c.imag)
|
||||
return ret
|
||||
|
||||
def get_modulus(values):
|
||||
"""Returns the modulus of a complex numbers vector.
|
||||
Args:
|
||||
values: List of complex.
|
||||
Returns:
|
||||
List of float
|
||||
"""
|
||||
ret = []
|
||||
for c in values:
|
||||
ret.append(math.hypot(c.imag,c.real))
|
||||
return ret
|
||||
|
||||
def get_phase(values):
|
||||
"""Returns the phase of a complex numbers vector.
|
||||
Args:
|
||||
values: List of complex.
|
||||
Returns:
|
||||
List of float
|
||||
"""
|
||||
ret = []
|
||||
for c in values:
|
||||
ret.append(math.atan(c.imag/c.real))
|
||||
return ret
|
||||
|
||||
def fft(f):
|
||||
"""Calculates the Fast Fourrier Transform of a vector, padding to the next power of 2 elements.
|
||||
Args:
|
||||
values(): List of float or complex
|
||||
Returns:
|
||||
List of complex
|
||||
"""
|
||||
f = pad_to_power_of_two(f)
|
||||
if type(f[0]) is complex:
|
||||
aux = []
|
||||
for c in f:
|
||||
aux.append(Complex(c.real, c.imag))
|
||||
f = aux
|
||||
fftt = FastFourierTransformer(DftNormalization.STANDARD)
|
||||
ret = []
|
||||
for c in fftt.transform(f,TransformType.FORWARD ):
|
||||
ret.append(complex(c.getReal(),c.getImaginary()))
|
||||
return ret
|
||||
|
||||
def ffti(f):
|
||||
"""Calculates the Inverse Fast Fourrier Transform of a vector, padding to the next power of 2 elements.
|
||||
Args:
|
||||
values(): List of float or complex
|
||||
Returns:
|
||||
List of complex
|
||||
"""
|
||||
f = pad_to_power_of_two(f)
|
||||
if type(f[0]) is complex:
|
||||
aux = []
|
||||
for c in f:
|
||||
aux.append(Complex(c.real, c.imag))
|
||||
f = aux
|
||||
fftt = FastFourierTransformer(DftNormalization.STANDARD)
|
||||
ret = []
|
||||
for c in fftt.transform(f,TransformType.INVERSE ):
|
||||
ret.append(complex(c.getReal(),c.getImaginary()))
|
||||
return ret
|
||||
116
script/_Lib/plotutils.js
Normal file
116
script/_Lib/plotutils.js
Normal file
@@ -0,0 +1,116 @@
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Plot utilities
|
||||
///////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
function plot_function(plot, func, name, range, show_points, show_lines, color){
|
||||
/*
|
||||
Plots a function to a plot.
|
||||
|
||||
Args:
|
||||
plot(LinePlot)
|
||||
func(UnivariateFunction): Gaussian, PolynomialFunction, HarmonicOscillator...
|
||||
name(str): name of the series
|
||||
range(list or array of floats): x values to plot
|
||||
Returns:
|
||||
Tuples of harmonic parameters: (amplitude, angular_frequency, phase)
|
||||
*/
|
||||
if (!is_defined(show_points)) show_points =true
|
||||
if (!is_defined(show_lines)) show_lines = true
|
||||
if (!is_defined(color)) color = null
|
||||
|
||||
if (plot.getStyle().isError()){
|
||||
s = new LinePlotErrorSeries(name, color)
|
||||
}
|
||||
else{
|
||||
s = new LinePlotSeries(name, color)
|
||||
}
|
||||
plot.addSeries(s)
|
||||
s.setPointsVisible(show_points)
|
||||
s.setLinesVisible(show_lines)
|
||||
for (var x in range){
|
||||
s.appendData(range[x], func.value(range[x]))
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
function plot_point(plot, x, y, size, color, name){
|
||||
if (!is_defined(size)) size =3
|
||||
if (!is_defined(name)) name = "Point"
|
||||
if (!is_defined(color)) color = null
|
||||
s = new LinePlotSeries(name, color)
|
||||
plot.addSeries(s)
|
||||
s.setPointSize(size)
|
||||
s.appendData(x, y)
|
||||
return s
|
||||
}
|
||||
|
||||
function plot_line(plot, x1, y1, x2, y2, width, color, name){
|
||||
if (!is_defined(width)) width = 1
|
||||
if (!is_defined(name)) name = "Line"
|
||||
if (!is_defined(color)) color = null
|
||||
s = new LinePlotSeries(name, color)
|
||||
plot.addSeries(s)
|
||||
s.setLineWidth(width)
|
||||
s.setPointsVisible(false)
|
||||
s.appendData(x1, y1)
|
||||
s.appendData(x2, y2)
|
||||
return s
|
||||
}
|
||||
|
||||
function plot_cross(plot, x, y, size, width, color, name){
|
||||
if (!is_defined(size)) size =1
|
||||
if (!is_defined(width)) width = 1
|
||||
if (!is_defined(color)) color = null
|
||||
if (!is_defined(name)) name = "Cross"
|
||||
s = new LinePlotSeries(name, color)
|
||||
plot.addSeries(s)
|
||||
s.setLineWidth(width)
|
||||
s.setPointsVisible(false)
|
||||
s.appendData(NaN, NaN)
|
||||
s.appendData(x-size/2, y)
|
||||
s.appendData(x+size/2, y)
|
||||
s.appendData(NaN, NaN)
|
||||
s.appendData(x, y-size/2)
|
||||
s.appendData(x, y+size/2)
|
||||
return s
|
||||
}
|
||||
|
||||
function plot_rectangle(plot, x1, y1, x2, y2, width, color, name){
|
||||
if (!is_defined(width)) width = 1
|
||||
if (!is_defined(name)) name = "Rectangle"
|
||||
if (!is_defined(color)) color = null
|
||||
s = new LinePlotSeries(name, color)
|
||||
plot.addSeries(s)
|
||||
s.setLineWidth(width)
|
||||
s.setPointsVisible(false)
|
||||
s.appendData(x1, y1)
|
||||
s.appendData(x1, y2)
|
||||
s.appendData(x2, y2)
|
||||
s.appendData(x2, y1)
|
||||
s.appendData(x1, y1)
|
||||
return s
|
||||
}
|
||||
|
||||
function plot_circle(plot, cx, cy, radius, width, color, name){
|
||||
if (!is_defined(width)) width = 1
|
||||
if (!is_defined(name)) name = "Circle"
|
||||
if (!is_defined(color)) color = null
|
||||
s = new LinePlotSeries(name, color)
|
||||
plot.addSeries(s)
|
||||
s.setLineWidth(width)
|
||||
s.setPointsVisible(false)
|
||||
res=radius / 100.0
|
||||
epson = 1e-12
|
||||
for (var xp = cx+radius-epson ; xp >= ( cx-radius+epson) ; xp-=res){
|
||||
yp = Math.sqrt(Math.pow(radius, 2) - Math.pow(xp - cx, 2)) + cy
|
||||
s.appendData(xp, yp)
|
||||
}
|
||||
for (var xp = cx-radius+epson ; xp <= ( cx+radius-epson) ; xp+=res){
|
||||
yp = -Math.sqrt(Math.pow(radius, 2) - Math.pow(xp - cx, 2)) + cy
|
||||
s.appendData(xp, yp)
|
||||
}
|
||||
if (s.getCount()>0)
|
||||
s.appendData(s.getX()[0], s.getY()[0])
|
||||
return s
|
||||
}
|
||||
119
script/_Lib/plotutils.py
Normal file
119
script/_Lib/plotutils.py
Normal file
@@ -0,0 +1,119 @@
|
||||
###################################################################################################
|
||||
# Plot utilities
|
||||
###################################################################################################
|
||||
|
||||
import ch.psi.pshell.plot.LinePlotSeries as LinePlotSeries
|
||||
import ch.psi.pshell.plot.LinePlotErrorSeries as LinePlotErrorSeries
|
||||
import math
|
||||
from startup import frange, to_array
|
||||
|
||||
def plot_function(plot, function, name, range, show_points = True, show_lines = True, color = None):
|
||||
"""Plots a function to a plot.
|
||||
|
||||
Args:
|
||||
plot(LinePlot)
|
||||
function(UnivariateFunction): Gaussian, PolynomialFunction, HarmonicOscillator...
|
||||
name(str): name of the series
|
||||
range(list or array of floats): x values to plot
|
||||
Returns:
|
||||
Tuples of harmonic parameters: (amplitude, angular_frequency, phase)
|
||||
"""
|
||||
if plot.style.isError():
|
||||
s = LinePlotErrorSeries(name, color)
|
||||
else:
|
||||
s = LinePlotSeries(name, color)
|
||||
plot.addSeries(s)
|
||||
s.setPointsVisible(show_points)
|
||||
s.setLinesVisible(show_lines)
|
||||
for x in range:
|
||||
s.appendData(x, function.value(x))
|
||||
return s
|
||||
|
||||
def plot_data(plot, data, name, xdata = None, error = None, show_points = True, show_lines = True, color = None):
|
||||
"""Plots a subscriptable object to a plot.
|
||||
|
||||
Args:
|
||||
plot(LinePlot)
|
||||
data(subscriptable): Y data
|
||||
name(str): name of the series
|
||||
xdata(subscriptable): X data
|
||||
error(subscriptable): Error data (only for error plots)
|
||||
Returns:
|
||||
Tuples of harmonic parameters: (amplitude, angular_frequency, phase)
|
||||
"""
|
||||
if plot.style.isError():
|
||||
s = LinePlotErrorSeries(name, color)
|
||||
else:
|
||||
s = LinePlotSeries(name, color)
|
||||
plot.addSeries(s)
|
||||
s.setPointsVisible(show_points)
|
||||
s.setLinesVisible(show_lines)
|
||||
if xdata is None:
|
||||
xdata = range(len(data))
|
||||
xdata = to_array(xdata, 'd')
|
||||
data = to_array(data, 'd')
|
||||
if plot.style.isError():
|
||||
error = to_array(error, 'd')
|
||||
s.setData(xdata, data, error)
|
||||
else:
|
||||
s.setData(xdata, data)
|
||||
return s
|
||||
|
||||
def plot_point(plot, x, y, size = 3, color = None, name = "Point"):
|
||||
s = LinePlotSeries(name, color)
|
||||
plot.addSeries(s)
|
||||
s.setPointSize(size)
|
||||
s.appendData(x, y)
|
||||
return s
|
||||
|
||||
def plot_line(plot, x1, y1, x2, y2, width = 1, color = None, name = "Line"):
|
||||
s = LinePlotSeries(name, color)
|
||||
plot.addSeries(s)
|
||||
s.setLineWidth(width)
|
||||
s.setPointsVisible(False)
|
||||
s.appendData(x1, y1)
|
||||
s.appendData(x2, y2)
|
||||
return s
|
||||
|
||||
def plot_cross(plot, x, y, size = 1.0, width = 1, color = None, name = "Cross"):
|
||||
size = float(size)
|
||||
s = LinePlotSeries(name, color)
|
||||
plot.addSeries(s)
|
||||
s.setLineWidth(width)
|
||||
s.setPointsVisible(False)
|
||||
s.appendData(float('nan'), float('nan'))
|
||||
s.appendData(x-size/2, y)
|
||||
s.appendData(x+size/2, y)
|
||||
s.appendData(float('nan'), float('nan'))
|
||||
s.appendData(x, y-size/2)
|
||||
s.appendData(x, y+size/2)
|
||||
return s
|
||||
|
||||
def plot_rectangle(plot, x1, y1, x2, y2, width = 1, color = None, name = "Rectangle"):
|
||||
s = LinePlotSeries(name, color)
|
||||
plot.addSeries(s)
|
||||
s.setLineWidth(width)
|
||||
s.setPointsVisible(False)
|
||||
s.appendData(x1, y1)
|
||||
s.appendData(x1, y2)
|
||||
s.appendData(x2, y2)
|
||||
s.appendData(x2, y1)
|
||||
s.appendData(x1, y1)
|
||||
return s
|
||||
|
||||
def plot_circle(plot, cx, cy, radius, width = 1, color = None, name = "Circle"):
|
||||
s = LinePlotSeries(name, color)
|
||||
plot.addSeries(s)
|
||||
s.setLineWidth(width)
|
||||
s.setPointsVisible(False)
|
||||
res=float(radius) / 100.0
|
||||
epson = 1e-12
|
||||
for xp in frange (cx+radius-epson , cx-radius+epson , -res):
|
||||
yp = math.sqrt(math.pow(radius, 2) - math.pow(xp - cx, 2)) + cy
|
||||
s.appendData(xp, yp)
|
||||
for xp in frange (cx-radius+epson , cx+radius-epson, res):
|
||||
yp = -math.sqrt(math.pow(radius, 2) - math.pow(xp - cx, 2)) + cy
|
||||
s.appendData(xp, yp)
|
||||
if s.getCount()>0:
|
||||
s.appendData(s.getX()[0], s.getY()[0])
|
||||
return s
|
||||
963
script/_Lib/requests-2.4.3-py2.7.egg-info
Normal file
963
script/_Lib/requests-2.4.3-py2.7.egg-info
Normal file
@@ -0,0 +1,963 @@
|
||||
Metadata-Version: 1.1
|
||||
Name: requests
|
||||
Version: 2.4.3
|
||||
Summary: Python HTTP for Humans.
|
||||
Home-page: http://python-requests.org
|
||||
Author: Kenneth Reitz
|
||||
Author-email: me@kennethreitz.com
|
||||
License: Apache 2.0
|
||||
Description: Requests: HTTP for Humans
|
||||
=========================
|
||||
|
||||
.. image:: https://badge.fury.io/py/requests.png
|
||||
:target: http://badge.fury.io/py/requests
|
||||
|
||||
.. image:: https://pypip.in/d/requests/badge.png
|
||||
:target: https://crate.io/packages/requests/
|
||||
|
||||
|
||||
Requests is an Apache2 Licensed HTTP library, written in Python, for human
|
||||
beings.
|
||||
|
||||
Most existing Python modules for sending HTTP requests are extremely
|
||||
verbose and cumbersome. Python's builtin urllib2 module provides most of
|
||||
the HTTP capabilities you should need, but the api is thoroughly broken.
|
||||
It requires an enormous amount of work (even method overrides) to
|
||||
perform the simplest of tasks.
|
||||
|
||||
Things shouldn't be this way. Not in Python.
|
||||
|
||||
.. code-block:: pycon
|
||||
|
||||
>>> r = requests.get('https://api.github.com', auth=('user', 'pass'))
|
||||
>>> r.status_code
|
||||
204
|
||||
>>> r.headers['content-type']
|
||||
'application/json'
|
||||
>>> r.text
|
||||
...
|
||||
|
||||
See `the same code, without Requests <https://gist.github.com/973705>`_.
|
||||
|
||||
Requests allow you to send HTTP/1.1 requests. You can add headers, form data,
|
||||
multipart files, and parameters with simple Python dictionaries, and access the
|
||||
response data in the same way. It's powered by httplib and `urllib3
|
||||
<https://github.com/shazow/urllib3>`_, but it does all the hard work and crazy
|
||||
hacks for you.
|
||||
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- International Domains and URLs
|
||||
- Keep-Alive & Connection Pooling
|
||||
- Sessions with Cookie Persistence
|
||||
- Browser-style SSL Verification
|
||||
- Basic/Digest Authentication
|
||||
- Elegant Key/Value Cookies
|
||||
- Automatic Decompression
|
||||
- Unicode Response Bodies
|
||||
- Multipart File Uploads
|
||||
- Connection Timeouts
|
||||
- Thread-safety
|
||||
- HTTP(S) proxy support
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
To install Requests, simply:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
$ pip install requests
|
||||
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
Documentation is available at http://docs.python-requests.org/.
|
||||
|
||||
|
||||
Contribute
|
||||
----------
|
||||
|
||||
#. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug. There is a `Contributor Friendly`_ tag for issues that should be ideal for people who are not very familiar with the codebase yet.
|
||||
#. If you feel uncomfortable or uncertain about an issue or your changes, feel free to email @sigmavirus24 and he will happily help you via email, Skype, remote pairing or whatever you are comfortable with.
|
||||
#. Fork `the repository`_ on GitHub to start making your changes to the **master** branch (or branch off of it).
|
||||
#. Write a test which shows that the bug was fixed or that the feature works as expected.
|
||||
#. Send a pull request and bug the maintainer until it gets merged and published. :) Make sure to add yourself to AUTHORS_.
|
||||
|
||||
.. _`the repository`: http://github.com/kennethreitz/requests
|
||||
.. _AUTHORS: https://github.com/kennethreitz/requests/blob/master/AUTHORS.rst
|
||||
.. _Contributor Friendly: https://github.com/kennethreitz/requests/issues?direction=desc&labels=Contributor+Friendly&page=1&sort=updated&state=open
|
||||
|
||||
|
||||
.. :changelog:
|
||||
|
||||
Release History
|
||||
---------------
|
||||
|
||||
2.4.3 (2014-10-06)
|
||||
++++++++++++++++++
|
||||
|
||||
**Bugfixes**
|
||||
|
||||
- Unicode URL improvements for Python 2.
|
||||
- Re-order JSON param for backwards compat.
|
||||
- Automatically defrag authentication schemes from host/pass URIs. (`#2249 <https://github.com/kennethreitz/requests/issues/2249>`_)
|
||||
|
||||
|
||||
2.4.2 (2014-10-05)
|
||||
++++++++++++++++++
|
||||
|
||||
**Improvements**
|
||||
|
||||
- FINALLY! Add json parameter for uploads! (`#2258 <https://github.com/kennethreitz/requests/pull/2258>`_)
|
||||
- Support for bytestring URLs on Python 3.x (`#2238 <https://github.com/kennethreitz/requests/pull/2238>`_)
|
||||
|
||||
**Bugfixes**
|
||||
|
||||
- Avoid getting stuck in a loop (`#2244 <https://github.com/kennethreitz/requests/pull/2244>`_)
|
||||
- Multiple calls to iter* fail with unhelpful error. (`#2240 <https://github.com/kennethreitz/requests/issues/2240>`_, `#2241 <https://github.com/kennethreitz/requests/issues/2241>`_)
|
||||
|
||||
**Documentation**
|
||||
|
||||
- Correct redirection introduction (`#2245 <https://github.com/kennethreitz/requests/pull/2245/>`_)
|
||||
- Added example of how to send multiple files in one request. (`#2227 <https://github.com/kennethreitz/requests/pull/2227/>`_)
|
||||
- Clarify how to pass a custom set of CAs (`#2248 <https://github.com/kennethreitz/requests/pull/2248/>`_)
|
||||
|
||||
|
||||
|
||||
2.4.1 (2014-09-09)
|
||||
++++++++++++++++++
|
||||
|
||||
- Now has a "security" package extras set, ``$ pip install requests[security]``
|
||||
- Requests will now use Certifi if it is available.
|
||||
- Capture and re-raise urllib3 ProtocolError
|
||||
- Bugfix for responses that attempt to redirect to themselves forever (wtf?).
|
||||
|
||||
|
||||
2.4.0 (2014-08-29)
|
||||
++++++++++++++++++
|
||||
|
||||
**Behavioral Changes**
|
||||
|
||||
- ``Connection: keep-alive`` header is now sent automatically.
|
||||
|
||||
**Improvements**
|
||||
|
||||
- Support for connect timeouts! Timeout now accepts a tuple (connect, read) which is used to set individual connect and read timeouts.
|
||||
- Allow copying of PreparedRequests without headers/cookies.
|
||||
- Updated bundled urllib3 version.
|
||||
- Refactored settings loading from environment — new `Session.merge_environment_settings`.
|
||||
- Handle socket errors in iter_content.
|
||||
|
||||
|
||||
2.3.0 (2014-05-16)
|
||||
++++++++++++++++++
|
||||
|
||||
**API Changes**
|
||||
|
||||
- New ``Response`` property ``is_redirect``, which is true when the
|
||||
library could have processed this response as a redirection (whether
|
||||
or not it actually did).
|
||||
- The ``timeout`` parameter now affects requests with both ``stream=True`` and
|
||||
``stream=False`` equally.
|
||||
- The change in v2.0.0 to mandate explicit proxy schemes has been reverted.
|
||||
Proxy schemes now default to ``http://``.
|
||||
- The ``CaseInsensitiveDict`` used for HTTP headers now behaves like a normal
|
||||
dictionary when references as string or viewed in the interpreter.
|
||||
|
||||
**Bugfixes**
|
||||
|
||||
- No longer expose Authorization or Proxy-Authorization headers on redirect.
|
||||
Fix CVE-2014-1829 and CVE-2014-1830 respectively.
|
||||
- Authorization is re-evaluated each redirect.
|
||||
- On redirect, pass url as native strings.
|
||||
- Fall-back to autodetected encoding for JSON when Unicode detection fails.
|
||||
- Headers set to ``None`` on the ``Session`` are now correctly not sent.
|
||||
- Correctly honor ``decode_unicode`` even if it wasn't used earlier in the same
|
||||
response.
|
||||
- Stop advertising ``compress`` as a supported Content-Encoding.
|
||||
- The ``Response.history`` parameter is now always a list.
|
||||
- Many, many ``urllib3`` bugfixes.
|
||||
|
||||
2.2.1 (2014-01-23)
|
||||
++++++++++++++++++
|
||||
|
||||
**Bugfixes**
|
||||
|
||||
- Fixes incorrect parsing of proxy credentials that contain a literal or encoded '#' character.
|
||||
- Assorted urllib3 fixes.
|
||||
|
||||
2.2.0 (2014-01-09)
|
||||
++++++++++++++++++
|
||||
|
||||
**API Changes**
|
||||
|
||||
- New exception: ``ContentDecodingError``. Raised instead of ``urllib3``
|
||||
``DecodeError`` exceptions.
|
||||
|
||||
**Bugfixes**
|
||||
|
||||
- Avoid many many exceptions from the buggy implementation of ``proxy_bypass`` on OS X in Python 2.6.
|
||||
- Avoid crashing when attempting to get authentication credentials from ~/.netrc when running as a user without a home directory.
|
||||
- Use the correct pool size for pools of connections to proxies.
|
||||
- Fix iteration of ``CookieJar`` objects.
|
||||
- Ensure that cookies are persisted over redirect.
|
||||
- Switch back to using chardet, since it has merged with charade.
|
||||
|
||||
2.1.0 (2013-12-05)
|
||||
++++++++++++++++++
|
||||
|
||||
- Updated CA Bundle, of course.
|
||||
- Cookies set on individual Requests through a ``Session`` (e.g. via ``Session.get()``) are no longer persisted to the ``Session``.
|
||||
- Clean up connections when we hit problems during chunked upload, rather than leaking them.
|
||||
- Return connections to the pool when a chunked upload is successful, rather than leaking it.
|
||||
- Match the HTTPbis recommendation for HTTP 301 redirects.
|
||||
- Prevent hanging when using streaming uploads and Digest Auth when a 401 is received.
|
||||
- Values of headers set by Requests are now always the native string type.
|
||||
- Fix previously broken SNI support.
|
||||
- Fix accessing HTTP proxies using proxy authentication.
|
||||
- Unencode HTTP Basic usernames and passwords extracted from URLs.
|
||||
- Support for IP address ranges for no_proxy environment variable
|
||||
- Parse headers correctly when users override the default ``Host:`` header.
|
||||
- Avoid munging the URL in case of case-sensitive servers.
|
||||
- Looser URL handling for non-HTTP/HTTPS urls.
|
||||
- Accept unicode methods in Python 2.6 and 2.7.
|
||||
- More resilient cookie handling.
|
||||
- Make ``Response`` objects pickleable.
|
||||
- Actually added MD5-sess to Digest Auth instead of pretending to like last time.
|
||||
- Updated internal urllib3.
|
||||
- Fixed @Lukasa's lack of taste.
|
||||
|
||||
2.0.1 (2013-10-24)
|
||||
++++++++++++++++++
|
||||
|
||||
- Updated included CA Bundle with new mistrusts and automated process for the future
|
||||
- Added MD5-sess to Digest Auth
|
||||
- Accept per-file headers in multipart file POST messages.
|
||||
- Fixed: Don't send the full URL on CONNECT messages.
|
||||
- Fixed: Correctly lowercase a redirect scheme.
|
||||
- Fixed: Cookies not persisted when set via functional API.
|
||||
- Fixed: Translate urllib3 ProxyError into a requests ProxyError derived from ConnectionError.
|
||||
- Updated internal urllib3 and chardet.
|
||||
|
||||
2.0.0 (2013-09-24)
|
||||
++++++++++++++++++
|
||||
|
||||
**API Changes:**
|
||||
|
||||
- Keys in the Headers dictionary are now native strings on all Python versions,
|
||||
i.e. bytestrings on Python 2, unicode on Python 3.
|
||||
- Proxy URLs now *must* have an explicit scheme. A ``MissingSchema`` exception
|
||||
will be raised if they don't.
|
||||
- Timeouts now apply to read time if ``Stream=False``.
|
||||
- ``RequestException`` is now a subclass of ``IOError``, not ``RuntimeError``.
|
||||
- Added new method to ``PreparedRequest`` objects: ``PreparedRequest.copy()``.
|
||||
- Added new method to ``Session`` objects: ``Session.update_request()``. This
|
||||
method updates a ``Request`` object with the data (e.g. cookies) stored on
|
||||
the ``Session``.
|
||||
- Added new method to ``Session`` objects: ``Session.prepare_request()``. This
|
||||
method updates and prepares a ``Request`` object, and returns the
|
||||
corresponding ``PreparedRequest`` object.
|
||||
- Added new method to ``HTTPAdapter`` objects: ``HTTPAdapter.proxy_headers()``.
|
||||
This should not be called directly, but improves the subclass interface.
|
||||
- ``httplib.IncompleteRead`` exceptions caused by incorrect chunked encoding
|
||||
will now raise a Requests ``ChunkedEncodingError`` instead.
|
||||
- Invalid percent-escape sequences now cause a Requests ``InvalidURL``
|
||||
exception to be raised.
|
||||
- HTTP 208 no longer uses reason phrase ``"im_used"``. Correctly uses
|
||||
``"already_reported"``.
|
||||
- HTTP 226 reason added (``"im_used"``).
|
||||
|
||||
**Bugfixes:**
|
||||
|
||||
- Vastly improved proxy support, including the CONNECT verb. Special thanks to
|
||||
the many contributors who worked towards this improvement.
|
||||
- Cookies are now properly managed when 401 authentication responses are
|
||||
received.
|
||||
- Chunked encoding fixes.
|
||||
- Support for mixed case schemes.
|
||||
- Better handling of streaming downloads.
|
||||
- Retrieve environment proxies from more locations.
|
||||
- Minor cookies fixes.
|
||||
- Improved redirect behaviour.
|
||||
- Improved streaming behaviour, particularly for compressed data.
|
||||
- Miscellaneous small Python 3 text encoding bugs.
|
||||
- ``.netrc`` no longer overrides explicit auth.
|
||||
- Cookies set by hooks are now correctly persisted on Sessions.
|
||||
- Fix problem with cookies that specify port numbers in their host field.
|
||||
- ``BytesIO`` can be used to perform streaming uploads.
|
||||
- More generous parsing of the ``no_proxy`` environment variable.
|
||||
- Non-string objects can be passed in data values alongside files.
|
||||
|
||||
1.2.3 (2013-05-25)
|
||||
++++++++++++++++++
|
||||
|
||||
- Simple packaging fix
|
||||
|
||||
|
||||
1.2.2 (2013-05-23)
|
||||
++++++++++++++++++
|
||||
|
||||
- Simple packaging fix
|
||||
|
||||
|
||||
1.2.1 (2013-05-20)
|
||||
++++++++++++++++++
|
||||
|
||||
- 301 and 302 redirects now change the verb to GET for all verbs, not just
|
||||
POST, improving browser compatibility.
|
||||
- Python 3.3.2 compatibility
|
||||
- Always percent-encode location headers
|
||||
- Fix connection adapter matching to be most-specific first
|
||||
- new argument to the default connection adapter for passing a block argument
|
||||
- prevent a KeyError when there's no link headers
|
||||
|
||||
1.2.0 (2013-03-31)
|
||||
++++++++++++++++++
|
||||
|
||||
- Fixed cookies on sessions and on requests
|
||||
- Significantly change how hooks are dispatched - hooks now receive all the
|
||||
arguments specified by the user when making a request so hooks can make a
|
||||
secondary request with the same parameters. This is especially necessary for
|
||||
authentication handler authors
|
||||
- certifi support was removed
|
||||
- Fixed bug where using OAuth 1 with body ``signature_type`` sent no data
|
||||
- Major proxy work thanks to @Lukasa including parsing of proxy authentication
|
||||
from the proxy url
|
||||
- Fix DigestAuth handling too many 401s
|
||||
- Update vendored urllib3 to include SSL bug fixes
|
||||
- Allow keyword arguments to be passed to ``json.loads()`` via the
|
||||
``Response.json()`` method
|
||||
- Don't send ``Content-Length`` header by default on ``GET`` or ``HEAD``
|
||||
requests
|
||||
- Add ``elapsed`` attribute to ``Response`` objects to time how long a request
|
||||
took.
|
||||
- Fix ``RequestsCookieJar``
|
||||
- Sessions and Adapters are now picklable, i.e., can be used with the
|
||||
multiprocessing library
|
||||
- Update charade to version 1.0.3
|
||||
|
||||
The change in how hooks are dispatched will likely cause a great deal of
|
||||
issues.
|
||||
|
||||
1.1.0 (2013-01-10)
|
||||
++++++++++++++++++
|
||||
|
||||
- CHUNKED REQUESTS
|
||||
- Support for iterable response bodies
|
||||
- Assume servers persist redirect params
|
||||
- Allow explicit content types to be specified for file data
|
||||
- Make merge_kwargs case-insensitive when looking up keys
|
||||
|
||||
1.0.3 (2012-12-18)
|
||||
++++++++++++++++++
|
||||
|
||||
- Fix file upload encoding bug
|
||||
- Fix cookie behavior
|
||||
|
||||
1.0.2 (2012-12-17)
|
||||
++++++++++++++++++
|
||||
|
||||
- Proxy fix for HTTPAdapter.
|
||||
|
||||
1.0.1 (2012-12-17)
|
||||
++++++++++++++++++
|
||||
|
||||
- Cert verification exception bug.
|
||||
- Proxy fix for HTTPAdapter.
|
||||
|
||||
1.0.0 (2012-12-17)
|
||||
++++++++++++++++++
|
||||
|
||||
- Massive Refactor and Simplification
|
||||
- Switch to Apache 2.0 license
|
||||
- Swappable Connection Adapters
|
||||
- Mountable Connection Adapters
|
||||
- Mutable ProcessedRequest chain
|
||||
- /s/prefetch/stream
|
||||
- Removal of all configuration
|
||||
- Standard library logging
|
||||
- Make Response.json() callable, not property.
|
||||
- Usage of new charade project, which provides python 2 and 3 simultaneous chardet.
|
||||
- Removal of all hooks except 'response'
|
||||
- Removal of all authentication helpers (OAuth, Kerberos)
|
||||
|
||||
This is not a backwards compatible change.
|
||||
|
||||
0.14.2 (2012-10-27)
|
||||
+++++++++++++++++++
|
||||
|
||||
- Improved mime-compatible JSON handling
|
||||
- Proxy fixes
|
||||
- Path hack fixes
|
||||
- Case-Insensistive Content-Encoding headers
|
||||
- Support for CJK parameters in form posts
|
||||
|
||||
|
||||
0.14.1 (2012-10-01)
|
||||
+++++++++++++++++++
|
||||
|
||||
- Python 3.3 Compatibility
|
||||
- Simply default accept-encoding
|
||||
- Bugfixes
|
||||
|
||||
|
||||
0.14.0 (2012-09-02)
|
||||
++++++++++++++++++++
|
||||
|
||||
- No more iter_content errors if already downloaded.
|
||||
|
||||
0.13.9 (2012-08-25)
|
||||
+++++++++++++++++++
|
||||
|
||||
- Fix for OAuth + POSTs
|
||||
- Remove exception eating from dispatch_hook
|
||||
- General bugfixes
|
||||
|
||||
0.13.8 (2012-08-21)
|
||||
+++++++++++++++++++
|
||||
|
||||
- Incredible Link header support :)
|
||||
|
||||
0.13.7 (2012-08-19)
|
||||
+++++++++++++++++++
|
||||
|
||||
- Support for (key, value) lists everywhere.
|
||||
- Digest Authentication improvements.
|
||||
- Ensure proxy exclusions work properly.
|
||||
- Clearer UnicodeError exceptions.
|
||||
- Automatic casting of URLs to tsrings (fURL and such)
|
||||
- Bugfixes.
|
||||
|
||||
0.13.6 (2012-08-06)
|
||||
+++++++++++++++++++
|
||||
|
||||
- Long awaited fix for hanging connections!
|
||||
|
||||
0.13.5 (2012-07-27)
|
||||
+++++++++++++++++++
|
||||
|
||||
- Packaging fix
|
||||
|
||||
0.13.4 (2012-07-27)
|
||||
+++++++++++++++++++
|
||||
|
||||
- GSSAPI/Kerberos authentication!
|
||||
- App Engine 2.7 Fixes!
|
||||
- Fix leaking connections (from urllib3 update)
|
||||
- OAuthlib path hack fix
|
||||
- OAuthlib URL parameters fix.
|
||||
|
||||
0.13.3 (2012-07-12)
|
||||
+++++++++++++++++++
|
||||
|
||||
- Use simplejson if available.
|
||||
- Do not hide SSLErrors behind Timeouts.
|
||||
- Fixed param handling with urls containing fragments.
|
||||
- Significantly improved information in User Agent.
|
||||
- client certificates are ignored when verify=False
|
||||
|
||||
0.13.2 (2012-06-28)
|
||||
+++++++++++++++++++
|
||||
|
||||
- Zero dependencies (once again)!
|
||||
- New: Response.reason
|
||||
- Sign querystring parameters in OAuth 1.0
|
||||
- Client certificates no longer ignored when verify=False
|
||||
- Add openSUSE certificate support
|
||||
|
||||
0.13.1 (2012-06-07)
|
||||
+++++++++++++++++++
|
||||
|
||||
- Allow passing a file or file-like object as data.
|
||||
- Allow hooks to return responses that indicate errors.
|
||||
- Fix Response.text and Response.json for body-less responses.
|
||||
|
||||
0.13.0 (2012-05-29)
|
||||
+++++++++++++++++++
|
||||
|
||||
- Removal of Requests.async in favor of `grequests <https://github.com/kennethreitz/grequests>`_
|
||||
- Allow disabling of cookie persistiance.
|
||||
- New implimentation of safe_mode
|
||||
- cookies.get now supports default argument
|
||||
- Session cookies not saved when Session.request is called with return_response=False
|
||||
- Env: no_proxy support.
|
||||
- RequestsCookieJar improvements.
|
||||
- Various bug fixes.
|
||||
|
||||
0.12.1 (2012-05-08)
|
||||
+++++++++++++++++++
|
||||
|
||||
- New ``Response.json`` property.
|
||||
- Ability to add string file uploads.
|
||||
- Fix out-of-range issue with iter_lines.
|
||||
- Fix iter_content default size.
|
||||
- Fix POST redirects containing files.
|
||||
|
||||
0.12.0 (2012-05-02)
|
||||
+++++++++++++++++++
|
||||
|
||||
- EXPERIMENTAL OAUTH SUPPORT!
|
||||
- Proper CookieJar-backed cookies interface with awesome dict-like interface.
|
||||
- Speed fix for non-iterated content chunks.
|
||||
- Move ``pre_request`` to a more usable place.
|
||||
- New ``pre_send`` hook.
|
||||
- Lazily encode data, params, files.
|
||||
- Load system Certificate Bundle if ``certify`` isn't available.
|
||||
- Cleanups, fixes.
|
||||
|
||||
0.11.2 (2012-04-22)
|
||||
+++++++++++++++++++
|
||||
|
||||
- Attempt to use the OS's certificate bundle if ``certifi`` isn't available.
|
||||
- Infinite digest auth redirect fix.
|
||||
- Multi-part file upload improvements.
|
||||
- Fix decoding of invalid %encodings in URLs.
|
||||
- If there is no content in a response don't throw an error the second time that content is attempted to be read.
|
||||
- Upload data on redirects.
|
||||
|
||||
0.11.1 (2012-03-30)
|
||||
+++++++++++++++++++
|
||||
|
||||
* POST redirects now break RFC to do what browsers do: Follow up with a GET.
|
||||
* New ``strict_mode`` configuration to disable new redirect behavior.
|
||||
|
||||
|
||||
0.11.0 (2012-03-14)
|
||||
+++++++++++++++++++
|
||||
|
||||
* Private SSL Certificate support
|
||||
* Remove select.poll from Gevent monkeypatching
|
||||
* Remove redundant generator for chunked transfer encoding
|
||||
* Fix: Response.ok raises Timeout Exception in safe_mode
|
||||
|
||||
0.10.8 (2012-03-09)
|
||||
+++++++++++++++++++
|
||||
|
||||
* Generate chunked ValueError fix
|
||||
* Proxy configuration by environment variables
|
||||
* Simplification of iter_lines.
|
||||
* New `trust_env` configuration for disabling system/environment hints.
|
||||
* Suppress cookie errors.
|
||||
|
||||
0.10.7 (2012-03-07)
|
||||
+++++++++++++++++++
|
||||
|
||||
* `encode_uri` = False
|
||||
|
||||
0.10.6 (2012-02-25)
|
||||
+++++++++++++++++++
|
||||
|
||||
* Allow '=' in cookies.
|
||||
|
||||
0.10.5 (2012-02-25)
|
||||
+++++++++++++++++++
|
||||
|
||||
* Response body with 0 content-length fix.
|
||||
* New async.imap.
|
||||
* Don't fail on netrc.
|
||||
|
||||
|
||||
0.10.4 (2012-02-20)
|
||||
+++++++++++++++++++
|
||||
|
||||
* Honor netrc.
|
||||
|
||||
0.10.3 (2012-02-20)
|
||||
+++++++++++++++++++
|
||||
|
||||
* HEAD requests don't follow redirects anymore.
|
||||
* raise_for_status() doesn't raise for 3xx anymore.
|
||||
* Make Session objects picklable.
|
||||
* ValueError for invalid schema URLs.
|
||||
|
||||
0.10.2 (2012-01-15)
|
||||
+++++++++++++++++++
|
||||
|
||||
* Vastly improved URL quoting.
|
||||
* Additional allowed cookie key values.
|
||||
* Attempted fix for "Too many open files" Error
|
||||
* Replace unicode errors on first pass, no need for second pass.
|
||||
* Append '/' to bare-domain urls before query insertion.
|
||||
* Exceptions now inherit from RuntimeError.
|
||||
* Binary uploads + auth fix.
|
||||
* Bugfixes.
|
||||
|
||||
|
||||
0.10.1 (2012-01-23)
|
||||
+++++++++++++++++++
|
||||
|
||||
* PYTHON 3 SUPPORT!
|
||||
* Dropped 2.5 Support. (*Backwards Incompatible*)
|
||||
|
||||
0.10.0 (2012-01-21)
|
||||
+++++++++++++++++++
|
||||
|
||||
* ``Response.content`` is now bytes-only. (*Backwards Incompatible*)
|
||||
* New ``Response.text`` is unicode-only.
|
||||
* If no ``Response.encoding`` is specified and ``chardet`` is available, ``Respoonse.text`` will guess an encoding.
|
||||
* Default to ISO-8859-1 (Western) encoding for "text" subtypes.
|
||||
* Removal of `decode_unicode`. (*Backwards Incompatible*)
|
||||
* New multiple-hooks system.
|
||||
* New ``Response.register_hook`` for registering hooks within the pipeline.
|
||||
* ``Response.url`` is now Unicode.
|
||||
|
||||
0.9.3 (2012-01-18)
|
||||
++++++++++++++++++
|
||||
|
||||
* SSL verify=False bugfix (apparent on windows machines).
|
||||
|
||||
0.9.2 (2012-01-18)
|
||||
++++++++++++++++++
|
||||
|
||||
* Asynchronous async.send method.
|
||||
* Support for proper chunk streams with boundaries.
|
||||
* session argument for Session classes.
|
||||
* Print entire hook tracebacks, not just exception instance.
|
||||
* Fix response.iter_lines from pending next line.
|
||||
* Fix but in HTTP-digest auth w/ URI having query strings.
|
||||
* Fix in Event Hooks section.
|
||||
* Urllib3 update.
|
||||
|
||||
|
||||
0.9.1 (2012-01-06)
|
||||
++++++++++++++++++
|
||||
|
||||
* danger_mode for automatic Response.raise_for_status()
|
||||
* Response.iter_lines refactor
|
||||
|
||||
0.9.0 (2011-12-28)
|
||||
++++++++++++++++++
|
||||
|
||||
* verify ssl is default.
|
||||
|
||||
|
||||
0.8.9 (2011-12-28)
|
||||
++++++++++++++++++
|
||||
|
||||
* Packaging fix.
|
||||
|
||||
|
||||
0.8.8 (2011-12-28)
|
||||
++++++++++++++++++
|
||||
|
||||
* SSL CERT VERIFICATION!
|
||||
* Release of Cerifi: Mozilla's cert list.
|
||||
* New 'verify' argument for SSL requests.
|
||||
* Urllib3 update.
|
||||
|
||||
0.8.7 (2011-12-24)
|
||||
++++++++++++++++++
|
||||
|
||||
* iter_lines last-line truncation fix
|
||||
* Force safe_mode for async requests
|
||||
* Handle safe_mode exceptions more consistently
|
||||
* Fix iteration on null responses in safe_mode
|
||||
|
||||
0.8.6 (2011-12-18)
|
||||
++++++++++++++++++
|
||||
|
||||
* Socket timeout fixes.
|
||||
* Proxy Authorization support.
|
||||
|
||||
0.8.5 (2011-12-14)
|
||||
++++++++++++++++++
|
||||
|
||||
* Response.iter_lines!
|
||||
|
||||
0.8.4 (2011-12-11)
|
||||
++++++++++++++++++
|
||||
|
||||
* Prefetch bugfix.
|
||||
* Added license to installed version.
|
||||
|
||||
0.8.3 (2011-11-27)
|
||||
++++++++++++++++++
|
||||
|
||||
* Converted auth system to use simpler callable objects.
|
||||
* New session parameter to API methods.
|
||||
* Display full URL while logging.
|
||||
|
||||
0.8.2 (2011-11-19)
|
||||
++++++++++++++++++
|
||||
|
||||
* New Unicode decoding system, based on over-ridable `Response.encoding`.
|
||||
* Proper URL slash-quote handling.
|
||||
* Cookies with ``[``, ``]``, and ``_`` allowed.
|
||||
|
||||
0.8.1 (2011-11-15)
|
||||
++++++++++++++++++
|
||||
|
||||
* URL Request path fix
|
||||
* Proxy fix.
|
||||
* Timeouts fix.
|
||||
|
||||
0.8.0 (2011-11-13)
|
||||
++++++++++++++++++
|
||||
|
||||
* Keep-alive support!
|
||||
* Complete removal of Urllib2
|
||||
* Complete removal of Poster
|
||||
* Complete removal of CookieJars
|
||||
* New ConnectionError raising
|
||||
* Safe_mode for error catching
|
||||
* prefetch parameter for request methods
|
||||
* OPTION method
|
||||
* Async pool size throttling
|
||||
* File uploads send real names
|
||||
* Vendored in urllib3
|
||||
|
||||
0.7.6 (2011-11-07)
|
||||
++++++++++++++++++
|
||||
|
||||
* Digest authentication bugfix (attach query data to path)
|
||||
|
||||
0.7.5 (2011-11-04)
|
||||
++++++++++++++++++
|
||||
|
||||
* Response.content = None if there was an invalid repsonse.
|
||||
* Redirection auth handling.
|
||||
|
||||
0.7.4 (2011-10-26)
|
||||
++++++++++++++++++
|
||||
|
||||
* Session Hooks fix.
|
||||
|
||||
0.7.3 (2011-10-23)
|
||||
++++++++++++++++++
|
||||
|
||||
* Digest Auth fix.
|
||||
|
||||
|
||||
0.7.2 (2011-10-23)
|
||||
++++++++++++++++++
|
||||
|
||||
* PATCH Fix.
|
||||
|
||||
|
||||
0.7.1 (2011-10-23)
|
||||
++++++++++++++++++
|
||||
|
||||
* Move away from urllib2 authentication handling.
|
||||
* Fully Remove AuthManager, AuthObject, &c.
|
||||
* New tuple-based auth system with handler callbacks.
|
||||
|
||||
|
||||
0.7.0 (2011-10-22)
|
||||
++++++++++++++++++
|
||||
|
||||
* Sessions are now the primary interface.
|
||||
* Deprecated InvalidMethodException.
|
||||
* PATCH fix.
|
||||
* New config system (no more global settings).
|
||||
|
||||
|
||||
0.6.6 (2011-10-19)
|
||||
++++++++++++++++++
|
||||
|
||||
* Session parameter bugfix (params merging).
|
||||
|
||||
|
||||
0.6.5 (2011-10-18)
|
||||
++++++++++++++++++
|
||||
|
||||
* Offline (fast) test suite.
|
||||
* Session dictionary argument merging.
|
||||
|
||||
|
||||
0.6.4 (2011-10-13)
|
||||
++++++++++++++++++
|
||||
|
||||
* Automatic decoding of unicode, based on HTTP Headers.
|
||||
* New ``decode_unicode`` setting.
|
||||
* Removal of ``r.read/close`` methods.
|
||||
* New ``r.faw`` interface for advanced response usage.*
|
||||
* Automatic expansion of parameterized headers.
|
||||
|
||||
|
||||
0.6.3 (2011-10-13)
|
||||
++++++++++++++++++
|
||||
|
||||
* Beautiful ``requests.async`` module, for making async requests w/ gevent.
|
||||
|
||||
|
||||
0.6.2 (2011-10-09)
|
||||
++++++++++++++++++
|
||||
|
||||
* GET/HEAD obeys allow_redirects=False.
|
||||
|
||||
|
||||
0.6.1 (2011-08-20)
|
||||
++++++++++++++++++
|
||||
|
||||
* Enhanced status codes experience ``\o/``
|
||||
* Set a maximum number of redirects (``settings.max_redirects``)
|
||||
* Full Unicode URL support
|
||||
* Support for protocol-less redirects.
|
||||
* Allow for arbitrary request types.
|
||||
* Bugfixes
|
||||
|
||||
|
||||
0.6.0 (2011-08-17)
|
||||
++++++++++++++++++
|
||||
|
||||
* New callback hook system
|
||||
* New persistient sessions object and context manager
|
||||
* Transparent Dict-cookie handling
|
||||
* Status code reference object
|
||||
* Removed Response.cached
|
||||
* Added Response.request
|
||||
* All args are kwargs
|
||||
* Relative redirect support
|
||||
* HTTPError handling improvements
|
||||
* Improved https testing
|
||||
* Bugfixes
|
||||
|
||||
|
||||
0.5.1 (2011-07-23)
|
||||
++++++++++++++++++
|
||||
|
||||
* International Domain Name Support!
|
||||
* Access headers without fetching entire body (``read()``)
|
||||
* Use lists as dicts for parameters
|
||||
* Add Forced Basic Authentication
|
||||
* Forced Basic is default authentication type
|
||||
* ``python-requests.org`` default User-Agent header
|
||||
* CaseInsensitiveDict lower-case caching
|
||||
* Response.history bugfix
|
||||
|
||||
|
||||
0.5.0 (2011-06-21)
|
||||
++++++++++++++++++
|
||||
|
||||
* PATCH Support
|
||||
* Support for Proxies
|
||||
* HTTPBin Test Suite
|
||||
* Redirect Fixes
|
||||
* settings.verbose stream writing
|
||||
* Querystrings for all methods
|
||||
* URLErrors (Connection Refused, Timeout, Invalid URLs) are treated as explicity raised
|
||||
``r.requests.get('hwe://blah'); r.raise_for_status()``
|
||||
|
||||
|
||||
0.4.1 (2011-05-22)
|
||||
++++++++++++++++++
|
||||
|
||||
* Improved Redirection Handling
|
||||
* New 'allow_redirects' param for following non-GET/HEAD Redirects
|
||||
* Settings module refactoring
|
||||
|
||||
|
||||
0.4.0 (2011-05-15)
|
||||
++++++++++++++++++
|
||||
|
||||
* Response.history: list of redirected responses
|
||||
* Case-Insensitive Header Dictionaries!
|
||||
* Unicode URLs
|
||||
|
||||
|
||||
0.3.4 (2011-05-14)
|
||||
++++++++++++++++++
|
||||
|
||||
* Urllib2 HTTPAuthentication Recursion fix (Basic/Digest)
|
||||
* Internal Refactor
|
||||
* Bytes data upload Bugfix
|
||||
|
||||
|
||||
|
||||
0.3.3 (2011-05-12)
|
||||
++++++++++++++++++
|
||||
|
||||
* Request timeouts
|
||||
* Unicode url-encoded data
|
||||
* Settings context manager and module
|
||||
|
||||
|
||||
0.3.2 (2011-04-15)
|
||||
++++++++++++++++++
|
||||
|
||||
* Automatic Decompression of GZip Encoded Content
|
||||
* AutoAuth Support for Tupled HTTP Auth
|
||||
|
||||
|
||||
0.3.1 (2011-04-01)
|
||||
++++++++++++++++++
|
||||
|
||||
* Cookie Changes
|
||||
* Response.read()
|
||||
* Poster fix
|
||||
|
||||
|
||||
0.3.0 (2011-02-25)
|
||||
++++++++++++++++++
|
||||
|
||||
* Automatic Authentication API Change
|
||||
* Smarter Query URL Parameterization
|
||||
* Allow file uploads and POST data together
|
||||
* New Authentication Manager System
|
||||
- Simpler Basic HTTP System
|
||||
- Supports all build-in urllib2 Auths
|
||||
- Allows for custom Auth Handlers
|
||||
|
||||
|
||||
0.2.4 (2011-02-19)
|
||||
++++++++++++++++++
|
||||
|
||||
* Python 2.5 Support
|
||||
* PyPy-c v1.4 Support
|
||||
* Auto-Authentication tests
|
||||
* Improved Request object constructor
|
||||
|
||||
0.2.3 (2011-02-15)
|
||||
++++++++++++++++++
|
||||
|
||||
* New HTTPHandling Methods
|
||||
- Response.__nonzero__ (false if bad HTTP Status)
|
||||
- Response.ok (True if expected HTTP Status)
|
||||
- Response.error (Logged HTTPError if bad HTTP Status)
|
||||
- Response.raise_for_status() (Raises stored HTTPError)
|
||||
|
||||
|
||||
0.2.2 (2011-02-14)
|
||||
++++++++++++++++++
|
||||
|
||||
* Still handles request in the event of an HTTPError. (Issue #2)
|
||||
* Eventlet and Gevent Monkeypatch support.
|
||||
* Cookie Support (Issue #1)
|
||||
|
||||
|
||||
0.2.1 (2011-02-14)
|
||||
++++++++++++++++++
|
||||
|
||||
* Added file attribute to POST and PUT requests for multipart-encode file uploads.
|
||||
* Added Request.url attribute for context and redirects
|
||||
|
||||
|
||||
0.2.0 (2011-02-14)
|
||||
++++++++++++++++++
|
||||
|
||||
* Birth!
|
||||
|
||||
|
||||
0.0.1 (2011-02-13)
|
||||
++++++++++++++++++
|
||||
|
||||
* Frustration
|
||||
* Conception
|
||||
|
||||
|
||||
Platform: UNKNOWN
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: Natural Language :: English
|
||||
Classifier: License :: OSI Approved :: Apache Software License
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 2.6
|
||||
Classifier: Programming Language :: Python :: 2.7
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Programming Language :: Python :: 3.3
|
||||
Classifier: Programming Language :: Python :: 3.4
|
||||
77
script/_Lib/requests/__init__.py
Normal file
77
script/_Lib/requests/__init__.py
Normal file
@@ -0,0 +1,77 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# __
|
||||
# /__) _ _ _ _ _/ _
|
||||
# / ( (- (/ (/ (- _) / _)
|
||||
# /
|
||||
|
||||
"""
|
||||
requests HTTP library
|
||||
~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Requests is an HTTP library, written in Python, for human beings. Basic GET
|
||||
usage:
|
||||
|
||||
>>> import requests
|
||||
>>> r = requests.get('https://www.python.org')
|
||||
>>> r.status_code
|
||||
200
|
||||
>>> 'Python is a programming language' in r.content
|
||||
True
|
||||
|
||||
... or POST:
|
||||
|
||||
>>> payload = dict(key1='value1', key2='value2')
|
||||
>>> r = requests.post('http://httpbin.org/post', data=payload)
|
||||
>>> print(r.text)
|
||||
{
|
||||
...
|
||||
"form": {
|
||||
"key2": "value2",
|
||||
"key1": "value1"
|
||||
},
|
||||
...
|
||||
}
|
||||
|
||||
The other HTTP methods are supported - see `requests.api`. Full documentation
|
||||
is at <http://python-requests.org>.
|
||||
|
||||
:copyright: (c) 2014 by Kenneth Reitz.
|
||||
:license: Apache 2.0, see LICENSE for more details.
|
||||
|
||||
"""
|
||||
|
||||
__title__ = 'requests'
|
||||
__version__ = '2.4.3'
|
||||
__build__ = 0x020403
|
||||
__author__ = 'Kenneth Reitz'
|
||||
__license__ = 'Apache 2.0'
|
||||
__copyright__ = 'Copyright 2014 Kenneth Reitz'
|
||||
|
||||
# Attempt to enable urllib3's SNI support, if possible
|
||||
try:
|
||||
from .packages.urllib3.contrib import pyopenssl
|
||||
pyopenssl.inject_into_urllib3()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
from . import utils
|
||||
from .models import Request, Response, PreparedRequest
|
||||
from .api import request, get, head, post, patch, put, delete, options
|
||||
from .sessions import session, Session
|
||||
from .status_codes import codes
|
||||
from .exceptions import (
|
||||
RequestException, Timeout, URLRequired,
|
||||
TooManyRedirects, HTTPError, ConnectionError
|
||||
)
|
||||
|
||||
# Set default logging handler to avoid "No handler found" warnings.
|
||||
import logging
|
||||
try: # Python 2.7+
|
||||
from logging import NullHandler
|
||||
except ImportError:
|
||||
class NullHandler(logging.Handler):
|
||||
def emit(self, record):
|
||||
pass
|
||||
|
||||
logging.getLogger(__name__).addHandler(NullHandler())
|
||||
426
script/_Lib/requests/adapters.py
Normal file
426
script/_Lib/requests/adapters.py
Normal file
@@ -0,0 +1,426 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.adapters
|
||||
~~~~~~~~~~~~~~~~~
|
||||
|
||||
This module contains the transport adapters that Requests uses to define
|
||||
and maintain connections.
|
||||
"""
|
||||
|
||||
import socket
|
||||
|
||||
from .models import Response
|
||||
from .packages.urllib3 import Retry
|
||||
from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
|
||||
from .packages.urllib3.response import HTTPResponse
|
||||
from .packages.urllib3.util import Timeout as TimeoutSauce
|
||||
from .compat import urlparse, basestring
|
||||
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
|
||||
prepend_scheme_if_needed, get_auth_from_url, urldefragauth)
|
||||
from .structures import CaseInsensitiveDict
|
||||
from .packages.urllib3.exceptions import ConnectTimeoutError
|
||||
from .packages.urllib3.exceptions import HTTPError as _HTTPError
|
||||
from .packages.urllib3.exceptions import MaxRetryError
|
||||
from .packages.urllib3.exceptions import ProxyError as _ProxyError
|
||||
from .packages.urllib3.exceptions import ProtocolError
|
||||
from .packages.urllib3.exceptions import ReadTimeoutError
|
||||
from .packages.urllib3.exceptions import SSLError as _SSLError
|
||||
from .cookies import extract_cookies_to_jar
|
||||
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
|
||||
ProxyError)
|
||||
from .auth import _basic_auth_str
|
||||
|
||||
DEFAULT_POOLBLOCK = False
|
||||
DEFAULT_POOLSIZE = 10
|
||||
DEFAULT_RETRIES = 0
|
||||
|
||||
|
||||
class BaseAdapter(object):
|
||||
"""The Base Transport Adapter"""
|
||||
|
||||
def __init__(self):
|
||||
super(BaseAdapter, self).__init__()
|
||||
|
||||
def send(self):
|
||||
raise NotImplementedError
|
||||
|
||||
def close(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class HTTPAdapter(BaseAdapter):
|
||||
"""The built-in HTTP Adapter for urllib3.
|
||||
|
||||
Provides a general-case interface for Requests sessions to contact HTTP and
|
||||
HTTPS urls by implementing the Transport Adapter interface. This class will
|
||||
usually be created by the :class:`Session <Session>` class under the
|
||||
covers.
|
||||
|
||||
:param pool_connections: The number of urllib3 connection pools to cache.
|
||||
:param pool_maxsize: The maximum number of connections to save in the pool.
|
||||
:param int max_retries: The maximum number of retries each connection
|
||||
should attempt. Note, this applies only to failed connections and
|
||||
timeouts, never to requests where the server returns a response.
|
||||
:param pool_block: Whether the connection pool should block for connections.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> import requests
|
||||
>>> s = requests.Session()
|
||||
>>> a = requests.adapters.HTTPAdapter(max_retries=3)
|
||||
>>> s.mount('http://', a)
|
||||
"""
|
||||
__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
|
||||
'_pool_block']
|
||||
|
||||
def __init__(self, pool_connections=DEFAULT_POOLSIZE,
|
||||
pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES,
|
||||
pool_block=DEFAULT_POOLBLOCK):
|
||||
self.max_retries = max_retries
|
||||
self.config = {}
|
||||
self.proxy_manager = {}
|
||||
|
||||
super(HTTPAdapter, self).__init__()
|
||||
|
||||
self._pool_connections = pool_connections
|
||||
self._pool_maxsize = pool_maxsize
|
||||
self._pool_block = pool_block
|
||||
|
||||
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block)
|
||||
|
||||
def __getstate__(self):
|
||||
return dict((attr, getattr(self, attr, None)) for attr in
|
||||
self.__attrs__)
|
||||
|
||||
def __setstate__(self, state):
|
||||
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
|
||||
# because self.poolmanager uses a lambda function, which isn't pickleable.
|
||||
self.proxy_manager = {}
|
||||
self.config = {}
|
||||
|
||||
for attr, value in state.items():
|
||||
setattr(self, attr, value)
|
||||
|
||||
self.init_poolmanager(self._pool_connections, self._pool_maxsize,
|
||||
block=self._pool_block)
|
||||
|
||||
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
|
||||
"""Initializes a urllib3 PoolManager.
|
||||
|
||||
This method should not be called from user code, and is only
|
||||
exposed for use when subclassing the
|
||||
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
||||
|
||||
:param connections: The number of urllib3 connection pools to cache.
|
||||
:param maxsize: The maximum number of connections to save in the pool.
|
||||
:param block: Block when no free connections are available.
|
||||
:param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
|
||||
"""
|
||||
# save these values for pickling
|
||||
self._pool_connections = connections
|
||||
self._pool_maxsize = maxsize
|
||||
self._pool_block = block
|
||||
|
||||
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize,
|
||||
block=block, **pool_kwargs)
|
||||
|
||||
def proxy_manager_for(self, proxy, **proxy_kwargs):
|
||||
"""Return urllib3 ProxyManager for the given proxy.
|
||||
|
||||
This method should not be called from user code, and is only
|
||||
exposed for use when subclassing the
|
||||
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
||||
|
||||
:param proxy: The proxy to return a urllib3 ProxyManager for.
|
||||
:param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager.
|
||||
:returns: ProxyManager
|
||||
"""
|
||||
if not proxy in self.proxy_manager:
|
||||
proxy_headers = self.proxy_headers(proxy)
|
||||
self.proxy_manager[proxy] = proxy_from_url(
|
||||
proxy,
|
||||
proxy_headers=proxy_headers,
|
||||
num_pools=self._pool_connections,
|
||||
maxsize=self._pool_maxsize,
|
||||
block=self._pool_block,
|
||||
**proxy_kwargs)
|
||||
|
||||
return self.proxy_manager[proxy]
|
||||
|
||||
def cert_verify(self, conn, url, verify, cert):
|
||||
"""Verify a SSL certificate. This method should not be called from user
|
||||
code, and is only exposed for use when subclassing the
|
||||
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
||||
|
||||
:param conn: The urllib3 connection object associated with the cert.
|
||||
:param url: The requested URL.
|
||||
:param verify: Whether we should actually verify the certificate.
|
||||
:param cert: The SSL certificate to verify.
|
||||
"""
|
||||
if url.lower().startswith('https') and verify:
|
||||
|
||||
cert_loc = None
|
||||
|
||||
# Allow self-specified cert location.
|
||||
if verify is not True:
|
||||
cert_loc = verify
|
||||
|
||||
if not cert_loc:
|
||||
cert_loc = DEFAULT_CA_BUNDLE_PATH
|
||||
|
||||
if not cert_loc:
|
||||
raise Exception("Could not find a suitable SSL CA certificate bundle.")
|
||||
|
||||
conn.cert_reqs = 'CERT_REQUIRED'
|
||||
conn.ca_certs = cert_loc
|
||||
else:
|
||||
conn.cert_reqs = 'CERT_NONE'
|
||||
conn.ca_certs = None
|
||||
|
||||
if cert:
|
||||
if not isinstance(cert, basestring):
|
||||
conn.cert_file = cert[0]
|
||||
conn.key_file = cert[1]
|
||||
else:
|
||||
conn.cert_file = cert
|
||||
|
||||
def build_response(self, req, resp):
|
||||
"""Builds a :class:`Response <requests.Response>` object from a urllib3
|
||||
response. This should not be called from user code, and is only exposed
|
||||
for use when subclassing the
|
||||
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`
|
||||
|
||||
:param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response.
|
||||
:param resp: The urllib3 response object.
|
||||
"""
|
||||
response = Response()
|
||||
|
||||
# Fallback to None if there's no status_code, for whatever reason.
|
||||
response.status_code = getattr(resp, 'status', None)
|
||||
|
||||
# Make headers case-insensitive.
|
||||
response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {}))
|
||||
|
||||
# Set encoding.
|
||||
response.encoding = get_encoding_from_headers(response.headers)
|
||||
response.raw = resp
|
||||
response.reason = response.raw.reason
|
||||
|
||||
if isinstance(req.url, bytes):
|
||||
response.url = req.url.decode('utf-8')
|
||||
else:
|
||||
response.url = req.url
|
||||
|
||||
# Add new cookies from the server.
|
||||
extract_cookies_to_jar(response.cookies, req, resp)
|
||||
|
||||
# Give the Response some context.
|
||||
response.request = req
|
||||
response.connection = self
|
||||
|
||||
return response
|
||||
|
||||
def get_connection(self, url, proxies=None):
|
||||
"""Returns a urllib3 connection for the given URL. This should not be
|
||||
called from user code, and is only exposed for use when subclassing the
|
||||
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
||||
|
||||
:param url: The URL to connect to.
|
||||
:param proxies: (optional) A Requests-style dictionary of proxies used on this request.
|
||||
"""
|
||||
proxies = proxies or {}
|
||||
proxy = proxies.get(urlparse(url.lower()).scheme)
|
||||
|
||||
if proxy:
|
||||
proxy = prepend_scheme_if_needed(proxy, 'http')
|
||||
proxy_manager = self.proxy_manager_for(proxy)
|
||||
conn = proxy_manager.connection_from_url(url)
|
||||
else:
|
||||
# Only scheme should be lower case
|
||||
parsed = urlparse(url)
|
||||
url = parsed.geturl()
|
||||
conn = self.poolmanager.connection_from_url(url)
|
||||
|
||||
return conn
|
||||
|
||||
def close(self):
|
||||
"""Disposes of any internal state.
|
||||
|
||||
Currently, this just closes the PoolManager, which closes pooled
|
||||
connections.
|
||||
"""
|
||||
self.poolmanager.clear()
|
||||
|
||||
def request_url(self, request, proxies):
|
||||
"""Obtain the url to use when making the final request.
|
||||
|
||||
If the message is being sent through a HTTP proxy, the full URL has to
|
||||
be used. Otherwise, we should only use the path portion of the URL.
|
||||
|
||||
This should not be called from user code, and is only exposed for use
|
||||
when subclassing the
|
||||
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
||||
|
||||
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
||||
:param proxies: A dictionary of schemes to proxy URLs.
|
||||
"""
|
||||
proxies = proxies or {}
|
||||
scheme = urlparse(request.url).scheme
|
||||
proxy = proxies.get(scheme)
|
||||
|
||||
if proxy and scheme != 'https':
|
||||
url = urldefragauth(request.url)
|
||||
else:
|
||||
url = request.path_url
|
||||
|
||||
return url
|
||||
|
||||
def add_headers(self, request, **kwargs):
|
||||
"""Add any headers needed by the connection. As of v2.0 this does
|
||||
nothing by default, but is left for overriding by users that subclass
|
||||
the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
||||
|
||||
This should not be called from user code, and is only exposed for use
|
||||
when subclassing the
|
||||
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
||||
|
||||
:param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to.
|
||||
:param kwargs: The keyword arguments from the call to send().
|
||||
"""
|
||||
pass
|
||||
|
||||
def proxy_headers(self, proxy):
|
||||
"""Returns a dictionary of the headers to add to any request sent
|
||||
through a proxy. This works with urllib3 magic to ensure that they are
|
||||
correctly sent to the proxy, rather than in a tunnelled request if
|
||||
CONNECT is being used.
|
||||
|
||||
This should not be called from user code, and is only exposed for use
|
||||
when subclassing the
|
||||
:class:`HTTPAdapter <requests.adapters.HTTPAdapter>`.
|
||||
|
||||
:param proxies: The url of the proxy being used for this request.
|
||||
:param kwargs: Optional additional keyword arguments.
|
||||
"""
|
||||
headers = {}
|
||||
username, password = get_auth_from_url(proxy)
|
||||
|
||||
if username and password:
|
||||
headers['Proxy-Authorization'] = _basic_auth_str(username,
|
||||
password)
|
||||
|
||||
return headers
|
||||
|
||||
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
|
||||
"""Sends PreparedRequest object. Returns Response object.
|
||||
|
||||
:param request: The :class:`PreparedRequest <PreparedRequest>` being sent.
|
||||
:param stream: (optional) Whether to stream the request content.
|
||||
:param timeout: (optional) How long to wait for the server to send
|
||||
data before giving up, as a float, or a (`connect timeout, read
|
||||
timeout <user/advanced.html#timeouts>`_) tuple.
|
||||
:type timeout: float or tuple
|
||||
:param verify: (optional) Whether to verify SSL certificates.
|
||||
:param cert: (optional) Any user-provided SSL certificate to be trusted.
|
||||
:param proxies: (optional) The proxies dictionary to apply to the request.
|
||||
"""
|
||||
|
||||
conn = self.get_connection(request.url, proxies)
|
||||
|
||||
self.cert_verify(conn, request.url, verify, cert)
|
||||
url = self.request_url(request, proxies)
|
||||
self.add_headers(request)
|
||||
|
||||
chunked = not (request.body is None or 'Content-Length' in request.headers)
|
||||
|
||||
if isinstance(timeout, tuple):
|
||||
try:
|
||||
connect, read = timeout
|
||||
timeout = TimeoutSauce(connect=connect, read=read)
|
||||
except ValueError as e:
|
||||
# this may raise a string formatting error.
|
||||
err = ("Invalid timeout {0}. Pass a (connect, read) "
|
||||
"timeout tuple, or a single float to set "
|
||||
"both timeouts to the same value".format(timeout))
|
||||
raise ValueError(err)
|
||||
else:
|
||||
timeout = TimeoutSauce(connect=timeout, read=timeout)
|
||||
|
||||
try:
|
||||
if not chunked:
|
||||
resp = conn.urlopen(
|
||||
method=request.method,
|
||||
url=url,
|
||||
body=request.body,
|
||||
headers=request.headers,
|
||||
redirect=False,
|
||||
assert_same_host=False,
|
||||
preload_content=False,
|
||||
decode_content=False,
|
||||
retries=Retry(self.max_retries, read=False),
|
||||
timeout=timeout
|
||||
)
|
||||
|
||||
# Send the request.
|
||||
else:
|
||||
if hasattr(conn, 'proxy_pool'):
|
||||
conn = conn.proxy_pool
|
||||
|
||||
low_conn = conn._get_conn(timeout=timeout)
|
||||
|
||||
try:
|
||||
low_conn.putrequest(request.method,
|
||||
url,
|
||||
skip_accept_encoding=True)
|
||||
|
||||
for header, value in request.headers.items():
|
||||
low_conn.putheader(header, value)
|
||||
|
||||
low_conn.endheaders()
|
||||
|
||||
for i in request.body:
|
||||
low_conn.send(hex(len(i))[2:].encode('utf-8'))
|
||||
low_conn.send(b'\r\n')
|
||||
low_conn.send(i)
|
||||
low_conn.send(b'\r\n')
|
||||
low_conn.send(b'0\r\n\r\n')
|
||||
|
||||
r = low_conn.getresponse()
|
||||
resp = HTTPResponse.from_httplib(
|
||||
r,
|
||||
pool=conn,
|
||||
connection=low_conn,
|
||||
preload_content=False,
|
||||
decode_content=False
|
||||
)
|
||||
except:
|
||||
# If we hit any problems here, clean up the connection.
|
||||
# Then, reraise so that we can handle the actual exception.
|
||||
low_conn.close()
|
||||
raise
|
||||
else:
|
||||
# All is well, return the connection to the pool.
|
||||
conn._put_conn(low_conn)
|
||||
|
||||
except (ProtocolError, socket.error) as err:
|
||||
raise ConnectionError(err, request=request)
|
||||
|
||||
except MaxRetryError as e:
|
||||
if isinstance(e.reason, ConnectTimeoutError):
|
||||
raise ConnectTimeout(e, request=request)
|
||||
|
||||
raise ConnectionError(e, request=request)
|
||||
|
||||
except _ProxyError as e:
|
||||
raise ProxyError(e)
|
||||
|
||||
except (_SSLError, _HTTPError) as e:
|
||||
if isinstance(e, _SSLError):
|
||||
raise SSLError(e, request=request)
|
||||
elif isinstance(e, ReadTimeoutError):
|
||||
raise ReadTimeout(e, request=request)
|
||||
else:
|
||||
raise
|
||||
|
||||
return self.build_response(request, resp)
|
||||
126
script/_Lib/requests/api.py
Normal file
126
script/_Lib/requests/api.py
Normal file
@@ -0,0 +1,126 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.api
|
||||
~~~~~~~~~~~~
|
||||
|
||||
This module implements the Requests API.
|
||||
|
||||
:copyright: (c) 2012 by Kenneth Reitz.
|
||||
:license: Apache2, see LICENSE for more details.
|
||||
|
||||
"""
|
||||
|
||||
from . import sessions
|
||||
|
||||
|
||||
def request(method, url, **kwargs):
|
||||
"""Constructs and sends a :class:`Request <Request>`.
|
||||
Returns :class:`Response <Response>` object.
|
||||
|
||||
:param method: method for the new :class:`Request` object.
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
|
||||
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
|
||||
:param json: (optional) json data to send in the body of the :class:`Request`.
|
||||
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
|
||||
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
|
||||
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': ('filename', fileobj)}``) for multipart encoding upload.
|
||||
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
|
||||
:param timeout: (optional) How long to wait for the server to send data
|
||||
before giving up, as a float, or a (`connect timeout, read timeout
|
||||
<user/advanced.html#timeouts>`_) tuple.
|
||||
:type timeout: float or tuple
|
||||
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
|
||||
:type allow_redirects: bool
|
||||
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
|
||||
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
|
||||
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
|
||||
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
|
||||
|
||||
Usage::
|
||||
|
||||
>>> import requests
|
||||
>>> req = requests.request('GET', 'http://httpbin.org/get')
|
||||
<Response [200]>
|
||||
"""
|
||||
|
||||
session = sessions.Session()
|
||||
return session.request(method=method, url=url, **kwargs)
|
||||
|
||||
|
||||
def get(url, **kwargs):
|
||||
"""Sends a GET request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return request('get', url, **kwargs)
|
||||
|
||||
|
||||
def options(url, **kwargs):
|
||||
"""Sends a OPTIONS request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return request('options', url, **kwargs)
|
||||
|
||||
|
||||
def head(url, **kwargs):
|
||||
"""Sends a HEAD request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
kwargs.setdefault('allow_redirects', False)
|
||||
return request('head', url, **kwargs)
|
||||
|
||||
|
||||
def post(url, data=None, json=None, **kwargs):
|
||||
"""Sends a POST request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
|
||||
:param json: (optional) json data to send in the body of the :class:`Request`.
|
||||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return request('post', url, data=data, json=json, **kwargs)
|
||||
|
||||
|
||||
def put(url, data=None, **kwargs):
|
||||
"""Sends a PUT request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
|
||||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return request('put', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def patch(url, data=None, **kwargs):
|
||||
"""Sends a PATCH request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
|
||||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return request('patch', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def delete(url, **kwargs):
|
||||
"""Sends a DELETE request. Returns :class:`Response` object.
|
||||
|
||||
:param url: URL for the new :class:`Request` object.
|
||||
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
||||
"""
|
||||
|
||||
return request('delete', url, **kwargs)
|
||||
197
script/_Lib/requests/auth.py
Normal file
197
script/_Lib/requests/auth.py
Normal file
@@ -0,0 +1,197 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
requests.auth
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
This module contains the authentication handlers for Requests.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import hashlib
|
||||
|
||||
from base64 import b64encode
|
||||
|
||||
from .compat import urlparse, str
|
||||
from .cookies import extract_cookies_to_jar
|
||||
from .utils import parse_dict_header, to_native_string
|
||||
|
||||
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
|
||||
CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
|
||||
|
||||
|
||||
def _basic_auth_str(username, password):
|
||||
"""Returns a Basic Auth string."""
|
||||
|
||||
authstr = 'Basic ' + to_native_string(
|
||||
b64encode(('%s:%s' % (username, password)).encode('latin1')).strip()
|
||||
)
|
||||
|
||||
return authstr
|
||||
|
||||
|
||||
class AuthBase(object):
|
||||
"""Base class that all auth implementations derive from"""
|
||||
|
||||
def __call__(self, r):
|
||||
raise NotImplementedError('Auth hooks must be callable.')
|
||||
|
||||
|
||||
class HTTPBasicAuth(AuthBase):
|
||||
"""Attaches HTTP Basic Authentication to the given Request object."""
|
||||
def __init__(self, username, password):
|
||||
self.username = username
|
||||
self.password = password
|
||||
|
||||
def __call__(self, r):
|
||||
r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
|
||||
return r
|
||||
|
||||
|
||||
class HTTPProxyAuth(HTTPBasicAuth):
|
||||
"""Attaches HTTP Proxy Authentication to a given Request object."""
|
||||
def __call__(self, r):
|
||||
r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
|
||||
return r
|
||||
|
||||
|
||||
class HTTPDigestAuth(AuthBase):
|
||||
"""Attaches HTTP Digest Authentication to the given Request object."""
|
||||
def __init__(self, username, password):
|
||||
self.username = username
|
||||
self.password = password
|
||||
self.last_nonce = ''
|
||||
self.nonce_count = 0
|
||||
self.chal = {}
|
||||
self.pos = None
|
||||
|
||||
def build_digest_header(self, method, url):
|
||||
|
||||
realm = self.chal['realm']
|
||||
nonce = self.chal['nonce']
|
||||
qop = self.chal.get('qop')
|
||||
algorithm = self.chal.get('algorithm')
|
||||
opaque = self.chal.get('opaque')
|
||||
|
||||
if algorithm is None:
|
||||
_algorithm = 'MD5'
|
||||
else:
|
||||
_algorithm = algorithm.upper()
|
||||
# lambdas assume digest modules are imported at the top level
|
||||
if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
|
||||
def md5_utf8(x):
|
||||
if isinstance(x, str):
|
||||
x = x.encode('utf-8')
|
||||
return hashlib.md5(x).hexdigest()
|
||||
hash_utf8 = md5_utf8
|
||||
elif _algorithm == 'SHA':
|
||||
def sha_utf8(x):
|
||||
if isinstance(x, str):
|
||||
x = x.encode('utf-8')
|
||||
return hashlib.sha1(x).hexdigest()
|
||||
hash_utf8 = sha_utf8
|
||||
|
||||
KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
|
||||
|
||||
if hash_utf8 is None:
|
||||
return None
|
||||
|
||||
# XXX not implemented yet
|
||||
entdig = None
|
||||
p_parsed = urlparse(url)
|
||||
path = p_parsed.path
|
||||
if p_parsed.query:
|
||||
path += '?' + p_parsed.query
|
||||
|
||||
A1 = '%s:%s:%s' % (self.username, realm, self.password)
|
||||
A2 = '%s:%s' % (method, path)
|
||||
|
||||
HA1 = hash_utf8(A1)
|
||||
HA2 = hash_utf8(A2)
|
||||
|
||||
if nonce == self.last_nonce:
|
||||
self.nonce_count += 1
|
||||
else:
|
||||
self.nonce_count = 1
|
||||
ncvalue = '%08x' % self.nonce_count
|
||||
s = str(self.nonce_count).encode('utf-8')
|
||||
s += nonce.encode('utf-8')
|
||||
s += time.ctime().encode('utf-8')
|
||||
s += os.urandom(8)
|
||||
|
||||
cnonce = (hashlib.sha1(s).hexdigest()[:16])
|
||||
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, HA2)
|
||||
if _algorithm == 'MD5-SESS':
|
||||
HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
|
||||
|
||||
if qop is None:
|
||||
respdig = KD(HA1, "%s:%s" % (nonce, HA2))
|
||||
elif qop == 'auth' or 'auth' in qop.split(','):
|
||||
respdig = KD(HA1, noncebit)
|
||||
else:
|
||||
# XXX handle auth-int.
|
||||
return None
|
||||
|
||||
self.last_nonce = nonce
|
||||
|
||||
# XXX should the partial digests be encoded too?
|
||||
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
|
||||
'response="%s"' % (self.username, realm, nonce, path, respdig)
|
||||
if opaque:
|
||||
base += ', opaque="%s"' % opaque
|
||||
if algorithm:
|
||||
base += ', algorithm="%s"' % algorithm
|
||||
if entdig:
|
||||
base += ', digest="%s"' % entdig
|
||||
if qop:
|
||||
base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
|
||||
|
||||
return 'Digest %s' % (base)
|
||||
|
||||
def handle_401(self, r, **kwargs):
|
||||
"""Takes the given response and tries digest-auth, if needed."""
|
||||
|
||||
if self.pos is not None:
|
||||
# Rewind the file position indicator of the body to where
|
||||
# it was to resend the request.
|
||||
r.request.body.seek(self.pos)
|
||||
num_401_calls = getattr(self, 'num_401_calls', 1)
|
||||
s_auth = r.headers.get('www-authenticate', '')
|
||||
|
||||
if 'digest' in s_auth.lower() and num_401_calls < 2:
|
||||
|
||||
setattr(self, 'num_401_calls', num_401_calls + 1)
|
||||
pat = re.compile(r'digest ', flags=re.IGNORECASE)
|
||||
self.chal = parse_dict_header(pat.sub('', s_auth, count=1))
|
||||
|
||||
# Consume content and release the original connection
|
||||
# to allow our new request to reuse the same one.
|
||||
r.content
|
||||
r.raw.release_conn()
|
||||
prep = r.request.copy()
|
||||
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
|
||||
prep.prepare_cookies(prep._cookies)
|
||||
|
||||
prep.headers['Authorization'] = self.build_digest_header(
|
||||
prep.method, prep.url)
|
||||
_r = r.connection.send(prep, **kwargs)
|
||||
_r.history.append(r)
|
||||
_r.request = prep
|
||||
|
||||
return _r
|
||||
|
||||
setattr(self, 'num_401_calls', 1)
|
||||
return r
|
||||
|
||||
def __call__(self, r):
|
||||
# If we have a saved nonce, skip the 401
|
||||
if self.last_nonce:
|
||||
r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
|
||||
try:
|
||||
self.pos = r.body.tell()
|
||||
except AttributeError:
|
||||
pass
|
||||
r.register_hook('response', self.handle_401)
|
||||
return r
|
||||
5026
script/_Lib/requests/cacert.pem
Normal file
5026
script/_Lib/requests/cacert.pem
Normal file
File diff suppressed because it is too large
Load Diff
25
script/_Lib/requests/certs.py
Normal file
25
script/_Lib/requests/certs.py
Normal file
@@ -0,0 +1,25 @@
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
certs.py
|
||||
~~~~~~~~
|
||||
|
||||
This module returns the preferred default CA certificate bundle.
|
||||
|
||||
If you are packaging Requests, e.g., for a Linux distribution or a managed
|
||||
environment, you can change the definition of where() to return a separately
|
||||
packaged CA bundle.
|
||||
"""
|
||||
import os.path
|
||||
|
||||
try:
|
||||
from certifi import where
|
||||
except ImportError:
|
||||
def where():
|
||||
"""Return the preferred certificate bundle."""
|
||||
# vendored bundle inside Requests
|
||||
return os.path.join(os.path.dirname(__file__), 'cacert.pem')
|
||||
|
||||
if __name__ == '__main__':
|
||||
print(where())
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user