Removed musr2dat from this tree.

This commit is contained in:
salman 2013-06-06 09:00:47 +00:00
parent 0465f3bc61
commit bd5e55bf94

View File

@ -1,134 +0,0 @@
#!/usr/bin/perl
# This scripts converts the parameters list in a musrfit input file
# into a table format.
# The strategy is first read the theory block to know how many
# shared parameters to expect, and then look for map lines to identify
# the non-shared parameters for each run, then go through the paramets list
# and finally spit everything out.
#
# Copyright (c) 2009 Zaher Salman (zaher.salman@psi.ch).
#TODO: Alpha, Backgrond and N0 lines are still not parsed
$NFile=1;
foreach $Arg (@ARGV) {
open(MSRF,"<$Arg");
@file=<MSRF>;
close(MSRF);
# Reset some variables and arrays
@Shared=();
$newfile = "";
$titles = "";
$flag = 0;
$NFITPARAMETERS=0;
$NTHEORY=0;
$NRUN=0;
# Remove comment lines
@file = grep {!/^\#/} @file;
# Remove empty lines
@file = grep {/\S/} @file;
# Identify different blocks
$i=0;
foreach $line (@file)
{
if (grep {/FITPARAMETER/} $line) {$NFITPARAMETERS=$i;}
if (grep {/THEORY/} $line) { $NTHEORY=$i;}
if ((grep {/RUN/} $line) & $NRUN==0) { $NRUN=$i;}
$i++;
}
@FPBlock=@file[$NFITPARAMETERS+1..$NTHEORY-1];
@TBlock=@file[$NTHEORY+1..$NRUN-1];
# Get shared parameters
foreach $TLine (@TBlock) {
# Then split it to find numbers of shared parameters
@tmp=split(/\s+/,$TLine);
foreach (@tmp) {
if ($_ eq $_+0 ) {
# This is a number, keep it in the Shared arry
@Shared=(@Shared,$_);
}
}
}
# Nice trick, make a hash for unique RUN lines
# Problem here if we have the same run fit twice (ranges etc)
@MAPS = grep {/map /} @file;
@RUNS = grep {/RUN/} @file;
$counter=0;
# for ($i=0;$i<=$#RUNS;$i++) {
# $key=$RUNS[$i].$MAPS[$i];
# $key =~ s/\s+//g;
foreach $key (@RUNS){
# This gets rid of duplicates
$RUN{$key}=$counter;
$MAP{$key}=$MAP{$key}.$MAPS[$counter];
$counter++;
}
# Number of runs (or independent sets of parameters) in this file
$NRuns=1;
foreach (sort { $RUN{$a} <=> $RUN{$b}} keys %RUN ) {
@RunParams=();
$NP=0;
# print $_."=".$MAP{$_}."\n";
@tmp=split(/\s+/,$MAP{$_});
# Remove first element (map)
shift(@tmp);
foreach (@tmp) {
if ($_ ne "" && $_>0 ) {
@RunParams=(@RunParams,$_);
$NP++;
}
}
if ($NP>0) {
$orders=join(",",@RunParams);
$RUNParams[$NRuns]=$orders;
$NRuns++;
}
}
# Split parameter's line to extract values and errors
foreach $line (@FPBlock) {
@Param=split(/\s+/,$line);
# Create a hash with the parameter order as a key
# and the value and error as value
$P{$Param[1]}=$Param[3].",".$Param[4];
$PName{$Param[1]}=$Param[2];
}
# Now we have everything. Lets start ordering
# First lines is names
# Only for the first input file
if ($NFile==1) {
@Pnum=split(/,/,$RUNParams[1]);
foreach (@Pnum,@Shared) {
$DatFile=join("\t",$DatFile,$PName{$_},"d".$PName{$_});
}
$DatFile=$DatFile."\n";
}
# For the values from all the files.
# I am not checking if all the files have the same theory function
for ($i=1;$i<=$NRuns-1;$i++) {
# print "RUN: ".$i." Params:".$RUNParams[$i]."\n";
@Pnum=split(/,/,$RUNParams[$i]);
# First go for the shared parameters
foreach (@Pnum,@Shared) {
($value,$err)=split(/,/,$P{$_});
$DatFile=join("\t",$DatFile,$value,$err);
}
$DatFile=$DatFile."\n";
}
# Go to the next file
$NFile=$NFile+1;
}
print $DatFile;