(1) changed normalization of maximum likelihood. (2) ascii export from canvas adds initial space for empty column for easier parsing. (3) fix of some typos
This commit is contained in:
parent
258a0ce336
commit
392cd0b4b3
@ -492,6 +492,7 @@ Double_t PFunction::EvalNode(PFuncTreeNode &node)
|
|||||||
if (denominator == 0.0) {
|
if (denominator == 0.0) {
|
||||||
cerr << endl << "**PANIC ERROR**: PFunction::EvalNode: division by 0.0";
|
cerr << endl << "**PANIC ERROR**: PFunction::EvalNode: division by 0.0";
|
||||||
cerr << endl << "**PANIC ERROR**: PFunction::EvalNode: requested operation: " << EvalNode(node.children[0]) << "/" << EvalNode(node.children[1]);
|
cerr << endl << "**PANIC ERROR**: PFunction::EvalNode: requested operation: " << EvalNode(node.children[0]) << "/" << EvalNode(node.children[1]);
|
||||||
|
cerr << endl << ">> " << fFuncString.Data() << endl;
|
||||||
cerr << endl;
|
cerr << endl;
|
||||||
assert(0);
|
assert(0);
|
||||||
}
|
}
|
||||||
|
@ -5328,9 +5328,9 @@ void PMusrCanvas::SaveDataAscii()
|
|||||||
fout << dumpVector[j].dataErr[i] << ", ";
|
fout << dumpVector[j].dataErr[i] << ", ";
|
||||||
} else {
|
} else {
|
||||||
if (dumpVector[j].dataErr.size() > 0)
|
if (dumpVector[j].dataErr.size() > 0)
|
||||||
fout << ", , , ";
|
fout << " , , , ";
|
||||||
else
|
else
|
||||||
fout << ", , ";
|
fout << " , , ";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// write theory
|
// write theory
|
||||||
@ -5339,7 +5339,7 @@ void PMusrCanvas::SaveDataAscii()
|
|||||||
fout << dumpVector[j].theoryX[i] << ", ";
|
fout << dumpVector[j].theoryX[i] << ", ";
|
||||||
fout << dumpVector[j].theory[i] << ", ";
|
fout << dumpVector[j].theory[i] << ", ";
|
||||||
} else {
|
} else {
|
||||||
fout << ", , ";
|
fout << " , , ";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// write last theory entry
|
// write last theory entry
|
||||||
@ -5347,7 +5347,7 @@ void PMusrCanvas::SaveDataAscii()
|
|||||||
fout << dumpVector[dumpVector.size()-1].theoryX[i] << ", ";
|
fout << dumpVector[dumpVector.size()-1].theoryX[i] << ", ";
|
||||||
fout << dumpVector[dumpVector.size()-1].theory[i];
|
fout << dumpVector[dumpVector.size()-1].theory[i];
|
||||||
} else {
|
} else {
|
||||||
fout << ", ";
|
fout << " , ";
|
||||||
}
|
}
|
||||||
fout << endl;
|
fout << endl;
|
||||||
}
|
}
|
||||||
|
@ -211,7 +211,7 @@ Double_t PRunMuMinus::CalcChiSquareExpected(const std::vector<Double_t>& par)
|
|||||||
// CalcMaxLikelihood
|
// CalcMaxLikelihood
|
||||||
//--------------------------------------------------------------------------
|
//--------------------------------------------------------------------------
|
||||||
/**
|
/**
|
||||||
* <p>Calculate log max-likelihood.
|
* <p>Calculate log max-likelihood. See http://pdg.lbl.gov/index.html
|
||||||
*
|
*
|
||||||
* <b>return:</b>
|
* <b>return:</b>
|
||||||
* - log max-likelihood value
|
* - log max-likelihood value
|
||||||
@ -258,13 +258,14 @@ Double_t PRunMuMinus::CalcMaxLikelihood(const std::vector<Double_t>& par)
|
|||||||
time = fData.GetDataTimeStart() + (Double_t)i*fData.GetDataTimeStep();
|
time = fData.GetDataTimeStart() + (Double_t)i*fData.GetDataTimeStep();
|
||||||
// calculate theory for the given parameter set
|
// calculate theory for the given parameter set
|
||||||
theo = fTheory->Func(time, par, fFuncValues);
|
theo = fTheory->Func(time, par, fFuncValues);
|
||||||
// check if data value is not too small
|
|
||||||
if (fData.GetValue()->at(i) > 1.0e-9)
|
data = fData.GetValue()->at(i);
|
||||||
data = fData.GetValue()->at(i);
|
|
||||||
else
|
if (data > 1.0e-9) {
|
||||||
data = 1.0e-9;
|
mllh += (theo-data) + data*log(data/theo);
|
||||||
// add maximum log likelihood contribution of bin i
|
} else {
|
||||||
mllh -= data*TMath::Log(theo) - theo - TMath::LnGamma(data+1);
|
mllh += (theo-data);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return mllh;
|
return mllh;
|
||||||
|
@ -144,7 +144,7 @@ Double_t PRunNonMusr::CalcChiSquareExpected(const std::vector<Double_t>& par)
|
|||||||
*/
|
*/
|
||||||
Double_t PRunNonMusr::CalcMaxLikelihood(const std::vector<Double_t>& par)
|
Double_t PRunNonMusr::CalcMaxLikelihood(const std::vector<Double_t>& par)
|
||||||
{
|
{
|
||||||
cout << endl << "PRunSingleHisto::CalcMaxLikelihood(): not implemented yet ..." << endl;
|
cout << endl << "PRunNonMusr::CalcMaxLikelihood(): not implemented yet ..." << endl;
|
||||||
|
|
||||||
return 1.0;
|
return 1.0;
|
||||||
}
|
}
|
||||||
|
@ -296,7 +296,7 @@ Double_t PRunSingleHisto::CalcChiSquareExpected(const std::vector<Double_t>& par
|
|||||||
// CalcMaxLikelihood (public)
|
// CalcMaxLikelihood (public)
|
||||||
//--------------------------------------------------------------------------
|
//--------------------------------------------------------------------------
|
||||||
/**
|
/**
|
||||||
* <p>Calculate log maximum-likelihood.
|
* <p>Calculate log maximum-likelihood. See http://pdg.lbl.gov/index.html
|
||||||
*
|
*
|
||||||
* <b>return:</b>
|
* <b>return:</b>
|
||||||
* - log maximum-likelihood value
|
* - log maximum-likelihood value
|
||||||
@ -381,13 +381,14 @@ Double_t PRunSingleHisto::CalcMaxLikelihood(const std::vector<Double_t>& par)
|
|||||||
// calculate theory for the given parameter set
|
// calculate theory for the given parameter set
|
||||||
theo = N0*TMath::Exp(-time/tau)*(1+fTheory->Func(time, par, fFuncValues))+bkg;
|
theo = N0*TMath::Exp(-time/tau)*(1+fTheory->Func(time, par, fFuncValues))+bkg;
|
||||||
theo *= normalizer;
|
theo *= normalizer;
|
||||||
// check if data value is not too small
|
|
||||||
if (fData.GetValue()->at(i) > 1.0e-9)
|
data = normalizer*fData.GetValue()->at(i);
|
||||||
data = normalizer*fData.GetValue()->at(i);
|
|
||||||
else
|
if (data > 1.0e-9) {
|
||||||
data = 1.0e-9;
|
mllh += (theo-data) + data*log(data/theo);
|
||||||
// add maximum log likelihood contribution of bin i
|
} else {
|
||||||
mllh -= data*TMath::Log(theo) - theo - TMath::LnGamma(data+1);
|
mllh += (theo-data);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return mllh;
|
return mllh;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user