Page MenuHomeHEPForge

No OneTemporary

diff --git a/analyses/pluginALICE/ALICE_2010_S8624100.cc b/analyses/pluginALICE/ALICE_2010_S8624100.cc
--- a/analyses/pluginALICE/ALICE_2010_S8624100.cc
+++ b/analyses/pluginALICE/ALICE_2010_S8624100.cc
@@ -1,94 +1,92 @@
// -*- C++ -*-
#include "Rivet/Analysis.hh"
#include "Rivet/Projections/ChargedFinalState.hh"
namespace Rivet {
class ALICE_2010_S8624100 : public Analysis {
public:
/// @name Constructors etc.
//@{
/// Constructor
ALICE_2010_S8624100()
: Analysis("ALICE_2010_S8624100")
{ }
//@}
public:
/// @name Analysis methods
//@{
/// Book histograms and initialise projections before the run
void init() {
ChargedFinalState cfs05(-0.5, 0.5);
ChargedFinalState cfs10(-1.0, 1.0);
ChargedFinalState cfs13(-1.3, 1.3);
declare(cfs05, "CFS05");
declare(cfs10, "CFS10");
declare(cfs13, "CFS13");
if (fuzzyEquals(sqrtS()/GeV, 900, 1E-3)) {
book(_h_dN_dNch_05 ,11, 1, 1);
book(_h_dN_dNch_10 ,12, 1, 1);
book(_h_dN_dNch_13 ,13, 1, 1);
} else if (fuzzyEquals(sqrtS()/GeV, 2360, 1E-3)) {
book(_h_dN_dNch_05 ,17, 1, 1);
book(_h_dN_dNch_10 ,18, 1, 1);
book(_h_dN_dNch_13 ,19, 1, 1);
}
}
/// Perform the per-event analysis
void analyze(const Event& event) {
- const double weight = 1.0;
-
- const ChargedFinalState& charged_05 = apply<ChargedFinalState>(event, "CFS05");
+ const ChargedFinalState& charged_05 = apply<ChargedFinalState>(event, "CFS05");
const ChargedFinalState& charged_10 = apply<ChargedFinalState>(event, "CFS10");
const ChargedFinalState& charged_13 = apply<ChargedFinalState>(event, "CFS13");
- _h_dN_dNch_05->fill(charged_05.size(), weight);
- _h_dN_dNch_10->fill(charged_10.size(), weight);
- _h_dN_dNch_13->fill(charged_13.size(), weight);
+ _h_dN_dNch_05->fill(charged_05.size());
+ _h_dN_dNch_10->fill(charged_10.size());
+ _h_dN_dNch_13->fill(charged_13.size());
}
/// Normalise histograms etc., after the run
void finalize() {
normalize(_h_dN_dNch_05);
normalize(_h_dN_dNch_10);
normalize(_h_dN_dNch_13);
}
//@}
private:
/// @name Histograms
//@{
Histo1DPtr _h_dN_dNch_05;
Histo1DPtr _h_dN_dNch_10;
Histo1DPtr _h_dN_dNch_13;
//@}
};
// The hook for the plugin system
DECLARE_RIVET_PLUGIN(ALICE_2010_S8624100);
}
diff --git a/analyses/pluginALICE/ALICE_2010_S8625980.cc b/analyses/pluginALICE/ALICE_2010_S8625980.cc
--- a/analyses/pluginALICE/ALICE_2010_S8625980.cc
+++ b/analyses/pluginALICE/ALICE_2010_S8625980.cc
@@ -1,99 +1,97 @@
// -*- C++ -*-
#include "Rivet/Analysis.hh"
#include "Rivet/Projections/ChargedFinalState.hh"
namespace Rivet {
class ALICE_2010_S8625980 : public Analysis {
public:
/// @name Constructors etc.
//@{
/// Constructor
ALICE_2010_S8625980()
- : Analysis("ALICE_2010_S8625980"),
- _Nevt_after_cuts(0.0)
+ : Analysis("ALICE_2010_S8625980")
{ }
//@}
public:
/// @name Analysis methods
//@{
/// Book histograms and initialise projections before the run
void init() {
ChargedFinalState cfs(-1.0, 1.0);
declare(cfs, "CFS");
if (fuzzyEquals(sqrtS()/GeV, 900, 1E-3)) {
book(_h_dN_deta ,4, 1, 1);
} else if (fuzzyEquals(sqrtS()/GeV, 2360, 1E-3)) {
book(_h_dN_deta ,5, 1, 1);
} else if (fuzzyEquals(sqrtS()/GeV, 7000, 1E-3)) {
book(_h_dN_deta ,6, 1, 1);
book(_h_dN_dNch ,3, 1, 1);
}
+ book(_Nevt_after_cuts, "Nevt_after_cuts");
}
/// Perform the per-event analysis
void analyze(const Event& event) {
- const double weight = 1.0;
-
const ChargedFinalState& charged = apply<ChargedFinalState>(event, "CFS");
if (charged.size() < 1) {
vetoEvent;
}
- _Nevt_after_cuts += weight;
+ _Nevt_after_cuts->fill();
foreach (const Particle& p, charged.particles()) {
const double eta = p.eta();
- _h_dN_deta->fill(eta, weight);
+ _h_dN_deta->fill(eta);
}
if (fuzzyEquals(sqrtS()/GeV, 7000, 1E-3)) {
- _h_dN_dNch->fill(charged.size(), weight);
+ _h_dN_dNch->fill(charged.size());
}
}
/// Normalise histograms etc., after the run
void finalize() {
if (fuzzyEquals(sqrtS()/GeV, 7000, 1E-3)) {
normalize(_h_dN_dNch);
}
scale(_h_dN_deta, 1.0/_Nevt_after_cuts);
}
//@}
private:
/// @name Histograms
//@{
Histo1DPtr _h_dN_deta;
Histo1DPtr _h_dN_dNch;
- double _Nevt_after_cuts;
+ CounterPtr _Nevt_after_cuts;
//@}
};
// The hook for the plugin system
DECLARE_RIVET_PLUGIN(ALICE_2010_S8625980);
}
diff --git a/analyses/pluginALICE/ALICE_2010_S8706239.cc b/analyses/pluginALICE/ALICE_2010_S8706239.cc
--- a/analyses/pluginALICE/ALICE_2010_S8706239.cc
+++ b/analyses/pluginALICE/ALICE_2010_S8706239.cc
@@ -1,101 +1,100 @@
// -*- C++ -*-
#include "Rivet/Analysis.hh"
#include "Rivet/Projections/ChargedFinalState.hh"
namespace Rivet {
class ALICE_2010_S8706239 : public Analysis {
public:
/// @name Constructors etc.
//@{
/// Constructor
ALICE_2010_S8706239()
- : Analysis("ALICE_2010_S8706239"),
- _Nevt_after_cuts(0.0)
+ : Analysis("ALICE_2010_S8706239")
{ }
//@}
public:
/// @name Analysis methods
//@{
/// Book histograms and initialise projections before the run
void init() {
ChargedFinalState cfs(-0.8, 0.8, 0.15);
declare(cfs, "CFS");
book(_h_pT ,4, 1, 1);
book(_h_pT_Nch_015 ,11, 1, 1);
book(_h_pT_Nch_05 ,12, 1, 1);
+ book(_Nevt_after_cuts,"Nevt_after_cuts");
+
}
/// Perform the per-event analysis
void analyze(const Event& event) {
- const double weight = 1.0;
-
const ChargedFinalState& charged = apply<ChargedFinalState>(event, "CFS");
- _Nevt_after_cuts += weight;
+ _Nevt_after_cuts->fill();
// Get number of particles that fulfill certain pT requirements
int Nch_015 = 0;
int Nch_05 = 0;
foreach (const Particle& p, charged.particles()) {
double pT = p.pT()/GeV;
if (pT < 4.0) Nch_015++;
if (pT > 0.5 && pT < 4.0) Nch_05++;
}
// Now we can fill histograms
foreach (const Particle& p, charged.particles()) {
double pT = p.pT()/GeV;
- if (pT < 4.0) _h_pT_Nch_015 ->fill(Nch_015, pT, weight);
- if (pT > 0.5 && pT < 4.0) _h_pT_Nch_05 ->fill(Nch_05, pT, weight);
+ if (pT < 4.0) _h_pT_Nch_015 ->fill(Nch_015, pT);
+ if (pT > 0.5 && pT < 4.0) _h_pT_Nch_05 ->fill(Nch_05, pT);
// To get the Yield, fill appropriate weight 1/(2PI * pT * d eta)
- _h_pT->fill(pT, weight /(TWOPI*pT*1.6) );
+ _h_pT->fill(pT, 1.0 /(TWOPI*pT*1.6) );
}
}
/// Normalise histograms etc., after the run
void finalize() {
scale(_h_pT, 1.0/_Nevt_after_cuts);
}
//@}
private:
/// @name Histograms
//@{
Histo1DPtr _h_pT;
Profile1DPtr _h_pT_Nch_015 ;
Profile1DPtr _h_pT_Nch_05 ;
- double _Nevt_after_cuts;
+ CounterPtr _Nevt_after_cuts;
//@}
};
// The hook for the plugin system
DECLARE_RIVET_PLUGIN(ALICE_2010_S8706239);
}
diff --git a/analyses/pluginALICE/ALICE_2011_S8909580.cc b/analyses/pluginALICE/ALICE_2011_S8909580.cc
--- a/analyses/pluginALICE/ALICE_2011_S8909580.cc
+++ b/analyses/pluginALICE/ALICE_2011_S8909580.cc
@@ -1,103 +1,102 @@
#include "Rivet/Analysis.hh"
#include "Rivet/Projections/FinalState.hh"
#include "Rivet/Projections/UnstableFinalState.hh"
namespace Rivet {
class ALICE_2011_S8909580 : public Analysis {
public:
ALICE_2011_S8909580()
: Analysis("ALICE_2011_S8909580")
{}
public:
void init() {
const UnstableFinalState ufs(Cuts::abseta < 15);
declare(ufs, "UFS");
book(_histPtK0s ,1, 1, 1);
book(_histPtLambda ,2, 1, 1);
book(_histPtAntiLambda ,3, 1, 1);
book(_histPtXi ,4, 1, 1);
book(_histPtPhi ,5, 1, 1);
book(_temp_h_Lambdas ,"TMP/h_Lambdas", refData(6, 1, 1));
book(_temp_h_Kzeros ,"TMP/h_Kzeros", refData(6, 1, 1));
book(_h_LamKzero ,6, 1, 1);
}
void analyze(const Event& event) {
- const double weight = 1.0;
const UnstableFinalState& ufs = apply<UnstableFinalState>(event, "UFS");
foreach (const Particle& p, ufs.particles()) {
const double absrap = p.absrap();
const double pT = p.pT()/GeV;
if (absrap < 0.8) {
switch(p.pid()) {
case 3312:
case -3312:
if ( !( p.hasAncestor(3334) || p.hasAncestor(-3334) ) ) {
- _histPtXi->fill(pT, weight);
+ _histPtXi->fill(pT);
}
break;
if (absrap < 0.75) {
case 310:
- _histPtK0s->fill(pT, weight);
- _temp_h_Kzeros->fill(pT, 2*weight);
+ _histPtK0s->fill(pT);
+ _temp_h_Kzeros->fill(pT, 2);
break;
case 3122:
if ( !( p.hasAncestor(3322) || p.hasAncestor(-3322) ||
p.hasAncestor(3312) || p.hasAncestor(-3312) ||
p.hasAncestor(3334) || p.hasAncestor(-3334) ) ) {
- _histPtLambda->fill(pT, weight);
- _temp_h_Lambdas->fill(pT, weight);
+ _histPtLambda->fill(pT);
+ _temp_h_Lambdas->fill(pT);
}
break;
case -3122:
if ( !( p.hasAncestor(3322) || p.hasAncestor(-3322) ||
p.hasAncestor(3312) || p.hasAncestor(-3312) ||
p.hasAncestor(3334) || p.hasAncestor(-3334) ) ) {
- _histPtAntiLambda->fill(pT, weight);
- _temp_h_Lambdas->fill(pT, weight);
+ _histPtAntiLambda->fill(pT);
+ _temp_h_Lambdas->fill(pT);
}
break;
}
if (absrap<0.6) {
case 333:
- _histPtPhi->fill(pT, weight);
+ _histPtPhi->fill(pT);
break;
}
}
}
}
}
void finalize() {
scale(_histPtK0s, 1./(1.5*sumOfWeights()));
scale(_histPtLambda, 1./(1.5*sumOfWeights()));
scale(_histPtAntiLambda, 1./(1.5*sumOfWeights()));
scale(_histPtXi, 1./(1.6*sumOfWeights()));
scale(_histPtPhi, 1./(1.2*sumOfWeights()));
divide(_temp_h_Lambdas, _temp_h_Kzeros, _h_LamKzero);
}
private:
Histo1DPtr _histPtK0s, _histPtLambda, _histPtAntiLambda, _histPtXi, _histPtPhi;
Histo1DPtr _temp_h_Lambdas, _temp_h_Kzeros;
Scatter2DPtr _h_LamKzero;
};
// The hook for the plugin system
DECLARE_RIVET_PLUGIN(ALICE_2011_S8909580);
}
diff --git a/analyses/pluginALICE/ALICE_2011_S8945144.cc b/analyses/pluginALICE/ALICE_2011_S8945144.cc
--- a/analyses/pluginALICE/ALICE_2011_S8945144.cc
+++ b/analyses/pluginALICE/ALICE_2011_S8945144.cc
@@ -1,105 +1,104 @@
// -*- C++ -*-
#include "Rivet/Analysis.hh"
#include "Rivet/Projections/ChargedFinalState.hh"
namespace Rivet {
class ALICE_2011_S8945144 : public Analysis {
public:
ALICE_2011_S8945144()
: Analysis("ALICE_2011_S8945144")
{}
public:
void init() {
const ChargedFinalState cfs(-15, 15);
declare(cfs, "CFS");
book(_histPtPions ,"d01-x01-y01");
book(_histPtAntiPions ,"d01-x01-y02");
book(_histPtKaons ,"d02-x01-y01");
book(_histPtAntiKaons ,"d02-x01-y02");
book(_histPtProtons ,"d03-x01-y01");
book(_histPtAntiProtons ,"d03-x01-y02");
book(_histAveragePt ,"d04-x01-y01");
}
void analyze(const Event& event) {
- const double weight = 1.0;
const ChargedFinalState& cfs = apply<ChargedFinalState>(event, "CFS");
foreach (const Particle& p, cfs.particles()) {
if(p.absrap()<0.5) {
switch (p.pid()) {
case 211:
- _histPtPions->fill(p.pT()/GeV, weight);
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histPtPions->fill(p.pT()/GeV);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
break;
case -211:
- _histPtAntiPions->fill(p.pT()/GeV, weight);
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histPtAntiPions->fill(p.pT()/GeV);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
break;
case 2212:
if ( !(p.hasAncestor(3322) || // Xi0
p.hasAncestor(3122) || p.hasAncestor(-3122) || // Lambda
p.hasAncestor(3222) || p.hasAncestor(-3222) || // Sigma+/-
p.hasAncestor(3312) || p.hasAncestor(-3312) ) ) { // Xi-/+
- _histPtProtons->fill(p.pT()/GeV, weight);
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histPtProtons->fill(p.pT()/GeV);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
}
break;
case -2212:
if ( !(p.hasAncestor(3322) || // Xi0
p.hasAncestor(3122) || p.hasAncestor(-3122) || // Lambda
p.hasAncestor(3222) || p.hasAncestor(-3222) || // Sigma+/-
p.hasAncestor(3312) || p.hasAncestor(-3312) ) ) { // Xi-/+
- _histPtAntiProtons->fill(p.pT()/GeV, weight);
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histPtAntiProtons->fill(p.pT()/GeV);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
}
break;
case 321:
- _histPtKaons->fill(p.pT()/GeV, weight);
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histPtKaons->fill(p.pT()/GeV);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
break;
case -321:
- _histPtAntiKaons->fill(p.pT()/GeV, weight);
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histPtAntiKaons->fill(p.pT()/GeV);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
break;
}
}
}
}
void finalize() {
scale(_histPtPions, 1./sumOfWeights());
scale(_histPtProtons, 1./sumOfWeights());
scale(_histPtKaons, 1./sumOfWeights());
scale(_histPtAntiPions, 1./sumOfWeights());
scale(_histPtAntiProtons, 1./sumOfWeights());
scale(_histPtAntiKaons, 1./sumOfWeights());
}
private:
Histo1DPtr _histPtPions;
Histo1DPtr _histPtProtons;
Histo1DPtr _histPtKaons;
Histo1DPtr _histPtAntiPions;
Histo1DPtr _histPtAntiProtons;
Histo1DPtr _histPtAntiKaons;
Profile1DPtr _histAveragePt;
};
// The hook for the plugin system
DECLARE_RIVET_PLUGIN(ALICE_2011_S8945144);
}
diff --git a/analyses/pluginALICE/ALICE_2012_I1181770.cc b/analyses/pluginALICE/ALICE_2012_I1181770.cc
--- a/analyses/pluginALICE/ALICE_2012_I1181770.cc
+++ b/analyses/pluginALICE/ALICE_2012_I1181770.cc
@@ -1,116 +1,114 @@
// -*- C++ -*-
#include "Rivet/Analysis.hh"
#include "Rivet/Projections/ChargedFinalState.hh"
namespace Rivet {
class ALICE_2012_I1181770 : public Analysis {
public:
ALICE_2012_I1181770()
: Analysis("ALICE_2012_I1181770")
{ }
void init() {
// Projection setup
declare(ChargedFinalState(), "CFS");
// Book (energy-specific) histograms
int isqrts = -1;
if (fuzzyEquals(sqrtS()/GeV, 900, 1E-3)) isqrts = 1;
else if (fuzzyEquals(sqrtS()/GeV, 2760, 1E-3)) isqrts = 2;
else if (fuzzyEquals(sqrtS()/GeV, 7000, 1E-3)) isqrts = 3;
assert(isqrts > 0);
book(_h_frac_sd_inel, 1, 1, isqrts);
book(_h_frac_dd_inel, 2, 1, isqrts);
book(_h_xsec_sd , 3, 1, isqrts);
book(_h_xsec_dd , 4, 1, isqrts);
book(_h_xsec_inel , 5, 1, isqrts);
}
void analyze(const Event& event) {
const ChargedFinalState& cfs = apply<ChargedFinalState>(event, "CFS");
if (cfs.size() < 2) vetoEvent; // need at least two particles to calculate gaps
- const double weight = 1.0;
-
// Fill INEL plots for each event
- _h_xsec_inel->fill(sqrtS()/GeV, weight);
+ _h_xsec_inel->fill(sqrtS()/GeV);
// Identify particles with most positive/most negative rapidities
const Particles particlesByRap = cfs.particles(cmpMomByRap);
const Particle pslowest = particlesByRap.front();
const Particle pfastest = particlesByRap.back();
// Find gap sizes
const Particles particlesByEta = cfs.particles(cmpMomByEta); // sorted from minus to plus
const size_t num_particles = particlesByEta.size();
vector<double> gaps;
for (size_t ip = 1; ip < num_particles; ++ip) {
const Particle& p1 = particlesByEta[ip-1];
const Particle& p2 = particlesByEta[ip];
const double gap = p2.eta() - p1.eta();
assert(gap >= 0);
gaps.push_back(gap);
}
// First, last, and largest gaps
const double gapmax = *max_element(gaps.begin(), gaps.end());
const double gapbwd = gaps.front();
const double gapfwd = gaps.back();
// Mx calculation
FourMomentum p4lead;
if (pslowest.pid() == PID::PROTON && pfastest.pid() == PID::PROTON) {
p4lead = (fabs(pslowest.rapidity()) > fabs(pfastest.rapidity())) ? pslowest.momentum() : pfastest.momentum();
} else if (pslowest.pid() == PID::PROTON) {
p4lead = pslowest.momentum();
} else if (pfastest.pid() == PID::PROTON) {
p4lead = pfastest.momentum();
}
const double Mx = sqrt( (sqrtS()-p4lead.E()-p4lead.p3().mod()) * (sqrtS()-p4lead.E()+p4lead.p3().mod()) );
// Fill SD (and escape) if Mx is sufficiently low
if (Mx < 200*GeV) {
- _h_xsec_sd->fill(sqrtS()/GeV, weight);
+ _h_xsec_sd->fill(sqrtS()/GeV);
return;
}
// Also remove SD-like events in NSD events
if (fuzzyEquals(gapbwd, gapmax) || fuzzyEquals(gapfwd, gapmax)) vetoEvent;
// Fill DD plots
- if (gapmax > 3) _h_xsec_dd->fill(sqrtS()/GeV, weight);
+ if (gapmax > 3) _h_xsec_dd->fill(sqrtS()/GeV);
}
void finalize() {
// get the ratio plots: SD/inel, DD/inel
divide(_h_xsec_sd , _h_xsec_inel, _h_frac_sd_inel);
divide(_h_xsec_sd , _h_xsec_inel, _h_frac_dd_inel);
const double scaling = crossSection()/millibarn/sumOfWeights();
scale(_h_xsec_sd, scaling);
scale(_h_xsec_dd, scaling);
scale(_h_xsec_inel, scaling);
}
private:
Scatter2DPtr _h_frac_sd_inel;
Scatter2DPtr _h_frac_dd_inel;
Histo1DPtr _h_xsec_sd;
Histo1DPtr _h_xsec_dd;
Histo1DPtr _h_xsec_inel;
};
// Hook for the plugin system
DECLARE_RIVET_PLUGIN(ALICE_2012_I1181770);
}
diff --git a/analyses/pluginALICE/ALICE_2014_I1300380.cc b/analyses/pluginALICE/ALICE_2014_I1300380.cc
--- a/analyses/pluginALICE/ALICE_2014_I1300380.cc
+++ b/analyses/pluginALICE/ALICE_2014_I1300380.cc
@@ -1,120 +1,119 @@
// -*- C++ -*-
#include "Rivet/Analysis.hh"
#include "Rivet/Projections/UnstableFinalState.hh"
namespace Rivet {
class ALICE_2014_I1300380 : public Analysis {
public:
ALICE_2014_I1300380()
: Analysis("ALICE_2014_I1300380")
{}
public:
void init() {
const UnstableFinalState cfs(Cuts::absrap<0.5);
declare(cfs, "CFS");
// Plots from the paper
book(_histPtSigmaStarPlus ,"d01-x01-y01"); // Sigma*+
book(_histPtSigmaStarMinus ,"d01-x01-y02"); // Sigma*-
book(_histPtSigmaStarPlusAnti ,"d01-x01-y03"); // anti Sigma*-
book(_histPtSigmaStarMinusAnti ,"d01-x01-y04"); // anti Sigma*+
book(_histPtXiStar ,"d02-x01-y01"); // 0.5 * (xi star + anti xi star)
book(_histAveragePt ,"d03-x01-y01"); // <pT> profile
}
void analyze(const Event& event) {
- const double weight = 1.0;
const UnstableFinalState& cfs = apply<UnstableFinalState>(event, "CFS");
foreach (const Particle& p, cfs.particles()) {
// protections against mc generators decaying long-lived particles
if ( !(p.hasAncestor(310) || p.hasAncestor(-310) || // K0s
p.hasAncestor(130) || p.hasAncestor(-130) || // K0l
p.hasAncestor(3322) || p.hasAncestor(-3322) || // Xi0
p.hasAncestor(3122) || p.hasAncestor(-3122) || // Lambda
p.hasAncestor(3222) || p.hasAncestor(-3222) || // Sigma+/-
p.hasAncestor(3312) || p.hasAncestor(-3312) || // Xi-/+
p.hasAncestor(3334) || p.hasAncestor(-3334) )) // Omega-/+
{
int aid = abs(p.pdgId());
if (aid == 211 || // pi+
aid == 321 || // K+
aid == 313 || // K*(892)0
aid == 2212 || // proton
aid == 333 ) { // phi(1020)
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
}
} // end if "rejection of long-lived particles"
switch (p.pdgId()) {
case 3224:
- _histPtSigmaStarPlus->fill(p.pT()/GeV, weight);
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histPtSigmaStarPlus->fill(p.pT()/GeV);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
break;
case -3224:
- _histPtSigmaStarPlusAnti->fill(p.pT()/GeV, weight);
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histPtSigmaStarPlusAnti->fill(p.pT()/GeV);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
break;
case 3114:
- _histPtSigmaStarMinus->fill(p.pT()/GeV, weight);
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histPtSigmaStarMinus->fill(p.pT()/GeV);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
break;
case -3114:
- _histPtSigmaStarMinusAnti->fill(p.pT()/GeV, weight);
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histPtSigmaStarMinusAnti->fill(p.pT()/GeV);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
break;
case 3324:
- _histPtXiStar->fill(p.pT()/GeV, weight);
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histPtXiStar->fill(p.pT()/GeV);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
break;
case -3324:
- _histPtXiStar->fill(p.pT()/GeV, weight);
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histPtXiStar->fill(p.pT()/GeV);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
break;
case 3312:
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
break;
case -3312:
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
break;
case 3334:
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
break;
case -3334:
- _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight);
+ _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV);
break;
}
}
}
void finalize() {
scale(_histPtSigmaStarPlus, 1./sumOfWeights());
scale(_histPtSigmaStarPlusAnti, 1./sumOfWeights());
scale(_histPtSigmaStarMinus, 1./sumOfWeights());
scale(_histPtSigmaStarMinusAnti, 1./sumOfWeights());
scale(_histPtXiStar, 1./sumOfWeights()/ 2.);
}
private:
// plots from the paper
Histo1DPtr _histPtSigmaStarPlus;
Histo1DPtr _histPtSigmaStarPlusAnti;
Histo1DPtr _histPtSigmaStarMinus;
Histo1DPtr _histPtSigmaStarMinusAnti;
Histo1DPtr _histPtXiStar;
Profile1DPtr _histAveragePt;
};
// The hook for the plugin system
DECLARE_RIVET_PLUGIN(ALICE_2014_I1300380);
}
diff --git a/analyses/pluginALICE/ALICE_2015_I1357424.cc b/analyses/pluginALICE/ALICE_2015_I1357424.cc
--- a/analyses/pluginALICE/ALICE_2015_I1357424.cc
+++ b/analyses/pluginALICE/ALICE_2015_I1357424.cc
@@ -1,99 +1,98 @@
// -*- C++ -*-
#include "Rivet/Analysis.hh"
#include "Rivet/Projections/ChargedFinalState.hh"
namespace Rivet {
class ALICE_2015_I1357424 : public Analysis {
public:
ALICE_2015_I1357424()
: Analysis("ALICE_2015_I1357424")
{}
public:
void init() {
const ChargedFinalState cfs(Cuts::absrap<0.5);
declare(cfs, "CFS");
//
// plots from the paper
book(_histPtPions ,"d01-x01-y01"); // pions
book(_histPtKaons ,"d01-x01-y02"); // kaons
book(_histPtProtons ,"d01-x01-y03"); // protons
book(_histPtKtoPi ,"d02-x01-y01"); // K to pi ratio
book(_histPtPtoPi ,"d03-x01-y01"); // p to pi ratio
//
// temp histos for ratios
book(_histPtPionsR1 ,"TMP/pT_pi1", refData(2, 1, 1)); // pi histo compatible with more restricted kaon binning
book(_histPtPionsR2 ,"TMP/pT_pi2", refData(3, 1, 1)); // pi histo compatible with more restricted proton binning
book(_histPtKaonsR ,"TMP/pT_K", refData(2, 1, 1)); // K histo with more restricted binning
book(_histPtProtonsR ,"TMP/pT_p", refData(3, 1, 1)); // p histo with more restricted binning
}
void analyze(const Event& event) {
- const double weight = 1.0;
const ChargedFinalState& cfs = apply<ChargedFinalState>(event, "CFS");
foreach (const Particle& p, cfs.particles()) {
// protections against mc generators decaying long-lived particles
if ( !(p.hasAncestor(310) || p.hasAncestor(-310) || // K0s
p.hasAncestor(130) || p.hasAncestor(-130) || // K0l
p.hasAncestor(3322) || p.hasAncestor(-3322) || // Xi0
p.hasAncestor(3122) || p.hasAncestor(-3122) || // Lambda
p.hasAncestor(3222) || p.hasAncestor(-3222) || // Sigma+/-
p.hasAncestor(3312) || p.hasAncestor(-3312) || // Xi-/+
p.hasAncestor(3334) || p.hasAncestor(-3334) )) // Omega-/+
{
switch (abs(p.pid())) {
case 211: // pi+
- _histPtPions->fill(p.pT()/GeV, weight);
- _histPtPionsR1->fill(p.pT()/GeV, weight);
- _histPtPionsR2->fill(p.pT()/GeV, weight);
+ _histPtPions->fill(p.pT()/GeV);
+ _histPtPionsR1->fill(p.pT()/GeV);
+ _histPtPionsR2->fill(p.pT()/GeV);
break;
case 2212: // proton
- _histPtProtons->fill(p.pT()/GeV, weight);
- _histPtProtonsR->fill(p.pT()/GeV, weight);
+ _histPtProtons->fill(p.pT()/GeV);
+ _histPtProtonsR->fill(p.pT()/GeV);
break;
case 321: // K+
- _histPtKaons->fill(p.pT()/GeV, weight);
- _histPtKaonsR->fill(p.pT()/GeV, weight);
+ _histPtKaons->fill(p.pT()/GeV);
+ _histPtKaonsR->fill(p.pT()/GeV);
break;
} // particle switch
} // primary pi, K, p only
} // particle loop
}
void finalize() {
divide(_histPtKaonsR, _histPtPionsR1, _histPtKtoPi);
divide(_histPtProtonsR, _histPtPionsR2, _histPtPtoPi);
scale(_histPtPions, 1./sumOfWeights());
scale(_histPtProtons, 1./sumOfWeights());
scale(_histPtKaons, 1./sumOfWeights());
}
private:
Histo1DPtr _histPtPions;
Histo1DPtr _histPtProtons;
Histo1DPtr _histPtKaons;
Histo1DPtr _histPtPionsR1;
Histo1DPtr _histPtPionsR2;
Histo1DPtr _histPtProtonsR;
Histo1DPtr _histPtKaonsR;
Scatter2DPtr _histPtKtoPi;
Scatter2DPtr _histPtPtoPi;
};
// The hook for the plugin system
DECLARE_RIVET_PLUGIN(ALICE_2015_I1357424);
}
diff --git a/analyses/pluginATLAS/ATLAS_2012_I1124167.cc b/analyses/pluginATLAS/ATLAS_2012_I1124167..cc.needstransversesphericity
rename from analyses/pluginATLAS/ATLAS_2012_I1124167.cc
rename to analyses/pluginATLAS/ATLAS_2012_I1124167..cc.needstransversesphericity
diff --git a/analyses/pluginLHCb/LHCB_2011_I917009.cc b/analyses/pluginLHCb/LHCB_2011_I917009.cc
--- a/analyses/pluginLHCb/LHCB_2011_I917009.cc
+++ b/analyses/pluginLHCb/LHCB_2011_I917009.cc
@@ -1,323 +1,334 @@
// -*- C++ -*-
#include "Rivet/Analysis.hh"
#include "Rivet/Projections/UnstableFinalState.hh"
namespace Rivet {
class LHCB_2011_I917009 : public Analysis {
public:
/// @name Constructors etc.
//@{
/// Constructor
LHCB_2011_I917009()
: Analysis("LHCB_2011_I917009"),
rap_beam(0.0), pt_min(0.0),
pt1_edge(0.65), pt2_edge(1.0),
pt3_edge(2.5), rap_min(2.),
rap_max(0.0), dsShift(0)
{ }
//@}
public:
/// @name Analysis methods
//@{
/// Book histograms and initialise projections before the run
void init() {
int y_nbins = 4;
fillMap(partLftMap);
if (fuzzyEquals(sqrtS(), 0.9*TeV)) {
rap_beam = 6.87;
rap_max = 4.;
pt_min = 0.25;
} else if (fuzzyEquals(sqrtS(), 7*TeV)) {
rap_beam = 8.92;
rap_max = 4.5;
pt_min = 0.15;
y_nbins = 5;
dsShift = 8;
} else {
MSG_ERROR("Incompatible beam energy!");
}
// Create the sets of temporary histograms that will be used to make the ratios in the finalize()
- for (size_t i = 0; i < 12; ++i) _tmphistos[i] = YODA::Histo1D(y_nbins, rap_min, rap_max);
- for (size_t i = 12; i < 15; ++i) _tmphistos[i] = YODA::Histo1D(refData(dsShift+5, 1, 1));
- for (size_t i = 15; i < 18; ++i) _tmphistos[i] = YODA::Histo1D(y_nbins, rap_beam - rap_max, rap_beam - rap_min);
+ for (size_t i = 0; i < 12; ++i) book(_tmphistos[i], "TMP/"+to_str(i), y_nbins, rap_min, rap_max);
+ for (size_t i = 12; i < 15; ++i) book(_tmphistos[i], "TMP/"+to_str(i), refData(dsShift+5, 1, 1));
+ for (size_t i = 15; i < 18; ++i) book(_tmphistos[i], "TMP/"+to_str(i), y_nbins, rap_beam - rap_max, rap_beam - rap_min);
+
+ int dsId = dsShift + 1;
+ for (size_t j = 0; j < 3; ++j) {
+ book(s1, dsId, 1, j+1);
+ book(s2, dsId+1, 1, j+1);
+ }
+ dsId += 2;
+ for (size_t j = 3; j < 6; ++j) {
+ book(s3, dsId, 1, 1);
+ dsId += 1;
+ book(s4, dsId, 1, 1);
+ dsId += 1;
+ }
declare(UnstableFinalState(), "UFS");
}
/// Perform the per-event analysis
void analyze(const Event& event) {
const double weight = 1.0;
const UnstableFinalState& ufs = apply<UnstableFinalState>(event, "UFS");
double ancestor_lftsum = 0.0;
double y, pT;
int id;
int partIdx = -1;
foreach (const Particle& p, ufs.particles()) {
id = p.pid();
// continue if particle not a K0s nor (anti-)Lambda
if ( (id == 310) || (id == -310) ) {
partIdx = 2;
} else if ( id == 3122 ) {
partIdx = 1;
} else if ( id == -3122 ) {
partIdx = 0;
} else {
continue;
}
ancestor_lftsum = getMotherLifeTimeSum(p);
// Lifetime cut: ctau sum of all particle ancestors < 10^-9 m according to the paper (see eq. 5)
const double MAX_CTAU = 1.0E-9; // [m]
if ( (ancestor_lftsum < 0.0) || (ancestor_lftsum > MAX_CTAU) ) continue;
const FourMomentum& qmom = p.momentum();
y = log((qmom.E() + qmom.pz())/(qmom.E() - qmom.pz()))/2.;
// skip this particle if it has too high or too low rapidity (extremely rare cases when E = +- pz)
if ( std::isnan(y) || std::isinf(y) ) continue;
y = fabs(y);
if (!inRange(y, rap_min, rap_max)) continue;
pT = sqrt((qmom.px() * qmom.px()) + (qmom.py() * qmom.py()));
if (!inRange(pT, pt_min, pt3_edge)) continue;
// Filling corresponding temporary histograms for pT intervals
- if (inRange(pT, pt_min, pt1_edge)) _tmphistos[partIdx*3].fill(y, weight);
- if (inRange(pT, pt1_edge, pt2_edge)) _tmphistos[partIdx*3+1].fill(y, weight);
- if (inRange(pT, pt2_edge, pt3_edge)) _tmphistos[partIdx*3+2].fill(y, weight);
+ if (inRange(pT, pt_min, pt1_edge)) _tmphistos[partIdx*3]->fill(y, weight);
+ if (inRange(pT, pt1_edge, pt2_edge)) _tmphistos[partIdx*3+1]->fill(y, weight);
+ if (inRange(pT, pt2_edge, pt3_edge)) _tmphistos[partIdx*3+2]->fill(y, weight);
// Fill histo in rapidity for whole pT interval
- _tmphistos[partIdx+9].fill(y, weight);
+ _tmphistos[partIdx+9]->fill(y, weight);
// Fill histo in pT for whole rapidity interval
- _tmphistos[partIdx+12].fill(pT, weight);
+ _tmphistos[partIdx+12]->fill(pT, weight);
// Fill histo in rapidity loss for whole pT interval
- _tmphistos[partIdx+15].fill(rap_beam - y, weight);
+ _tmphistos[partIdx+15]->fill(rap_beam - y, weight);
}
}
// Generate the ratio histograms
void finalize() {
int dsId = dsShift + 1;
for (size_t j = 0; j < 3; ++j) {
/// @todo Compactify to two one-liners
- Scatter2DPtr s1; book(s1, dsId, 1, j+1);
divide(_tmphistos[j], _tmphistos[3+j], s1);
- Scatter2DPtr s2; book(s2, dsId+1, 1, j+1);
divide(_tmphistos[j], _tmphistos[6+j], s2);
}
dsId += 2;
for (size_t j = 3; j < 6; ++j) {
/// @todo Compactify to two one-liners
- Scatter2DPtr s1; book(s1, dsId, 1, 1);
- divide(_tmphistos[3*j], _tmphistos[3*j+1], s1);
+ divide(_tmphistos[3*j], _tmphistos[3*j+1], s3);
dsId += 1;
- Scatter2DPtr s2; book(s2, dsId, 1, 1);
- divide(_tmphistos[3*j], _tmphistos[3*j+2], s2);
+ divide(_tmphistos[3*j], _tmphistos[3*j+2], s4);
dsId += 1;
}
}
//@}
private:
// Get particle lifetime from hardcoded data
double getLifeTime(int pid) {
double lft = -1.0;
if (pid < 0) pid = - pid;
// Correct Pythia6 PIDs for f0(980), f0(1370) mesons
if (pid == 10331) pid = 30221;
if (pid == 10221) pid = 9010221;
map<int, double>::iterator pPartLft = partLftMap.find(pid);
// search stable particle list
if (pPartLft == partLftMap.end()) {
if (pid <= 100) return 0.0;
for (size_t i=0; i < sizeof(stablePDGIds)/sizeof(unsigned int); i++) {
if (pid == stablePDGIds[i]) { lft = 0.0; break; }
}
} else {
lft = (*pPartLft).second;
}
if (lft < 0.0 && PID::isHadron(pid)) {
MSG_ERROR("Could not determine lifetime for particle with PID " << pid
<< "... This V^0 will be considered unprompt!");
}
return lft;
}
// Data members like post-cuts event weight counters go here
const double getMotherLifeTimeSum(const Particle& p) {
if (p.genParticle() == NULL) return -1.;
double lftSum = 0.;
double plft = 0.;
const GenParticle* part = p.genParticle();
const GenVertex* ivtx = part->production_vertex();
while (ivtx) {
if (ivtx->particles_in_size() < 1) { lftSum = -1.; break; };
const GenVertex::particles_in_const_iterator iPart_invtx = ivtx->particles_in_const_begin();
part = (*iPart_invtx);
if ( !(part) ) { lftSum = -1.; break; };
ivtx = part->production_vertex();
if ( (part->pdg_id() == 2212) || !(ivtx) ) break; //reached beam
plft = getLifeTime(part->pdg_id());
if (plft < 0.) { lftSum = -1.; break; };
lftSum += plft;
};
return (lftSum * c_light);
}
/// @name Private variables
//@{
// The rapidity of the beam according to the selected beam energy
double rap_beam;
// The edges of the intervals of transverse momentum
double pt_min, pt1_edge, pt2_edge, pt3_edge;
// The limits of the rapidity window
double rap_min;
double rap_max;
// Indicates which set of histograms will be output to yoda file (according to beam energy)
int dsShift;
// Map between PDG id and particle lifetimes in seconds
std::map<int, double> partLftMap;
// Set of PDG Ids for stable particles (PDG Id <= 100 are considered stable)
static const int stablePDGIds[205];
//@}
/// @name Helper histograms
//@{
/// Histograms are defined in the following order: anti-Lambda, Lambda and K0s.
/// First 3 suites of 3 histograms correspond to each particle in bins of y for the 3 pT intervals. (9 histos)
/// Next 3 histograms contain the particles in y bins for the whole pT interval (3 histos)
/// Next 3 histograms contain the particles in y_loss bins for the whole pT interval (3 histos)
/// Last 3 histograms contain the particles in pT bins for the whole rapidity (y) interval (3 histos)
- YODA::Histo1D _tmphistos[18];
+ Histo1DPtr _tmphistos[18];
+ Scatter2DPtr s1,s2,s3,s4;
//@}
// Fill the PDG Id to Lifetime[seconds] map
// Data was extracted from LHCb Particle Table through LHCb::ParticlePropertySvc
bool fillMap(map<int, double>& m) {
m[6] = 4.707703E-25; m[11] = 1.E+16; m[12] = 1.E+16;
m[13] = 2.197019E-06; m[14] = 1.E+16; m[15] = 2.906E-13; m[16] = 1.E+16; m[22] = 1.E+16;
m[23] = 2.637914E-25; m[24] = 3.075758E-25; m[25] = 9.4E-26; m[35] = 9.4E-26;
m[36] = 9.4E-26; m[37] = 9.4E-26; m[84] = 3.335641E-13; m[85] = 1.290893E-12;
m[111] = 8.4E-17; m[113] = 4.405704E-24; m[115] = 6.151516E-24; m[117] = 4.088275E-24;
m[119] = 2.102914E-24; m[130] = 5.116E-08; m[150] = 1.525E-12; m[211] = 2.6033E-08;
m[213] = 4.405704E-24; m[215] = 6.151516E-24; m[217] = 4.088275E-24; m[219] = 2.102914E-24;
m[221] = 5.063171E-19; m[223] = 7.752794E-23; m[225] = 3.555982E-24; m[227] = 3.91793E-24;
m[229] = 2.777267E-24; m[310] = 8.953E-11; m[313] = 1.308573E-23; m[315] = 6.038644E-24;
m[317] = 4.139699E-24; m[319] = 3.324304E-24; m[321] = 1.238E-08; m[323] = 1.295693E-23;
m[325] = 6.682357E-24; m[327] = 4.139699E-24; m[329] = 3.324304E-24; m[331] = 3.210791E-21;
m[333] = 1.545099E-22; m[335] = 9.016605E-24; m[337] = 7.565657E-24; m[350] = 1.407125E-12;
m[411] = 1.04E-12; m[413] = 6.856377E-21; m[415] = 1.778952E-23; m[421] = 4.101E-13;
m[423] = 1.000003E-19; m[425] = 1.530726E-23; m[431] = 5.E-13; m[433] = 1.000003E-19;
m[435] = 3.291061E-23; m[441] = 2.465214E-23; m[443] = 7.062363E-21; m[445] = 3.242425E-22;
m[510] = 1.525E-12; m[511] = 1.525E-12; m[513] = 1.000019E-19; m[515] = 1.31E-23;
m[521] = 1.638E-12; m[523] = 1.000019E-19; m[525] = 1.31E-23; m[530] = 1.536875E-12;
m[531] = 1.472E-12; m[533] = 1.E-19; m[535] = 1.31E-23; m[541] = 4.5E-13;
m[553] = 1.218911E-20; m[1112] = 4.539394E-24; m[1114] = 5.578069E-24; m[1116] = 1.994582E-24;
m[1118] = 2.269697E-24; m[1212] = 4.539394E-24; m[1214] = 5.723584E-24; m[1216] = 1.994582E-24;
m[1218] = 1.316424E-24; m[2112] = 8.857E+02; m[2114] = 5.578069E-24; m[2116] = 4.388081E-24;
m[2118] = 2.269697E-24; m[2122] = 4.539394E-24; m[2124] = 5.723584E-24; m[2126] = 1.994582E-24;
m[2128] = 1.316424E-24; m[2212] = 1.E+16; m[2214] = 5.578069E-24; m[2216] = 4.388081E-24;
m[2218] = 2.269697E-24; m[2222] = 4.539394E-24; m[2224] = 5.578069E-24; m[2226] = 1.994582E-24;
m[2228] = 2.269697E-24; m[3112] = 1.479E-10; m[3114] = 1.670589E-23; m[3116] = 5.485102E-24;
m[3118] = 3.656734E-24; m[3122] = 2.631E-10; m[3124] = 4.219309E-23; m[3126] = 8.227653E-24;
m[3128] = 3.291061E-24; m[3212] = 7.4E-20; m[3214] = 1.828367E-23; m[3216] = 5.485102E-24;
m[3218] = 3.656734E-24; m[3222] = 8.018E-11; m[3224] = 1.838582E-23; m[3226] = 5.485102E-24;
m[3228] = 3.656734E-24; m[3312] = 1.639E-10; m[3314] = 6.648608E-23; m[3322] = 2.9E-10;
m[3324] = 7.233101E-23; m[3334] = 8.21E-11; m[4112] = 2.991874E-22; m[4114] = 4.088274E-23;
m[4122] = 2.E-13; m[4132] = 1.12E-13; m[4212] = 3.999999E-22; m[4214] = 3.291061E-22;
m[4222] = 2.951624E-22; m[4224] = 4.417531E-23; m[4232] = 4.42E-13; m[4332] = 6.9E-14;
m[4412] = 3.335641E-13; m[4422] = 3.335641E-13; m[4432] = 3.335641E-13; m[5112] = 1.E-19;
m[5122] = 1.38E-12; m[5132] = 1.42E-12; m[5142] = 1.290893E-12; m[5212] = 1.E-19;
m[5222] = 1.E-19; m[5232] = 1.42E-12; m[5242] = 1.290893E-12; m[5312] = 1.E-19;
m[5322] = 1.E-19; m[5332] = 1.55E-12; m[5342] = 1.290893E-12; m[5442] = 1.290893E-12;
m[5512] = 1.290893E-12; m[5522] = 1.290893E-12; m[5532] = 1.290893E-12; m[5542] = 1.290893E-12;
m[10111] = 2.48382E-24; m[10113] = 4.635297E-24; m[10115] = 2.54136E-24; m[10211] = 2.48382E-24;
m[10213] = 4.635297E-24; m[10215] = 2.54136E-24; m[10223] = 1.828367E-24; m[10225] = 3.636531E-24;
m[10311] = 2.437823E-24; m[10313] = 7.313469E-24; m[10315] = 3.538775E-24;
m[10321] = 2.437823E-24; m[10323] = 7.313469E-24; m[10325] = 3.538775E-24;
m[10331] = 4.804469E-24; m[10411] = 4.38E-24; m[10413] = 3.29E-23; m[10421] = 4.38E-24;
m[10423] = 3.22653E-23; m[10431] = 6.5821E-22; m[10433] = 6.5821E-22; m[10441] = 6.453061E-23;
m[10511] = 4.39E-24; m[10513] = 1.65E-23; m[10521] = 4.39E-24; m[10523] = 1.65E-23;
m[10531] = 4.39E-24; m[10533] = 1.65E-23; m[11114] = 2.194041E-24; m[11116] = 1.828367E-24;
m[11212] = 1.880606E-24; m[11216] = 1.828367E-24; m[12112] = 2.194041E-24;
m[12114] = 2.194041E-24; m[12116] = 5.063171E-24; m[12126] = 1.828367E-24;
m[12212] = 2.194041E-24; m[12214] = 2.194041E-24; m[12216] = 5.063171E-24;
m[12224] = 2.194041E-24; m[12226] = 1.828367E-24; m[13112] = 6.582122E-24; m[13114] = 1.09702E-23;
m[13116] = 5.485102E-24; m[13122] = 1.316424E-23; m[13124] = 1.09702E-23; m[13126] = 6.928549E-24;
m[13212] = 6.582122E-24; m[13214] = 1.09702E-23; m[13216] = 5.485102E-24; m[13222] = 6.582122E-24;
m[13224] = 1.09702E-23; m[13226] = 5.485102E-24; m[13314] = 2.742551E-23;
m[13324] = 2.742551E-23; m[14122] = 1.828367E-22; m[20022] = 1.E+16; m[20113] = 1.567172E-24;
m[20213] = 1.567172E-24; m[20223] = 2.708692E-23; m[20313] = 3.782829E-24;
m[20315] = 2.384827E-24; m[20323] = 3.782829E-24; m[20325] = 2.384827E-24;
m[20333] = 1.198929E-23; m[20413] = 2.63E-24; m[20423] = 2.63E-24; m[20433] = 6.5821E-22;
m[20443] = 7.395643E-22; m[20513] = 2.63E-24; m[20523] = 2.63E-24; m[20533] = 2.63E-24;
m[21112] = 2.632849E-24; m[21114] = 3.291061E-24; m[21212] = 2.632849E-24;
m[21214] = 6.582122E-24; m[22112] = 4.388081E-24; m[22114] = 3.291061E-24;
m[22122] = 2.632849E-24; m[22124] = 6.582122E-24; m[22212] = 4.388081E-24;
m[22214] = 3.291061E-24; m[22222] = 2.632849E-24; m[22224] = 3.291061E-24;
m[23112] = 7.313469E-24; m[23114] = 2.991874E-24; m[23122] = 4.388081E-24;
m[23124] = 6.582122E-24; m[23126] = 3.291061E-24; m[23212] = 7.313469E-24;
m[23214] = 2.991874E-24; m[23222] = 7.313469E-24; m[23224] = 2.991874E-24;
m[30113] = 2.632849E-24; m[30213] = 2.632849E-24; m[30221] = 1.880606E-24;
m[30223] = 2.089563E-24; m[30313] = 2.056913E-24; m[30323] = 2.056913E-24;
m[30443] = 2.419898E-23; m[31114] = 1.880606E-24; m[31214] = 3.291061E-24;
m[32112] = 3.989164E-24; m[32114] = 1.880606E-24; m[32124] = 3.291061E-24;
m[32212] = 3.989164E-24; m[32214] = 1.880606E-24; m[32224] = 1.880606E-24;
m[33122] = 1.880606E-23; m[42112] = 6.582122E-24; m[42212] = 6.582122E-24;
m[43122] = 2.194041E-24; m[53122] = 4.388081E-24; m[100111] = 1.645531E-24;
m[100113] = 1.64553E-24; m[100211] = 1.645531E-24; m[100213] = 1.64553E-24;
m[100221] = 1.196749E-23; m[100223] = 3.061452E-24; m[100313] = 2.837122E-24;
m[100323] = 2.837122E-24; m[100331] = 4.459432E-25; m[100333] = 4.388081E-24;
m[100441] = 4.701516E-23; m[100443] = 2.076379E-21; m[100553] = 2.056913E-20;
m[200553] = 3.242425E-20; m[300553] = 3.210791E-23; m[9000111] = 8.776163E-24;
m[9000211] = 8.776163E-24; m[9000443] = 8.227652E-24; m[9000553] = 5.983747E-24;
m[9010111] = 3.164482E-24; m[9010211] = 3.164482E-24; m[9010221] = 9.403031E-24;
m[9010443] = 8.438618E-24; m[9010553] = 8.3318E-24; m[9020443] = 1.061633E-23;
m[9030221] = 6.038644E-24; m[9042413] = 2.07634E-21; m[9050225] = 1.394517E-24;
m[9060225] = 3.291061E-24; m[9080225] = 4.388081E-24; m[9090225] = 2.056913E-24;
m[9910445] = 2.07634E-21; m[9920443] = 2.07634E-21;
return true;
}
};
const int LHCB_2011_I917009::stablePDGIds[205] = {
311, 543, 545, 551, 555, 557, 1103, 2101, 2103, 2203, 3101, 3103, 3201, 3203, 3303,
4101, 4103, 4124, 4201, 4203, 4301, 4303, 4312, 4314, 4322, 4324, 4334, 4403, 4414,
4424, 4434, 4444, 5101, 5103, 5114, 5201, 5203, 5214, 5224, 5301, 5303, 5314, 5324,
5334, 5401, 5403, 5412, 5414, 5422, 5424, 5432, 5434, 5444, 5503, 5514, 5524, 5534,
5544, 5554, 10022, 10333, 10335, 10443, 10541, 10543, 10551, 10553, 10555, 11112,
12118, 12122, 12218, 12222, 13316, 13326, 20543, 20553, 20555, 23314, 23324, 30343,
30353, 30363, 30553, 33314, 33324, 41214, 42124, 52114, 52214, 100311, 100315, 100321,
100325, 100411, 100413, 100421, 100423, 100551, 100555, 100557, 110551, 110553, 110555,
120553, 120555, 130553, 200551, 200555, 210551, 210553, 220553, 1000001, 1000002,
1000003, 1000004, 1000005, 1000006, 1000011, 1000012, 1000013, 1000014, 1000015,
1000016, 1000021, 1000022, 1000023, 1000024, 1000025, 1000035, 1000037, 1000039,
2000001, 2000002, 2000003, 2000004, 2000005, 2000006, 2000011, 2000012, 2000013,
2000014, 2000015, 2000016, 3000111, 3000113, 3000211, 3000213, 3000221, 3000223,
3000331, 3100021, 3100111, 3100113, 3200111, 3200113, 3300113, 3400113, 4000001,
4000002, 4000011, 4000012, 5000039, 9000221, 9900012, 9900014, 9900016, 9900023,
9900024, 9900041, 9900042 };
// Hook for the plugin system
DECLARE_RIVET_PLUGIN(LHCB_2011_I917009);
}
+
diff --git a/src/Core/Analysis.cc b/src/Core/Analysis.cc
--- a/src/Core/Analysis.cc
+++ b/src/Core/Analysis.cc
@@ -1,862 +1,862 @@
// -*- C++ -*-
#include "Rivet/Config/RivetCommon.hh"
#include "Rivet/Analysis.hh"
#include "Rivet/AnalysisHandler.hh"
#include "Rivet/AnalysisInfo.hh"
#include "Rivet/Tools/BeamConstraint.hh"
-#include "DummyConfig.hh"
-#ifdef HAVE_EXECINFO_H
-#include <execinfo.h>
-#endif
+// #include "DummyConfig.hh"
+// #ifdef HAVE_EXECINFO_H
+// #include <execinfo.h>
+// #endif
namespace Rivet {
Analysis::Analysis(const string& name)
: _crossSection(-1.0),
_gotCrossSection(false),
_analysishandler(NULL)
{
ProjectionApplier::_allowProjReg = false;
_defaultname = name;
unique_ptr<AnalysisInfo> ai = AnalysisInfo::make(name);
assert(ai);
_info = move(ai);
assert(_info);
}
double Analysis::sqrtS() const {
return handler().sqrtS();
}
const ParticlePair& Analysis::beams() const {
return handler().beams();
}
const PdgIdPair Analysis::beamIds() const {
return handler().beamIds();
}
const string Analysis::histoDir() const {
/// @todo Cache in a member variable
string _histoDir;
if (_histoDir.empty()) {
_histoDir = "/" + name();
if (handler().runName().length() > 0) {
_histoDir = "/" + handler().runName() + _histoDir;
}
replace_all(_histoDir, "//", "/"); //< iterates until none
}
return _histoDir;
}
const string Analysis::histoPath(const string& hname) const {
const string path = histoDir() + "/" + hname;
return path;
}
const string Analysis::histoPath(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const {
return histoDir() + "/" + makeAxisCode(datasetId, xAxisId, yAxisId);
}
const string Analysis::makeAxisCode(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const {
stringstream axisCode;
axisCode << "d";
if (datasetId < 10) axisCode << 0;
axisCode << datasetId;
axisCode << "-x";
if (xAxisId < 10) axisCode << 0;
axisCode << xAxisId;
axisCode << "-y";
if (yAxisId < 10) axisCode << 0;
axisCode << yAxisId;
return axisCode.str();
}
Log& Analysis::getLog() const {
string logname = "Rivet.Analysis." + name();
return Log::getLog(logname);
}
size_t Analysis::numEvents() const {
return handler().numEvents();
}
double Analysis::sumOfWeights() const {
return handler().sumOfWeights();
}
///////////////////////////////////////////
bool Analysis::isCompatible(const ParticlePair& beams) const {
return isCompatible(beams.first.pid(), beams.second.pid(),
beams.first.energy(), beams.second.energy());
}
bool Analysis::isCompatible(PdgId beam1, PdgId beam2, double e1, double e2) const {
PdgIdPair beams(beam1, beam2);
pair<double,double> energies(e1, e2);
return isCompatible(beams, energies);
}
bool Analysis::isCompatible(const PdgIdPair& beams, const pair<double,double>& energies) const {
// First check the beam IDs
bool beamIdsOk = false;
foreach (const PdgIdPair& bp, requiredBeams()) {
if (compatible(beams, bp)) {
beamIdsOk = true;
break;
}
}
if (!beamIdsOk) return false;
// Next check that the energies are compatible (within 1% or 1 GeV, whichever is larger, for a bit of UI forgiveness)
/// @todo Use some sort of standard ordering to improve comparisons, esp. when the two beams are different particles
bool beamEnergiesOk = requiredEnergies().size() > 0 ? false : true;
typedef pair<double,double> DoublePair;
foreach (const DoublePair& ep, requiredEnergies()) {
if ((fuzzyEquals(ep.first, energies.first, 0.01) && fuzzyEquals(ep.second, energies.second, 0.01)) ||
(fuzzyEquals(ep.first, energies.second, 0.01) && fuzzyEquals(ep.second, energies.first, 0.01)) ||
(abs(ep.first - energies.first) < 1*GeV && abs(ep.second - energies.second) < 1*GeV) ||
(abs(ep.first - energies.second) < 1*GeV && abs(ep.second - energies.first) < 1*GeV)) {
beamEnergiesOk = true;
break;
}
}
return beamEnergiesOk;
/// @todo Need to also check internal consistency of the analysis'
/// beam requirements with those of the projections it uses.
}
///////////////////////////////////////////
Analysis& Analysis::setCrossSection(double xs) {
_crossSection = xs;
_gotCrossSection = true;
return *this;
}
double Analysis::crossSection() const {
if (!_gotCrossSection || std::isnan(_crossSection)) {
string errMsg = "You did not set the cross section for the analysis " + name();
throw Error(errMsg);
}
return _crossSection;
}
double Analysis::crossSectionPerEvent() const {
return _crossSection/sumOfWeights();
}
////////////////////////////////////////////////////////////
// Histogramming
void Analysis::_cacheRefData() const {
if (_refdata.empty()) {
MSG_TRACE("Getting refdata cache for paper " << name());
_refdata = getRefData(name());
}
}
CounterPtr & Analysis::book(CounterPtr & ctr,
const string& cname,
const string& title) {
const string path = histoPath(cname);
ctr = CounterPtr(handler().numWeights(), Counter(path, title));
addAnalysisObject(ctr);
MSG_TRACE("Made counter " << cname << " for " << name());
return ctr;
}
CounterPtr & Analysis::book(CounterPtr & ctr, unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId,
const string& title) {
// const string& xtitle,
// const string& ytitle) {
const string axisCode = makeAxisCode(datasetId, xAxisId, yAxisId);
return book(ctr, axisCode, title);
}
Histo1DPtr & Analysis::book(Histo1DPtr & histo, const string& hname,
size_t nbins, double lower, double upper,
const string& title,
const string& xtitle,
const string& ytitle) {
const string path = histoPath(hname);
Histo1D hist = Histo1D(nbins, lower, upper, path, title);
hist.setAnnotation("XLabel", xtitle);
hist.setAnnotation("YLabel", ytitle);
histo = Histo1DPtr(handler().numWeights(), hist);
addAnalysisObject(histo);
MSG_TRACE("Made histogram " << hname << " for " << name());
return histo;
// Histo1DPtr hist;
// try { // try to bind to pre-existing
// // AnalysisObjectPtr ao = getAnalysisObject(path);
// // hist = dynamic_pointer_cast<Histo1D>(ao);
// hist = getHisto1D(hname);
// /// @todo Test that cast worked
// /// @todo Also test that binning is as expected?
// MSG_TRACE("Bound pre-existing histogram " << hname << " for " << name());
// } catch (...) { // binding failed; make it from scratch
// hist = make_shared<Histo1D>(nbins, lower, upper, histoPath(hname), title);
// addAnalysisObject(hist);
// MSG_TRACE("Made histogram " << hname << " for " << name());
// }
}
Histo1DPtr & Analysis::book(Histo1DPtr & histo, const string& hname,
const initializer_list<double>& binedges,
const string& title,
const string& xtitle,
const string& ytitle) {
return book(histo, hname, vector<double>{binedges}, title, xtitle, ytitle);
}
Histo1DPtr & Analysis::book(Histo1DPtr & histo, const string& hname,
const vector<double>& binedges,
const string& title,
const string& xtitle,
const string& ytitle) {
const string path = histoPath(hname);
// Histo1DPtr hist;
// try { // try to bind to pre-existing
// // AnalysisObjectPtr ao = getAnalysisObject(path);
// // hist = dynamic_pointer_cast<Histo1D>(ao);
// hist = getHisto1D(hname);
// /// @todo Test that cast worked
// /// @todo Also test that binning is as expected?
// MSG_TRACE("Bound pre-existing histogram " << hname << " for " << name());
// } catch (...) { // binding failed; make it from scratch
// hist = make_shared<Histo1D>(binedges, histoPath(hname), title);
// addAnalysisObject(hist);
// MSG_TRACE("Made histogram " << hname << " for " << name());
// }
Histo1D hist = Histo1D(binedges, path, title);
hist.setAnnotation("XLabel", xtitle);
hist.setAnnotation("YLabel", ytitle);
histo = Histo1DPtr(handler().numWeights(), hist);
addAnalysisObject(histo);
MSG_TRACE("Made histogram " << hname << " for " << name());
return histo;
}
Histo1DPtr & Analysis::book(Histo1DPtr & histo, const string& hname,
const Scatter2D& refscatter,
const string& title,
const string& xtitle,
const string& ytitle) {
const string path = histoPath(hname);
// Histo1DPtr hist;
// try { // try to bind to pre-existing
// // AnalysisObjectPtr ao = getAnalysisObject(path);
// // hist = dynamic_pointer_cast<Histo1D>(ao);
// hist = getHisto1D(hname);
// /// @todo Test that cast worked
// /// @todo Also test that binning is as expected?
// MSG_TRACE("Bound pre-existing histogram " << hname << " for " << name());
// } catch (...) { // binding failed; make it from scratch
// hist = make_shared<Histo1D>(refscatter, histoPath(hname));
// if (hist->hasAnnotation("IsRef")) hist->rmAnnotation("IsRef");
// addAnalysisObject(hist);
// MSG_TRACE("Made histogram " << hname << " for " << name());
// }
Histo1D hist = Histo1D(refscatter, path);
hist.setTitle(title);
hist.setAnnotation("XLabel", xtitle);
hist.setAnnotation("YLabel", ytitle);
histo = Histo1DPtr(handler().numWeights(), hist);
addAnalysisObject(histo);
MSG_TRACE("Made histogram " << hname << " for " << name());
return histo;
}
Histo1DPtr & Analysis::book(Histo1DPtr & histo, const string& hname,
const string& title,
const string& xtitle,
const string& ytitle) {
const Scatter2D& refdata = refData(hname);
return book(histo, hname, refdata, title, xtitle, ytitle);
}
Histo1DPtr & Analysis::book(Histo1DPtr & histo, unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId,
const string& title,
const string& xtitle,
const string& ytitle) {
const string axisCode = makeAxisCode(datasetId, xAxisId, yAxisId);
return book(histo, axisCode, title, xtitle, ytitle);
}
/// @todo Add booking methods which take a path, titles and *a reference Scatter from which to book*
/////////////////
Histo2DPtr & Analysis::book(Histo2DPtr & h2d,const string& hname,
size_t nxbins, double xlower, double xupper,
size_t nybins, double ylower, double yupper,
const string& title,
const string& xtitle,
const string& ytitle,
const string& ztitle)
{
const string path = histoPath(hname);
Histo2D hist(nxbins, xlower, xupper, nybins, ylower, yupper, path, title);
hist.setAnnotation("XLabel", xtitle);
hist.setAnnotation("YLabel", ytitle);
hist.setAnnotation("ZLabel", ztitle);
h2d = Histo2DPtr(handler().numWeights(), hist);
addAnalysisObject(h2d);
MSG_TRACE("Made 2D histogram " << hname << " for " << name());
return h2d;
}
Histo2DPtr & Analysis::book(Histo2DPtr & h2d,const string& hname,
const initializer_list<double>& xbinedges,
const initializer_list<double>& ybinedges,
const string& title,
const string& xtitle,
const string& ytitle,
const string& ztitle)
{
return book(h2d, hname, vector<double>{xbinedges}, vector<double>{ybinedges}, title, xtitle, ytitle, ztitle);
}
Histo2DPtr & Analysis::book(Histo2DPtr & h2d,const string& hname,
const vector<double>& xbinedges,
const vector<double>& ybinedges,
const string& title,
const string& xtitle,
const string& ytitle,
const string& ztitle)
{
const string path = histoPath(hname);
Histo2D hist(xbinedges, ybinedges, path, title);
hist.setAnnotation("XLabel", xtitle);
hist.setAnnotation("YLabel", ytitle);
hist.setAnnotation("ZLabel", ztitle);
h2d = Histo2DPtr(handler().numWeights(), hist);
addAnalysisObject(h2d);
MSG_TRACE("Made 2D histogram " << hname << " for " << name());
return h2d;
}
Profile1DPtr & Analysis::book(Profile1DPtr & p1d,const string& hname,
size_t nbins, double lower, double upper,
const string& title,
const string& xtitle,
const string& ytitle) {
const string path = histoPath(hname);
Profile1D prof(nbins, lower, upper, path, title);
prof.setAnnotation("XLabel", xtitle);
prof.setAnnotation("YLabel", ytitle);
p1d = Profile1DPtr(handler().numWeights(), prof);
addAnalysisObject(p1d);
MSG_TRACE("Made profile histogram " << hname << " for " << name());
return p1d;
}
Profile1DPtr & Analysis::book(Profile1DPtr & p1d,const string& hname,
const initializer_list<double>& binedges,
const string& title,
const string& xtitle,
const string& ytitle) {
return book(p1d, hname, vector<double>{binedges}, title, xtitle, ytitle);
}
Profile1DPtr & Analysis::book(Profile1DPtr & p1d,const string& hname,
const vector<double>& binedges,
const string& title,
const string& xtitle,
const string& ytitle) {
const string path = histoPath(hname);
Profile1D prof(binedges, path, title);
prof.setAnnotation("XLabel", xtitle);
prof.setAnnotation("YLabel", ytitle);
p1d = Profile1DPtr(handler().numWeights(), prof);
addAnalysisObject(p1d);
MSG_TRACE("Made profile histogram " << hname << " for " << name());
return p1d;
}
Profile1DPtr & Analysis::book(Profile1DPtr & p1d,const string& hname,
const Scatter2D& refscatter,
const string& title,
const string& xtitle,
const string& ytitle) {
const string path = histoPath(hname);
Profile1D prof(refscatter, path);
prof.setTitle(title);
prof.setAnnotation("XLabel", xtitle);
prof.setAnnotation("YLabel", ytitle);
p1d = Profile1DPtr(handler().numWeights(), prof);
addAnalysisObject(p1d);
MSG_TRACE("Made profile histogram " << hname << " for " << name());
return p1d;
// if (prof.hasAnnotation("IsRef")) prof.rmAnnotation("IsRef");
}
Profile1DPtr & Analysis::book(Profile1DPtr & p1d,const string& hname,
const string& title,
const string& xtitle,
const string& ytitle) {
const Scatter2D& refdata = refData(hname);
book(p1d, hname, refdata, title, xtitle, ytitle);
return p1d;
}
Profile1DPtr & Analysis::book(Profile1DPtr & p1d,unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId,
const string& title,
const string& xtitle,
const string& ytitle) {
const string axisCode = makeAxisCode(datasetId, xAxisId, yAxisId);
return book(p1d, axisCode, title, xtitle, ytitle);
}
Profile2DPtr & Analysis::book(Profile2DPtr & p2d, const string& hname,
size_t nxbins, double xlower, double xupper,
size_t nybins, double ylower, double yupper,
const string& title,
const string& xtitle,
const string& ytitle,
const string& ztitle)
{
const string path = histoPath(hname);
Profile2D prof(nxbins, xlower, xupper, nybins, ylower, yupper, path, title);
prof.setAnnotation("XLabel", xtitle);
prof.setAnnotation("YLabel", ytitle);
prof.setAnnotation("ZLabel", ztitle);
p2d = Profile2DPtr(handler().numWeights(), prof);
addAnalysisObject(p2d);
MSG_TRACE("Made 2D profile histogram " << hname << " for " << name());
return p2d;
}
Profile2DPtr & Analysis::book(Profile2DPtr & p2d, const string& hname,
const initializer_list<double>& xbinedges,
const initializer_list<double>& ybinedges,
const string& title,
const string& xtitle,
const string& ytitle,
const string& ztitle)
{
return book(p2d, hname, vector<double>{xbinedges}, vector<double>{ybinedges}, title, xtitle, ytitle, ztitle);
}
Profile2DPtr & Analysis::book(Profile2DPtr & p2d, const string& hname,
const vector<double>& xbinedges,
const vector<double>& ybinedges,
const string& title,
const string& xtitle,
const string& ytitle,
const string& ztitle)
{
const string path = histoPath(hname);
Profile2D prof(xbinedges, ybinedges, path, title);
prof.setAnnotation("XLabel", xtitle);
prof.setAnnotation("YLabel", ytitle);
prof.setAnnotation("ZLabel", ztitle);
p2d = Profile2DPtr(handler().numWeights(), prof);
addAnalysisObject(p2d);
MSG_TRACE("Made 2D profile histogram " << hname << " for " << name());
return p2d;
}
Scatter2DPtr & Analysis::book(Scatter2DPtr & s2d, unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId,
bool copy_pts,
const string& title,
const string& xtitle,
const string& ytitle) {
const string axisCode = makeAxisCode(datasetId, xAxisId, yAxisId);
return book(s2d, axisCode, copy_pts, title, xtitle, ytitle);
}
Scatter2DPtr & Analysis::book(Scatter2DPtr & s2d, const string& hname,
bool copy_pts,
const string& title,
const string& xtitle,
const string& ytitle) {
Scatter2D scat;
const string path = histoPath(hname);
if (copy_pts) {
const Scatter2D& refdata = refData(hname);
scat = Scatter2D(refdata, path);
for (Point2D& p : scat.points()) p.setY(0, 0);
} else {
scat = Scatter2D(path);
}
scat.setTitle(title);
scat.setAnnotation("XLabel", xtitle);
scat.setAnnotation("YLabel", ytitle);
s2d = Scatter2DPtr(handler().numWeights(), scat);
addAnalysisObject(s2d);
MSG_TRACE("Made scatter " << hname << " for " << name());
// if (scat.hasAnnotation("IsRef")) scat.rmAnnotation("IsRef");
return s2d;
}
Scatter2DPtr & Analysis::book(Scatter2DPtr & s2d, const string& hname,
size_t npts, double lower, double upper,
const string& title,
const string& xtitle,
const string& ytitle) {
const string path = histoPath(hname);
Scatter2D scat;
const double binwidth = (upper-lower)/npts;
for (size_t pt = 0; pt < npts; ++pt) {
const double bincentre = lower + (pt + 0.5) * binwidth;
scat.addPoint(bincentre, 0, binwidth/2.0, 0);
}
scat.setTitle(title);
scat.setAnnotation("XLabel", xtitle);
scat.setAnnotation("YLabel", ytitle);
s2d = Scatter2DPtr(handler().numWeights(), scat);
addAnalysisObject(s2d);
MSG_TRACE("Made scatter " << hname << " for " << name());
return s2d;
}
Scatter2DPtr & Analysis::book(Scatter2DPtr & s2d, const string& hname,
const vector<double>& binedges,
const string& title,
const string& xtitle,
const string& ytitle) {
const string path = histoPath(hname);
Scatter2D scat;
for (size_t pt = 0; pt < binedges.size()-1; ++pt) {
const double bincentre = (binedges[pt] + binedges[pt+1]) / 2.0;
const double binwidth = binedges[pt+1] - binedges[pt];
scat.addPoint(bincentre, 0, binwidth/2.0, 0);
}
scat.setTitle(title);
scat.setAnnotation("XLabel", xtitle);
scat.setAnnotation("YLabel", ytitle);
s2d = Scatter2DPtr(handler().numWeights(), scat);
addAnalysisObject(s2d);
MSG_TRACE("Made scatter " << hname << " for " << name());
return s2d;
}
void Analysis::divide(CounterPtr c1, CounterPtr c2, Scatter1DPtr s) const {
const string path = s->path();
*s = *c1 / *c2;
s->setPath(path);
}
void Analysis::divide(const Counter& c1, const Counter& c2, Scatter1DPtr s) const {
const string path = s->path();
*s = c1 / c2;
s->setPath(path);
}
void Analysis::divide(Histo1DPtr h1, Histo1DPtr h2, Scatter2DPtr s) const {
const string path = s->path();
*s = *h1 / *h2;
s->setPath(path);
}
void Analysis::divide(const Histo1D& h1, const Histo1D& h2, Scatter2DPtr s) const {
const string path = s->path();
*s = h1 / h2;
s->setPath(path);
}
void Analysis::divide(Profile1DPtr p1, Profile1DPtr p2, Scatter2DPtr s) const {
const string path = s->path();
*s = *p1 / *p2;
s->setPath(path);
}
void Analysis::divide(const Profile1D& p1, const Profile1D& p2, Scatter2DPtr s) const {
const string path = s->path();
*s = p1 / p2;
s->setPath(path);
}
void Analysis::divide(Histo2DPtr h1, Histo2DPtr h2, Scatter3DPtr s) const {
const string path = s->path();
*s = *h1 / *h2;
s->setPath(path);
}
void Analysis::divide(const Histo2D& h1, const Histo2D& h2, Scatter3DPtr s) const {
const string path = s->path();
*s = h1 / h2;
s->setPath(path);
}
void Analysis::divide(Profile2DPtr p1, Profile2DPtr p2, Scatter3DPtr s) const {
const string path = s->path();
*s = *p1 / *p2;
s->setPath(path);
}
void Analysis::divide(const Profile2D& p1, const Profile2D& p2, Scatter3DPtr s) const {
const string path = s->path();
*s = p1 / p2;
s->setPath(path);
}
/// @todo Counter and Histo2D efficiencies and asymms
void Analysis::efficiency(Histo1DPtr h1, Histo1DPtr h2, Scatter2DPtr s) const {
const string path = s->path();
*s = YODA::efficiency(*h1, *h2);
s->setPath(path);
}
void Analysis::efficiency(const Histo1D& h1, const Histo1D& h2, Scatter2DPtr s) const {
const string path = s->path();
*s = YODA::efficiency(h1, h2);
s->setPath(path);
}
void Analysis::asymm(Histo1DPtr h1, Histo1DPtr h2, Scatter2DPtr s) const {
const string path = s->path();
*s = YODA::asymm(*h1, *h2);
s->setPath(path);
}
void Analysis::asymm(const Histo1D& h1, const Histo1D& h2, Scatter2DPtr s) const {
const string path = s->path();
*s = YODA::asymm(h1, h2);
s->setPath(path);
}
void Analysis::scale(CounterPtr cnt, double factor) {
if (!cnt) {
MSG_WARNING("Failed to scale counter=NULL in analysis " << name() << " (scale=" << factor << ")");
return;
}
if (std::isnan(factor) || std::isinf(factor)) {
MSG_WARNING("Failed to scale counter=" << cnt->path() << " in analysis: " << name() << " (invalid scale factor = " << factor << ")");
factor = 0;
}
MSG_TRACE("Scaling counter " << cnt->path() << " by factor " << factor);
try {
cnt->scaleW(factor);
} catch (YODA::Exception& we) {
MSG_WARNING("Could not scale counter " << cnt->path());
return;
}
}
void Analysis::normalize(Histo1DPtr histo, double norm, bool includeoverflows) {
if (!histo) {
MSG_WARNING("Failed to normalize histo=NULL in analysis " << name() << " (norm=" << norm << ")");
return;
}
MSG_TRACE("Normalizing histo " << histo->path() << " to " << norm);
try {
histo->normalize(norm, includeoverflows);
} catch (YODA::Exception& we) {
MSG_WARNING("Could not normalize histo " << histo->path());
return;
}
}
void Analysis::scale(Histo1DPtr histo, double factor) {
if (!histo) {
MSG_WARNING("Failed to scale histo=NULL in analysis " << name() << " (scale=" << factor << ")");
return;
}
if (std::isnan(factor) || std::isinf(factor)) {
MSG_WARNING("Failed to scale histo=" << histo->path() << " in analysis: " << name() << " (invalid scale factor = " << factor << ")");
factor = 0;
}
MSG_TRACE("Scaling histo " << histo->path() << " by factor " << factor);
try {
histo->scaleW(factor);
} catch (YODA::Exception& we) {
MSG_WARNING("Could not scale histo " << histo->path());
return;
}
}
void Analysis::normalize(Histo2DPtr histo, double norm, bool includeoverflows) {
if (!histo) {
MSG_ERROR("Failed to normalize histo=NULL in analysis " << name() << " (norm=" << norm << ")");
return;
}
MSG_TRACE("Normalizing histo " << histo->path() << " to " << norm);
try {
histo->normalize(norm, includeoverflows);
} catch (YODA::Exception& we) {
MSG_WARNING("Could not normalize histo " << histo->path());
return;
}
}
void Analysis::scale(Histo2DPtr histo, double factor) {
if (!histo) {
MSG_ERROR("Failed to scale histo=NULL in analysis " << name() << " (scale=" << factor << ")");
return;
}
if (std::isnan(factor) || std::isinf(factor)) {
MSG_ERROR("Failed to scale histo=" << histo->path() << " in analysis: " << name() << " (invalid scale factor = " << factor << ")");
factor = 0;
}
MSG_TRACE("Scaling histo " << histo->path() << " by factor " << factor);
try {
histo->scaleW(factor);
} catch (YODA::Exception& we) {
MSG_WARNING("Could not scale histo " << histo->path());
return;
}
}
void Analysis::integrate(Histo1DPtr h, Scatter2DPtr s) const {
// preserve the path info
const string path = s->path();
*s = toIntegralHisto(*h);
s->setPath(path);
}
void Analysis::integrate(const Histo1D& h, Scatter2DPtr s) const {
// preserve the path info
const string path = s->path();
*s = toIntegralHisto(h);
s->setPath(path);
}
}
/// @todo 2D versions of integrate... defined how, exactly?!?
//////////////////////////////////
namespace {
void errormsg(std::string name) {
-#ifdef HAVE_BACKTRACE
- void * buffer[4];
- backtrace(buffer, 4);
- backtrace_symbols_fd(buffer, 4 , 1);
-#endif
+// #ifdef HAVE_BACKTRACE
+// void * buffer[4];
+// backtrace(buffer, 4);
+// backtrace_symbols_fd(buffer, 4 , 1);
+// #endif
std::cerr << name << ": Can't book objects outside of init().\n";
assert(false);
}
}
namespace Rivet {
void Analysis::addAnalysisObject(const MultiweightAOPtr & ao) {
if (handler().stage() == AnalysisHandler::Stage::INIT) {
_analysisobjects.push_back(ao);
ao.get()->blockDestructor(true);
}
else {
errormsg(name());
}
}
void Analysis::removeAnalysisObject(const string& path) {
for (auto it = _analysisobjects.begin();
it != _analysisobjects.end(); ++it) {
if ((*it)->path() == path) {
_analysisobjects.erase(it);
break;
}
}
}
void Analysis::removeAnalysisObject(const MultiweightAOPtr & ao) {
for (auto it = _analysisobjects.begin(); it != _analysisobjects.end(); ++it) {
if ((*it) == ao) {
_analysisobjects.erase(it);
break;
}
}
}
}

File Metadata

Mime Type
text/x-diff
Expires
Tue, Nov 19, 7:43 PM (1 d, 9 h)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
3805897
Default Alt Text
(76 KB)

Event Timeline