Page MenuHomeHEPForge

No OneTemporary

diff --git a/src/Analyses/ALEPH_1991_S2435284.cc b/src/Analyses/ALEPH_1991_S2435284.cc
--- a/src/Analyses/ALEPH_1991_S2435284.cc
+++ b/src/Analyses/ALEPH_1991_S2435284.cc
@@ -1,41 +1,41 @@
// -*- C++ -*-
#include "Rivet/Tools/Logging.hh"
#include "Rivet/RivetAIDA.hh"
#include "Rivet/Analyses/ALEPH_1991_S2435284.hh"
#include "Rivet/Projections/ChargedFinalState.hh"
#include "Rivet/Projections/Multiplicity.hh"
namespace Rivet {
ALEPH_1991_S2435284::ALEPH_1991_S2435284() {
setBeams(ELECTRON, POSITRON);
const ChargedFinalState cfs;
addProjection(cfs, "FS");
addProjection(Multiplicity(cfs), "Mult");
}
void ALEPH_1991_S2435284::init() {
// Book histogram
_histChTot = bookHistogram1D(1, 1, 1, "Total charged multiplicity", "$n_\\text{ch}$",
- "$2/\\sigma \\, \\d{\\sigma}/\\d{n_\\text{ch}}$");
+ "$2/\\sigma \\, \\mathrm{d}{\\sigma}/\\mathrm{d}{n_\\text{ch}}$");
}
// Do the analysis
void ALEPH_1991_S2435284::analyze(const Event& event) {
const Multiplicity& m = applyProjection<Multiplicity>(event, "Mult");
getLog() << Log::DEBUG << "Total charged multiplicity = " << m.totalMultiplicity() << endl;
_histChTot->fill(m.totalMultiplicity(), event.weight());
}
// Finalize
void ALEPH_1991_S2435284::finalize() {
// Normalize the histogram
scale(_histChTot, 2.0/sumOfWeights()); // same as in ALEPH 1996
}
}
diff --git a/src/Analyses/CDF_1990_S2089246.cc b/src/Analyses/CDF_1990_S2089246.cc
--- a/src/Analyses/CDF_1990_S2089246.cc
+++ b/src/Analyses/CDF_1990_S2089246.cc
@@ -1,68 +1,68 @@
// -*- C++ -*-
#include "Rivet/Tools/Logging.hh"
#include "Rivet/RivetAIDA.hh"
#include "Rivet/Analyses/CDF_1990_S2089246.hh"
#include "Rivet/Projections/ChargedFinalState.hh"
#include "Rivet/Projections/Beam.hh"
#include "Rivet/Projections/FastJets.hh"
#include "Rivet/Projections/PVertex.hh"
#include "Rivet/Projections/TotalVisibleMomentum.hh"
namespace Rivet {
CDF_1990_S2089246::CDF_1990_S2089246()
{
setBeams(PROTON, ANTIPROTON);
addProjection(ChargedFinalState(-3.5, 3.5), "FS");
addProjection(Beam(), "Beam");
}
void CDF_1990_S2089246::init() {
- const string dNdEtaTeX = "$\\d{N_\\text{ch}}/\\d{\\eta}$";
+ const string dNdEtaTeX = "$\\mathrm{d}{N_\\text{ch}}/\\mathrm{d}{\\eta}$";
const string etaTeX = "$\\eta$";
/// @todo Get 630 GeV data in HepData
// _hist_eta630 = bookHistogram1D(3, 1, 0,
// "Pseudorapidity distribution at $\\sqrt{s} = \\unit{630}{\\GeV}$", dNdEtaTeX, etaTeX);
_hist_eta630 =
bookHistogram1D("d03-x01-y00",
"Pseudorapidity distribution at $\\sqrt{s} = \\unit{630}{\\GeV}$",
dNdEtaTeX, etaTeX, 10, 0, 3.5);
_hist_eta1800 =
bookHistogram1D(3, 1, 1,
"Pseudorapidity distribution at $\\sqrt{s} = \\unit{1800}{\\GeV}$",
dNdEtaTeX, etaTeX);
}
// Do the analysis
void CDF_1990_S2089246::analyze(const Event& event) {
const double sqrtS = applyProjection<Beam>(event, "Beam").sqrtS();
const double weight = event.weight();
const FinalState& fs = applyProjection<FinalState>(event, "FS");
foreach (const Particle& p, fs.particles()) {
const double eta = p.momentum().pseudorapidity();
if (fuzzyEquals(sqrtS/GeV, 630)) {
_hist_eta630->fill(eta, weight);
} else if (fuzzyEquals(sqrtS/GeV, 1800)) {
_hist_eta1800->fill(eta, weight);
}
}
}
// Finalize
void CDF_1990_S2089246::finalize() {
// Divide through by num events to get d<N>/d(eta) in bins
scale(_hist_eta630, 1/sumOfWeights());
scale(_hist_eta1800, 1/sumOfWeights());
}
}
diff --git a/src/Analyses/CDF_2000_S4155203.cc b/src/Analyses/CDF_2000_S4155203.cc
--- a/src/Analyses/CDF_2000_S4155203.cc
+++ b/src/Analyses/CDF_2000_S4155203.cc
@@ -1,51 +1,51 @@
// -*- C++ -*-
// CDF Z pT analysis
#include "Rivet/Rivet.hh"
#include "Rivet/Tools/Logging.hh"
#include "Rivet/Analyses/CDF_2000_S4155203.hh"
#include "Rivet/Projections/ZFinder.hh"
#include "Rivet/RivetAIDA.hh"
namespace Rivet {
CDF_2000_S4155203::CDF_2000_S4155203() {
setBeams(PROTON, ANTIPROTON);
ZFinder zfinder(FinalState(), ELECTRON, 66.0*GeV, 116.0*GeV, 0.2);
addProjection(zfinder, "ZFinder");
}
// Book histograms
void CDF_2000_S4155203::init() {
/// @todo Cross-section units in label
_hist_zpt =
bookHistogram1D(1, 1, 1, "$p_\\perp$ of Z boson in $\\Pelectron \\Ppositron$ decays",
- "$p_\\perp(\\PZ)$ / GeV", "$\\d{\\sigma}/\\d{p_\\perp(\\PZ)}$");
+ "$p_\\perp(\\PZ)$ / GeV", "$\\mathrm{d}{\\sigma}/\\mathrm{d}{p_\\perp(\\PZ)}$");
}
// Do the analysis
void CDF_2000_S4155203::analyze(const Event& e) {
const ZFinder& zfinder = applyProjection<ZFinder>(e, "ZFinder");
if (zfinder.particles().size() != 1) {
getLog() << Log::DEBUG << "No unique e+e- pair found" << endl;
vetoEvent(e);
}
FourMomentum pZ = zfinder.particles()[0].momentum();
getLog() << Log::DEBUG << "Dilepton mass = " << pZ.mass()/GeV << " GeV" << endl;
getLog() << Log::DEBUG << "Dilepton pT = " << pZ.pT()/GeV << " GeV" << endl;
_hist_zpt->fill(pZ.pT()/GeV, e.weight());
}
void CDF_2000_S4155203::finalize() {
// Normalize to the experimental cross-section
/// @todo Get norm from generator cross-section
normalize(_hist_zpt, 247.4);
}
}
diff --git a/src/Analyses/CDF_2001_S4751469.cc b/src/Analyses/CDF_2001_S4751469.cc
--- a/src/Analyses/CDF_2001_S4751469.cc
+++ b/src/Analyses/CDF_2001_S4751469.cc
@@ -1,189 +1,189 @@
// -*- C++ -*-
// Field & Stuart underlying event analysis at CDF.
// Phys.Rev.D65:092002,2002 - no arXiv code.
// FNAL-PUB 01/211-E
#include "Rivet/Rivet.hh"
#include "Rivet/RivetAIDA.hh"
#include "Rivet/Tools/Logging.hh"
#include "Rivet/Analyses/CDF_2001_S4751469.hh"
#include "Rivet/Projections/ChargedFinalState.hh"
#include "Rivet/Projections/LossyFinalState.hh"
#include "Rivet/Projections/FastJets.hh"
namespace Rivet {
// Constructor
CDF_2001_S4751469::CDF_2001_S4751469()
: _totalNumTrans2(0), _totalNumTrans5(0), _totalNumTrans30(0),
_sumWeightsPtLead2(0),_sumWeightsPtLead5(0), _sumWeightsPtLead30(0)
{
setBeams(PROTON, ANTIPROTON);
const ChargedFinalState cfs(-1.0, 1.0, 0.5*GeV);
const LossyFinalState lfs(cfs, 0.08);
addProjection(lfs, "FS");
addProjection(FastJets(lfs, FastJets::TRACKJET, 0.7), "TrackJet");
}
// Book histograms
void CDF_2001_S4751469::init() {
const string pT1 = "$p_\\perp^\\text{lead}$";
const string Nch = "$N_\\text{ch}$";
string xlabel = pT1 + " / GeV";
string ylabel = Nch;
_numTowardMB = bookProfile1D(3, 1, 1, Nch + " (toward) for min-bias", xlabel, ylabel);
_numTransMB = bookProfile1D(3, 1, 2, Nch + " (transverse) for min-bias", xlabel, ylabel);
_numAwayMB = bookProfile1D(3, 1, 3, Nch + " (away) for min-bias", xlabel, ylabel);
_numTowardJ20 = bookProfile1D(4, 1, 1, Nch + " (toward) for JET20", xlabel, ylabel);
_numTransJ20 = bookProfile1D(4, 1, 2, Nch + " (transverse) for JET20", xlabel, ylabel);
_numAwayJ20 = bookProfile1D(4, 1, 3, Nch + " (away) for JET20", xlabel, ylabel);
const string pTsum = "$p_\\perp^\\text{sum}$";
ylabel = pTsum + " / GeV";
_ptsumTowardMB = bookProfile1D(5, 1, 1, pTsum + " (toward) for min-bias", xlabel, ylabel);
_ptsumTransMB = bookProfile1D(5, 1, 2, pTsum + " (transverse) for min-bias", xlabel, ylabel);
_ptsumAwayMB = bookProfile1D(5, 1, 3, pTsum + " (away) for min-bias", xlabel, ylabel);
_ptsumTowardJ20 = bookProfile1D(6, 1, 1, pTsum + " (toward) for JET20", xlabel, ylabel);
_ptsumTransJ20 = bookProfile1D(6, 1, 2, pTsum + " (transverse) for JET20", xlabel, ylabel);
_ptsumAwayJ20 = bookProfile1D(6, 1, 3, pTsum + " (away) for JET20", xlabel, ylabel);
const string pT = "$p_\\perp$";
xlabel = pT + " / GeV";
- ylabel = "$1/\\sigma \\, \\d{\\sigma}/\\d{p_\\perp}$";
+ ylabel = "$1/\\sigma \\, \\mathrm{d}{\\sigma}/\\mathrm{d}{p_\\perp}$";
_ptTrans2 = bookHistogram1D(7, 1, 1, "$p_\\perp$ distribution "
"(transverse, $p_\\perp^\\text{lead} > 2\\text{ GeV}$)",
xlabel, ylabel);
_ptTrans5 = bookHistogram1D(7, 1, 2, "$p_\\perp$ distribution "
"(transverse, $p_\\perp^\\text{lead} > 5\\text{ GeV}$)",
xlabel, ylabel);
_ptTrans30 = bookHistogram1D(7, 1, 3, "$p_\\perp$ distribution "
"(transverse, $p_\\perp^\\text{lead} > 30 \\text{GeV}$)",
xlabel, ylabel);
}
// Do the analysis
void CDF_2001_S4751469::analyze(const Event& event) {
// Analyse, with pT > 0.5 GeV AND |eta| < 1
const JetAlg& tj = applyProjection<JetAlg>(event, "TrackJet");
// Get jets, sorted by pT
const Jets jets = tj.jetsByPt();
if (jets.empty()) {
vetoEvent(event);
}
Jet leadingJet = jets.front();
const double phiLead = leadingJet.ptWeightedPhi();
const double ptLead = leadingJet.ptSum();
// Cut on highest pT jet: combined 0.5 GeV < pT(lead) < 50 GeV
if (ptLead/GeV < 0.5) vetoEvent(event);
if (ptLead/GeV > 50.0) vetoEvent(event);
// Get the event weight
const double weight = event.weight();
// Count sum of all event weights in three pTlead regions
if (ptLead/GeV > 2.0) {
_sumWeightsPtLead2 += weight;
}
if (ptLead/GeV > 5.0) {
_sumWeightsPtLead5 += weight;
}
if (ptLead/GeV > 30.0) {
_sumWeightsPtLead30 += weight;
}
// Run over tracks
double ptSumToward(0.0), ptSumAway(0.0), ptSumTrans(0.0);
size_t numToward(0), numTrans(0), numAway(0);
foreach (const Jet& j, jets) {
foreach (const FourMomentum& p, j) {
// Calculate Delta(phi) from leading jet
const double deltaPhi = delta_phi(p.azimuthalAngle(), phiLead);
// Get pT sum and multiplicity values for each region
// (each is 1 number for each region per event)
/// @todo Include event weight factor?
if (deltaPhi < PI/3.0) {
ptSumToward += p.pT();
++numToward;
} else if (deltaPhi < 2*PI/3.0) {
ptSumTrans += p.pT();
++numTrans;
// Fill transverse pT distributions
if (ptLead/GeV > 2.0) {
_ptTrans2->fill(p.pT()/GeV, weight);
_totalNumTrans2 += weight;
}
if (ptLead/GeV > 5.0) {
_ptTrans5->fill(p.pT()/GeV, weight);
_totalNumTrans5 += weight;
}
if (ptLead/GeV > 30.0) {
_ptTrans30->fill(p.pT()/GeV, weight);
_totalNumTrans30 += weight;
}
} else {
ptSumAway += p.pT();
++numAway;
}
}
}
// Log some event details
getLog() << Log::DEBUG
<< "pT [lead; twd, away, trans] = ["
<< ptLead << "; "
<< ptSumToward << ", "
<< ptSumAway << ", "
<< ptSumTrans << "]"
<< endl;
// Update the pT profile histograms
_ptsumTowardMB->fill(ptLead/GeV, ptSumToward/GeV, weight);
_ptsumTowardJ20->fill(ptLead/GeV, ptSumToward/GeV, weight);
_ptsumTransMB->fill(ptLead/GeV, ptSumTrans/GeV, weight);
_ptsumTransJ20->fill(ptLead/GeV, ptSumTrans/GeV, weight);
_ptsumAwayMB->fill(ptLead/GeV, ptSumAway/GeV, weight);
_ptsumAwayJ20->fill(ptLead/GeV, ptSumAway/GeV, weight);
// Log some event details
getLog() << Log::DEBUG
<< "N [twd, away, trans] = ["
<< numToward << ", "
<< numTrans << ", "
<< numAway << "]"
<< endl;
// Update the N_jet profile histograms
_numTowardMB->fill(ptLead/GeV, numToward, weight);
_numTowardJ20->fill(ptLead/GeV, numToward, weight);
_numTransMB->fill(ptLead/GeV, numTrans, weight);
_numTransJ20->fill(ptLead/GeV, numTrans, weight);
_numAwayMB->fill(ptLead/GeV, numAway, weight);
_numAwayJ20->fill(ptLead/GeV, numAway, weight);
}
void CDF_2001_S4751469::finalize() {
normalize(_ptTrans2, _totalNumTrans2 / _sumWeightsPtLead2);
normalize(_ptTrans5, _totalNumTrans5 / _sumWeightsPtLead5);
normalize(_ptTrans30, _totalNumTrans30 / _sumWeightsPtLead30);
}
}
diff --git a/src/Analyses/CDF_2002_S4796047.cc b/src/Analyses/CDF_2002_S4796047.cc
--- a/src/Analyses/CDF_2002_S4796047.cc
+++ b/src/Analyses/CDF_2002_S4796047.cc
@@ -1,64 +1,64 @@
// -*- C++ -*-
#include "Rivet/Rivet.hh"
#include "Rivet/RivetAIDA.hh"
#include "Rivet/Tools/Logging.hh"
#include "Rivet/Analyses/CDF_2002_S4796047.hh"
#include "Rivet/Projections/Beam.hh"
#include "Rivet/Projections/ChargedFinalState.hh"
namespace Rivet {
CDF_2002_S4796047::CDF_2002_S4796047()
{
setBeams(PROTON, ANTIPROTON);
addProjection(Beam(), "Beams");
const ChargedFinalState cfs(-1.0, 1.0, 0.4*GeV);
addProjection(cfs, "FS");
}
// Book histograms
void CDF_2002_S4796047::init() {
/// @todo Cross-section units
_hist_multiplicity_630 = bookHistogram1D(1, 1, 1,
"Charged multiplicity at $\\sqrt{s} = 630~\\text{GeV}$, $|\\eta| < 1$, $p_T > 0.4~\\text{GeV}$",
- "$n_\\text{ch}$", "\\d{\\sigma}/\\d{n_\\text{ch}}$");
+ "$n_\\text{ch}$", "\\mathrm{d}{\\sigma}/\\mathrm{d}{n_\\text{ch}}$");
/// @todo Cross-section units
_hist_multiplicity_1800 = bookHistogram1D(2, 1, 1,
"Charged multiplicity at $\\sqrt{s} = 1800~\\text{GeV}$, $|\\eta| < 1$, $p_T > 0.4~\\text{GeV}$",
- "$n_\\text{ch}$", "\\d{\\sigma}/\\d{n_\\text{ch}}$");
+ "$n_\\text{ch}$", "\\mathrm{d}{\\sigma}/\\mathrm{d}{n_\\text{ch}}$");
}
// Do the analysis
void CDF_2002_S4796047::analyze(const Event& e) {
Log log = getLog();
const ChargedFinalState& fs = applyProjection<ChargedFinalState>(e, "FS");
const size_t numParticles = fs.particles().size();
// Get the event weight
const double weight = e.weight();
// Get beams and average beam momentum
const ParticlePair& beams = applyProjection<Beam>(e, "Beams").beams();
const double sumBeamMom = ( beams.first.momentum().vector3().mod() +
beams.second.momentum().vector3().mod() );
if (inRange(sumBeamMom/GeV, 625, 635)) {
_hist_multiplicity_630->fill(numParticles, weight);
} else if (inRange(sumBeamMom/GeV, 1795, 1805)) {
_hist_multiplicity_1800->fill(numParticles, weight);
}
}
void CDF_2002_S4796047::finalize() {
/// @todo Get cross-section from the generator
normalize(_hist_multiplicity_630, 3.21167);
normalize(_hist_multiplicity_1800, 4.19121);
}
}
diff --git a/src/Analyses/CDF_2004_S5839831.cc b/src/Analyses/CDF_2004_S5839831.cc
--- a/src/Analyses/CDF_2004_S5839831.cc
+++ b/src/Analyses/CDF_2004_S5839831.cc
@@ -1,419 +1,419 @@
// -*- C++ -*-
// "Acosta" underlying event analysis at CDF, inc. "Swiss Cheese"
#include "Rivet/Rivet.hh"
#include "Rivet/RivetAIDA.hh"
#include "Rivet/Tools/Logging.hh"
#include "Rivet/Analyses/CDF_2004_S5839831.hh"
#include "Rivet/Projections/Beam.hh"
#include "Rivet/Projections/ChargedFinalState.hh"
#include "Rivet/Projections/FastJets.hh"
namespace Rivet {
CDF_2004_S5839831::CDF_2004_S5839831() {
setBeams(PROTON, ANTIPROTON);
addProjection(Beam(), "Beam");
// NB. Charged track reconstruction efficiency has already been corrected in the data.
const ChargedFinalState fs(-1.2, 1.2, 0.4*GeV);
addProjection(fs, "FS");
addProjection(FastJets(fs, FastJets::CDFJETCLU, 0.7), "Jets");
// Restrict tracks to |eta| < 0.7 for the min bias part.
const ChargedFinalState mbfs(-0.7, 0.7, 0.4*GeV);
addProjection(mbfs, "MBFS");
// Restrict tracks to |eta| < 1 for the Swiss-Cheese part.
const ChargedFinalState cheesefs(-1.0, 1.0, 0.4*GeV);
addProjection(cheesefs, "CheeseFS");
addProjection(FastJets(cheesefs, FastJets::CDFJETCLU, 0.7), "CheeseJets");
}
// Book histograms
void CDF_2004_S5839831::init() {
getLog() << Log::WARN << "\n"
<< "***************************************************\n"
<< "This analysis is not considered reliable enough for\n"
<< "inclusion in MC tuning studies: be careful! Expert\n"
<< "help with ensuring that this analysis matches the\n"
<< "experiment's implementation would be very welcome!\n"
<< "***************************************************"
<< endl;
const string ptmax = "$p_\\perp^\\text{max} \\rangle$";
const string ptmin = "$p_\\perp^\\text{min}$";
const string ptdiff = "$p_\\perp^\\text{diff}$";
const string ptsum = "$p_\\perp^\\text{sum}$";
const string ptmaxmean = "$\\langle p_\\perp^\\text{max} \\rangle$";
const string ptminmean = "$\\langle p_\\perp^\\text{min} \\rangle$";
const string et1 = "$E_\\perp^\\text{lead}$";
string xlabel = et1 + " / GeV";
string ylabel = "";
_pt90MaxAvg1800 =
bookProfile1D(1, 1, 1,
ptmaxmean + " vs. " + et1 + " at $\\sqrt{s}$ = 1800 GeV",
xlabel, ptmaxmean + " / GeV");
_pt90MinAvg1800 =
bookProfile1D(1, 1, 2,
ptminmean + " vs. " + et1 + " at $\\sqrt{s}$ = 1800 GeV",
xlabel, ptminmean + " / GeV");
_pt90Max1800 =
bookProfile1D(2, 1, 1,
ptmax + " vs. " + et1 + " at $\\sqrt{s}$ = 1800 GeV",
xlabel, ptmax + " / GeV");
_pt90Min1800 =
bookProfile1D(2, 1, 2,
ptmin + " vs. " + et1 + " at $\\sqrt{s}$ = 1800 GeV",
xlabel, ptmin + " / GeV");
_pt90Diff1800 =
bookProfile1D(2, 1, 3,
ptdiff + " vs. " + et1 + " at $\\sqrt{s}$ = 1800 GeV",
xlabel, ptdiff + " / GeV");
_num90Max1800 =
bookProfile1D(4, 1, 1,
"$N_\\text{max}$ vs. " + et1 + " at $\\sqrt{s}$ = 1800 GeV",
xlabel, "$N_\\text{max}$");
_num90Min1800 =
bookProfile1D(4, 1, 2,
"$N_\\text{min}$ vs. " + et1 + " at $\\sqrt{s}$ = 1800 GeV",
xlabel, "$N_\\text{min}$");
_pTSum1800_2Jet =
bookProfile1D(7, 1, 1,
"Swiss Cheese " + ptsum + " vs. " + et1 +
" (for removal of 2 jets) at $\\sqrt{s}$ = 1800 GeV",
xlabel, ptsum + " / GeV (2 jets removed)");
_pTSum1800_3Jet =
bookProfile1D(7, 1, 2,
"Swiss Cheese " + ptsum + " vs. " + et1 +
" (for removal of 3 jets) at $\\sqrt{s}$ = 1800 GeV",
xlabel, ptsum + " / GeV (3 jets removed)");
_pt90Max630 =
bookProfile1D(8, 1, 1,
ptmax + " vs. " + et1 + " at $\\sqrt{s}$ = 630 GeV",
xlabel, ptmax + " / GeV");
_pt90Min630 =
bookProfile1D(8, 1, 2,
ptmin + " vs. " + et1 + " at $\\sqrt{s}$ = 630 GeV",
xlabel, ptmin + " / GeV");
_pt90Diff630 =
bookProfile1D(8, 1, 3,
ptdiff + " vs. " + et1 + " at $\\sqrt{s}$ = 630 GeV",
xlabel, ptdiff + " / GeV");
_pTSum630_2Jet =
bookProfile1D(9, 1, 1,
"Swiss Cheese " + ptsum + " vs. " + et1 +
" (for removal of 2 jets) at $\\sqrt{s}$ = 630 GeV",
xlabel, ptsum + " / GeV (2 jets removed)");
_pTSum630_3Jet =
bookProfile1D(9, 1, 2,
"Swiss Cheese " + ptsum + " vs. " + et1 +
" (for removal of 3 jets) at $\\sqrt{s}$ = 630 GeV",
xlabel, ptsum + " / GeV (3 jets removed)");
string basetitle = "$p_\\perp$ distribution in MAX+MIN transverse cones for ";
xlabel = "$p_\\perp / GeV";
- ylabel = "$\\d{\\sigma}/\\d{p_\\perp}$";
+ ylabel = "$\\mathrm{d}{\\sigma}/\\mathrm{d}{p_\\perp}$";
/// @todo Check this normalisation defn (num-tracks x xsec?.)
_pt90Dbn1800Et40 =
bookHistogram1D(3, 1, 1,
basetitle + "$40 < E_\\perp^\\text{lead} < 80$ GeV at $\\sqrt{s}$ = 1800 GeV",
xlabel, ylabel);
/// @todo Check this normalisation defn (num-tracks x xsec?.)
_pt90Dbn1800Et80 =
bookHistogram1D(3, 1, 2,
basetitle + "$80 < E_\\perp^\\text{lead} < 120$ GeV at $\\sqrt{s}$ = 1800 GeV",
xlabel, ylabel);
/// @todo Check this normalisation defn (num-tracks x xsec?.)
_pt90Dbn1800Et120 =
bookHistogram1D(3, 1, 3,
basetitle + "$120 < E_\\perp^\\text{lead} < 160$ GeV at $\\sqrt{s}$ = 1800 GeV",
xlabel, ylabel);
/// @todo Check this normalisation defn (num-tracks x xsec?.)
_pt90Dbn1800Et160 =
bookHistogram1D(3, 1, 4,
basetitle + "$160 < E_\\perp^\\text{lead} < 200$ GeV at $\\sqrt{s}$ = 1800 GeV",
xlabel, ylabel);
/// @todo Check this normalisation defn (num-tracks x xsec?.)
_pt90Dbn1800Et200 =
bookHistogram1D(3, 1, 5,
basetitle + "$200 < E_\\perp^\\text{lead} < 270$ GeV at $\\sqrt{s}$ = 1800 GeV",
xlabel, ylabel);
/// @todo Check this normalisation defn (num-tracks x xsec?.)
_ptDbn1800MB =
bookHistogram1D(6, 1, 1,
"Min bias $p_\\perp$ distribution at $\\sqrt{s}$ = 1800 GeV",
xlabel, ylabel);
xlabel = "$N_\\text{ch}$";
- ylabel = "$\\d{\\sigma}/\\d{N_\\text{ch}}$";
+ ylabel = "$\\mathrm{d}{\\sigma}/\\mathrm{d}{N_\\text{ch}}$";
/// @todo Check this normalisation defn.
_numTracksDbn1800MB =
bookHistogram1D(5, 1, 1,
"Min bias track multiplicity distribution at $\\sqrt{s}$ = 1800 GeV",
xlabel, ylabel);
/// @todo Check this normalisation defn.
_numTracksDbn630MB =
bookHistogram1D(10, 1, 1,
"Min bias track multiplicity distribution at $\\sqrt{s}$ = 630 GeV",
xlabel, ylabel);
/// @todo Check this normalisation defn.
_ptDbn630MB =
bookHistogram1D(11, 1, 1,
"Min bias $p_\\perp$ distribution at $\\sqrt{s}$ = 630 GeV",
xlabel, ylabel);
}
const CDF_2004_S5839831::ConesInfo
CDF_2004_S5839831::calcTransCones(const double etaLead, const double phiLead,
const ParticleVector& tracks) {
const double phiTransPlus = mapAngle0To2Pi(phiLead + PI/2.0);
const double phiTransMinus = mapAngle0To2Pi(phiLead - PI/2.0);
getLog() << Log::DEBUG << "phi_lead = " << phiLead
<< " -> trans = (" << phiTransPlus
<< ", " << phiTransMinus << ")" << endl;
unsigned int numPlus(0), numMinus(0);
double ptPlus(0), ptMinus(0);
// Run over all charged tracks
foreach (const Particle& t, tracks) {
FourMomentum trackMom = t.momentum();
const double pt = trackMom.pT();
// Find if track mom is in either transverse cone
if (deltaR(trackMom, etaLead, phiTransPlus) < 0.7) {
ptPlus += pt;
numPlus += 1;
} else if (deltaR(trackMom, etaLead, phiTransMinus) < 0.7) {
ptMinus += pt;
numMinus += 1;
}
}
ConesInfo rtn;
// Assign N_{min,max} from N_{plus,minus}
rtn.numMax = (ptPlus >= ptMinus) ? numPlus : numMinus;
rtn.numMin = (ptPlus >= ptMinus) ? numMinus : numPlus;
// Assign pT_{min,max} from pT_{plus,minus}
rtn.ptMax = (ptPlus >= ptMinus) ? ptPlus : ptMinus;
rtn.ptMin = (ptPlus >= ptMinus) ? ptMinus : ptPlus;
rtn.ptDiff = fabs(rtn.ptMax - rtn.ptMin);
getLog() << Log::DEBUG << "Min cone has " << rtn.numMin << " tracks -> "
<< "pT_min = " << rtn.ptMin/GeV << " GeV" << endl;
getLog() << Log::DEBUG << "Max cone has " << rtn.numMax << " tracks -> "
<< "pT_max = " << rtn.ptMax/GeV << " GeV" << endl;
return rtn;
}
const CDF_2004_S5839831::ConesInfo
CDF_2004_S5839831::calcTransCones(const FourMomentum& leadvec,
const ParticleVector& tracks) {
const double etaLead = leadvec.pseudorapidity();
const double phiLead = leadvec.azimuthalAngle();
return calcTransCones(etaLead, phiLead, tracks);
}
vector<Jet> CDF_2004_S5839831::sortjets(vector<Jet>& jets) {
sort(jets.begin(), jets.end(), cmpJetsByE); // was ...ByEt
if ( getLog().isActive(Log::DEBUG) ) {
getLog() << Log::DEBUG << "Jet Et sums = [" << endl;
foreach (Jet& j, jets) {
getLog() << Log::DEBUG << " " << j.EtSum() << endl;
}
getLog() << Log::DEBUG << "]" << endl;
}
return jets;
}
// Do the analysis
void CDF_2004_S5839831::analyze(const Event& event) {
const double sqrtS = applyProjection<Beam>(event, "Beam").sqrtS();
// Get the event weight
const double weight = event.weight();
{
const ParticleVector tracks = applyProjection<FinalState>(event, "FS").particles();
vector<Jet> jets = applyProjection<JetAlg>(event, "Jets").jets();
if (!jets.empty()) {
// Leading jet must be in central |eta| < 0.5 region
sortjets(jets);
const Jet leadingjet = jets.front();
const double etaLead = leadingjet.momentum().pseudorapidity();
if (fabs(etaLead) > 0.5) {
getLog() << Log::DEBUG << "Leading jet eta = " << etaLead << " not in |eta| < 0.5" << endl;
} else {
// Get Et of the leading jet: used to bin histograms
const double ETlead = leadingjet.EtSum();
getLog() << Log::DEBUG << "Leading Et = " << ETlead/GeV << " GeV" << endl;
// Multiplicity & pT distributions for sqrt(s) = 630 GeV, 1800 GeV
const ConesInfo cones = calcTransCones(leadingjet.momentum(), tracks);
if (fuzzyEquals(sqrtS/GeV, 630)) {
_pt90Max630->fill(ETlead/GeV, cones.ptMax/GeV, weight);
_pt90Min630->fill(ETlead/GeV, cones.ptMin/GeV, weight);
_pt90Diff630->fill(ETlead/GeV, cones.ptDiff/GeV, weight);
} else if (fuzzyEquals(sqrtS/GeV, 1800)) {
_num90Max1800->fill(ETlead/GeV, cones.numMax, weight);
_num90Min1800->fill(ETlead/GeV, cones.numMin, weight);
_pt90Max1800->fill(ETlead/GeV, cones.ptMax/GeV, weight);
_pt90Min1800->fill(ETlead/GeV, cones.ptMin/GeV, weight);
_pt90Diff1800->fill(ETlead/GeV, cones.ptDiff/GeV, weight);
_pt90MaxAvg1800->fill(ETlead/GeV, cones.ptMax/GeV, weight); // /numMax
_pt90MinAvg1800->fill(ETlead/GeV, cones.ptMin/GeV, weight); // /numMin
//
const double ptTransTotal = cones.ptMax + cones.ptMin;
if (inRange(ETlead/GeV, 40, 80)) {
_pt90Dbn1800Et40->fill(ptTransTotal/GeV, weight);
} else if (inRange(ETlead/GeV, 80, 120)) {
_pt90Dbn1800Et80->fill(ptTransTotal/GeV, weight);
} else if (inRange(ETlead/GeV, 120, 160)) {
_pt90Dbn1800Et120->fill(ptTransTotal/GeV, weight);
} else if (inRange(ETlead/GeV, 160, 200)) {
_pt90Dbn1800Et160->fill(ptTransTotal/GeV, weight);
} else if (inRange(ETlead/GeV, 200, 270)) {
_pt90Dbn1800Et200->fill(ptTransTotal/GeV, weight);
}
}
}
}
}
// Fill min bias total track multiplicity histos
{
const ParticleVector mbtracks = applyProjection<FinalState>(event, "MBFS").particles();
if (fuzzyEquals(sqrtS/GeV, 1800)) {
_numTracksDbn1800MB->fill(mbtracks.size(), weight);
} else if (fuzzyEquals(sqrtS/GeV, 630)) {
_numTracksDbn630MB->fill(mbtracks.size(), weight);
}
// Run over all charged tracks
foreach (const Particle& t, mbtracks) {
FourMomentum trackMom = t.momentum();
const double pt = trackMom.pT();
// Plot total pT distribution for min bias
if (fuzzyEquals(sqrtS/GeV, 1800)) {
_ptDbn1800MB->fill(pt/GeV, weight);
} else if (fuzzyEquals(sqrtS/GeV, 630)) {
_ptDbn630MB->fill(pt/GeV, weight);
}
}
}
// Construct "Swiss Cheese" pT distributions, with pT contributions from
// tracks within R = 0.7 of the 1st, 2nd (and 3rd) jets being ignored. A
// different set of charged tracks, with |eta| < 1.0, is used here, and all
// the removed jets must have Et > 5 GeV.
{
const ParticleVector cheesetracks = applyProjection<FinalState>(event, "CheeseFS").particles();
vector<Jet> cheesejets = applyProjection<JetAlg>(event, "Jets").jets();
if (cheesejets.empty()) {
getLog() << Log::DEBUG << "No 'cheese' jets found in event" << endl;
return;
}
sortjets(cheesejets);
if (cheesejets.size() > 1 &&
fabs(cheesejets[0].momentum().pseudorapidity()) <= 0.5 &&
cheesejets[0].momentum().Et()/GeV > 5.0 &&
cheesejets[1].momentum().Et()/GeV > 5.0) {
const double cheeseETlead = cheesejets[0].momentum().Et();
const double eta1 = cheesejets[0].momentum().pseudorapidity();
const double phi1 = cheesejets[0].momentum().azimuthalAngle();
const double eta2 = cheesejets[1].momentum().pseudorapidity();
const double phi2 = cheesejets[1].momentum().azimuthalAngle();
double ptSumSub2(0), ptSumSub3(0);
foreach (const Particle& t, cheesetracks) {
FourMomentum trackMom = t.momentum();
const double pt = trackMom.pT();
// Subtracting 2 leading jets
const double deltaR1 = deltaR(trackMom, eta1, phi1);
const double deltaR2 = deltaR(trackMom, eta2, phi2);
getLog() << Log::TRACE << "Track vs jet(1): "
<< "|(" << trackMom.pseudorapidity() << ", " << trackMom.azimuthalAngle() << ") - "
<< "|(" << eta1 << ", " << phi1 << ")| = " << deltaR1 << endl;
getLog() << Log::TRACE << "Track vs jet(2): "
<< "|(" << trackMom.pseudorapidity() << ", " << trackMom.azimuthalAngle() << ") - "
<< "|(" << eta2 << ", " << phi2 << ")| = " << deltaR2 << endl;
if (deltaR1 > 0.7 && deltaR2 > 0.7) {
ptSumSub2 += pt;
// Subtracting 3rd leading jet
if (cheesejets.size() > 2 &&
cheesejets[2].momentum().Et()/GeV > 5.0) {
const double eta3 = cheesejets[2].momentum().pseudorapidity();
const double phi3 = cheesejets[2].momentum().azimuthalAngle();
const double deltaR3 = deltaR(trackMom, eta3, phi3);
getLog() << Log::TRACE << "Track vs jet(3): "
<< "|(" << trackMom.pseudorapidity() << ", " << trackMom.azimuthalAngle() << ") - "
<< "|(" << eta3 << ", " << phi3 << ")| = " << deltaR3 << endl;
if (deltaR3 > 0.7) {
ptSumSub3 += pt;
}
}
}
}
// Swiss Cheese sub 2,3 jets distributions for sqrt(s) = 630 GeV, 1800 GeV
if (fuzzyEquals(sqrtS/GeV, 630)) {
_pTSum630_2Jet->fill(cheeseETlead/GeV, ptSumSub2/GeV, weight);
_pTSum630_3Jet->fill(cheeseETlead/GeV, ptSumSub3/GeV, weight);
} else if (fuzzyEquals(sqrtS/GeV, 1800)) {
_pTSum1800_2Jet->fill(cheeseETlead/GeV, ptSumSub2/GeV, weight);
_pTSum1800_3Jet->fill(cheeseETlead/GeV, ptSumSub3/GeV, weight);
}
}
}
}
void CDF_2004_S5839831::finalize() {
// Normalize to actual number of entries in pT dbn histos
/// @todo Check this normalisation defn.
normalize(_pt90Dbn1800Et40, 1656.75);
/// @todo Check this normalisation defn.
normalize(_pt90Dbn1800Et80, 4657.5);
/// @todo Check this normalisation defn.
normalize(_pt90Dbn1800Et120, 5395.5);
/// @todo Check this normalisation defn.
normalize(_pt90Dbn1800Et160, 7248.75);
/// @todo Check this normalisation defn.
normalize(_pt90Dbn1800Et200, 2442.0);
// and for min bias distributions:
/// @todo Check this normalisation defn.
normalize(_numTracksDbn1800MB, 309718.25);
/// @todo Check this normalisation defn.
normalize(_numTracksDbn630MB, 1101024.0);
/// @todo Check this normalisation defn.
normalize(_ptDbn1800MB, 33600.0);
/// @todo Check this normalisation defn.
normalize(_ptDbn630MB, 105088.0);
}
}
diff --git a/src/Analyses/D0_2001_S4674421.cc b/src/Analyses/D0_2001_S4674421.cc
--- a/src/Analyses/D0_2001_S4674421.cc
+++ b/src/Analyses/D0_2001_S4674421.cc
@@ -1,156 +1,156 @@
// -*- C++ -*-
#include "Rivet/Tools/Logging.hh"
#include "Rivet/RivetAIDA.hh"
#include "Rivet/Tools/ParticleIDMethods.hh"
#include "Rivet/Analyses/D0_2001_S4674421.hh"
#include "Rivet/Projections/PVertex.hh"
#include "Rivet/Projections/LeadingParticlesFinalState.hh"
#include "Rivet/Projections/VetoedFinalState.hh"
namespace Rivet {
// - @c _mwmz = ratio of \f$ mW/mZ \f$ used in the publication analysis
// - @c _brwenu = ratio of \f$ BR(W->e,nu) \f$ used in the publication analysis
// - @c _brzee = ratio of \f$ BR(Z->ee) \f$ used in the publication analysis
// - @c _mZmin = lower Z mass cut used in the publication analysis
// - @c _mZmax = upper Z mass cut used in the publication analysis
D0_2001_S4674421::D0_2001_S4674421()
: _mwmz(0.8820), _brwenu(0.1073), _brzee(0.033632), _mZmin(75.*GeV), _mZmax(105.*GeV)
{
setBeams(PROTON, ANTIPROTON);
setNeedsCrossSection(true);
//const FinalState fs(-3.0, 3.0);
FinalState fs(-5.0, 5.0); //corrected for detector acceptance
addProjection(fs, "FS");
// Z -> e- e+
LeadingParticlesFinalState eeFS(fs, -2.5, 2.5, 0.); //20.);
eeFS.addParticleIdPair(ELECTRON);
addProjection(eeFS, "eeFS");
// W- -> e- nu_e~
LeadingParticlesFinalState enuFS(fs, -2.5, 2.5, 0.); //25.);
enuFS.addParticleId(ELECTRON).addParticleId(NU_EBAR);
addProjection(enuFS, "enuFS");
// W+ -> e+ nu_e
LeadingParticlesFinalState enubFS(fs, -2.5, 2.5, 0.); //25.);
enubFS.addParticleId(POSITRON).addParticleId(NU_E);
addProjection(enubFS, "enubFS");
// Remove neutrinos for isolation of final state particles
VetoedFinalState vfs(fs);
vfs.vetoNeutrinos();
addProjection(vfs, "VFS");
}
void D0_2001_S4674421::init() {
_eventsFilledW = 0.0;
_eventsFilledZ = 0.0;
_h_dsigdpt_w =
- bookHistogram1D(1, 1, 1, "$\\d{\\sigma} / \\d{p_\\perp(W)}$",
- "$p_\\perp$ / GeV/$c$", "$\\d{\\sigma}/\\d{p_\\perp(W)}$");
+ bookHistogram1D(1, 1, 1, "$\\mathrm{d}{\\sigma} / \\mathrm{d}{p_\\perp(W)}$",
+ "$p_\\perp$ / GeV/$c$", "$\\mathrm{d}{\\sigma}/\\mathrm{d}{p_\\perp(W)}$");
_h_dsigdpt_z =
- bookHistogram1D(1, 1, 2, "$\\d{\\sigma} / \\d{p_\\perp(Z)}$",
- "$p_\\perp$ / GeV/$c$", "$\\d{\\sigma}/\\d{p_\\perp(Z)}$");
+ bookHistogram1D(1, 1, 2, "$\\mathrm{d}{\\sigma} / \\mathrm{d}{p_\\perp(Z)}$",
+ "$p_\\perp$ / GeV/$c$", "$\\mathrm{d}{\\sigma}/\\mathrm{d}{p_\\perp(Z)}$");
vector<double> bins(23);
bins += 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 25, 30, 35, 40, 50, 60, 70, 80, 100, 120, 160, 200;
_h_dsigdpt_scaled_z =
- bookHistogram1D("d01-x01-y03", "$\\d{\\sigma} / \\d{(p_\\perp(Z) \\cdot M_W/M_Z)}$",
+ bookHistogram1D("d01-x01-y03", "$\\mathrm{d}{\\sigma} / \\mathrm{d}{(p_\\perp(Z) \\cdot M_W/M_Z)}$",
"$p_\\perp(W)/M_W$", "$R_p_\\perp$", bins);
}
void D0_2001_S4674421::analyze(const Event& event) {
const double weight = event.weight();
const LeadingParticlesFinalState& eeFS = applyProjection<LeadingParticlesFinalState>(event, "eeFS");
if (eeFS.particles().size() == 2) {
// If there is a Z candidate:
static size_t Zcount = 0;
// Fill Z pT distributions
const ParticleVector& Zdaughters = eeFS.particles();
const FourMomentum pmom = Zdaughters[0].momentum() + Zdaughters[1].momentum();
double mass = sqrt(pmom.invariant());
if (mass/GeV > _mZmin && mass/GeV < _mZmax) {
++Zcount;
_eventsFilledZ += weight;
getLog() << Log::DEBUG << "Z #" << Zcount << " pmom.pT() = " << pmom.pT()/GeV << " GeV" << endl;
_h_dsigdpt_z->fill(pmom.pT()/GeV, weight);
_h_dsigdpt_scaled_z->fill(pmom.pT()/GeV * _mwmz, weight);
}
} else {
// There is no Z -> ee candidate... so this must be a W event, right?
const LeadingParticlesFinalState& enuFS = applyProjection<LeadingParticlesFinalState>(event, "enuFS");
const LeadingParticlesFinalState& enubFS = applyProjection<LeadingParticlesFinalState>(event, "enubFS");
static size_t Wcount = 0;
// Fill W pT distributions
ParticleVector Wdaughters;
if (enuFS.particles().size() == 2 && enubFS.isEmpty()) {
Wdaughters = enuFS.particles();
} else if (enuFS.isEmpty() && enubFS.particles().size() == 2) {
Wdaughters = enubFS.particles();
}
if (! Wdaughters.empty()) {
assert(Wdaughters.size() == 2);
const FourMomentum pmom = Wdaughters[0].momentum() + Wdaughters[1].momentum();
++Wcount;
_eventsFilledW += weight;
_h_dsigdpt_w->fill(pmom.pT()/GeV, weight);
}
}
}
void D0_2001_S4674421::finalize() {
// Get cross-section per event (i.e. per unit weight) from generator
const double xSecPerEvent = crossSection()/picobarn / sumOfWeights();
// Correct W pT distribution to W cross-section
const double xSecW = xSecPerEvent * _eventsFilledW;
// Correct Z pT distribution to Z cross-section
const double xSecZ = xSecPerEvent * _eventsFilledZ;
// Get W and Z pT integrals
const double wpt_integral = integral(_h_dsigdpt_w);
const double zpt_scaled_integral = integral(_h_dsigdpt_scaled_z);
// Divide and scale ratio histos
AIDA::IDataPointSet* div = histogramFactory().divide(histoDir() + "/d02-x01-y01", *_h_dsigdpt_w, *_h_dsigdpt_scaled_z);
div->setTitle("$[\\mathrm{d}\\sigma/\\mathrm{d}p_\\perp(W)] / [\\mathrm{d}\\sigma/\\mathrm{d}(p_\\perp(Z) \\cdot M_W/M_Z)]$");
if (xSecW == 0 || wpt_integral == 0 || xSecZ == 0 || zpt_scaled_integral == 0) {
getLog() << Log::WARN << "Not filling ratio plot because input histos are empty" << endl;
} else {
// Scale factor converts event counts to cross-sections, and inverts the
// branching ratios since only one decay channel has been analysed for each boson.
const double scalefactor = (xSecW / wpt_integral) / (xSecZ / zpt_scaled_integral) * (_brzee / _brwenu);
for (int pt = 0; pt < div->size(); ++pt) {
assert(div->point(pt)->dimension() == 2);
AIDA::IMeasurement* m = div->point(pt)->coordinate(1);
m->setValue(m->value() * scalefactor);
m->setErrorPlus(m->errorPlus() * scalefactor);
m->setErrorMinus(m->errorPlus() * scalefactor);
}
}
// Normalize non-ratio histos
normalize(_h_dsigdpt_w, xSecW);
normalize(_h_dsigdpt_z, xSecZ);
normalize(_h_dsigdpt_scaled_z, xSecZ);
}
}
diff --git a/src/Analyses/D0_2006_S6438750.cc b/src/Analyses/D0_2006_S6438750.cc
--- a/src/Analyses/D0_2006_S6438750.cc
+++ b/src/Analyses/D0_2006_S6438750.cc
@@ -1,109 +1,109 @@
// -*- C++ -*-
#include "Rivet/Analyses/D0_2006_S6438750.hh"
#include "Rivet/Tools/Logging.hh"
#include "Rivet/Projections/FinalState.hh"
#include "Rivet/Projections/LeadingParticlesFinalState.hh"
#include "Rivet/Projections/VetoedFinalState.hh"
#include "Rivet/RivetAIDA.hh"
namespace Rivet {
D0_2006_S6438750::D0_2006_S6438750()
{
setBeams(PROTON, ANTIPROTON);
/// @todo Use cross-section from generator
setNeedsCrossSection(true);
// General FS for photon isolation
FinalState fs(-1.5, 1.5);
addProjection(fs, "AllFS");
// Get leading photon
LeadingParticlesFinalState photonfs(fs, -1.0, 1.0);
photonfs.addParticleId(PHOTON);
addProjection(photonfs, "LeadingPhoton");
}
// Book histograms
void D0_2006_S6438750::init() {
_h_pTgamma =
bookHistogram1D(1, 1, 1, "$p_\\perp$ spectrum for leading photon",
"$p_\\perp(\\gamma_\\text{lead})$ / GeV",
- "$\\d{\\sigma}/\\d{p_\\perp(\\gamma_\\text{lead})}$");
+ "$\\mathrm{d}{\\sigma}/\\mathrm{d}{p_\\perp(\\gamma_\\text{lead})}$");
}
// Do the analysis
void D0_2006_S6438750::analyze(const Event& event) {
// Get the photon
const FinalState& photonfs = applyProjection<FinalState>(event, "LeadingPhoton");
if (photonfs.particles().size() != 1) {
getLog() << Log::DEBUG << "No photon found" << endl;
vetoEvent(event);
}
const FourMomentum photon = photonfs.particles().front().momentum();
if (photon.pT()/GeV < 23) {
getLog() << Log::DEBUG << "Leading photon has pT < 23 GeV: " << photon.pT()/GeV << endl;
vetoEvent(event);
}
// Get all other particles
const FinalState& fs = applyProjection<FinalState>(event, "AllFS");
if (fs.isEmpty()) {
vetoEvent(event);
}
// Isolate photon by ensuring that a 0.4 cone around it contains less than 7% of the photon's energy
const double egamma = photon.E();
// Energy inside R = 0.2
double econe_02 = 0.0;
// Energy between R = [0.2, 0.4]
double econe_02_04 = 0.0;
foreach (const Particle& p, fs.particles()) {
const double dr = deltaR(photon.pseudorapidity(), photon.azimuthalAngle(),
p.momentum().pseudorapidity(), p.momentum().azimuthalAngle());
if (dr < 0.2) {
econe_02 += p.momentum().E();
} else if (dr < 0.4) {
econe_02_04 += p.momentum().E();
}
}
// Veto if outer hollow cone contains more than 10% of the energy of the inner cone
// or if the non-photon energy in the inner cone exceeds 5% of the photon energy.
if (econe_02_04/econe_02 > 0.1 || (econe_02-egamma)/egamma > 0.05) {
getLog() << Log::DEBUG << "Vetoing event because photon is insufficiently isolated" << endl;
vetoEvent(event);
}
// Veto if leading jet is outside plotted rapidity regions
const double eta_gamma = fabs(photon.pseudorapidity());
if (eta_gamma > 0.9) {
getLog() << Log::DEBUG << "Leading photon falls outside acceptance range; "
<< "|eta_gamma| = " << eta_gamma << endl;
vetoEvent(event);
}
// Fill histo
const double weight = event.weight();
_h_pTgamma->fill(photon.pT(), weight);
}
// Finalize
void D0_2006_S6438750::finalize() {
/// @todo Generator cross-section from Pythia gives ~7500, vs. expected 2988!
//normalize(_h_pTgamma, 2988.4869);
const double lumi_gen = sumOfWeights()/crossSection();
// Divide by effective lumi, plus rapidity bin width of 1.8
scale(_h_pTgamma, 1/lumi_gen * 1/1.8);
}
}
diff --git a/src/Analyses/D0_2008_S7662670.cc b/src/Analyses/D0_2008_S7662670.cc
--- a/src/Analyses/D0_2008_S7662670.cc
+++ b/src/Analyses/D0_2008_S7662670.cc
@@ -1,109 +1,109 @@
// -*- C++ -*-
#include "Rivet/Analyses/D0_2008_S7662670.hh"
#include "Rivet/Tools/Logging.hh"
#include "Rivet/Projections/FinalState.hh"
#include "Rivet/Projections/LeadingParticlesFinalState.hh"
#include "Rivet/Projections/VetoedFinalState.hh"
#include "Rivet/Projections/FastJets.hh"
#include "Rivet/RivetAIDA.hh"
namespace Rivet {
D0_2008_S7662670::D0_2008_S7662670()
{
setBeams(PROTON, ANTIPROTON);
setNeedsCrossSection(true);
//full final state
FinalState fs(-5.0, 5.0);
addProjection(fs, "FS");
FastJets jetpro(fs, FastJets::D0ILCONE, 0.7);
addProjection(jetpro, "Jets");
}
// Book histograms
void D0_2008_S7662670::init()
{
const string basetitle = "Inclusive jet $p_\\perp$, ";
const string xlabel = "Jet $p_\\perp$ / GeV";
- const string ylabel = "$1/\\sigma \\, \\d{\\sigma}/\\d{p_\\perp}$";
+ const string ylabel = "$1/\\sigma \\, \\mathrm{d}{\\sigma}/\\mathrm{d}{p_\\perp}$";
_h_dsigdptdy_y00_04 =
bookHistogram1D(1, 1, 1, basetitle + "$0.0 < |y| < 0.4$", xlabel, ylabel);
_h_dsigdptdy_y04_08 =
bookHistogram1D(2, 1, 1, basetitle + "$0.4 < |y| < 0.8$", xlabel, ylabel);
_h_dsigdptdy_y08_12 =
bookHistogram1D(3, 1, 1, basetitle + "$0.8 < |y| < 1.2$", xlabel, ylabel);
_h_dsigdptdy_y12_16 =
bookHistogram1D(4, 1, 1, basetitle + "$1.2 < |y| < 1.6$", xlabel, ylabel);
_h_dsigdptdy_y16_20 =
bookHistogram1D(5, 1, 1, basetitle + "$1.6 < |y| < 2.0$", xlabel, ylabel);
_h_dsigdptdy_y20_24 =
bookHistogram1D(6, 1, 1, basetitle + "$2.0 < |y| < 2.4$", xlabel, ylabel);
}
// Do the analysis
void D0_2008_S7662670::analyze(const Event & event) {
const double weight = event.weight();
// Skip if the event is empty
const FinalState& fs = applyProjection<FinalState>(event, "FS");
if (fs.isEmpty()) {
getLog() << Log::DEBUG << "Empty event!" << endl;
vetoEvent(event);
}
// Find the jets
const JetAlg& jetpro = applyProjection<JetAlg>(event, "Jets");
// If there are no jets, skip the event
if (jetpro.jets().size() == 0) {
getLog() << Log::DEBUG << "No jets found" << endl;
vetoEvent(event);
}
// Fill histo for each jet
foreach (const Jet& j, jetpro.jets()) {
const double pt = j.momentum().pT();
const double y = fabs(j.momentum().rapidity());
if (pt/GeV > 50) {
getLog() << Log::TRACE << "Filling histos: pT = " << pt/GeV
<< ", |y| = " << y << endl;
if (y < 0.4) {
_h_dsigdptdy_y00_04->fill(pt/GeV, weight);
} else if (y < 0.8) {
_h_dsigdptdy_y04_08->fill(pt/GeV, weight);
} else if (y < 1.2) {
_h_dsigdptdy_y08_12->fill(pt/GeV, weight);
} else if (y < 1.6) {
_h_dsigdptdy_y12_16->fill(pt/GeV, weight);
} else if (y < 2.0) {
_h_dsigdptdy_y16_20->fill(pt/GeV, weight);
} else if (y < 2.4) {
_h_dsigdptdy_y20_24->fill(pt/GeV, weight);
}
}
}
}
// Finalize
void D0_2008_S7662670::finalize() {
/// Scale by L_eff = sig_MC * L_exp / num_MC
const double lumi_mc = sumOfWeights() / crossSection();
const double scalefactor = 1 / lumi_mc;
scale(_h_dsigdptdy_y00_04, scalefactor);
scale(_h_dsigdptdy_y04_08, scalefactor);
scale(_h_dsigdptdy_y08_12, scalefactor);
scale(_h_dsigdptdy_y12_16, scalefactor);
scale(_h_dsigdptdy_y16_20, scalefactor);
scale(_h_dsigdptdy_y20_24, scalefactor);
}
}
diff --git a/src/Analyses/D0_2008_S7719523.cc b/src/Analyses/D0_2008_S7719523.cc
--- a/src/Analyses/D0_2008_S7719523.cc
+++ b/src/Analyses/D0_2008_S7719523.cc
@@ -1,173 +1,173 @@
// -*- C++ -*-
#include "Rivet/Analyses/D0_2008_S7719523.hh"
#include "Rivet/Tools/Logging.hh"
#include "Rivet/Projections/FinalState.hh"
#include "Rivet/Projections/LeadingParticlesFinalState.hh"
#include "Rivet/Projections/VetoedFinalState.hh"
#include "Rivet/Projections/FastJets.hh"
#include "Rivet/RivetAIDA.hh"
namespace Rivet {
D0_2008_S7719523::D0_2008_S7719523()
{
setBeams(PROTON, ANTIPROTON);
/// @todo Use cross-section from generator
//setNeedsCrossSection(true);
// General FS
FinalState fs(-5.0, 5.0);
addProjection(fs, "FS");
// Get leading photon
LeadingParticlesFinalState photonfs(fs, -1.0, 1.0);
photonfs.addParticleId(PHOTON);
addProjection(photonfs, "LeadingPhoton");
// FS for jets excludes the leading photon
VetoedFinalState vfs(fs);
vfs.addVetoOnThisFinalState(photonfs);
addProjection(vfs, "JetFS");
}
// Book histograms
void D0_2008_S7719523::init() {
const string xlabel = "$p_\\perp(\\gamma_\\text{lead})$ / GeV";
/// @todo Cross-section units in label
- const string ylabel = "$\\d{\\sigma}/\\d{p_\\perp(\\gamma_\\text{lead})}$";
+ const string ylabel = "$\\mathrm{d}{\\sigma}/\\mathrm{d}{p_\\perp(\\gamma_\\text{lead})}$";
const string basetitle = "Leading photon $p_\\perp$ ";
_h_central_same_cross_section =
bookHistogram1D("d01-x01-y01", basetitle + "(central jets, same-sign rapidity)", xlabel, ylabel);
_h_central_opp_cross_section =
bookHistogram1D("d02-x01-y01", basetitle + "(central jets, opp-sign rapidity)", xlabel, ylabel);
_h_forward_same_cross_section =
bookHistogram1D("d03-x01-y01", basetitle + "(forward jets, same-sign rapidity)", xlabel, ylabel);
_h_forward_opp_cross_section =
bookHistogram1D("d04-x01-y01", basetitle + "(forward jets, opp-sign rapidity)", xlabel, ylabel);
}
// Do the analysis
void D0_2008_S7719523::analyze(const Event& event) {
const double weight = event.weight();
// Get the photon
const FinalState& photonfs = applyProjection<FinalState>(event, "LeadingPhoton");
if (photonfs.particles().size() != 1) {
getLog() << Log::DEBUG << "No photon found" << endl;
vetoEvent(event);
}
const FourMomentum photon = photonfs.particles().front().momentum();
if (photon.pT()/GeV < 30) {
getLog() << Log::DEBUG << "Leading photon has pT < 30 GeV: " << photon.pT()/GeV << endl;
vetoEvent(event);
}
// Get all charged particles
const FinalState& fs = applyProjection<FinalState>(event, "JetFS");
if (fs.isEmpty()) {
vetoEvent(event);
}
// Isolate photon by ensuring that a 0.4 cone around it contains less than 7% of the photon's energy
const double egamma = photon.E();
double econe = 0.0;
foreach (const Particle& p, fs.particles()) {
const double dr = deltaR(photon.pseudorapidity(), photon.azimuthalAngle(),
p.momentum().pseudorapidity(), p.momentum().azimuthalAngle());
if (dr < 0.2) {
econe += p.momentum().E();
// Veto as soon as E_cone gets larger
if (econe/egamma > 0.07) {
getLog() << Log::DEBUG << "Vetoing event because photon is insufficiently isolated" << endl;
vetoEvent(event);
}
}
}
/// @todo Allow proj creation w/o FS as ctor arg, so that calc can be used more easily.
FastJets jetpro(fs, FastJets::D0ILCONE, 0.7); //< @todo This fs arg makes no sense!
jetpro.calc(fs.particles());
Jets isolated_jets;
foreach (const Jet& j, jetpro.jets()) {
const FourMomentum pjet = j.momentum();
const double dr = deltaR(photon.pseudorapidity(), photon.azimuthalAngle(),
pjet.pseudorapidity(), pjet.azimuthalAngle());
if (dr > 0.7 && pjet.pT()/GeV > 15) {
isolated_jets.push_back(j);
}
}
getLog() << Log::DEBUG << "Num jets after isolation and pT cuts = "
<< isolated_jets.size() << endl;
if (isolated_jets.empty()) {
getLog() << Log::DEBUG << "No jets pass cuts" << endl;
vetoEvent(event);
}
// Sort by pT and get leading jet
sort(isolated_jets.begin(), isolated_jets.end(), cmpJetsByPt);
const FourMomentum leadingJet = isolated_jets.front().momentum();
int photon_jet_sign = sign( leadingJet.rapidity() * photon.rapidity() );
// Veto if leading jet is outside plotted rapidity regions
const double abs_y1 = fabs(leadingJet.rapidity());
if (inRange(abs_y1, 0.8, 1.5) || abs_y1 > 2.5) {
getLog() << Log::DEBUG << "Leading jet falls outside acceptance range; |y1| = "
<< abs_y1 << endl;
vetoEvent(event);
}
// Fill histos
if (fabs(leadingJet.rapidity()) < 0.8) {
if (photon_jet_sign >= 1) {
_h_central_same_cross_section->fill(photon.pT(), weight);
} else {
_h_central_opp_cross_section->fill(photon.pT(), weight);
}
} else if (inRange( fabs(leadingJet.rapidity()), 1.5, 2.5)) {
if (photon_jet_sign >= 1) {
_h_forward_same_cross_section->fill(photon.pT(), weight);
} else {
_h_forward_opp_cross_section->fill(photon.pT(), weight);
}
}
}
// Finalize
void D0_2008_S7719523::finalize() {
// Cross-section ratios (6 plots)
// Central/central and forward/forward ratios
AIDA::IHistogramFactory& hf = histogramFactory();
const string dir = histoDir();
hf.divide(dir + "/d05-x01-y01", *_h_central_opp_cross_section, *_h_central_same_cross_section);
hf.divide(dir + "/d08-x01-y01", *_h_forward_opp_cross_section, *_h_forward_same_cross_section);
// Central/forward ratio combinations
/// @todo Bins don't match
// hf.divide(dir + "/d06-x01-y01", *_h_central_same_cross_section, *_h_forward_same_cross_section);
// hf.divide(dir + "/d07-x01-y01", *_h_central_opp_cross_section, *_h_forward_same_cross_section);
// hf.divide(dir + "/d09-x01-y01", *_h_central_same_cross_section, *_h_forward_opp_cross_section);
// hf.divide(dir + "/d10-x01-y01", *_h_central_opp_cross_section, *_h_forward_opp_cross_section);
/// @todo Use the generator cross-section
// Must happen *after* the divs, since otherwise the pointers are null!
//_h_total_cross_section->fill(crossSection());
normalize(_h_central_same_cross_section, 347.4);
normalize(_h_central_opp_cross_section, 281.8);
normalize(_h_forward_same_cross_section, 164.8);
normalize(_h_forward_opp_cross_section, 81.5);
}
}
diff --git a/src/Analyses/D0_2008_S7837160.cc b/src/Analyses/D0_2008_S7837160.cc
--- a/src/Analyses/D0_2008_S7837160.cc
+++ b/src/Analyses/D0_2008_S7837160.cc
@@ -1,172 +1,172 @@
// -*- C++ -*-
#include "Rivet/Analyses/D0_2008_S7837160.hh"
#include "Rivet/Tools/Logging.hh"
#include "Rivet/Tools/ParticleIDMethods.hh"
#include "Rivet/Projections/FinalState.hh"
#include "Rivet/Projections/LeadingParticlesFinalState.hh"
#include "Rivet/Projections/VetoedFinalState.hh"
#include "Rivet/RivetAIDA.hh"
namespace Rivet {
D0_2008_S7837160::D0_2008_S7837160()
{
// Run II W charge asymmetry
setBeams(PROTON, ANTIPROTON);
// Leading electrons
FinalState fs(-5.0, 5.0);
LeadingParticlesFinalState efs(fs);
efs.addParticleId(ELECTRON).addParticleId(POSITRON);
addProjection(efs, "WDecayE");
LeadingParticlesFinalState nufs(fs);
nufs.addParticleId(NU_E).addParticleId(NU_EBAR);
addProjection(nufs, "WDecayNu");
// Final state w/o electron
VetoedFinalState vfs(fs);
/// @todo A better way would be to have a "only photons FS". Add this projection.
/// @todo Make it easier to exclude all neutrinos
vfs.addVetoOnThisFinalState(efs);
vfs.addVetoPairId(NU_E).addVetoPairId(NU_MU).addVetoPairId(NU_TAU);
addProjection(vfs, "NoElectronFS");
}
// Book histograms
void D0_2008_S7837160::init() {
_h_dsigplus_deta_25_35 = bookHistogram1D("dsigplus_deta_25_35", "Temp1", 10, 0.0, 3.2);
_h_dsigminus_deta_25_35 = bookHistogram1D("dsigminus_deta_25_35", "Temp2", 10, 0.0, 3.2);
_h_dsigplus_deta_35 = bookHistogram1D("dsigplus_deta_35", "Temp3", 10, 0.0, 3.2);
_h_dsigminus_deta_35 = bookHistogram1D("dsigminus_deta_35", "Temp4", 10, 0.0, 3.2);
_h_dsigplus_deta_25 = bookHistogram1D("dsigplus_deta_25", "Temp5", 10, 0.0, 3.2);
_h_dsigminus_deta_25 = bookHistogram1D("dsigminus_deta_25", "Temp6", 10, 0.0, 3.2);
}
// Do the analysis
void D0_2008_S7837160::analyze(const Event & event) {
const double weight = event.weight();
// Find the W decay products
const FinalState& efs = applyProjection<FinalState>(event, "WDecayE");
const FinalState& nufs = applyProjection<FinalState>(event, "WDecayNu");
// If there is no e/nu_e pair in the FinalState, skip the event
if (efs.particles().size() < 1 || nufs.particles().size() < 1) {
getLog() << Log::DEBUG << "No e/nu_e pair found " << endl;
vetoEvent(event);
}
// Identify leading nu and electron
ParticleVector es = efs.particles();
sort(es.begin(), es.end(), cmpParticleByEt);
Particle leading_e = es[0];
//
ParticleVector nus = nufs.particles();
sort(nus.begin(), nus.end(), cmpParticleByEt);
Particle leading_nu = nus[0];
// Require that the neutrino has Et > 25 GeV
const FourMomentum nu = leading_nu.momentum();
if (nu.Et()/GeV < 25) {
getLog() << Log::DEBUG << "Neutrino fails Et cut" << endl;
vetoEvent(event);
}
// Get "raw" electron 4-momentum and add back in photons that could have radiated from the electron
FourMomentum e = leading_e.momentum();
const ParticleVector allparts = applyProjection<FinalState>(event, "NoElectronFS").particles();
const double HALO_RADIUS = 0.2;
foreach (const Particle& p, allparts) {
if (p.pdgId() == PHOTON) {
const double pho_eta = p.momentum().pseudorapidity();
const double pho_phi = p.momentum().azimuthalAngle();
if (deltaR(e.pseudorapidity(), e.azimuthalAngle(), pho_eta, pho_phi) < HALO_RADIUS) {
e += p.momentum();
}
}
}
// Require that the electron has Et > 25 GeV
if (e.Et()/GeV < 25) {
getLog() << Log::DEBUG << "Electron fails Et cut" << endl;
vetoEvent(event);
}
const double eta_e = fabs(e.pseudorapidity());
const double et_e = e.Et();
const int chg_e = PID::threeCharge(leading_e.pdgId());
if (et_e/GeV < 35) {
// 25 < ET < 35
if (chg_e < 0) {
_h_dsigminus_deta_25_35->fill(eta_e, weight);
} else {
_h_dsigplus_deta_25_35->fill(eta_e, weight);
}
} else {
// ET > 35
if (chg_e < 0) {
_h_dsigminus_deta_35->fill(eta_e, weight);
} else {
_h_dsigplus_deta_35->fill(eta_e, weight);
}
}
// Inclusive: ET > 25
if (chg_e < 0) {
_h_dsigminus_deta_25->fill(eta_e, weight);
} else {
_h_dsigplus_deta_25->fill(eta_e, weight);
}
}
// Finalize
void D0_2008_S7837160::finalize() {
// Construct asymmetry: (dsig+/deta - dsig-/deta) / (dsig+/deta + dsig-/deta) for each Et region
AIDA::IHistogramFactory& hf = histogramFactory();
const string basetitle = "W charge asymmetry for ";
const string xlabel = "$|\\eta|$ of leading electron";
const string ylabel = "A = "
- "$(\\frac{\\d{\\sigma^+}}{\\d{|\\eta|}} - \\frac{\\d{\\sigma^-}}{\\d{|\\eta|}}) / "
- "(\\frac{\\d{\\sigma^+}}{\\d{|\\eta|}} + \\frac{\\d{\\sigma^-}}{\\d{|\\eta|}})$";
+ "$(\\frac{\\mathrm{d}{\\sigma^+}}{\\mathrm{d}{|\\eta|}} - \\frac{\\mathrm{d}{\\sigma^-}}{\\mathrm{d}{|\\eta|}}) / "
+ "(\\frac{\\mathrm{d}{\\sigma^+}}{\\mathrm{d}{|\\eta|}} + \\frac{\\mathrm{d}{\\sigma^-}}{\\mathrm{d}{|\\eta|}})$";
IHistogram1D* num25_35 = hf.subtract("/num25_35", *_h_dsigplus_deta_25_35, *_h_dsigminus_deta_25_35);
IHistogram1D* denom25_35 = hf.add("/denom25_35", *_h_dsigplus_deta_25_35, *_h_dsigminus_deta_25_35);
assert(num25_35 && denom25_35);
IDataPointSet* tot25_35 = hf.divide(histoDir() + "/d01-x01-y01", *num25_35, *denom25_35);
tot25_35->setTitle(basetitle + "$25 < E_\\perp < 35$ GeV");
tot25_35->setXTitle(xlabel);
tot25_35->setYTitle(ylabel);
hf.destroy(num25_35);
hf.destroy(denom25_35);
//
IHistogram1D* num35 = hf.subtract("/num35", *_h_dsigplus_deta_35, *_h_dsigminus_deta_35);
IHistogram1D* denom35 = hf.add("/denom35", *_h_dsigplus_deta_35, *_h_dsigminus_deta_35);
assert(num35 && denom35);
IDataPointSet* tot35 = hf.divide(histoDir() + "/d02-x01-y01", *num35, *denom35);
tot35->setTitle(basetitle + "$E_\\perp > 35$ GeV");
tot35->setXTitle(xlabel);
tot35->setYTitle(ylabel);
hf.destroy(num35);
hf.destroy(denom35);
//
IHistogram1D* num25 = hf.subtract("/num25", *_h_dsigplus_deta_25, *_h_dsigminus_deta_25);
IHistogram1D* denom25 = hf.add("/denom25", *_h_dsigplus_deta_25, *_h_dsigminus_deta_25);
assert(num25 && denom25);
IDataPointSet* tot25 = hf.divide(histoDir() + "/d03-x01-y01", *num25, *denom25);
tot25->setTitle(basetitle + "$E_\\perp > 35$ GeV");
tot25->setXTitle(xlabel);
tot25->setYTitle(ylabel);
hf.destroy(num25);
hf.destroy(denom25);
}
}
diff --git a/src/Analyses/ExampleAnalysis.cc b/src/Analyses/ExampleAnalysis.cc
--- a/src/Analyses/ExampleAnalysis.cc
+++ b/src/Analyses/ExampleAnalysis.cc
@@ -1,100 +1,100 @@
// -*- C++ -*-
#include "Rivet/Tools/Logging.hh"
#include "Rivet/RivetAIDA.hh"
#include "Rivet/Analyses/ExampleAnalysis.hh"
#include "Rivet/Projections/FinalState.hh"
#include "Rivet/Projections/ChargedFinalState.hh"
#include "Rivet/Projections/Multiplicity.hh"
#include "Rivet/Projections/Thrust.hh"
#include "Rivet/Projections/Sphericity.hh"
namespace Rivet {
// Constructor
ExampleAnalysis::ExampleAnalysis() {
const FinalState cnfs;
const ChargedFinalState cfs;
addProjection(cnfs, "FS");
addProjection(cfs, "CFS");
addProjection(Multiplicity(cfs), "CMult");
addProjection(Multiplicity(cnfs), "CNMult");
addProjection(Thrust(cfs), "Thrust");
addProjection(Sphericity(cfs), "Sphericity");
}
// Book histograms
void ExampleAnalysis::init() {
// Using histogram auto-booking is preferable if there are comparison datasets in HepData.
// Since this is just a demo analysis, there is no associated paper!
_histTot = bookHistogram1D("TotalMult", "Total multiplicity",
- "$N_\\text{tot}$", "$1/\\sigma \\, \\d{\\sigma}/\\d{N_\\text{tot}}$",
+ "$N_\\text{tot}$", "$1/\\sigma \\, \\mathrm{d}{\\sigma}/\\mathrm{d}{N_\\text{tot}}$",
100, -0.5, 99.5);
_histChTot = bookHistogram1D("TotalChMult", "Total charged multiplicity",
- "$N_\\text{ch}$", "$1/\\sigma \\, \\d{\\sigma}/\\d{N_\\text{ch}}$",
+ "$N_\\text{ch}$", "$1/\\sigma \\, \\mathrm{d}{\\sigma}/\\mathrm{d}{N_\\text{ch}}$",
50, -1.0, 99.0);
_histHadrTot = bookHistogram1D("HadrTotalMult", "Total hadronic multiplicity",
- "$N_\\text{H}$", "$1/\\sigma \\, \\d{\\sigma}/\\d{N_\\text{H}}$",
+ "$N_\\text{H}$", "$1/\\sigma \\, \\mathrm{d}{\\sigma}/\\mathrm{d}{N_\\text{H}}$",
100, -0.5, 99.5);
_histHadrChTot = bookHistogram1D("HadrTotalChMult", "Total hadronic charged multiplicity",
- "$N_\\text{Hch}$", "$1/\\sigma \\, \\d{\\sigma}/\\d{N_\\text{Hch}}$",
+ "$N_\\text{Hch}$", "$1/\\sigma \\, \\mathrm{d}{\\sigma}/\\mathrm{d}{N_\\text{Hch}}$",
50, -1.0, 99.0);
double edges[11] = { 0.5, 0.6, 0.7, 0.80, 0.85, 0.9, 0.92, 0.94, 0.96, 0.98, 1.0 };
vector<double> vedges(edges, edges+11);
_histThrust = bookHistogram1D("Thrust", "Thrust",
- "$T$", "$1/\\sigma \\, \\d{\\sigma}/\\d{T}$", vedges);
+ "$T$", "$1/\\sigma \\, \\mathrm{d}{\\sigma}/\\mathrm{d}{T}$", vedges);
_histMajor = bookHistogram1D("Major", "Thrust major",
- "$M$", "$1/\\sigma \\, \\d{\\sigma}/\\d{M}$", 10, 0.0, 0.6);
+ "$M$", "$1/\\sigma \\, \\mathrm{d}{\\sigma}/\\mathrm{d}{M}$", 10, 0.0, 0.6);
_histSphericity = bookHistogram1D("Sphericity", "Sphericity",
- "$S$", "$1/\\sigma \\, \\d{\\sigma}/\\d{S}$", 10, 0.0, 0.8);
+ "$S$", "$1/\\sigma \\, \\mathrm{d}{\\sigma}/\\mathrm{d}{S}$", 10, 0.0, 0.8);
_histAplanarity = bookHistogram1D("Aplanarity", "Aplanarity",
- "$A$", "$1/\\sigma \\, \\d{\\sigma}/\\d{A}$", 10, 0.0, 0.3);
+ "$A$", "$1/\\sigma \\, \\mathrm{d}{\\sigma}/\\mathrm{d}{A}$", 10, 0.0, 0.3);
}
// Do the analysis
void ExampleAnalysis::analyze(const Event& event) {
// Analyse and print some info
const Multiplicity& cm = applyProjection<Multiplicity>(event, "CMult");
const Multiplicity& cnm = applyProjection<Multiplicity>(event, "CNMult");
getLog() << Log::DEBUG << "Total multiplicity = " << cnm.totalMultiplicity() << endl;
getLog() << Log::DEBUG << "Total charged multiplicity = " << cm.totalMultiplicity() << endl;
getLog() << Log::DEBUG << "Hadron multiplicity = " << cnm.hadronMultiplicity() << endl;
getLog() << Log::DEBUG << "Hadron charged multiplicity = " << cm.hadronMultiplicity() << endl;
const Thrust& t = applyProjection<Thrust>(event, "Thrust");
getLog() << Log::DEBUG << "Thrust = " << t.thrust() << endl;
const Sphericity& s = applyProjection<Sphericity>(event, "Sphericity");
getLog() << Log::DEBUG << "Sphericity = " << s.sphericity() << endl;
getLog() << Log::DEBUG << "Aplanarity = " << s.aplanarity() << endl;
// Fill histograms
const double weight = event.weight();
_histTot->fill(cnm.totalMultiplicity(), weight);
_histChTot->fill(cm.totalMultiplicity(), weight);
_histHadrTot->fill(cnm.hadronMultiplicity(), weight);
_histHadrChTot->fill(cm.hadronMultiplicity(), weight);
_histThrust->fill(t.thrust(), weight);
_histMajor->fill(t.thrustMajor(), weight);
_histSphericity->fill(s.sphericity(), weight);
_histAplanarity->fill(s.aplanarity(), weight);
}
// Finalize
void ExampleAnalysis::finalize() {
normalize(_histTot);
normalize(_histChTot);
normalize(_histHadrTot);
normalize(_histHadrChTot);
normalize(_histThrust);
normalize(_histMajor);
normalize(_histSphericity);
normalize(_histAplanarity);
}
}
diff --git a/src/Analyses/H1_1994_S2919893.cc b/src/Analyses/H1_1994_S2919893.cc
--- a/src/Analyses/H1_1994_S2919893.cc
+++ b/src/Analyses/H1_1994_S2919893.cc
@@ -1,228 +1,228 @@
// -*- C++ -*-
#include "Rivet/Rivet.hh"
#include "Rivet/RivetAIDA.hh"
#include "Rivet/Math/Constants.hh"
#include "Rivet/Analyses/H1_1994_S2919893.hh"
#include "Rivet/Tools/ParticleIDMethods.hh"
#include "Rivet/Projections/FinalState.hh"
#include "Rivet/Projections/DISKinematics.hh"
namespace Rivet {
// Constructor
H1_1994_S2919893::H1_1994_S2919893() {
setBeams(ELECTRON, PROTON);
addProjection(DISLepton(), "Lepton");
addProjection(DISKinematics(), "Kinematics");
addProjection(FinalState(), "FS");
}
void H1_1994_S2919893::analyze(const Event& event) {
const FinalState& fs = applyProjection<FinalState>(event, "FS");
const DISKinematics& dk = applyProjection<DISKinematics>(event, "Kinematics");
const DISLepton& dl = applyProjection<DISLepton>(event,"Lepton");
// Get the DIS kinematics
double x = dk.x();
double w2 = dk.W2();
double w = sqrt(w2);
// Momentum of the scattered lepton
FourMomentum leptonMom = dl.out().momentum();
double ptel = pT(leptonMom);
double enel = leptonMom.E();
double thel = leptonMom.angle(dk.beamHadron().momentum())/degree;
// Extract the particles other than the lepton
ParticleVector particles;
particles.reserve(fs.particles().size());
const GenParticle& dislepGP = dl.out().genParticle();
foreach (const Particle& p, fs.particles()) {
const GenParticle& loopGP = p.genParticle();
if (&loopGP == &dislepGP) continue;
particles.push_back(p);
}
// Cut on the forward energy
double efwd = 0.0;
foreach (const Particle& p, particles) {
double th = p.momentum().angle(dk.beamHadron().momentum())/degree;
if (th > 4.4 && th < 15.) {
efwd += p.momentum().E();
}
}
// Apply the cuts
// Lepton energy and angle, w2 and forward energy
getLog()<<Log::DEBUG<<"enel/GeV = "<<enel/GeV<<", thel = "<<thel<<", w2 = "<<w2<<", efwd/GeV = "<<efwd/GeV<<std::endl;
bool cut = enel/GeV > 14. && thel > 157. && thel < 172.5 && w2 >= 3000. && efwd/GeV > 0.5;
if (!cut) vetoEvent(event);
// Weight of the event
const double weight = event.weight();
// weights for x<1e-3 and x>1e-3
if (x < 1e-3) {
_wEnergy.first += weight;
} else {
_wEnergy.second += weight;
}
// Boost to hadronic CM
const LorentzTransform hcmboost = dk.boostHCM();
// Loop over the particles
long ncharged(0);
for (size_t ip1 = 0; ip1 < particles.size(); ++ip1) {
const Particle& p = particles[ip1];
double th = p.momentum().angle(dk.beamHadron().momentum()) / degree;
// Boost momentum to lab
const FourMomentum hcmMom = hcmboost.transform(p.momentum());
// Angular cut
if (th <= 4.4) continue;
// Energy flow histogram
double et = fabs(Et(hcmMom));
double eta = -hcmMom.pseudorapidity();
if (x < 1e-3) {
_histEnergyFlowLowX ->fill(eta, et*weight);
} else {
_histEnergyFlowHighX->fill(eta, et*weight);
}
if (PID::threeCharge(p.pdgId()) != 0) {
/// @todo Use units in w comparisons... what are the units?
if (w > 50. && w <= 200.) {
double xf= -2 * hcmMom.z() / w;
double pt2 = pT2(hcmMom);
if (w > 50. && w <= 100.) {
_histSpectraW77 ->fill(xf, weight);
} else if (w > 100. && w <= 150.) {
_histSpectraW122->fill(xf, weight);
} else if (w > 150. && w <= 200.) {
_histSpectraW169->fill(xf, weight);
}
_histSpectraW117->fill(xf, weight);
/// @todo Is this profile meant to be filled with 2 weight factors?
_histPT2->fill(xf, pt2*weight/GeV2, weight);
++ncharged;
}
}
// Energy-energy correlation
if (th <= 8.) continue;
double phi1 = p.momentum().azimuthalAngle(ZERO_2PI);
double eta1 = p.momentum().pseudorapidity();
double et1 = fabs(Et(p.momentum()));
for (size_t ip2 = ip1+1; ip2 < particles.size(); ++ip2) {
const Particle& p2 = particles[ip2];
//double th2 = beamAngle(p2.momentum(), order);
double th2 = p2.momentum().angle(dk.beamHadron().momentum()) / degree;
if (th2 <= 8.) continue;
double phi2 = p2.momentum().azimuthalAngle(ZERO_2PI);
/// @todo Use angle function
double deltaphi = phi1 - phi2;
if (fabs(deltaphi) > PI)
deltaphi = fabs(fabs(deltaphi) - TWOPI);
double eta2 = p2.momentum().pseudorapidity();
double omega = sqrt(sqr(eta1-eta2) + sqr(deltaphi));
double et2 = fabs(Et(p2.momentum()));
double wt = et1*et2 / sqr(ptel) * weight;
if(x < 1e-3) {
_histEECLowX ->fill(omega, wt);
} else {
_histEECHighX->fill(omega,wt);
}
}
}
// Factors for normalization
if (w > 50. && w <= 200.) {
if (w <= 100.) {
_w77.first += ncharged*weight;
_w77.second += weight;
} else if (w <= 150.) {
_w122.first += ncharged*weight;
_w122.second += weight;
} else {
_w169.first += ncharged*weight;
_w169.second += weight;
}
_w117.first += ncharged*weight;
_w117.second += weight;
}
}
void H1_1994_S2919893::init() {
_w77 = make_pair(0.0, 0.0);
_w122 = make_pair(0.0, 0.0);
_w169 = make_pair(0.0, 0.0);
_w117 = make_pair(0.0, 0.0);
_wEnergy = make_pair(0.0, 0.0);
string xlabel = "$\\eta$";
/// @todo What is "N"?
- string ylabel = "$1/N \\, \\d{E_\\perp}/\\d{\\eta}$ / GeV";
+ string ylabel = "$1/N \\, \\mathrm{d}{E_\\perp}/\\mathrm{d}{\\eta}$ / GeV";
string basetitle = "Transverse energy flow as a function of rapidity, ";
_histEnergyFlowLowX =
bookHistogram1D(1, 1, 1, basetitle + "$x < 10^{-3}$", xlabel, ylabel);
_histEnergyFlowHighX =
bookHistogram1D(1, 1, 2, basetitle + "$x > 10^{-3}$", xlabel, ylabel);
xlabel = "$\\omega$";
- ylabel = "$\\d{\\text{EEC}_\\perp}/\\d{\\omega}$";
+ ylabel = "$\\mathrm{d}{\\text{EEC}_\\perp}/\\mathrm{d}{\\omega}$";
basetitle = "Transverse energy--energy correlation for ";
_histEECLowX =
bookHistogram1D(2, 1, 1, basetitle + "$x < 10^{-3}$", xlabel, ylabel);
_histEECHighX =
bookHistogram1D(2, 1, 2, basetitle + "$x > 10^{-3}$", xlabel, ylabel);
xlabel = "$x_L$";
/// @todo Add cross-section units to label
- ylabel = "$1/\\sigma \\, \\d{\\sigma}/\\d{x_L}$";
+ ylabel = "$1/\\sigma \\, \\mathrm{d}{\\sigma}/\\mathrm{d}{x_L}$";
basetitle = "Charged particle spectra for ";
_histSpectraW77 =
bookHistogram1D(3, 1, 1, "$50 < W < 100$", xlabel, ylabel);
_histSpectraW122 =
bookHistogram1D(3, 1, 2, "$100 < W < 150$", xlabel, ylabel);
_histSpectraW169 =
bookHistogram1D(3, 1, 3, "$150 < W < 200$", xlabel, ylabel);
_histSpectraW117 =
bookHistogram1D(3, 1, 4, "all $W$", xlabel, ylabel);
_histPT2 =
bookProfile1D (4, 1, 1, "$\\langle p_\\perp^2 \\rangle$ as a function of $x_L$",
"$x_L$", "$\\langle p_\\perp^2 \\rangle$");
}
// Finalize
void H1_1994_S2919893::finalize() {
// Normalize inclusive single particle distributions to the average number
// of charged particles per event.
double avgNumParts = _w77.first/_w77.second;
normalize(_histSpectraW77, avgNumParts);
avgNumParts = _w122.first/_w122.second;
normalize(_histSpectraW122, avgNumParts);
avgNumParts = _w169.first/_w169.second;
normalize(_histSpectraW169, avgNumParts);
avgNumParts = _w117.first/_w117.second;
normalize(_histSpectraW117, avgNumParts);
scale(_histEnergyFlowLowX , 1./_wEnergy.first );
scale(_histEnergyFlowHighX, 1./_wEnergy.second);
scale(_histEECLowX , 1./_wEnergy.first );
scale(_histEECHighX, 1./_wEnergy.second);
}
}
diff --git a/src/Analyses/H1_2000_S4129130.cc b/src/Analyses/H1_2000_S4129130.cc
--- a/src/Analyses/H1_2000_S4129130.cc
+++ b/src/Analyses/H1_2000_S4129130.cc
@@ -1,306 +1,306 @@
// -*- C++ -*-
#include "Rivet/Rivet.hh"
#include "Rivet/RivetAIDA.hh"
#include "Rivet/Analyses/H1_2000_S4129130.hh"
#include "Rivet/Math/Constants.hh"
#include "Rivet/Tools/ParticleIDMethods.hh"
#include "Rivet/Projections/FinalState.hh"
#include "Rivet/Projections/DISKinematics.hh"
namespace Rivet {
// Constructor
H1_2000_S4129130::H1_2000_S4129130() {
setBeams(ELECTRON, PROTON);
addProjection(DISLepton(), "Lepton");
addProjection(DISKinematics(), "Kinematics");
addProjection(FinalState(), "FS");
}
void H1_2000_S4129130::analyze(const Event& event) {
// Get the projections
const FinalState & fs = applyProjection<FinalState>(event, "FS");
const DISKinematics& dk = applyProjection<DISKinematics>(event, "Kinematics");
const DISLepton & dl = applyProjection<DISLepton>(event,"Lepton");
// Get the DIS kinematics
double q2 = dk.Q2();
double x = dk.x();
double y = dk.y();
double w2 = dk.W2();
// Momentum of the scattered lepton
FourMomentum leptonMom = dl.out().momentum();
// pT energy and angle
double enel = leptonMom.E();
double thel = 180.-leptonMom.angle(dl.in().momentum())/degree;
// Extract the particles other than the lepton
ParticleVector particles;
particles.reserve(fs.particles().size());
const GenParticle& dislepGP = dl.out().genParticle();
for (ParticleVector::const_iterator p = fs.particles().begin();
p != fs.particles().end(); ++p) {
const GenParticle& loopGP = p->genParticle();
if (&loopGP == &dislepGP) continue;
particles.push_back(*p);
}
// Cut on the forward energy
double efwd = 0.;
foreach (const Particle& p, particles) {
double th = 180.-p.momentum().angle(dl.in().momentum())/degree;
// double th = beamAngle(p.momentum(),order);
if(th > 4.4 && th < 15.0) efwd += p.momentum().E();
}
// There are four possible selections for events
bool evcut[4];
// Low Q2 selection a
/// @todo Units and inRange
evcut[0] = enel/GeV > 12. && w2 >= 4400. && efwd/GeV > 0.5 &&
thel > 157. && thel < 176.0;
// Low Q2 selection b
/// @todo Units and inRange
evcut[1] = enel/GeV > 12. && y > 0.3 && y < 0.5;
// High Q2 selection a
/// @todo Units and inRange
evcut[2] = thel > 12. && thel < 150.0 && y > 0.05 && y < 0.6 &&
w2 >= 4400. && efwd > 0.5;
// High Q2 selection b
/// @todo Units and inRange
evcut[3] = thel > 12. && thel < 150.0 && y > 0.05 && y < 0.6 &&
w2 > 27110. && w2 < 45182.;
// Veto if fails all cuts
if (! (evcut[0] || evcut[1] || evcut[2] || evcut[3]) ) {
vetoEvent(event);
}
// Find the bins
int bin[4] = {-1,-1,-1,-1};
// For the low Q2 selection a)
/// @todo Units
if (q2 > 2.5 && q2 <= 5.) {
if (x > 0.00005 && x <= 0.0001 ) bin[0] = 0;
if (x > 0.0001 && x <= 0.0002 ) bin[0] = 1;
if (x > 0.0002 && x <= 0.00035) bin[0] = 2;
if (x > 0.00035 && x <= 0.0010 ) bin[0] = 3;
}
/// @todo Units
else if(q2 > 5. && q2 <= 10.) {
if (x > 0.0001 && x <= 0.0002 ) bin[0] = 4;
if (x > 0.0002 && x <= 0.00035) bin[0] = 5;
if (x > 0.00035 && x <= 0.0007 ) bin[0] = 6;
if (x > 0.0007 && x <= 0.0020 ) bin[0] = 7;
}
/// @todo Units
else if(q2 > 10. && q2 <= 20.) {
if (x > 0.0002 && x <= 0.0005) bin[0] = 8;
if (x > 0.0005 && x <= 0.0008) bin[0] = 9;
if (x > 0.0008 && x <= 0.0015) bin[0] = 10;
if (x > 0.0015 && x <= 0.040 ) bin[0] = 11;
}
/// @todo Units
else if(q2 > 20. && q2 <= 50.) {
if (x > 0.0005 && x <= 0.0014) bin[0] = 12;
if (x > 0.0014 && x <= 0.0030) bin[0] = 13;
if (x > 0.0030 && x <= 0.0100) bin[0] = 14;
}
/// @todo Units
else if (q2 > 50. && q2 <= 100.) {
if (x >0.0008 && x <= 0.0030) bin[0] = 15;
if (x >0.0030 && x <= 0.0200) bin[0] = 16;
}
/// @todo Huh?
evcut[0] &= bin[0] >= 0;
// For the low Q2 selection b)
if (q2 > 2.5 && q2 <= 5. ) bin[1] = 0;
if (q2 > 5. && q2 <= 10. ) bin[1] = 1;
if (q2 > 10. && q2 <= 20. ) bin[1] = 2;
if (q2 > 20. && q2 <= 50. ) bin[1] = 3;
if (q2 > 50. && q2 <= 100.) bin[1] = 4;
evcut[1] &= bin[1] >= 0;
// for the high Q2 selection a)
/// @todo Units
if (q2 > 100. && q2 <= 400.) {
if (x > 0.00251 && x <= 0.00631) bin[2] = 0;
if (x > 0.00631 && x <= 0.0158 ) bin[2] = 1;
if (x > 0.0158 && x <= 0.0398 ) bin[2] = 2;
}
/// @todo Units
else if (q2 > 400 && q2 <= 1100.) {
if (x > 0.00631 && x <= 0.0158 ) bin[2] = 3;
if (x > 0.0158 && x <= 0.0398 ) bin[2] = 4;
if (x > 0.0398 && x <= 1. ) bin[2] = 5;
}
/// @todo Units
else if (q2 > 1100. && q2 <= 100000.) {
if (x > 0. && x <= 1.) bin[2] = 6;
}
evcut[2] &= bin[2] >= 0;
// for the high Q2 selection b)
/// @todo Units
if (q2 > 100. && q2 <= 220.) bin[3] = 0;
else if (q2 > 220. && q2 <= 400.) bin[3] = 1;
else if (q2 > 400. ) bin[3] = 2;
evcut[3] &= bin[3] >= 0;
// Veto if fails all cuts after bin selection
if (! (evcut[0] || evcut[1] || evcut[2] || evcut[3]));
// Increment the count for normalisation
const double weight = event.weight();
if (evcut[0]) _weightETLowQa [bin[0]] += weight;
if (evcut[1]) _weightETLowQb [bin[1]] += weight;
if (evcut[2]) _weightETHighQa[bin[2]] += weight;
if (evcut[3]) _weightETHighQb[bin[3]] += weight;
// Boost to hadronicCM
const LorentzTransform hcmboost = dk.boostHCM();
// Loop over the particles
double etcent = 0;
double etfrag = 0;
foreach (const Particle& p, particles) {
// Boost momentum to CMS
const FourMomentum hcmMom = hcmboost.transform(p.momentum());
double et = fabs(Et(hcmMom));
double eta = -hcmMom.pseudorapidity();
// Averages in central and forward region
if (fabs(eta) < .5 ) etcent += et;
if (eta > 2 && eta <= 3.) etfrag += et;
// Histograms of Et flow
if (evcut[0]) _histETLowQa [bin[0]]->fill(eta, et*weight);
if (evcut[1]) _histETLowQb [bin[1]]->fill(eta, et*weight);
if (evcut[2]) _histETHighQa[bin[2]]->fill(eta, et*weight);
if (evcut[3]) _histETHighQb[bin[3]]->fill(eta, et*weight);
}
// Fill histograms for the average quantities
if (evcut[1] || evcut[3]) {
_histAverETCentral->fill(q2, etcent*weight,weight);
_histAverETFrag ->fill(q2, etfrag*weight,weight);
}
}
void H1_2000_S4129130::init() {
string t = "Transverse energy flow for ";
IHistogram1D* h = 0;
string xlabel = "$\\eta$";
/// @todo What is "N"?
- string ylabel = "$1/N \\, \\d{E_\\perp}/\\d{\\eta}$ / GeV";
+ string ylabel = "$1/N \\, \\mathrm{d}{E_\\perp}/\\mathrm{d}{\\eta}$ / GeV";
const string xt = "\\langle x \\rangle";
const string Q2t = "\\langle Q^2 \\rangle";
// Histograms and weight vectors for low Q^2 a
_histETLowQa.reserve(17);
_weightETLowQa.reserve(17);
for (size_t ix = 0; ix < 17; ++ix) {
string title = t + "$" + xt;
if (ix == 0) title += " = 0.08\\cdot 10^{-3}, " + Q2t + " = 3.2";
else if (ix == 1) title += " = 0.14\\cdot 10^{-3}, " + Q2t + " = 3.8";
else if (ix == 2) title += " = 0.26\\cdot 10^{-3}, " + Q2t + " = 3.9";
else if (ix == 3) title += " = 0.57\\cdot 10^{-3}, " + Q2t + " = 4.2";
else if (ix == 4) title += " = 0.16\\cdot 10^{-3}, " + Q2t + " = 6.3";
else if (ix == 5) title += " = 0.27\\cdot 10^{-3}, " + Q2t + " = 7.0";
else if (ix == 6) title += " = 0.50\\cdot 10^{-3}, " + Q2t + " = 7.0";
else if (ix == 7) title += " = 1.10\\cdot 10^{-3}, " + Q2t + " = 7.3";
else if (ix == 8) title += " = 0.36\\cdot 10^{-3}, " + Q2t + " = 13.1";
else if (ix == 9) title += " = 0.63\\cdot 10^{-3}, " + Q2t + " = 14.1";
else if (ix == 10) title += " = 1.10\\cdot 10^{-3}, " + Q2t + " = 14.1";
else if (ix == 11) title += " = 2.30\\cdot 10^{-3}, " + Q2t + " = 14.9";
else if (ix == 12) title += " = 0.93\\cdot 10^{-3}, " + Q2t + " = 28.8";
else if (ix == 13) title += " = 2.10\\cdot 10^{-3}, " + Q2t + " = 31.2";
else if (ix == 14) title += " = 4.70\\cdot 10^{-3}, " + Q2t + " = 33.2";
else if (ix == 15) title += " = 2.00\\cdot 10^{-3}, " + Q2t + " = 59.4";
else if (ix == 16) title += " = 7.00\\cdot 10^{-3}, " + Q2t + " = 70.2";
title += " \\text{ GeV}^2$";
h = bookHistogram1D(ix+1, 1, 1, title, xlabel, ylabel);
_histETLowQa.push_back(h);
_weightETLowQa.push_back(0.);
}
// Histograms and weight vectors for high Q^2 a
_histETHighQa.reserve(7);
_weightETHighQa.reserve(7);
for (size_t ix = 0; ix < 7; ++ix) {
string title = t + "$" + xt;
if (ix == 0) title += " = 0.0043, " + Q2t + " = 175";
else if (ix == 1) title += " = 0.01, " + Q2t + " = 253";
else if (ix == 2) title += " = 0.026, " + Q2t + " = 283";
else if (ix == 3) title += " = 0.012, " + Q2t + " = 511";
else if (ix == 4) title += " = 0.026, " + Q2t + " = 617";
else if (ix == 5) title += " = 0.076, " + Q2t + " = 682";
else if (ix == 6) title += " = 0.11, " + Q2t + " = 2200";
title += " \\text{ GeV}^2$";
h = bookHistogram1D(ix+18, 1, 1, title, xlabel, ylabel);
_histETHighQa.push_back(h);
_weightETHighQa.push_back(0.);
}
// Histograms and weight vectors for low Q^2 b
_histETLowQb.reserve(5);
_weightETLowQb.reserve(5);
for (size_t ix = 0; ix < 5; ++ix) {
string title = t + "$" + Q2t;
if (ix == 0) title += " = 2.5-5";
else if (ix == 1) title += " = 5-10";
else if (ix == 2) title += " = 10-20";
else if (ix == 3) title += " = 20-50";
else if (ix == 4) title += " = 50-100";
title += " \\text{ GeV}^2$";
h = bookHistogram1D(ix+25, 1, 1, title, xlabel, ylabel);
_histETLowQb.push_back(h);
_weightETLowQb.push_back(0.);
}
// Histograms and weight vectors for high Q^2 b
_histETHighQb.reserve(3);
_weightETHighQb.reserve(3);
for (size_t ix = 0; ix < 3; ++ix) {
string title = t + "$" + Q2t;
if (ix == 0) title += " = 100-220";
else if (ix == 1) title += " = 220-400";
else if (ix == 1) title += " > 400";
title += " \\text{ GeV}^2$";
h = bookHistogram1D(30+ix, 1, 1, title, xlabel, ylabel);
_histETHighQb.push_back(h);
_weightETHighQb.push_back(0.0);
}
// Histograms for the averages
xlabel = "$Q^2$ / $\\text{GeV}^2$";
ylabel = "$\\langle E_\\perp \\rangle$ / GeV";
_histAverETCentral =
bookProfile1D(33, 1, 1,
"Average $E_\\perp$ in the central region", xlabel, ylabel);
_histAverETFrag =
bookProfile1D(34, 1, 1,
"Average $E_\\perp$ in the forward region", xlabel, ylabel);
}
// Finalize
void H1_2000_S4129130::finalize() {
// Normalization of the Et distributions
for (size_t ix=0; ix<17; ++ix) {
scale(_histETLowQa[ix], 1./_weightETLowQa[ix]);
}
for(size_t ix=0; ix<7; ++ix) {
scale(_histETHighQa[ix], 1./_weightETHighQa[ix]);
}
for(size_t ix=0; ix<5; ++ix) {
scale(_histETLowQb[ix], 1./_weightETLowQb[ix]);
}
for(size_t ix=0; ix<3; ++ix) {
scale(_histETHighQb[ix], 1./_weightETHighQb[ix]);
}
}
}
diff --git a/src/Analyses/MC_LHC_LEADINGJETS.cc b/src/Analyses/MC_LHC_LEADINGJETS.cc
--- a/src/Analyses/MC_LHC_LEADINGJETS.cc
+++ b/src/Analyses/MC_LHC_LEADINGJETS.cc
@@ -1,172 +1,172 @@
// -*- C++ -*-
#include "Rivet/Rivet.hh"
#include "Rivet/Tools/Logging.hh"
#include "Rivet/Analyses/MC_LHC_LEADINGJETS.hh"
#include "Rivet/RivetAIDA.hh"
#include "Rivet/Projections/FinalState.hh"
#include "Rivet/Projections/ChargedFinalState.hh"
#include "Rivet/Projections/FastJets.hh"
namespace Rivet {
MC_LHC_LEADINGJETS::MC_LHC_LEADINGJETS()
{
setBeams(PROTON, PROTON);
// Final state for the jet finding
const FinalState fsj(-4.0, 4.0, 0.0*GeV);
addProjection(fsj, "FSJ");
addProjection(FastJets(fsj, FastJets::KT, 0.7), "Jets");
// Charged final state for the distributions
const ChargedFinalState cfs(-1.0, 1.0, 0.5*GeV);
addProjection(cfs, "CFS");
}
// Book histograms
void MC_LHC_LEADINGJETS::init() {
const string xlabel = "Leading jet $p_\\perp$ / GeV";
- const string pdensityTeX = "$\\d{N_\\text{ch}}/\\d{\\phi}$";
- const string ptsumdensityTeX = "$\\d{\\sum p_\\perp^\\text{sum}}/\\d{\\phi}$";
+ const string pdensityTeX = "$\\mathrm{d}{N_\\text{ch}}/\\mathrm{d}{\\phi}$";
+ const string ptsumdensityTeX = "$\\mathrm{d}{\\sum p_\\perp^\\text{sum}}/\\mathrm{d}{\\phi}$";
const string ptavgTeX = "$\\langle p_\\perp \\rangle$";
const double maxpt1 = 500.0/GeV;
_hist_pnchg = bookProfile1D(histoPath("trans-nchg"),
"Transverse region charged particle density",
xlabel, pdensityTeX, 50, 0.0, maxpt1);
_hist_pmaxnchg = bookProfile1D(histoPath("trans-maxnchg"),
"TransMAX region charged particle density",
xlabel, pdensityTeX, 50, 0.0, maxpt1);
_hist_pminnchg = bookProfile1D(histoPath("trans-minnchg"),
"TransMIN region charged particle density",
xlabel, pdensityTeX, 50, 0.0, maxpt1);
_hist_pcptsum = bookProfile1D(histoPath("trans-ptsum"),
"Transverse region charged pT sum density",
xlabel, ptsumdensityTeX, 50, 0.0, maxpt1);
_hist_pmaxcptsum = bookProfile1D(histoPath("trans-maxptsum"),
"TransMAX region charged pT sum density",
xlabel, ptsumdensityTeX, 50, 0.0, maxpt1);
_hist_pmincptsum = bookProfile1D(histoPath("trans-minptsum"),
"TransMIN region charged pT sum density",
xlabel, ptsumdensityTeX, 50, 0.0, maxpt1);
_hist_pcptave = bookProfile1D(histoPath("trans-ptavg"),
"Transverse region charged pT average",
xlabel, ptavgTeX, 50, 0.0, maxpt1);
}
// Do the analysis
void MC_LHC_LEADINGJETS::analyze(const Event& e) {
const FinalState& fsj = applyProjection<FinalState>(e, "FSJ");
if (fsj.particles().empty()) {
getLog() << Log::DEBUG << "Failed multiplicity cut" << endl;
vetoEvent(e);
}
const FastJets& jetpro = applyProjection<FastJets>(e, "Jets");
const Jets jets = jetpro.jetsByPt();
getLog() << Log::DEBUG << "Jet multiplicity = " << jets.size() << endl;
// We require the leading jet to be within |eta| < 2
if (jets.size() < 1 || fabs(jets[0].momentum().pseudorapidity()) > 2) {
getLog() << Log::DEBUG << "Failed jet cut" << endl;
vetoEvent(e);
}
const double jetphi = jets[0].momentum().azimuthalAngle();
const double jetpT = jets[0].momentum().pT();
getLog() << Log::DEBUG << "Leading jet: pT = " << jetpT
<< ", eta = " << jets[0].momentum().pseudorapidity()
<< ", phi = " << jetphi << endl;
// Get the event weight
const double weight = e.weight();
// Get the final states to work with for filling the distributions
const FinalState& cfs = applyProjection<ChargedFinalState>(e, "CFS");
size_t numOverall(0), numToward(0), numTrans1(0), numTrans2(0), numAway(0);
double ptSumOverall(0.0), ptSumToward(0.0), ptSumTrans1(0.0), ptSumTrans2(0.0), ptSumAway(0.0);
double ptMaxOverall(0.0), ptMaxToward(0.0), ptMaxTrans1(0.0), ptMaxTrans2(0.0), ptMaxAway(0.0);
// Calculate all the charged stuff
foreach (const Particle& p, cfs.particles()) {
const double deltaPhi = delta_phi(p.momentum().azimuthalAngle(), jetphi);
const double pT = p.momentum().pT();
const double phi = p.momentum().azimuthalAngle();
// TODO: FIX!
// Jets come with phi in [0 .. 2*Pi]. Particles come in [-Pi .. Pi].
// Lovely, isn't it?
/// @todo Use fastjet PseudoJet::phi_pi_pi (or whatever it's called)
double rotatedphi = phi - jetphi;
while (rotatedphi < 0) rotatedphi += 2*PI;
ptSumOverall += pT;
++numOverall;
if (pT > ptMaxOverall) ptMaxOverall = pT;
if (deltaPhi < PI/3.0) {
ptSumToward += pT;
++numToward;
if (pT > ptMaxToward) ptMaxToward = pT;
}
else if (deltaPhi < 2*PI/3.0) {
if (rotatedphi <= PI) {
ptSumTrans1 += pT;
++numTrans1;
if (pT > ptMaxTrans1) {
ptMaxTrans1 = pT;
} else {
ptSumTrans2 += pT;
++numTrans2;
if (pT > ptMaxTrans2) ptMaxTrans2 = pT;
}
}
}
else {
ptSumAway += pT;
++numAway;
if (pT > ptMaxAway) ptMaxAway = pT;
}
}
// Fill the histograms
//_hist_tnchg->fill(jetpT, numToward/(4*PI/3), weight);
_hist_pnchg->fill(jetpT, (numTrans1+numTrans2)/(4*PI/3), weight);
_hist_pmaxnchg->fill(jetpT, (numTrans1>numTrans2 ? numTrans1 : numTrans2)/(2*PI/3), weight);
_hist_pminnchg->fill(jetpT, (numTrans1<numTrans2 ? numTrans1 : numTrans2)/(2*PI/3), weight);
//_hist_pdifnchg->fill(jetpT, abs(numTrans1-numTrans2)/(2*PI/3), weight);
//_hist_anchg->fill(jetpT, numAway/(4*PI/3), weight);
//_hist_tcptsum->fill(jetpT, ptSumToward/(4*PI/3), weight);
_hist_pcptsum->fill(jetpT, (ptSumTrans1+ptSumTrans2)/(4*PI/3), weight);
_hist_pmaxcptsum->fill(jetpT, (ptSumTrans1>ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/(2*PI/3), weight);
_hist_pmincptsum->fill(jetpT, (ptSumTrans1<ptSumTrans2 ? ptSumTrans1 : ptSumTrans2)/(2*PI/3), weight);
//_hist_pdifcptsum->fill(jetpT, fabs(ptSumTrans1-ptSumTrans2)/(2*PI/3), weight);
//_hist_acptsum->fill(jetpT, ptSumAway/(4*PI/3), weight);
//if (numToward > 0) {
// _hist_tcptave->fill(jetpT, ptSumToward/numToward, weight);
// _hist_tcptmax->fill(jetpT, ptMaxToward, weight);
//}
if ((numTrans1+numTrans2) > 0) {
_hist_pcptave->fill(jetpT, (ptSumTrans1+ptSumTrans2)/(numTrans1+numTrans2), weight);
//_hist_pcptmax->fill(jetpT, (ptMaxTrans1 > ptMaxTrans2 ? ptMaxTrans1 : ptMaxTrans2), weight);
}
//if (numAway > 0) {
// _hist_acptave->fill(jetpT, ptSumAway/numAway, weight);
// _hist_acptmax->fill(jetpT, ptMaxAway, weight);
//}
}
void MC_LHC_LEADINGJETS::finalize() { }
}

File Metadata

Mime Type
text/x-diff
Expires
Sun, Feb 23, 3:00 PM (5 h, 53 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
4486786
Default Alt Text
(93 KB)

Event Timeline