diff --git a/src/Analyses/ALICE_2014_I1300380.cc b/src/Analyses/ALICE_2014_I1300380.cc --- a/src/Analyses/ALICE_2014_I1300380.cc +++ b/src/Analyses/ALICE_2014_I1300380.cc @@ -1,120 +1,120 @@ // -*- C++ -*- #include "Rivet/Analysis.hh" #include "Rivet/Projections/UnstableFinalState.hh" namespace Rivet { class ALICE_2014_I1300380 : public Analysis { public: ALICE_2014_I1300380() : Analysis("ALICE_2014_I1300380") {} public: void init() { const UnstableFinalState cfs(Cuts::absrap<0.5); declare(cfs, "CFS"); // Plots from the paper _histPtSigmaStarPlus = bookHisto1D("d01-x01-y01"); // Sigma*+ - _histPtSigmaStarMinus = bookHisto1D("d01-x01-y02"); // Sigma*- + _histPtSigmaStarMinus = bookHisto1D("d01-x01-y02"); // Sigma*- _histPtSigmaStarPlusAnti = bookHisto1D("d01-x01-y03"); // anti Sigma*- _histPtSigmaStarMinusAnti = bookHisto1D("d01-x01-y04"); // anti Sigma*+ _histPtXiStar = bookHisto1D("d02-x01-y01"); // 0.5 * (xi star + anti xi star) _histAveragePt = bookProfile1D("d03-x01-y01"); // profile } void analyze(const Event& event) { const double weight = event.weight(); const UnstableFinalState& cfs = apply(event, "CFS"); foreach (const Particle& p, cfs.particles()) { // protections against mc generators decaying long-lived particles if ( !(p.hasAncestor(310) || p.hasAncestor(-310) || // K0s p.hasAncestor(130) || p.hasAncestor(-130) || // K0l p.hasAncestor(3322) || p.hasAncestor(-3322) || // Xi0 p.hasAncestor(3122) || p.hasAncestor(-3122) || // Lambda p.hasAncestor(3222) || p.hasAncestor(-3222) || // Sigma+/- p.hasAncestor(3312) || p.hasAncestor(-3312) || // Xi-/+ - p.hasAncestor(3334) || p.hasAncestor(-3334) )) // Omega-/+ - { - int aid = abs(p.pdgId()); - if (aid == 211 || // pi+ + p.hasAncestor(3334) || p.hasAncestor(-3334) )) // Omega-/+ + { + int aid = abs(p.pid()); + if (aid == 211 || // pi+ aid == 321 || // K+ aid == 313 || // K*(892)0 aid == 2212 || // proton aid == 333 ) { // phi(1020) _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight); } } // end if "rejection of long-lived particles" - - - switch (p.pdgId()) { + + + switch (p.pid()) { case 3224: _histPtSigmaStarPlus->fill(p.pT()/GeV, weight); _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight); break; case -3224: _histPtSigmaStarPlusAnti->fill(p.pT()/GeV, weight); _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight); break; case 3114: _histPtSigmaStarMinus->fill(p.pT()/GeV, weight); _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight); break; case -3114: _histPtSigmaStarMinusAnti->fill(p.pT()/GeV, weight); _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight); break; case 3324: _histPtXiStar->fill(p.pT()/GeV, weight); _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight); break; case -3324: _histPtXiStar->fill(p.pT()/GeV, weight); _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight); break; case 3312: _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight); break; case -3312: _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight); break; case 3334: _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight); break; case -3334: _histAveragePt->fill(p.mass()/GeV, p.pT()/GeV, weight); break; } } } void finalize() { scale(_histPtSigmaStarPlus, 1./sumOfWeights()); scale(_histPtSigmaStarPlusAnti, 1./sumOfWeights()); scale(_histPtSigmaStarMinus, 1./sumOfWeights()); scale(_histPtSigmaStarMinusAnti, 1./sumOfWeights()); - scale(_histPtXiStar, 1./sumOfWeights()/ 2.); + scale(_histPtXiStar, 1./sumOfWeights()/ 2.); } private: // plots from the paper Histo1DPtr _histPtSigmaStarPlus; Histo1DPtr _histPtSigmaStarPlusAnti; Histo1DPtr _histPtSigmaStarMinus; Histo1DPtr _histPtSigmaStarMinusAnti; Histo1DPtr _histPtXiStar; Profile1DPtr _histAveragePt; }; // The hook for the plugin system DECLARE_RIVET_PLUGIN(ALICE_2014_I1300380); } diff --git a/src/Analyses/ATLAS_2011_I944826.cc b/src/Analyses/ATLAS_2011_I944826.cc --- a/src/Analyses/ATLAS_2011_I944826.cc +++ b/src/Analyses/ATLAS_2011_I944826.cc @@ -1,260 +1,260 @@ // -*- C++ -*- #include "Rivet/Analysis.hh" #include "Rivet/Projections/ChargedFinalState.hh" #include "Rivet/Projections/IdentifiedFinalState.hh" #include "Rivet/Projections/UnstableFinalState.hh" namespace Rivet { class ATLAS_2011_I944826 : public Analysis { public: /// Constructor ATLAS_2011_I944826() : Analysis("ATLAS_2011_I944826") { _sum_w_ks = 0.0; _sum_w_lambda = 0.0; _sum_w_passed = 0.0; } /// Book histograms and initialise projections before the run void init() { UnstableFinalState ufs(Cuts::pT > 100*MeV); declare(ufs, "UFS"); ChargedFinalState mbts(Cuts::absetaIn(2.09, 3.84)); declare(mbts, "MBTS"); IdentifiedFinalState nstable(Cuts::abseta < 2.5 && Cuts::pT >= 100*MeV); nstable.acceptIdPair(PID::ELECTRON) .acceptIdPair(PID::MUON) .acceptIdPair(PID::PIPLUS) .acceptIdPair(PID::KPLUS) .acceptIdPair(PID::PROTON); declare(nstable, "nstable"); if (fuzzyEquals(sqrtS()/GeV, 7000, 1e-3)) { _hist_Ks_pT = bookHisto1D(1, 1, 1); _hist_Ks_y = bookHisto1D(2, 1, 1); _hist_Ks_mult = bookHisto1D(3, 1, 1); _hist_L_pT = bookHisto1D(7, 1, 1); _hist_L_y = bookHisto1D(8, 1, 1); _hist_L_mult = bookHisto1D(9, 1, 1); _hist_Ratio_v_y = bookScatter2D(13, 1, 1); _hist_Ratio_v_pT = bookScatter2D(14, 1, 1); // _temp_lambda_v_y = Histo1D(10, 0.0, 2.5); _temp_lambdabar_v_y = Histo1D(10, 0.0, 2.5); _temp_lambda_v_pT = Histo1D(18, 0.5, 4.1); _temp_lambdabar_v_pT = Histo1D(18, 0.5, 4.1); } else if (fuzzyEquals(sqrtS()/GeV, 900, 1E-3)) { _hist_Ks_pT = bookHisto1D(4, 1, 1); _hist_Ks_y = bookHisto1D(5, 1, 1); _hist_Ks_mult = bookHisto1D(6, 1, 1); _hist_L_pT = bookHisto1D(10, 1, 1); _hist_L_y = bookHisto1D(11, 1, 1); _hist_L_mult = bookHisto1D(12, 1, 1); _hist_Ratio_v_y = bookScatter2D(15, 1, 1); _hist_Ratio_v_pT = bookScatter2D(16, 1, 1); // _temp_lambda_v_y = Histo1D(5, 0.0, 2.5); _temp_lambdabar_v_y = Histo1D(5, 0.0, 2.5); _temp_lambda_v_pT = Histo1D(8, 0.5, 3.7); _temp_lambdabar_v_pT = Histo1D(8, 0.5, 3.7); } } // This function is required to impose the flight time cuts on Kaons and Lambdas double getPerpFlightDistance(const Rivet::Particle& p) { - const HepMC::GenParticle* genp = p.genParticle(); - const HepMC::GenVertex* prodV = genp->production_vertex(); - const HepMC::GenVertex* decV = genp->end_vertex(); + const HepMC::GenParticlePtr genp = p.genParticle(); + const HepMC::GenVertexPtr prodV = genp->production_vertex(); + const HepMC::GenVertexPtr decV = genp->end_vertex(); const HepMC::ThreeVector prodPos = prodV->point3d(); if (decV) { const HepMC::ThreeVector decPos = decV->point3d(); double dy = prodPos.y() - decPos.y(); double dx = prodPos.x() - decPos.x(); return add_quad(dx, dy); } return numeric_limits::max(); } bool daughtersSurviveCuts(const Rivet::Particle& p) { // We require the Kshort or Lambda to decay into two charged // particles with at least pT = 100 MeV inside acceptance region - const HepMC::GenParticle* genp = p.genParticle(); - const HepMC::GenVertex* decV = genp->end_vertex(); + const HepMC::GenParticlePtr genp = p.genParticle(); + const HepMC::GenVertexPtr decV = genp->end_vertex(); bool decision = true; if (!decV) return false; if (decV->particles_out_size() == 2) { std::vector pTs; std::vector charges; std::vector etas; - foreach (const HepMC::GenParticle* gp, particles(decV, HepMC::children)) { + foreach (const HepMC::GenParticlePtr gp, particles(decV, HepMC::children)) { pTs.push_back(gp->momentum().perp()); etas.push_back(fabs(gp->momentum().eta())); charges.push_back( Rivet::PID::threeCharge(gp->pdg_id()) ); // gp->print(); } if ( (pTs[0]/Rivet::GeV < 0.1) || (pTs[1]/Rivet::GeV < 0.1) ) { decision = false; MSG_DEBUG("Failed pT cut: " << pTs[0]/Rivet::GeV << " " << pTs[1]/Rivet::GeV); } if ( etas[0] > 2.5 || etas[1] > 2.5 ) { decision = false; MSG_DEBUG("Failed eta cut: " << etas[0] << " " << etas[1]); } if ( charges[0] * charges[1] >= 0 ) { decision = false; MSG_DEBUG("Failed opposite charge cut: " << charges[0] << " " << charges[1]); } } else { decision = false; MSG_DEBUG("Failed nDaughters cut: " << decV->particles_out_size()); } return decision; } /// Perform the per-event analysis void analyze(const Event& event) { const double weight = event.weight(); // ATLAS MBTS trigger requirement of at least one hit in either hemisphere if (apply(event, "MBTS").size() < 1) { MSG_DEBUG("Failed trigger cut"); vetoEvent; } // Veto event also when we find less than 2 particles in the acceptance region of type 211,2212,11,13,321 if (apply(event, "nstable").size() < 2) { MSG_DEBUG("Failed stable particle cut"); vetoEvent; } _sum_w_passed += weight; // This ufs holds all the Kaons and Lambdas const UnstableFinalState& ufs = apply(event, "UFS"); // Some conters int n_KS0 = 0; int n_LAMBDA = 0; // Particle loop foreach (const Particle& p, ufs.particles()) { // General particle quantities const double pT = p.pT(); const double y = p.rapidity(); const PdgId apid = p.abspid(); double flightd = 0.0; // Look for Kaons, Lambdas switch (apid) { case PID::K0S: flightd = getPerpFlightDistance(p); if (!inRange(flightd/mm, 4., 450.) ) { MSG_DEBUG("Kaon failed flight distance cut:" << flightd); break; } if (daughtersSurviveCuts(p) ) { _hist_Ks_y ->fill(y, weight); _hist_Ks_pT->fill(pT/GeV, weight); _sum_w_ks += weight; n_KS0++; } break; case PID::LAMBDA: if (pT < 0.5*GeV) { // Lambdas have an additional pT cut of 500 MeV MSG_DEBUG("Lambda failed pT cut:" << pT/GeV << " GeV"); break; } flightd = getPerpFlightDistance(p); if (!inRange(flightd/mm, 17., 450.)) { MSG_DEBUG("Lambda failed flight distance cut:" << flightd/mm << " mm"); break; } if ( daughtersSurviveCuts(p) ) { if (p.pid() == PID::LAMBDA) { _temp_lambda_v_y.fill(fabs(y), weight); _temp_lambda_v_pT.fill(pT/GeV, weight); _hist_L_y->fill(y, weight); _hist_L_pT->fill(pT/GeV, weight); _sum_w_lambda += weight; n_LAMBDA++; } else if (p.pid() == -PID::LAMBDA) { _temp_lambdabar_v_y.fill(fabs(y), weight); _temp_lambdabar_v_pT.fill(pT/GeV, weight); } } break; } } // Fill multiplicity histos _hist_Ks_mult->fill(n_KS0, weight); _hist_L_mult->fill(n_LAMBDA, weight); } /// Normalise histograms etc., after the run void finalize() { MSG_DEBUG("# Events that pass the trigger: " << _sum_w_passed); MSG_DEBUG("# Kshort events: " << _sum_w_ks); MSG_DEBUG("# Lambda events: " << _sum_w_lambda); /// @todo Replace with normalize()? scale(_hist_Ks_pT, 1.0/_sum_w_ks); scale(_hist_Ks_y, 1.0/_sum_w_ks); scale(_hist_Ks_mult, 1.0/_sum_w_passed); /// @todo Replace with normalize()? scale(_hist_L_pT, 1.0/_sum_w_lambda); scale(_hist_L_y, 1.0/_sum_w_lambda); scale(_hist_L_mult, 1.0/_sum_w_passed); // Division of histograms to obtain lambda_bar/lambda ratios divide(_temp_lambdabar_v_y, _temp_lambda_v_y, _hist_Ratio_v_y); divide(_temp_lambdabar_v_pT, _temp_lambda_v_pT, _hist_Ratio_v_pT); } private: /// Counters double _sum_w_ks, _sum_w_lambda, _sum_w_passed; /// @name Persistent histograms //@{ Histo1DPtr _hist_Ks_pT, _hist_Ks_y, _hist_Ks_mult; Histo1DPtr _hist_L_pT, _hist_L_y, _hist_L_mult; Scatter2DPtr _hist_Ratio_v_pT, _hist_Ratio_v_y; //@} /// @name Temporary histograms //@{ Histo1D _temp_lambda_v_y, _temp_lambdabar_v_y; Histo1D _temp_lambda_v_pT, _temp_lambdabar_v_pT; //@} }; // The hook for the plugin system DECLARE_RIVET_PLUGIN(ATLAS_2011_I944826); } diff --git a/src/Analyses/LHCB_2015_I1333223.cc b/src/Analyses/LHCB_2015_I1333223.cc --- a/src/Analyses/LHCB_2015_I1333223.cc +++ b/src/Analyses/LHCB_2015_I1333223.cc @@ -1,113 +1,110 @@ // -*- C++ -*- #include "Rivet/Analysis.hh" #include "Rivet/Tools/Logging.hh" #include "Rivet/Projections/ChargedFinalState.hh" #include "Rivet/Math/Units.hh" #include using namespace std; namespace Rivet { class LHCB_2015_I1333223 : public Analysis { public: /// @name Constructors etc. //@{ /// Constructor LHCB_2015_I1333223() : Analysis("LHCB_2015_I1333223") { } //@} public: /// @name Analysis methods //@{ /// Book histograms and initialise projections before the run void init() { // Charged particles declare(ChargedFinalState(Cuts::eta> 2.0 && Cuts::eta <4.5 && Cuts::pT >0.2*GeV), "CFS"); // Reproducing only measurement for prompt charged particles _hInelasticXs = bookHisto1D(1, 1, 1); } /// Perform the per-event analysis void analyze(const Event& event) { const double weight = event.weight(); const ChargedFinalState &cfs = apply (event, "CFS"); // eliminate non-inelastic events and empty events in LHCb if (cfs.particles().size() == 0) vetoEvent; // See if this event has at least one prompt particle foreach (const Particle &myp, cfs.particles()){ double dPV = getPVDCA(myp); // if IP > 200 microns the particle is not considered prompt if ((dPV < 0.) || (dPV > 0.2 * millimeter)) { MSG_DEBUG(" Vetoing " << myp.pdgId() << " at " << dPV); continue; } // histo gets filled only for inelastic events (at least one prompt charged particle) _hInelasticXs->fill(sqrtS(), weight); break; } //end loop on particles } /// Normalise histograms etc., after the run void finalize() { scale(_hInelasticXs, crossSection()/sumOfWeights()/millibarn); } //@} private: - /* - * Compute Distance of Closest Approach in z range for one particle. - * Assuming length unit is mm. - * Returns -1. if unable to compute the DCA to PV. - */ + /// Compute distance of closest approach in z range for one particle. + /// Returns -1 if unable to compute the DCA to PV. double getPVDCA(const Particle& p) { - const HepMC::GenVertex* vtx = p.genParticle()->production_vertex(); + const GenVertexPtr vtx = p.genParticle()->production_vertex(); if ( 0 == vtx ) return -1.; - + // Unit vector of particle's MOMENTUM three vector const Vector3 u = p.momentum().p3().unit(); - + // The interaction point is always at (0, 0,0,0) hence the // vector pointing from the PV to the particle production vertex is: Vector3 d(vtx->position().x(), vtx->position().y(), vtx->position().z()); // Subtract projection of d onto u from d double proj = d.dot(u); d -= (u * proj); - // d should be orthogonal to u and it's length give the distance of + // d should be orthogonal to u and it's length give the distance of // closest approach return d.mod(); } /// @name Histograms //@{ Histo1DPtr _hInelasticXs; //@} // }; // The hook for the plugin system DECLARE_RIVET_PLUGIN(LHCB_2015_I1333223); }