diff --git a/analyses/pluginALICE/ALICE_2012_I1127497.cc b/analyses/pluginALICE/ALICE_2012_I1127497.cc --- a/analyses/pluginALICE/ALICE_2012_I1127497.cc +++ b/analyses/pluginALICE/ALICE_2012_I1127497.cc @@ -1,212 +1,212 @@ // -*- C++ -*- #include "Rivet/Analysis.hh" #include "Rivet/Projections/ChargedFinalState.hh" #include "Rivet/Tools/Cuts.hh" #include "Rivet/Projections/SingleValueProjection.hh" #include "Rivet/Tools/AliceCommon.hh" #include "Rivet/Projections/AliceCommon.hh" #include "Rivet/Projections/HepMCHeavyIon.hh" namespace Rivet { /// @brief ALICE PbPb at 2.76 TeV R_AA analysis. class ALICE_2012_I1127497 : public Analysis { + public: /// Constructor DEFAULT_RIVET_ANALYSIS_CTOR(ALICE_2012_I1127497); - /// @name Analysis methods //@{ /// Book histograms and initialise projections before the run void init() { // Access the HepMC heavy ion info declare(HepMCHeavyIon(), "HepMC"); // Declare centrality projection declareCentrality(ALICE::V0MMultiplicity(), "ALICE_2015_PBPBCentrality", "V0M", "V0M"); // Charged, primary particles with |eta| < 0.5 and pT > 150 MeV declare(ALICE::PrimaryParticles(Cuts::abseta < 0.5 && Cuts::pT > 150*MeV && Cuts::abscharge > 0), "APRIM"); // Loop over all histograms for (size_t ihist = 0; ihist < NHISTOS; ++ihist) { // Initialize PbPb objects book(_histNch[PBPB][ihist], ihist+1, 1, 1); std::string nameCounterPbPb = "counter.pbpb." + std::to_string(ihist); book(_counterSOW[PBPB][ihist], nameCounterPbPb); // Sum of weights counter for PbPb std::string nameCounterNcoll = "counter.ncoll." + std::to_string(ihist); book(_counterNcoll[ihist], nameCounterNcoll); // Ncoll counter for PbPb // Initialize pp objects. In principle, only one pp histogram would be // needed since centrality does not make any difference here. However, // in some cases in this analysis the binning differ from each other, // so this is easy-to-implement way to account for that. std::string namePP = _histNch[PBPB][ihist]->name() + "-pp"; // The binning is taken from the reference data book(_histNch[PP][ihist], namePP, refData(ihist+1, 1, 1)); std::string nameCounterpp = "counter.pp." + std::to_string(ihist); book(_counterSOW[PP][ihist], nameCounterpp); // Sum of weights counter for pp // Book ratios, to be used in finalize book(_histRAA[ihist], ihist+16, 1, 1); } // Centrality regions keeping boundaries for a certain region. // Note, that some regions overlap with other regions. _centrRegions.clear(); _centrRegions = {{0., 5.}, {5., 10.}, {10., 20.}, {20., 30.}, {30., 40.}, {40., 50.}, {50., 60.}, {60., 70.}, {70., 80.}, {0., 10.}, {0., 20.}, {20., 40.}, {40., 60.}, {40., 80.}, {60., 80.}}; // Find out the beam type, also specified from option. string beamOpt = getOption("beam","NONE"); if (beamOpt != "NONE") { MSG_WARNING("You are using a specified beam type, instead of using what" - "is provided by the generator. " - "Only do this if you are completely sure what you are doing."); - if (beamOpt=="PP") isHI = false; - else if (beamOpt=="HI") isHI = true; - else { - MSG_ERROR("Beam error (option)!"); - return; + "is provided by the generator. " + "Only do this if you are completely sure what you are doing."); + if (beamOpt=="PP") isHI = false; + else if (beamOpt=="HI") isHI = true; + else { + MSG_ERROR("Beam error (option)!"); + return; } } else { const ParticlePair& beam = beams(); if (beam.first.pid() == PID::PROTON && beam.second.pid() == PID::PROTON) isHI = false; - else if (beam.first.pid() == PID::LEAD && beam.second.pid() == PID::LEAD) - isHI = true; - else { - MSG_ERROR("Beam error (found)!"); - return; - } + else if (beam.first.pid() == PID::LEAD && beam.second.pid() == PID::LEAD) + isHI = true; + else { + MSG_ERROR("Beam error (found)!"); + return; + } } } /// Perform the per-event analysis void analyze(const Event& event) { // Charged, primary particles with at least pT = 150 MeV // in eta range of |eta| < 0.5 Particles chargedParticles = apply(event,"APRIM").particlesByPt(); // Check type of event. if ( isHI ) { const HepMCHeavyIon & hi = apply(event, "HepMC"); // Prepare centrality projection and value const CentralityProjection& centrProj = apply(event, "V0M"); double centr = centrProj(); // Veto event for too large centralities since those are not used // in the analysis at all if ((centr < 0.) || (centr > 80.)) vetoEvent; // Fill PbPb histograms and add weights based on centrality value for (size_t ihist = 0; ihist < NHISTOS; ++ihist) { if (inRange(centr, _centrRegions[ihist].first, _centrRegions[ihist].second)) { _counterSOW[PBPB][ihist]->fill(); _counterNcoll[ihist]->fill(hi.Ncoll()); for (const Particle& p : chargedParticles) { double pT = p.pT()/GeV; if (pT < 50.) { const double pTAtBinCenter = _histNch[PBPB][ihist]->binAt(pT).xMid(); _histNch[PBPB][ihist]->fill(pT, 1/pTAtBinCenter); } } } } } else { // Fill all pp histograms and add weights for (size_t ihist = 0; ihist < NHISTOS; ++ihist) { _counterSOW[PP][ihist]->fill(); for (const Particle& p : chargedParticles) { double pT = p.pT()/GeV; if (pT < 50.) { const double pTAtBinCenter = _histNch[PP][ihist]->binAt(pT).xMid(); _histNch[PP][ihist]->fill(pT, 1/pTAtBinCenter); } } } } } /// Normalise histograms etc., after the run void finalize() { // Right scaling of the histograms with their individual weights. for (size_t itype = 0; itype < EVENT_TYPES; ++itype ) { for (size_t ihist = 0; ihist < NHISTOS; ++ihist) { if (_counterSOW[itype][ihist]->sumW() > 0.) { scale(_histNch[itype][ihist], (1. / _counterSOW[itype][ihist]->sumW() / 2. / M_PI)); } } } // Postprocessing of the histograms for (size_t ihist = 0; ihist < NHISTOS; ++ihist) { // If there are entires in histograms for both beam types if (_histNch[PP][ihist]->numEntries() > 0 && _histNch[PBPB][ihist]->numEntries() > 0) { // Initialize and fill R_AA histograms divide(_histNch[PBPB][ihist], _histNch[PP][ihist], _histRAA[ihist]); // Scale by Ncoll. Unfortunately some generators does not provide // Ncoll value (eg. JEWEL), so the following scaling will be done // only if there are entries in the counters double ncoll = _counterNcoll[ihist]->sumW(); double sow = _counterSOW[PBPB][ihist]->sumW(); if (ncoll > 1e-6 && sow > 1e-6) _histRAA[ihist]->scaleY(1. / (ncoll / sow)); } } } //@} private: bool isHI; static const int NHISTOS = 15; static const int EVENT_TYPES = 2; static const int PP = 0; static const int PBPB = 1; /// @name Histograms //@{ Histo1DPtr _histNch[EVENT_TYPES][NHISTOS]; CounterPtr _counterSOW[EVENT_TYPES][NHISTOS]; CounterPtr _counterNcoll[NHISTOS]; Scatter2DPtr _histRAA[NHISTOS]; //@} std::vector> _centrRegions; }; // The hook for the plugin system DECLARE_RIVET_PLUGIN(ALICE_2012_I1127497); } diff --git a/analyses/pluginLEP/ALEPH_1996_S3486095.cc b/analyses/pluginLEP/ALEPH_1996_S3486095.cc --- a/analyses/pluginLEP/ALEPH_1996_S3486095.cc +++ b/analyses/pluginLEP/ALEPH_1996_S3486095.cc @@ -1,478 +1,479 @@ // -*- C++ -*- #include "Rivet/Analysis.hh" #include "Rivet/Projections/Beam.hh" #include "Rivet/Projections/Sphericity.hh" #include "Rivet/Projections/Thrust.hh" #include "Rivet/Projections/FastJets.hh" #include "Rivet/Projections/ParisiTensor.hh" #include "Rivet/Projections/Hemispheres.hh" #include "Rivet/Projections/FinalState.hh" #include "Rivet/Projections/ChargedFinalState.hh" #include "Rivet/Projections/UnstableParticles.hh" namespace Rivet { /// @brief ALEPH QCD study with event shapes and identified particles /// @author Holger Schulz class ALEPH_1996_S3486095 : public Analysis { public: /// Constructor DEFAULT_RIVET_ANALYSIS_CTOR(ALEPH_1996_S3486095); /// @name Analysis methods //@{ void init() { // Set up projections declare(Beam(), "Beams"); const ChargedFinalState cfs; declare(cfs, "FS"); declare(UnstableParticles(), "UFS"); declare(FastJets(cfs, FastJets::DURHAM, 0.7), "DurhamJets"); declare(Sphericity(cfs), "Sphericity"); declare(ParisiTensor(cfs), "Parisi"); const Thrust thrust(cfs); declare(thrust, "Thrust"); declare(Hemispheres(thrust), "Hemispheres"); // Book histograms book(_histSphericity ,1, 1, 1); book(_histAplanarity ,2, 1, 1); book(_hist1MinusT ,3, 1, 1); book(_histTMinor ,4, 1, 1); book(_histY3 ,5, 1, 1); book(_histHeavyJetMass ,6, 1, 1); book(_histCParam ,7, 1, 1); book(_histOblateness ,8, 1, 1); book(_histScaledMom ,9, 1, 1); book(_histRapidityT ,10, 1, 1); book(_histPtSIn ,11, 1, 1); book(_histPtSOut ,12, 1, 1); book(_histLogScaledMom ,17, 1, 1); book(_histChMult ,18, 1, 1); book(_histMeanChMult ,19, 1, 1); book(_histMeanChMultRapt05,20, 1, 1); book(_histMeanChMultRapt10,21, 1, 1); book(_histMeanChMultRapt15,22, 1, 1); book(_histMeanChMultRapt20,23, 1, 1); // Particle spectra book(_histMultiPiPlus ,25, 1, 1); book(_histMultiKPlus ,26, 1, 1); book(_histMultiP ,27, 1, 1); book(_histMultiPhoton ,28, 1, 1); book(_histMultiPi0 ,29, 1, 1); book(_histMultiEta ,30, 1, 1); book(_histMultiEtaPrime ,31, 1, 1); book(_histMultiK0 ,32, 1, 1); book(_histMultiLambda0 ,33, 1, 1); book(_histMultiXiMinus ,34, 1, 1); book(_histMultiSigma1385Plus ,35, 1, 1); book(_histMultiXi1530_0 ,36, 1, 1); book(_histMultiRho ,37, 1, 1); book(_histMultiOmega782 ,38, 1, 1); book(_histMultiKStar892_0 ,39, 1, 1); book(_histMultiPhi ,40, 1, 1); book(_histMultiKStar892Plus ,43, 1, 1); // Mean multiplicities book(_histMeanMultiPi0 ,44, 1, 2); book(_histMeanMultiEta ,44, 1, 3); book(_histMeanMultiEtaPrime ,44, 1, 4); book(_histMeanMultiK0 ,44, 1, 5); book(_histMeanMultiRho ,44, 1, 6); book(_histMeanMultiOmega782 ,44, 1, 7); book(_histMeanMultiPhi ,44, 1, 8); book(_histMeanMultiKStar892Plus ,44, 1, 9); book(_histMeanMultiKStar892_0 ,44, 1, 10); book(_histMeanMultiLambda0 ,44, 1, 11); book(_histMeanMultiSigma0 ,44, 1, 12); book(_histMeanMultiXiMinus ,44, 1, 13); book(_histMeanMultiSigma1385Plus ,44, 1, 14); book(_histMeanMultiXi1530_0 ,44, 1, 15); book(_histMeanMultiOmegaOmegaBar ,44, 1, 16); + book(_weightedTotalPartNum, "/TMP/TotalPartNum"); book(_weightedTotalPartNum, "/TMP/weightedTotalPartNum"); } void analyze(const Event& e) { // First, veto on leptonic events by requiring at least 4 charged FS particles const FinalState& fs = apply(e, "FS"); const size_t numParticles = fs.particles().size(); // Even if we only generate hadronic events, we still need a cut on numCharged >= 2. if (numParticles < 2) { MSG_DEBUG("Failed leptonic event cut"); vetoEvent; } MSG_DEBUG("Passed leptonic event cut"); _weightedTotalPartNum->fill(numParticles); // Get beams and average beam momentum const ParticlePair& beams = apply(e, "Beams").beams(); const double meanBeamMom = ( beams.first.p3().mod() + beams.second.p3().mod() ) / 2.0; MSG_DEBUG("Avg beam momentum = " << meanBeamMom); // Thrusts MSG_DEBUG("Calculating thrust"); const Thrust& thrust = apply(e, "Thrust"); _hist1MinusT->fill(1 - thrust.thrust()); _histTMinor->fill(thrust.thrustMinor()); _histOblateness->fill(thrust.oblateness()); // Jets MSG_DEBUG("Calculating differential jet rate plots:"); const FastJets& durjet = apply(e, "DurhamJets"); if (durjet.clusterSeq()) { double y3 = durjet.clusterSeq()->exclusive_ymerge_max(2); if (y3>0.0) _histY3->fill(-1. * std::log(y3)); } // Sphericities MSG_DEBUG("Calculating sphericity"); const Sphericity& sphericity = apply(e, "Sphericity"); _histSphericity->fill(sphericity.sphericity()); _histAplanarity->fill(sphericity.aplanarity()); // C param MSG_DEBUG("Calculating Parisi params"); const ParisiTensor& parisi = apply(e, "Parisi"); _histCParam->fill(parisi.C()); // Hemispheres MSG_DEBUG("Calculating hemisphere variables"); const Hemispheres& hemi = apply(e, "Hemispheres"); _histHeavyJetMass->fill(hemi.scaledM2high()); // Iterate over all the charged final state particles. double Evis = 0.0; double rapt05 = 0.; double rapt10 = 0.; double rapt15 = 0.; double rapt20 = 0.; MSG_DEBUG("About to iterate over charged FS particles"); for (const Particle& p : fs.particles()) { // Get momentum and energy of each particle. const Vector3 mom3 = p.p3(); const double energy = p.E(); Evis += energy; // Scaled momenta. const double mom = mom3.mod(); const double scaledMom = mom/meanBeamMom; const double logInvScaledMom = -std::log(scaledMom); _histLogScaledMom->fill(logInvScaledMom); _histScaledMom->fill(scaledMom); // Get momenta components w.r.t. thrust and sphericity. const double momT = dot(thrust.thrustAxis(), mom3); const double pTinS = dot(mom3, sphericity.sphericityMajorAxis()); const double pToutS = dot(mom3, sphericity.sphericityMinorAxis()); _histPtSIn->fill(fabs(pTinS/GeV)); _histPtSOut->fill(fabs(pToutS/GeV)); // Calculate rapidities w.r.t. thrust. const double rapidityT = 0.5 * std::log((energy + momT) / (energy - momT)); _histRapidityT->fill(fabs(rapidityT)); if (std::fabs(rapidityT) <= 0.5) { rapt05 += 1.0; } if (std::fabs(rapidityT) <= 1.0) { rapt10 += 1.0; } if (std::fabs(rapidityT) <= 1.5) { rapt15 += 1.0; } if (std::fabs(rapidityT) <= 2.0) { rapt20 += 1.0; } } _histChMult->fill(numParticles); _histMeanChMultRapt05->fill(_histMeanChMultRapt05->bin(0).xMid(), rapt05); _histMeanChMultRapt10->fill(_histMeanChMultRapt10->bin(0).xMid(), rapt10); _histMeanChMultRapt15->fill(_histMeanChMultRapt15->bin(0).xMid(), rapt15); _histMeanChMultRapt20->fill(_histMeanChMultRapt20->bin(0).xMid(), rapt20); _histMeanChMult->fill(_histMeanChMult->bin(0).xMid(), numParticles); //// Final state of unstable particles to get particle spectra const UnstableParticles& ufs = apply(e, "UFS"); for (Particles::const_iterator p = ufs.particles().begin(); p != ufs.particles().end(); ++p) { const Vector3 mom3 = p->momentum().p3(); int id = abs(p->pid()); const double mom = mom3.mod(); const double energy = p->momentum().E(); const double scaledMom = mom/meanBeamMom; const double scaledEnergy = energy/meanBeamMom; // meanBeamMom is approximately beam energy switch (id) { case 22: _histMultiPhoton->fill(-1.*std::log(scaledMom)); break; case -321: case 321: _histMultiKPlus->fill(scaledMom); break; case 211: case -211: _histMultiPiPlus->fill(scaledMom); break; case 2212: case -2212: _histMultiP->fill(scaledMom); break; case 111: _histMultiPi0->fill(scaledMom); _histMeanMultiPi0->fill(_histMeanMultiPi0->bin(0).xMid()); break; case 221: if (scaledMom >= 0.1) { _histMultiEta->fill(scaledEnergy); _histMeanMultiEta->fill(_histMeanMultiEta->bin(0).xMid()); } break; case 331: if (scaledMom >= 0.1) { _histMultiEtaPrime->fill(scaledEnergy); _histMeanMultiEtaPrime->fill(_histMeanMultiEtaPrime->bin(0).xMid()); } break; case 130: //klong case 310: //kshort _histMultiK0->fill(scaledMom); _histMeanMultiK0->fill(_histMeanMultiK0->bin(0).xMid()); break; case 113: _histMultiRho->fill(scaledMom); _histMeanMultiRho->fill(_histMeanMultiRho->bin(0).xMid()); break; case 223: _histMultiOmega782->fill(scaledMom); _histMeanMultiOmega782->fill(_histMeanMultiOmega782->bin(0).xMid()); break; case 333: _histMultiPhi->fill(scaledMom); _histMeanMultiPhi->fill(_histMeanMultiPhi->bin(0).xMid()); break; case 313: case -313: _histMultiKStar892_0->fill(scaledMom); _histMeanMultiKStar892_0->fill(_histMeanMultiKStar892_0->bin(0).xMid()); break; case 323: case -323: _histMultiKStar892Plus->fill(scaledEnergy); _histMeanMultiKStar892Plus->fill(_histMeanMultiKStar892Plus->bin(0).xMid()); break; case 3122: case -3122: _histMultiLambda0->fill(scaledMom); _histMeanMultiLambda0->fill(_histMeanMultiLambda0->bin(0).xMid()); break; case 3212: case -3212: _histMeanMultiSigma0->fill(_histMeanMultiSigma0->bin(0).xMid()); break; case 3312: case -3312: _histMultiXiMinus->fill(scaledEnergy); _histMeanMultiXiMinus->fill(_histMeanMultiXiMinus->bin(0).xMid()); break; case 3114: case -3114: case 3224: case -3224: _histMultiSigma1385Plus->fill(scaledEnergy); _histMeanMultiSigma1385Plus->fill(_histMeanMultiSigma1385Plus->bin(0).xMid()); break; case 3324: case -3324: _histMultiXi1530_0->fill(scaledEnergy); _histMeanMultiXi1530_0->fill(_histMeanMultiXi1530_0->bin(0).xMid()); break; case 3334: _histMeanMultiOmegaOmegaBar->fill(_histMeanMultiOmegaOmegaBar->bin(0).xMid()); break; } } } /// Finalize void finalize() { // Normalize inclusive single particle distributions to the average number // of charged particles per event. const double avgNumParts = _weightedTotalPartNum->sumW() / sumOfWeights(); normalize(_histPtSIn, avgNumParts); normalize(_histPtSOut, avgNumParts); normalize(_histRapidityT, avgNumParts); normalize(_histY3); normalize(_histLogScaledMom, avgNumParts); normalize(_histScaledMom, avgNumParts); // particle spectra scale(_histMultiPiPlus ,1./sumOfWeights()); scale(_histMultiKPlus ,1./sumOfWeights()); scale(_histMultiP ,1./sumOfWeights()); scale(_histMultiPhoton ,1./sumOfWeights()); scale(_histMultiPi0 ,1./sumOfWeights()); scale(_histMultiEta ,1./sumOfWeights()); scale(_histMultiEtaPrime ,1./sumOfWeights()); scale(_histMultiK0 ,1./sumOfWeights()); scale(_histMultiLambda0 ,1./sumOfWeights()); scale(_histMultiXiMinus ,1./sumOfWeights()); scale(_histMultiSigma1385Plus ,1./sumOfWeights()); scale(_histMultiXi1530_0 ,1./sumOfWeights()); scale(_histMultiRho ,1./sumOfWeights()); scale(_histMultiOmega782 ,1./sumOfWeights()); scale(_histMultiKStar892_0 ,1./sumOfWeights()); scale(_histMultiPhi ,1./sumOfWeights()); scale(_histMultiKStar892Plus ,1./sumOfWeights()); // event shape normalize(_hist1MinusT); normalize(_histTMinor); normalize(_histOblateness); normalize(_histSphericity); normalize(_histAplanarity); normalize(_histHeavyJetMass); normalize(_histCParam); // mean multiplicities scale(_histChMult , 2.0/sumOfWeights()); // taking into account the binwidth of 2 scale(_histMeanChMult , 1.0/sumOfWeights()); scale(_histMeanChMultRapt05 , 1.0/sumOfWeights()); scale(_histMeanChMultRapt10 , 1.0/sumOfWeights()); scale(_histMeanChMultRapt15 , 1.0/sumOfWeights()); scale(_histMeanChMultRapt20 , 1.0/sumOfWeights()); scale(_histMeanMultiPi0 , 1.0/sumOfWeights()); scale(_histMeanMultiEta , 1.0/sumOfWeights()); scale(_histMeanMultiEtaPrime , 1.0/sumOfWeights()); scale(_histMeanMultiK0 , 1.0/sumOfWeights()); scale(_histMeanMultiRho , 1.0/sumOfWeights()); scale(_histMeanMultiOmega782 , 1.0/sumOfWeights()); scale(_histMeanMultiPhi , 1.0/sumOfWeights()); scale(_histMeanMultiKStar892Plus , 1.0/sumOfWeights()); scale(_histMeanMultiKStar892_0 , 1.0/sumOfWeights()); scale(_histMeanMultiLambda0 , 1.0/sumOfWeights()); scale(_histMeanMultiSigma0 , 1.0/sumOfWeights()); scale(_histMeanMultiXiMinus , 1.0/sumOfWeights()); scale(_histMeanMultiSigma1385Plus, 1.0/sumOfWeights()); scale(_histMeanMultiXi1530_0 , 1.0/sumOfWeights()); scale(_histMeanMultiOmegaOmegaBar, 1.0/sumOfWeights()); } //@} private: /// Store the weighted sums of numbers of charged / charged+neutral /// particles - used to calculate average number of particles for the /// inclusive single particle distributions' normalisations. CounterPtr _weightedTotalPartNum; /// @name Histograms //@{ Histo1DPtr _histSphericity; Histo1DPtr _histAplanarity; Histo1DPtr _hist1MinusT; Histo1DPtr _histTMinor; Histo1DPtr _histY3; Histo1DPtr _histHeavyJetMass; Histo1DPtr _histCParam; Histo1DPtr _histOblateness; Histo1DPtr _histScaledMom; Histo1DPtr _histRapidityT; Histo1DPtr _histPtSIn; Histo1DPtr _histPtSOut; Histo1DPtr _histJetRate2Durham; Histo1DPtr _histJetRate3Durham; Histo1DPtr _histJetRate4Durham; Histo1DPtr _histJetRate5Durham; Histo1DPtr _histLogScaledMom; Histo1DPtr _histChMult; Histo1DPtr _histMultiPiPlus; Histo1DPtr _histMultiKPlus; Histo1DPtr _histMultiP; Histo1DPtr _histMultiPhoton; Histo1DPtr _histMultiPi0; Histo1DPtr _histMultiEta; Histo1DPtr _histMultiEtaPrime; Histo1DPtr _histMultiK0; Histo1DPtr _histMultiLambda0; Histo1DPtr _histMultiXiMinus; Histo1DPtr _histMultiSigma1385Plus; Histo1DPtr _histMultiXi1530_0; Histo1DPtr _histMultiRho; Histo1DPtr _histMultiOmega782; Histo1DPtr _histMultiKStar892_0; Histo1DPtr _histMultiPhi; Histo1DPtr _histMultiKStar892Plus; // mean multiplicities Histo1DPtr _histMeanChMult; Histo1DPtr _histMeanChMultRapt05; Histo1DPtr _histMeanChMultRapt10; Histo1DPtr _histMeanChMultRapt15; Histo1DPtr _histMeanChMultRapt20; Histo1DPtr _histMeanMultiPi0; Histo1DPtr _histMeanMultiEta; Histo1DPtr _histMeanMultiEtaPrime; Histo1DPtr _histMeanMultiK0; Histo1DPtr _histMeanMultiRho; Histo1DPtr _histMeanMultiOmega782; Histo1DPtr _histMeanMultiPhi; Histo1DPtr _histMeanMultiKStar892Plus; Histo1DPtr _histMeanMultiKStar892_0; Histo1DPtr _histMeanMultiLambda0; Histo1DPtr _histMeanMultiSigma0; Histo1DPtr _histMeanMultiXiMinus; Histo1DPtr _histMeanMultiSigma1385Plus; Histo1DPtr _histMeanMultiXi1530_0; Histo1DPtr _histMeanMultiOmegaOmegaBar; //@} }; // The hook for the plugin system DECLARE_RIVET_PLUGIN(ALEPH_1996_S3486095); } diff --git a/analyses/pluginLEP/DELPHI_2000_S4328825.cc b/analyses/pluginLEP/DELPHI_2000_S4328825.cc --- a/analyses/pluginLEP/DELPHI_2000_S4328825.cc +++ b/analyses/pluginLEP/DELPHI_2000_S4328825.cc @@ -1,175 +1,175 @@ // -*- C++ -*- #include "Rivet/Analysis.hh" #include "Rivet/Projections/Beam.hh" #include "Rivet/Projections/FinalState.hh" #include "Rivet/Projections/ChargedFinalState.hh" #include "Rivet/Projections/Sphericity.hh" #include "Rivet/Projections/Thrust.hh" #include "Rivet/Projections/FastJets.hh" #include "Rivet/Projections/ParisiTensor.hh" #include "Rivet/Projections/Hemispheres.hh" #include #define I_KNOW_THE_INITIAL_QUARKS_PROJECTION_IS_DODGY_BUT_NEED_TO_USE_IT #include "Rivet/Projections/InitialQuarks.hh" namespace Rivet { - /// @brief OPAL multiplicities at various energies + /// @brief DELPHI multiplicities at various energies /// @author Peter Richardson class DELPHI_2000_S4328825 : public Analysis { public: /// Constructor DELPHI_2000_S4328825() : Analysis("DELPHI_2000_S4328825") {} /// @name Analysis methods //@{ void init() { // Projections declare(Beam(), "Beams"); declare(ChargedFinalState(), "CFS"); declare(InitialQuarks(), "IQF"); book(_cLight, "/TMP/CLIGHT" ); book(_cCharm, "/TMP/CCHARM" ); book(_cBottom, "/TMP/CBOTTOM"); book(_wLight, "_weight_light"); book(_wCharm, "_weight_charm"); book(_wBottom,"_weight_bottom"); _mult.resize(4); book(_mult[0], 1, 1, 1); book(_mult[1], 1, 1, 2); book(_mult[2], 1, 1, 3); book(_mult[3], 1, 1, 4); // bottom minus light } void analyze(const Event& event) { // Even if we only generate hadronic events, we still need a cut on numCharged >= 2. const FinalState& cfs = apply(event, "CFS"); if (cfs.size() < 2) vetoEvent; int flavour = 0; const InitialQuarks& iqf = apply(event, "IQF"); // If we only have two quarks (qqbar), just take the flavour. // If we have more than two quarks, look for the highest energetic q-qbar pair. if (iqf.particles().size() == 2) { flavour = iqf.particles().front().abspid(); } else { map quarkmap; for (const Particle& p : iqf.particles()) { if (quarkmap[p.pid()] < p.E()) { quarkmap[p.pid()] = p.E(); } } double maxenergy = 0.; for (int i = 1; i <= 5; ++i) { if (quarkmap[i]+quarkmap[-i] > maxenergy) { flavour = i; } } } const size_t numParticles = cfs.particles().size(); switch (flavour) { case 1: case 2: case 3: _wLight->fill(); _cLight->fill(numParticles); break; case 4: _wCharm->fill(); _cCharm->fill(numParticles); break; case 5: _wBottom->fill(); _cBottom->fill(numParticles); break; } } void finalize() { // calculate the averages and diffs if(_wLight->val() != 0.) scale(_cLight, 1./(*_wLight)); if(_wCharm->val() != 0.) scale(_cCharm, 1./(*_wCharm)); if(_wBottom->val() != 0.) scale(_cBottom, 1./(*_wBottom)); Counter _cDiff = *_cBottom - *_cLight; // fill the histograms for (unsigned int ix=1; ix < 5; ++ix) { double val(0.), err(0.0); if(ix==1) { val = _cBottom->val(); err = _cBottom->err(); } else if(ix==2) { val = _cCharm->val(); err = _cCharm->err(); } else if(ix==3) { val = _cLight->val(); err = _cLight->err(); } else if(ix==4) { val = _cDiff.val(); err = _cDiff.err(); } Scatter2D temphisto(refData(1, 1, ix)); for (size_t b = 0; b < temphisto.numPoints(); b++) { const double x = temphisto.point(b).x(); pair ex = temphisto.point(b).xErrs(); pair ex2 = ex; if(ex2.first ==0.) ex2. first=0.0001; if(ex2.second==0.) ex2.second=0.0001; if (inRange(sqrtS()/GeV, x-ex2.first, x+ex2.second)) { _mult[ix-1]->addPoint(x, val, ex, make_pair(err,err)); } else { _mult[ix-1]->addPoint(x, 0., ex, make_pair(0.,.0)); } } } } //@} private: vector _mult; /// @name Multiplicities //@{ CounterPtr _cLight; CounterPtr _cCharm; CounterPtr _cBottom; //@} /// @name Weights //@{ CounterPtr _wLight; CounterPtr _wCharm; CounterPtr _wBottom; //@} }; // The hook for the plugin system DECLARE_RIVET_PLUGIN(DELPHI_2000_S4328825); } diff --git a/analyses/pluginLEP/L3_2004_I652683.cc b/analyses/pluginLEP/L3_2004_I652683.cc --- a/analyses/pluginLEP/L3_2004_I652683.cc +++ b/analyses/pluginLEP/L3_2004_I652683.cc @@ -1,212 +1,404 @@ // -*- C++ -*- #include "Rivet/Analysis.hh" #include "Rivet/Projections/Beam.hh" #include "Rivet/Projections/FinalState.hh" #include "Rivet/Projections/ChargedFinalState.hh" +#include "Rivet/Projections/InitialQuarks.hh" #include "Rivet/Projections/Thrust.hh" #include "Rivet/Projections/ParisiTensor.hh" #include "Rivet/Projections/Hemispheres.hh" -#define I_KNOW_THE_INITIAL_QUARKS_PROJECTION_IS_DODGY_BUT_NEED_TO_USE_IT -#include "Rivet/Projections/InitialQuarks.hh" - namespace Rivet { /// Jet rates and event shapes at LEP I+II class L3_2004_I652683 : public Analysis { public: /// Constructor DEFAULT_RIVET_ANALYSIS_CTOR(L3_2004_I652683); - // L3_2004_I652683() : Analysis("L3_2004_I652683") - // { } - /// Book histograms and initialise projections before the run void init() { // Projections to use const FinalState FS; declare(FS, "FS"); declare(Beam(), "beams"); const ChargedFinalState CFS; declare(CFS, "CFS"); const Thrust thrust(FS); declare(thrust, "thrust"); declare(ParisiTensor(FS), "Parisi"); declare(Hemispheres(thrust), "Hemispheres"); declare(InitialQuarks(), "initialquarks"); // Book the histograms - book(_h_Thrust_udsc , 47, 1, 1); - book(_h_Thrust_bottom , 47, 1, 2); - book(_h_heavyJetmass_udsc , 48, 1, 1); - book(_h_heavyJetmass_bottom , 48, 1, 2); - book(_h_totalJetbroad_udsc , 49, 1, 1); - book(_h_totalJetbroad_bottom , 49, 1, 2); - book(_h_wideJetbroad_udsc , 50, 1, 1); - book(_h_wideJetbroad_bottom , 50, 1, 2); - book(_h_Cparameter_udsc , 51, 1, 1); - book(_h_Cparameter_bottom , 51, 1, 2); - book(_h_Dparameter_udsc , 52, 1, 1); - book(_h_Dparameter_bottom , 52, 1, 2); - book(_h_Ncharged , 59, 1, 1); - book(_h_Ncharged_udsc , 59, 1, 2); - book(_h_Ncharged_bottom , 59, 1, 3); - book(_h_scaledMomentum , 65, 1, 1); - book(_h_scaledMomentum_udsc , 65, 1, 2); - book(_h_scaledMomentum_bottom, 65, 1, 3); + if(fuzzyEquals(sqrtS()/GeV, 91.2, 1e-3)) { + // z pole + book(_h_Thrust_udsc , 47, 1, 1); + book(_h_Thrust_bottom , 47, 1, 2); + book(_h_heavyJetmass_udsc , 48, 1, 1); + book(_h_heavyJetmass_bottom , 48, 1, 2); + book(_h_totalJetbroad_udsc , 49, 1, 1); + book(_h_totalJetbroad_bottom , 49, 1, 2); + book(_h_wideJetbroad_udsc , 50, 1, 1); + book(_h_wideJetbroad_bottom , 50, 1, 2); + book(_h_Cparameter_udsc , 51, 1, 1); + book(_h_Cparameter_bottom , 51, 1, 2); + book(_h_Dparameter_udsc , 52, 1, 1); + book(_h_Dparameter_bottom , 52, 1, 2); + book(_h_Ncharged , "/TMP/NCHARGED" , 28, 1, 57); + book(_h_Ncharged_udsc , "/TMP/NCHARGED_UDSC", 28, 1, 57); + book(_h_Ncharged_bottom , "/TMP/NCHARGED_B" , 27, 3, 57); + book(_h_scaledMomentum , 65, 1, 1); + book(_h_scaledMomentum_udsc , 65, 1, 2); + book(_h_scaledMomentum_bottom , 65, 1, 3); + } + else if(sqrtS()/GeV<90) { + int i1(-1),i2(-1); + if(fuzzyEquals(sqrtS()/GeV, 41.4, 1e-2)) { + i1=0; + i2=1; + } + else if(fuzzyEquals(sqrtS()/GeV, 55.3, 1e-2)) { + i1=0; + i2=2; + } + else if(fuzzyEquals(sqrtS()/GeV, 65.4, 1e-2)) { + i1=0; + i2=3; + } + else if(fuzzyEquals(sqrtS()/GeV, 75.7, 1e-2)) { + i1=1; + i2=1; + } + else if(fuzzyEquals(sqrtS()/GeV, 82.3, 1e-2)) { + i1=1; + i2=2; + } + else if(fuzzyEquals(sqrtS()/GeV, 85.1, 1e-2)) { + i1=1; + i2=3; + } + else + MSG_ERROR("Beam energy not supported!"); + book(_h_thrust , 21+i1,1,i2); + book(_h_rho , 26+i1,1,i2); + book(_h_B_T , 31+i1,1,i2); + book(_h_B_W , 36+i1,1,i2); + } + else if(sqrtS()/GeV>120) { + int i1(-1),i2(-1); + if(fuzzyEquals(sqrtS()/GeV, 130.1, 1e-2)) { + i1=0; + i2=1; + } + else if(fuzzyEquals(sqrtS()/GeV, 136.1, 1e-2)) { + i1=0; + i2=2; + } + else if(fuzzyEquals(sqrtS()/GeV, 161.3, 1e-2)) { + i1=0; + i2=3; + } + else if(fuzzyEquals(sqrtS()/GeV, 172.3, 1e-2)) { + i1=1; + i2=1; + } + else if(fuzzyEquals(sqrtS()/GeV, 182.8, 1e-2)) { + i1=1; + i2=2; + } + else if(fuzzyEquals(sqrtS()/GeV, 188.6, 1e-2)) { + i1=1; + i2=3; + } + else if(fuzzyEquals(sqrtS()/GeV, 194.4, 1e-2)) { + i1=2; + i2=1; + } + else if(fuzzyEquals(sqrtS()/GeV, 200.2, 1e-2)) { + i1=2; + i2=2; + } + else if(fuzzyEquals(sqrtS()/GeV, 206.2, 1e-2)) { + i1=2; + i2=3; + } + else + MSG_ERROR("Beam energy not supported!"); + book(_h_thrust , 23+i1,1,i2); + book(_h_rho , 28+i1,1,i2); + book(_h_B_T , 33+i1,1,i2); + book(_h_B_W , 38+i1,1,i2); + book(_h_C , 41+i1,1,i2); + book(_h_D , 44+i1,1,i2); + book(_h_N , "/TMP/NCHARGED", 22, 9, 53); + book(_h_xi , 66+i1,1,i2); + // todo add the jets + // int i3 = 3*i1+i2; + // _h_y_2_JADE = bookHisto1D( i3,1,1); + // _h_y_3_JADE = bookHisto1D( i3,1,2); + // _h_y_4_JADE = bookHisto1D( i3,1,3); + // _h_y_5_JADE = bookHisto1D( i3,1,4); + // _h_y_2_Durham = bookHisto1D( 9+i3,1,1); + // _h_y_3_Durham = bookHisto1D( 9+i3,1,2); + // _h_y_4_Durham = bookHisto1D( 9+i3,1,3); + // _h_y_5_Durham = bookHisto1D( 9+i3,1,4); + // if(i3==8||i3==9) { + // _h_y_2_Cambridge = bookHisto1D(10+i3,1,1); + // _h_y_3_Cambridge = bookHisto1D(10+i3,1,2); + // _h_y_4_Cambridge = bookHisto1D(10+i3,1,3); + // _h_y_5_Cambridge = bookHisto1D(10+i3,1,4); + // } + } book(_sumW_udsc, "_sumW_udsc"); book(_sumW_b, "_sumW_b"); book(_sumW_ch, "_sumW_ch"); book(_sumW_ch_udsc, "_sumW_ch_udsc"); book(_sumW_ch_b, "_sumW_ch_b"); } /// Perform the per-event analysis void analyze(const Event& event) { - // Get beam average momentum const ParticlePair& beams = apply(event, "beams").beams(); const double beamMomentum = ( beams.first.p3().mod() + beams.second.p3().mod() ) / 2.0; // InitialQuarks projection to have udsc events separated from b events /// @todo Yuck!!! Eliminate when possible... - int flavour = 0; - const InitialQuarks& iqf = apply(event, "initialquarks"); - Particles quarks; - if ( iqf.particles().size() == 2 ) { - flavour = iqf.particles().front().abspid(); - quarks = iqf.particles(); - } else { - map quarkmap; - for (const Particle& p : iqf.particles()) { - if (quarkmap.find(p.pid()) == quarkmap.end()) quarkmap[p.pid()] = p; - else if (quarkmap[p.pid()].E() < p.E()) quarkmap[p.pid()] = p; - } - double max_energy = 0.; - for (int i = 1; i <= 5; ++i) { - double energy = 0.; - if (quarkmap.find(i) != quarkmap.end()) - energy += quarkmap[ i].E(); - if (quarkmap.find(-i) != quarkmap.end()) - energy += quarkmap[-i].E(); - if (energy > max_energy) - flavour = i; - } - if (quarkmap.find(flavour) != quarkmap.end()) - quarks.push_back(quarkmap[flavour]); - if (quarkmap.find(-flavour) != quarkmap.end()) - quarks.push_back(quarkmap[-flavour]); + int iflav = 0; + // only need the flavour at Z pole + if(_h_Thrust_udsc) { + int flavour = 0; + const InitialQuarks& iqf = apply(event, "initialquarks"); + Particles quarks; + if ( iqf.particles().size() == 2 ) { + flavour = iqf.particles().front().abspid(); + quarks = iqf.particles(); + } else { + map quarkmap; + for (const Particle& p : iqf.particles()) { + if (quarkmap.find(p.pid()) == quarkmap.end()) quarkmap[p.pid()] = p; + else if (quarkmap[p.pid()].E() < p.E()) quarkmap[p.pid()] = p; + } + double max_energy = 0.; + for (int i = 1; i <= 5; ++i) { + double energy = 0.; + if (quarkmap.find(i) != quarkmap.end()) + energy += quarkmap[ i].E(); + if (quarkmap.find(-i) != quarkmap.end()) + energy += quarkmap[-i].E(); + if (energy > max_energy) + flavour = i; + } + if (quarkmap.find(flavour) != quarkmap.end()) + quarks.push_back(quarkmap[flavour]); + if (quarkmap.find(-flavour) != quarkmap.end()) + quarks.push_back(quarkmap[-flavour]); + } + // Flavour label + /// @todo Change to a bool? + iflav = (flavour == PID::DQUARK || flavour == PID::UQUARK || flavour == PID::SQUARK || flavour == PID::CQUARK) ? 1 : (flavour == PID::BQUARK) ? 5 : 0; } - - // Flavour label - /// @todo Change to a bool? - const int iflav = (flavour == PID::DQUARK || flavour == PID::UQUARK || flavour == PID::SQUARK || flavour == PID::CQUARK) ? 1 : (flavour == PID::BQUARK) ? 5 : 0; - // Update weight sums if (iflav == 1) { _sumW_udsc->fill(); } else if (iflav == 5) { _sumW_b->fill(); } _sumW_ch->fill(); // Charged multiplicity const FinalState& cfs = applyProjection(event, "CFS"); - _h_Ncharged->fill(cfs.size()); + if(_h_Ncharged) _h_Ncharged->fill(cfs.size()); if (iflav == 1) { _sumW_ch_udsc->fill(); _h_Ncharged_udsc->fill(cfs.size()); } else if (iflav == 5) { _sumW_ch_b->fill(); _h_Ncharged_bottom->fill(cfs.size()); } + else if(_h_N) { + _h_N->fill(cfs.size()); + } // Scaled momentum const Particles& chparticles = cfs.particlesByPt(); for (const Particle& p : chparticles) { const Vector3 momentum3 = p.p3(); const double mom = momentum3.mod(); const double scaledMom = mom/beamMomentum; const double logScaledMom = std::log(scaledMom); - _h_scaledMomentum->fill(-logScaledMom); + if(_h_scaledMomentum) _h_scaledMomentum->fill(-logScaledMom); if (iflav == 1) { _h_scaledMomentum_udsc->fill(-logScaledMom); } else if (iflav == 5) { _h_scaledMomentum_bottom->fill(-logScaledMom); } + else if(_h_xi) { + _h_xi->fill(-logScaledMom); + } } // Thrust const Thrust& thrust = applyProjection(event, "thrust"); if (iflav == 1) { _h_Thrust_udsc->fill(thrust.thrust()); } else if (iflav == 5) { _h_Thrust_bottom->fill(thrust.thrust()); } + else if(_h_thrust) { + _h_thrust->fill(1.-thrust.thrust()); + } // C and D Parisi parameters const ParisiTensor& parisi = applyProjection(event, "Parisi"); if (iflav == 1) { _h_Cparameter_udsc->fill(parisi.C()); _h_Dparameter_udsc->fill(parisi.D()); } else if (iflav == 5) { _h_Cparameter_bottom->fill(parisi.C()); _h_Dparameter_bottom->fill(parisi.D()); } + else if(_h_C) { + _h_C->fill(parisi.C()); + _h_D->fill(parisi.D()); + } // The hemisphere variables const Hemispheres& hemisphere = applyProjection(event, "Hemispheres"); if (iflav == 1) { _h_heavyJetmass_udsc->fill(hemisphere.scaledM2high()); _h_totalJetbroad_udsc->fill(hemisphere.Bsum()); _h_wideJetbroad_udsc->fill(hemisphere.Bmax()); } else if (iflav == 5) { _h_heavyJetmass_bottom->fill(hemisphere.scaledM2high()); _h_totalJetbroad_bottom->fill(hemisphere.Bsum()); _h_wideJetbroad_bottom->fill(hemisphere.Bmax()); } + else if (_h_rho) { + _h_rho->fill(hemisphere.scaledM2high()); + _h_B_T->fill(hemisphere.Bsum()); + _h_B_W->fill(hemisphere.Bmax()); + } } - + + Scatter2DPtr convertHisto(unsigned int ix,unsigned int iy, unsigned int iz, Histo1DPtr histo) { + Scatter2D temphisto(refData(ix, iy, iz)); + Scatter2DPtr mult; + book(mult, ix, iy, iz); + for (size_t b = 0; b < temphisto.numPoints(); b++) { + const double x = temphisto.point(b).x(); + pair ex = temphisto.point(b).xErrs(); + double y = histo->bins()[b].area(); + double yerr = histo->bins()[b].areaErr(); + mult->addPoint(x, y, ex, make_pair(yerr,yerr)); + } + return mult; + } /// Normalise histograms etc., after the run void finalize() { - scale({_h_Thrust_udsc, _h_heavyJetmass_udsc, _h_totalJetbroad_udsc, _h_wideJetbroad_udsc, _h_Cparameter_udsc, _h_Dparameter_udsc}, 1/ *_sumW_udsc); - scale({_h_Thrust_bottom, _h_heavyJetmass_bottom, _h_totalJetbroad_bottom, _h_wideJetbroad_bottom, _h_Cparameter_bottom, _h_Dparameter_bottom}, 1./ *_sumW_b); - scale(_h_Ncharged, 2/ *_sumW_ch); - scale(_h_Ncharged_udsc, 2/ *_sumW_ch_udsc); - scale(_h_Ncharged_bottom, 2/ *_sumW_ch_b); - scale(_h_scaledMomentum, 1/ *_sumW_ch); - scale(_h_scaledMomentum_udsc, 1/ *_sumW_ch_udsc); - scale(_h_scaledMomentum_bottom, 1/ *_sumW_ch_b); + // Z pole plots + if(_h_Thrust_udsc) { + scale({_h_Thrust_udsc, _h_heavyJetmass_udsc, _h_totalJetbroad_udsc, + _h_wideJetbroad_udsc, _h_Cparameter_udsc, _h_Dparameter_udsc}, 1/_sumW_udsc->sumW()); + scale({_h_Thrust_bottom, _h_heavyJetmass_bottom, _h_totalJetbroad_bottom, + _h_wideJetbroad_bottom, _h_Cparameter_bottom, _h_Dparameter_bottom}, 1./_sumW_b->sumW()); + scale(_h_Ncharged, 1./_sumW_ch->sumW()); + scale(_h_Ncharged_udsc, 1./_sumW_ch_udsc->sumW()); + scale(_h_Ncharged_bottom, 1./_sumW_ch_b->sumW()); + convertHisto(59,1,1,_h_Ncharged ); + convertHisto(59,1,2,_h_Ncharged_udsc ); + convertHisto(59,1,3,_h_Ncharged_bottom); + + + + scale(_h_scaledMomentum, 1/_sumW_ch->sumW()); + scale(_h_scaledMomentum_udsc, 1/_sumW_ch_udsc->sumW()); + scale(_h_scaledMomentum_bottom, 1/_sumW_ch_b->sumW()); + } + else { + if(_h_thrust) normalize(_h_thrust); + if(_h_rho) normalize(_h_rho); + if(_h_B_T) normalize(_h_B_T); + if(_h_B_W) normalize(_h_B_W); + if(_h_C) normalize(_h_C); + if(_h_D) normalize(_h_D); + if(_h_N) normalize(_h_N); + if(_h_xi) scale(_h_xi,1./sumOfWeights()); + + + Scatter2DPtr mult; + if(_h_N) { + if(fuzzyEquals(sqrtS()/GeV, 130.1, 1e-2)) { + convertHisto(60, 1, 1, _h_N); + } + else if(fuzzyEquals(sqrtS()/GeV, 136.1, 1e-2)) { + convertHisto(60, 1, 2, _h_N); + } + else if(fuzzyEquals(sqrtS()/GeV, 161.3, 1e-2)) { + convertHisto(60, 1, 3, _h_N); + } + else if(fuzzyEquals(sqrtS()/GeV, 172.3, 1e-2)) { + convertHisto(61, 1, 1, _h_N); + } + else if(fuzzyEquals(sqrtS()/GeV, 182.8, 1e-2)) { + convertHisto(61, 1, 2, _h_N); + } + else if(fuzzyEquals(sqrtS()/GeV, 188.6, 1e-2)) { + convertHisto(61, 1, 3, _h_N); + } + else if(fuzzyEquals(sqrtS()/GeV, 194.4, 1e-2)) { + convertHisto(62, 1, 1, _h_N); + } + else if(fuzzyEquals(sqrtS()/GeV, 200.2, 1e-2)) { + convertHisto(62, 1, 2, _h_N); + } + else if(fuzzyEquals(sqrtS()/GeV, 206.2, 1e-2)) { + convertHisto(62, 1, 3, _h_N); + } + } + // todo add the jets + // Histo1DPtr _h_y_2_JADE,_h_y_3_JADE,_h_y_4_JADE,_h_y_5_JADE; + // Histo1DPtr _h_y_2_Durham,_h_y_3_Durham,_h_y_4_Durham,_h_y_5_Durham; + // Histo1DPtr _h_y_2_Cambridge,_h_y_3_Cambridge,_h_y_4_Cambridge,_h_y_5_Cambridge; + } } /// Weight counters CounterPtr _sumW_udsc, _sumW_b, _sumW_ch, _sumW_ch_udsc, _sumW_ch_b; /// @name Histograms //@{ + // at the Z pole Histo1DPtr _h_Thrust_udsc, _h_Thrust_bottom; Histo1DPtr _h_heavyJetmass_udsc, _h_heavyJetmass_bottom; Histo1DPtr _h_totalJetbroad_udsc, _h_totalJetbroad_bottom; Histo1DPtr _h_wideJetbroad_udsc, _h_wideJetbroad_bottom; Histo1DPtr _h_Cparameter_udsc, _h_Cparameter_bottom; Histo1DPtr _h_Dparameter_udsc, _h_Dparameter_bottom; Histo1DPtr _h_Ncharged, _h_Ncharged_udsc, _h_Ncharged_bottom; Histo1DPtr _h_scaledMomentum, _h_scaledMomentum_udsc, _h_scaledMomentum_bottom; + // at other enegies + Histo1DPtr _h_thrust,_h_rho,_h_B_T,_h_B_W,_h_C,_h_D,_h_N,_h_xi; + // todo add the jets + // Histo1DPtr _h_y_2_JADE,_h_y_3_JADE,_h_y_4_JADE,_h_y_5_JADE; + // Histo1DPtr _h_y_2_Durham,_h_y_3_Durham,_h_y_4_Durham,_h_y_5_Durham; + // Histo1DPtr _h_y_2_Cambridge,_h_y_3_Cambridge,_h_y_4_Cambridge,_h_y_5_Cambridge; //@} }; // The hook for the plugin system DECLARE_RIVET_PLUGIN(L3_2004_I652683); }