diff --git a/analyses/pluginALICE/ALICE_2010_I880049.cc b/analyses/pluginALICE/ALICE_2010_I880049.cc --- a/analyses/pluginALICE/ALICE_2010_I880049.cc +++ b/analyses/pluginALICE/ALICE_2010_I880049.cc @@ -1,104 +1,115 @@ // -*- C++ -*- #include "Rivet/Analysis.hh" #include "Rivet/Projections/ChargedFinalState.hh" #include "Rivet/Tools/Cuts.hh" #include "Rivet/Projections/SingleValueProjection.hh" #include "Rivet/Tools/AliceCommon.hh" #include "Rivet/Projections/AliceCommon.hh" #include #define _USE_MATH_DEFINES #include namespace Rivet { - - // Analysis + + /// @brief ALICE PbPb at 2.76 TeV multiplicity at mid-rapidity class ALICE_2010_I880049 : public Analysis { - + public: - + + /// Constructor DEFAULT_RIVET_ANALYSIS_CTOR(ALICE_2010_I880049); - + /// @name Analysis methods + //@{ + /// Book histograms and initialise projections before the run void init() { - + // Declare centrality projection declareCentrality(ALICE::V0MMultiplicity(), "ALICE_2015_PBPBCentrality", "V0M", "V0M"); - - // Charged final states with |eta| < 0.5 and pT > 50 MeV - const Cut& cut = Cuts::abseta < 0.5 && Cuts::pT > 50*MeV; - const ChargedFinalState cfs(cut); - addProjection(cfs,"CFS"); - + // Trigger projections - declare(ChargedFinalState((Cuts::eta > 2.8 && Cuts::eta < 5.1) && - Cuts::pT > 0.1*GeV), "VZERO1"); + declare(ChargedFinalState((Cuts::eta > 2.8 && Cuts::eta < 5.1) && + Cuts::pT > 0.1*GeV), "VZERO1"); declare(ChargedFinalState((Cuts::eta > -3.7 && Cuts::eta < -1.7) && - Cuts::pT > 0.1*GeV), "VZERO2"); + Cuts::pT > 0.1*GeV), "VZERO2"); declare(ChargedFinalState(Cuts::abseta < 1. && Cuts::pT > 0.15*GeV), "SPD"); - - // Primary particles - declare(ALICE::PrimaryParticles(Cuts::abseta < 5.6),"APRIM"); - + + // Charged, primary particles with |eta| < 0.5 and pT > 50 MeV + declare(ALICE::PrimaryParticles(Cuts::abseta < 0.5 && + Cuts::pT > 50*MeV && Cuts::abscharge > 0), "APRIM"); + // Histograms and variables initialization - _histNchVsCentr = bookProfile1D(1, 1, 1); - _histNpartVsCentr = bookProfile1D(1, 1, 2); - + _histNchVsCentr = bookProfile1D(1, 1, 1); + _histNpartVsCentr = bookProfile1D(1, 1, 2); + } + /// Perform the per-event analysis void analyze(const Event& event) { - + + const double weight = event.weight(); + + // Charged, primary particles with at least pT = 50 MeV + // in eta range of |eta| < 0.5 + Particles chargedParticles = + applyProjection(event,"APRIM").particles(); + // Trigger projections - const ChargedFinalState& vz1 = + const ChargedFinalState& vz1 = applyProjection(event,"VZERO1"); - const ChargedFinalState& vz2 = + const ChargedFinalState& vz2 = applyProjection(event,"VZERO2"); - const ChargedFinalState& spd = + const ChargedFinalState& spd = applyProjection(event,"SPD"); int fwdTrig = (vz1.particles().size() > 0 ? 1 : 0); int bwdTrig = (vz2.particles().size() > 0 ? 1 : 0); int cTrig = (spd.particles().size() > 0 ? 1 : 0); - + if (fwdTrig + bwdTrig + cTrig < 2) vetoEvent; - - const CentralityProjection& centrProj = + + const CentralityProjection& centrProj = apply(event, "V0M"); double centr = centrProj(); if (centr > 80.) vetoEvent; + // Calculate number of charged particles and fill histogram - double nch = 0.; - //primary.particles().size(); - for (const auto& p : applyProjection(event,"APRIM").particles()) { - if(p.abscharge() > 0) - nch++; + double nch = chargedParticles.size(); + _histNchVsCentr->fill(centr, nch, weight); + + // Attempt to extract Npart form GenEvent. + // TODO: Unclear how to handle this in HepMC3 + const HepMC::HeavyIon* hi = event.genEvent()->heavy_ion(); + if (hi && hi->is_valid()) { + _histNpartVsCentr->fill(centr, hi->Npart_proj() + hi->Npart_targ(), + weight); } - _histNchVsCentr->fill(centr, nch, event.weight()); - - // Attempt to extract Npart form GenEvent. TODO: Unclear how to handle this - // in HepMC3 - const HepMC::HeavyIon* hi = event.genEvent()->heavy_ion(); - if (hi && hi->is_valid()) - _histNpartVsCentr->fill(centr, hi->Npart_proj() + hi->Npart_targ(), - event.weight()); } - + + /// Normalise histograms etc., after the run void finalize() { - + } + //@} + private: - + + /// @name Histograms + //@{ Profile1DPtr _histNchVsCentr; Profile1DPtr _histNpartVsCentr; - + //@} + }; // The hook for the plugin system DECLARE_RIVET_PLUGIN(ALICE_2010_I880049); + } diff --git a/analyses/pluginALICE/ALICE_2012_I1127497.cc b/analyses/pluginALICE/ALICE_2012_I1127497.cc --- a/analyses/pluginALICE/ALICE_2012_I1127497.cc +++ b/analyses/pluginALICE/ALICE_2012_I1127497.cc @@ -1,180 +1,197 @@ // -*- C++ -*- +#include "Rivet/Analysis.hh" #include "Rivet/Projections/ChargedFinalState.hh" #include "Rivet/Tools/Cuts.hh" #include "Rivet/Projections/SingleValueProjection.hh" #include "Rivet/Tools/AliceCommon.hh" #include "Rivet/Projections/AliceCommon.hh" #include #define _USE_MATH_DEFINES #include namespace Rivet { - /// Analysis + /// @brief ALICE PbPb at 2.76 TeV R_AA analysis. class ALICE_2012_I1127497 : public Analysis { - + public: - + + /// Constructor DEFAULT_RIVET_ANALYSIS_CTOR(ALICE_2012_I1127497); - + + /// @name Analysis methods + //@{ + /// Book histograms and initialise projections before the run void init() { - + // Declare centrality projection - if (getOption("cent") == "REF") { - _mode = 1; - declareCentrality(ALICE::V0MMultiplicity(), "ALICE_2015_PBPBCentrality", "V0M", "V0M"); - } - else if (getOption("cent") == "IMP") { - _mode = 2; - declareCentrality(ALICE::V0MMultiplicity(), "ALICE_2015_PBPBCentrality", "V0M", "V0M_IMP"); - } - - // Charged final states with |eta| < 0.5 and pT > 150 MeV - const Cut& cut = Cuts::abseta < 0.5 && Cuts::pT > 150*MeV; - const ChargedFinalState cfs(cut); - addProjection(cfs,"CFS"); - + declareCentrality(ALICE::V0MMultiplicity(), + "ALICE_2015_PBPBCentrality", "V0M", "V0M"); + + // Charged, primary particles with |eta| < 0.5 and pT > 150 MeV + declare(ALICE::PrimaryParticles(Cuts::abseta < 0.5 && + Cuts::pT > 150*MeV && Cuts::abscharge > 0), "APRIM"); + // Loop over all histograms for (size_t ihist = 0; ihist < NHISTOS; ++ihist) { - - // Initialize PbPb objects - _histNch[PBPB][ihist] = bookHisto1D(ihist+1, 1, 1); - - std::string nameCounterPbPb = "Counter_PbPb_" + std::to_string(ihist); - _counterSOW[PBPB][ihist] = bookCounter(nameCounterPbPb, "Sum of weights counter for PbPb"); - - std::string nameCounterNcoll = "Counter_Ncoll_" + std::to_string(ihist); - _counterNcoll[ihist] = bookCounter(nameCounterNcoll, "Ncoll counter for PbPb"); - - // Initialize pp objects. In principle, only one pp histogram would be needed since - // centrality does not make any difference here. However, in some cases in this analysis - // the binning differ from each other, so this is easy-to-implement way to account for that. - std::string namePP = _histNch[PBPB][ihist]->name() + "-pp"; - _histNch[PP][ihist] = bookHisto1D(namePP, refData(ihist+1, 1, 1)); // binning taken from ref data - - std::string nameCounterpp = "Counter_pp_" + std::to_string(ihist); - _counterSOW[PP][ihist] = bookCounter(nameCounterpp, "Sum of weights counter for pp"); - + + // Initialize PbPb objects + _histNch[PBPB][ihist] = bookHisto1D(ihist+1, 1, 1); + + std::string nameCounterPbPb = "counter.pbpb." + std::to_string(ihist); + _counterSOW[PBPB][ihist] = bookCounter(nameCounterPbPb, + "Sum of weights counter for PbPb"); + + std::string nameCounterNcoll = "counter.ncoll." + std::to_string(ihist); + _counterNcoll[ihist] = bookCounter(nameCounterNcoll, + "Ncoll counter for PbPb"); + + // Initialize pp objects. In principle, only one pp histogram would be + // needed since centrality does not make any difference here. However, + // in some cases in this analysis the binning differ from each other, + // so this is easy-to-implement way to account for that. + std::string namePP = _histNch[PBPB][ihist]->name() + "-pp"; + // The binning is taken from the reference data + _histNch[PP][ihist] = bookHisto1D(namePP, refData(ihist+1, 1, 1)); + + std::string nameCounterpp = "counter.pp." + std::to_string(ihist); + _counterSOW[PP][ihist] = bookCounter(nameCounterpp, + "Sum of weights counter for pp"); + } - - // Centrality regions keeping boundaries for a certain region. + + // Centrality regions keeping boundaries for a certain region. // Note, that some regions overlap with other regions. _centrRegions.clear(); _centrRegions = {{0., 5.}, {5., 10.}, {10., 20.}, - {20., 30.}, {30., 40.}, {40., 50.}, - {50., 60.}, {60., 70.}, {70., 80.}, - {0., 10.}, {0., 20.}, {20., 40.}, - {40., 60.}, {40., 80.}, {60., 80.}}; - + {20., 30.}, {30., 40.}, {40., 50.}, + {50., 60.}, {60., 70.}, {70., 80.}, + {0., 10.}, {0., 20.}, {20., 40.}, + {40., 60.}, {40., 80.}, {60., 80.}}; + } + /// Perform the per-event analysis void analyze(const Event& event) { - + const double weight = event.weight(); - - // Final state particles with at least pT = 150 MeV in eta range of |eta| < 0.5 - const ChargedFinalState& charged = applyProjection(event, "CFS"); - Particles chargedParticles = charged.particlesByPt(); - - // Check type of event. This may be not the perfect way to check for the type of event as - // there might be some weird conditions hidden inside. For example some HepMC versions check - // if number of hard collisions is equal to 0 and assign 'false' in that case, which is usually wrong. + + // Charged, primary particles with at least pT = 150 MeV + // in eta range of |eta| < 0.5 + Particles chargedParticles = + applyProjection(event,"APRIM").particlesByPt(); + + // Check type of event. This may not be a perfect way to check for the + // type of event as there might be some weird conditions hidden inside. + // For example some HepMC versions check if number of hard collisions + // is equal to 0 and assign 'false' in that case, which is usually wrong. // This might be changed in the future - if (event.genEvent()->heavy_ion()) { - - // Prepare centrality projection and value - const CentralityProjection& centrProj = apply(event, (_mode == 1 ? "V0M" : "V0M_IMP")); - double centr = centrProj(); - // Veto event for too large centralities since those are not used in the analysis at all - if ((centr < 0.) || (centr > 80.)) - vetoEvent; - - // Fill the right PbPb histograms and add weights based on centrality value - for (size_t ihist = 0; ihist < NHISTOS; ++ihist) { + const HepMC::HeavyIon* hi = event.genEvent()->heavy_ion(); + if (hi && hi->is_valid()) { + + // Prepare centrality projection and value + const CentralityProjection& centrProj = + apply(event, "V0M"); + double centr = centrProj(); + // Veto event for too large centralities since those are not used + // in the analysis at all + if ((centr < 0.) || (centr > 80.)) + vetoEvent; + + // Fill PbPb histograms and add weights based on centrality value + for (size_t ihist = 0; ihist < NHISTOS; ++ihist) { if (inRange(centr, _centrRegions[ihist].first, _centrRegions[ihist].second)) { - _counterSOW[PBPB][ihist]->fill(weight); - _counterNcoll[ihist]->fill(event.genEvent()->heavy_ion()->Ncoll(), weight); - foreach (const Particle& p, chargedParticles) { - float pT = p.pT()/GeV; - if (pT < 50.) { - double pTAtBinCenter = _histNch[PBPB][ihist]->binAt(pT).xMid(); - _histNch[PBPB][ihist]->fill(pT, weight/pTAtBinCenter); - } - } - } - } - + _counterSOW[PBPB][ihist]->fill(weight); + _counterNcoll[ihist]->fill(event.genEvent()->heavy_ion()->Ncoll(), weight); + foreach (const Particle& p, chargedParticles) { + float pT = p.pT()/GeV; + if (pT < 50.) { + double pTAtBinCenter = _histNch[PBPB][ihist]->binAt(pT).xMid(); + _histNch[PBPB][ihist]->fill(pT, weight/pTAtBinCenter); + } + } + } + } + } else { - - // Fill all pp histograms and add weights - for (size_t ihist = 0; ihist < NHISTOS; ++ihist) { - _counterSOW[PP][ihist]->fill(weight); - foreach (const Particle& p, chargedParticles) { - float pT = p.pT()/GeV; - if (pT < 50.) { - double pTAtBinCenter = _histNch[PP][ihist]->binAt(pT).xMid(); - _histNch[PP][ihist]->fill(pT, weight/pTAtBinCenter); - } - } - } - + + // Fill all pp histograms and add weights + for (size_t ihist = 0; ihist < NHISTOS; ++ihist) { + _counterSOW[PP][ihist]->fill(weight); + foreach (const Particle& p, chargedParticles) { + float pT = p.pT()/GeV; + if (pT < 50.) { + double pTAtBinCenter = _histNch[PP][ihist]->binAt(pT).xMid(); + _histNch[PP][ihist]->fill(pT, weight/pTAtBinCenter); + } + } + } + } - + } + /// Normalise histograms etc., after the run void finalize() { - + // Right scaling of the histograms with their individual weights. for (size_t itype = 0; itype < EVENT_TYPES; ++itype ) { - for (size_t ihist = 0; ihist < NHISTOS; ++ihist) { - if (_counterSOW[itype][ihist]->sumW() > 0.) { - scale(_histNch[itype][ihist], (1./_counterSOW[itype][ihist]->sumW() / 2. / M_PI)); - } - } + for (size_t ihist = 0; ihist < NHISTOS; ++ihist) { + if (_counterSOW[itype][ihist]->sumW() > 0.) { + scale(_histNch[itype][ihist], + (1. / _counterSOW[itype][ihist]->sumW() / 2. / M_PI)); + } + } } - + // Postprocessing of the histograms for (size_t ihist = 0; ihist < NHISTOS; ++ihist) { - // If there are entires in histograms for both beam types - if (_histNch[PP][ihist]->numEntries() > 0 && _histNch[PBPB][ihist]->numEntries() > 0) { - // Initialize and fill R_AA histograms - _histRAA[ihist] = bookScatter2D(ihist+16, 1, 1); - divide(_histNch[PBPB][ihist], _histNch[PP][ihist], _histRAA[ihist]); - // Scale by Ncoll. Unfortunately some generators does not provide Ncoll (eg. JEWEL), - // so the following scaling will be done only if there are entries in the counters - if (_counterNcoll[ihist]->sumW() > 1e-6 && _counterSOW[PBPB][ihist]->sumW() > 1e-6) { - _histRAA[ihist]->scaleY(1. / (_counterNcoll[ihist]->sumW() / _counterSOW[PBPB][ihist]->sumW())); - } - } + // If there are entires in histograms for both beam types + if (_histNch[PP][ihist]->numEntries() > 0 && _histNch[PBPB][ihist]->numEntries() > 0) { + // Initialize and fill R_AA histograms + _histRAA[ihist] = bookScatter2D(ihist+16, 1, 1); + divide(_histNch[PBPB][ihist], _histNch[PP][ihist], _histRAA[ihist]); + // Scale by Ncoll. Unfortunately some generators does not provide + // Ncoll value (eg. JEWEL), so the following scaling will be done + // only if there are entries in the counters + double ncoll = _counterNcoll[ihist]->sumW(); + double sow = _counterSOW[PBPB][ihist]->sumW(); + if (ncoll > 1e-6 && sow > 1e-6) + _histRAA[ihist]->scaleY(1. / (ncoll / sow)); + + } } - + } - + + //@} + private: - + static const int NHISTOS = 15; static const int EVENT_TYPES = 2; static const int PP = 0; static const int PBPB = 1; - + + /// @name Histograms + //@{ Histo1DPtr _histNch[EVENT_TYPES][NHISTOS]; CounterPtr _counterSOW[EVENT_TYPES][NHISTOS]; CounterPtr _counterNcoll[NHISTOS]; - Scatter2DPtr _histRAA[NHISTOS]; + //@} + std::vector> _centrRegions; - - int _mode; - + }; // The hook for the plugin system DECLARE_RIVET_PLUGIN(ALICE_2012_I1127497); } diff --git a/analyses/pluginALICE/ALICE_2012_I930312.cc b/analyses/pluginALICE/ALICE_2012_I930312.cc --- a/analyses/pluginALICE/ALICE_2012_I930312.cc +++ b/analyses/pluginALICE/ALICE_2012_I930312.cc @@ -1,316 +1,320 @@ // -*- C++ -*- +#include "Rivet/Analysis.hh" #include "Rivet/Projections/ChargedFinalState.hh" -#include "Rivet/Projections/SingleValueProjection.hh" #include "Rivet/Tools/AliceCommon.hh" #include "Rivet/Projections/AliceCommon.hh" namespace Rivet { + /// @brief ALICE PbPb at 2.76 TeV azimuthal di-hadron correlations + class ALICE_2012_I930312 : public Analysis { - class ALICE_2012_I930312 : public Analysis { public: // Constructor DEFAULT_RIVET_ANALYSIS_CTOR(ALICE_2012_I930312); + /// @name Analysis methods + //@{ /// Book histograms and initialise projections before the run void init() { // Declare centrality projection - declareCentrality(ALICE::V0MMultiplicity(), "ALICE_2015_PBPBCentrality", "V0M", "V0M"); + declareCentrality(ALICE::V0MMultiplicity(), + "ALICE_2015_PBPBCentrality", "V0M", "V0M"); - // Charged final states with |eta| < 1.0 and 8 < pT < 15 GeV/c for trigger particles - const Cut& cutTrigger = Cuts::abseta < 1.0 && Cuts::pT > 8*GeV && Cuts::pT < 15*GeV; - const ChargedFinalState cfsTrigger(cutTrigger); - addProjection(cfsTrigger,"CFSTrigger"); + // Projection for trigger particles: charged, primary particles + // with |eta| < 1.0 and 8 < pT < 15 GeV/c + declare(ALICE::PrimaryParticles(Cuts::abseta < 1.0 && Cuts::abscharge > 0 + && Cuts::ptIn(8.*GeV, 15.*GeV)), "APRIMTrig"); - // Set limit values of pT bins - pt_limits[0] = 3.; - pt_limits[1] = 4.; - pt_limits[2] = 6.; - pt_limits[3] = 8.; - pt_limits[4] = 10.; + // pT bins edges + vector ptBins = { 3., 4., 6., 8., 10. }; - // Charged final states with |eta| < 1.0 and different pT bins for associated particles. - for (int ipt = 0; ipt < PT_BINS; ipt++) { - Cut mycut = Cuts::abseta < 1.0 && Cuts::pT > pt_limits[ipt]*GeV && Cuts::pT < pt_limits[ipt + 1]*GeV; - declare(ChargedFinalState(mycut), "CFSAssoc" + std::to_string(ipt)); + // Projections for associated particles: charged, primary particles + // with |eta| < 1.0 and different pT bins + for (int ipt = 0; ipt < PT_BINS; ++ipt) { + Cut cut = Cuts::abseta < 1.0 && Cuts::abscharge > 0 && + Cuts::ptIn(ptBins[ipt]*GeV, ptBins[ipt+1]*GeV); + declare(ALICE::PrimaryParticles(cut), "APRIMAssoc" + toString(ipt)); } // Create event strings - event_string[0] = "pp"; - event_string[1] = "central"; - event_string[2] = "peripheral"; - event_string[3] = "other"; + vector evString = { "pp", "central", "peripheral" }; - // For each event type - for (int itype = 0; itype < EVENT_TYPES; itype++) { - // For each pT range - for (int ipt = 0; ipt < PT_BINS; ipt++) { - // Initialize yield histograms - _histYield[itype][ipt] = bookHisto1D("Yield_" + event_string[itype] - + "_" + std::to_string(ipt), 36, -0.5 * M_PI, 1.5 * M_PI, - "Associated particle per trigger particle yield", - "$\\Delta\\eta$ (rad)", "$1 / N_{trig} dN_{assoc} / d\\Delta\\eta$ (rad$^-1$)"); - _histYieldBkgRemoved[itype][ipt] = bookHisto1D("Yield_" + - event_string[itype] + "_nobkg_" + std::to_string(ipt), 36, -0.5*M_PI, - 1.5 * M_PI, "Associated particle per trigger particle yield no bkg", - "$\\Delta\\eta$ (rad)", "$1 / N_{trig} dN_{assoc} / d\\Delta\\eta$ (rad$^-1$)"); + // Initialize trigger counters and yield histograms + string title = "Per trigger particle yield"; + string xtitle = "$\\Delta\\eta$ (rad)"; + string ytitle = + "$1 / N_{trig} {\\rm d}N_{assoc} / {\\rm d}\\Delta\\eta$ (rad$^-1$)"; + for (int itype = 0; itype < EVENT_TYPES; ++itype) { + _counterTrigger[itype] = bookCounter("counter." + toString(itype)); + for (int ipt = 0; ipt < PT_BINS; ++ipt) { + string name = "yield." + evString[itype] + ".pt" + toString(ipt); + _histYield[itype][ipt] = bookHisto1D(name, 36, + -0.5*M_PI, 1.5*M_PI, title, xtitle, ytitle); } } - // Histogram for counting trigger particles for each event type - _histTriggerCounter = bookHisto1D("Trigger", EVENT_TYPES, 0.0, - EVENT_TYPES, "Trigger counter", "event type", "N"); - } /// Perform the per-event analysis void analyze(const Event& event) { const double weight = event.weight(); - // Create charged final state for trigger particle - const ChargedFinalState& triggerFinalState = apply(event, "CFSTrigger"); - Particles triggerParticles = triggerFinalState.particlesByPt(); + // Trigger particles + Particles trigParticles = + applyProjection(event,"APRIMTrig").particles(); - // Create charged final state for associated particle - ChargedFinalState associatedFinalState[PT_BINS]; - Particles associatedParticles[PT_BINS]; - for (int ipt = 0; ipt < PT_BINS; ipt++) { - associatedFinalState[ipt] = apply(event, "CFSAssoc" + std::to_string(ipt)); - associatedParticles[ipt] = associatedFinalState[ipt].particlesByPt(); + // Associated particles + Particles assocParticles[PT_BINS]; + for (int ipt = 0; ipt < PT_BINS; ++ipt) { + string pname = "APRIMAssoc" + toString(ipt); + assocParticles[ipt] = + applyProjection(event,pname).particles(); } - // Check event type - if (event.genEvent()->heavy_ion()) { + // Check type of event. This may not be a perfect way to check for the + // type of event as there might be some weird conditions hidden inside. + // For example some HepMC versions check if number of hard collisions + // is equal to 0 and assign 'false' in that case, which is usually wrong. + // This might be changed in the future + int ev_type = 0; // pp + const HepMC::HeavyIon* hi = event.genEvent()->heavy_ion(); + if (hi && hi->is_valid()) { // Prepare centrality projection and value - const CentralityProjection& centrProj = apply(event, "V0M"); + const CentralityProjection& centrProj = + apply(event, "V0M"); double centr = centrProj(); - // Set the flag for the type of the event if (centr > 0.0 && centr < 5.0) - event_type = 1; // PbPb, central + ev_type = 1; // PbPb, central else if (centr > 60.0 && centr < 90.0) - event_type = 2; // PbPb, peripherial + ev_type = 2; // PbPb, peripherial else - event_type = 3; // PbPb, other - } - else { - event_type = 0; // pp + vetoEvent; // PbPb, other, this is not used in the analysis at all } - // Veto event if not valid event type - if (event_type == 3) vetoEvent; - // Fill trigger histogram for a proper event type - _histTriggerCounter->fill(event_type, triggerParticles.size()); + _counterTrigger[ev_type]->fill(trigParticles.size()); // Loop over trigger particles - for (const Particle& triggerParticle : triggerParticles) { + for (const Particle& trigParticle : trigParticles) { // For each pt bin - for (int ipt = 0; ipt < PT_BINS; ipt++) { + for (int ipt = 0; ipt < PT_BINS; ++ipt) { // Loop over associated particles - for (const Particle& associatedParticle : associatedParticles[ipt]) { + for (const Particle& assocParticle : assocParticles[ipt]) { // If associated and trigger particle are not the same particles. - if (!isSame(triggerParticle, associatedParticle)) { + if (!isSame(trigParticle, assocParticle)) { // Test trigger particle. - if (triggerParticle.pt() > associatedParticle.pt()) { + if (trigParticle.pt() > assocParticle.pt()) { // Calculate delta phi in range (-0.5*PI, 1.5*PI). - const double dPhi = deltaPhi(triggerParticle, associatedParticle, true); + double dPhi = deltaPhi(trigParticle, assocParticle, true); + if (dPhi < -0.5 * M_PI) dPhi += 2 * M_PI; // Fill yield histogram for calculated delta phi - _histYield[event_type][ipt]->fill(dPhi, weight); + _histYield[ev_type][ipt]->fill(dPhi, weight); } } } } } } /// Normalise histograms etc., after the run void finalize() { // Check for the reentrant finalize bool pp_available = false, PbPb_available = false; - reentrant_flag = false; - // For each event type - for (int itype = 0; itype < EVENT_TYPES; itype++) { - // For each pT range - for (int ipt = 0; ipt < PT_BINS; ipt++) { + for (int itype = 0; itype < EVENT_TYPES; ++itype) { + for (int ipt = 0; ipt < PT_BINS; ++ipt) { if (_histYield[itype][ipt]->numEntries() > 0) itype == 0 ? pp_available = true : PbPb_available = true; } } - reentrant_flag = (pp_available && PbPb_available); + // Skip postprocessing if pp or PbPb histograms are not available + if (!(pp_available && PbPb_available)) + return; - // Postprocessing of the histograms - if (reentrant_flag == true) { + // Initialize IAA and ICP histograms + _histIAA[0] = bookScatter2D(1, 1, 1); + _histIAA[1] = bookScatter2D(2, 1, 1); + _histIAA[2] = bookScatter2D(5, 1, 1); + _histIAA[3] = bookScatter2D(3, 1, 1); + _histIAA[4] = bookScatter2D(4, 1, 1); + _histIAA[5] = bookScatter2D(6, 1, 1); - // Initialize IAA and ICP histograms - _histIAA[0] = bookScatter2D(1, 1, 1); - _histIAA[1] = bookScatter2D(2, 1, 1); - _histIAA[2] = bookScatter2D(5, 1, 1); - _histIAA[3] = bookScatter2D(3, 1, 1); - _histIAA[4] = bookScatter2D(4, 1, 1); - _histIAA[5] = bookScatter2D(6, 1, 1); + // Initialize background-subtracted yield histograms + for (int itype = 0; itype < EVENT_TYPES; ++itype) { + for (int ipt = 0; ipt < PT_BINS; ++ipt) { + string newname = _histYield[itype][ipt]->name() + ".nobkg"; + string newtitle = _histYield[itype][ipt]->title() + + ", background subtracted"; + _histYieldNoBkg[itype][ipt] = + bookHisto1D(newname, 36, -0.5*M_PI, 1.5*M_PI, newtitle, + _histYield[itype][ipt]->annotation("XLabel"), + _histYield[itype][ipt]->annotation("YLabel")); + } + } - // Variables for near and away side peak calculation - double nearSide[EVENT_TYPES][PT_BINS] = { {0.0} }; - double awaySide[EVENT_TYPES][PT_BINS] = { {0.0} }; + // Variable for near and away side peak integral calculation + double integral[EVENT_TYPES][PT_BINS][2] = { { {0.0} } }; - // Variables for background error calculation - double background[EVENT_TYPES][PT_BINS] = { {0.0} }; - double backgroundError[EVENT_TYPES][PT_BINS] = { {0.0} }; + // Variables for background calculation + double bkg = 0.0; + double bkgErr[EVENT_TYPES][PT_BINS] = { {0.0} }; - // Variables for integration error calculation - double scalingFactor[EVENT_TYPES] = {0.0}; - double numberOfEntries[EVENT_TYPES][PT_BINS][2] = { { {0.0} } }; - int numberOfBins[EVENT_TYPES][PT_BINS][2] = { { {0} } }; + // Variables for integration error calculation + double norm[EVENT_TYPES] = {0.0}; + double numEntries[EVENT_TYPES][PT_BINS][2] = { { {0.0} } }; + int numBins[EVENT_TYPES][PT_BINS][2] = { { {0} } }; - // For each event type - for (int itype = 0; itype < EVENT_TYPES; itype++) { - // For each pT range - for (int ipt = 0; ipt < PT_BINS; ipt++) { - // Check if histograms are fine - if (_histTriggerCounter->numEntries() == 0 || _histYield[itype][ipt]->numEntries() == 0) { - MSG_WARNING("There are no entries in one of the histograms"); - continue; + // For each event type + for (int itype = 0; itype < EVENT_TYPES; ++itype) { + // Get counter + CounterPtr counter = _counterTrigger[itype]; + // For each pT range + for (int ipt = 0; ipt < PT_BINS; ++ipt) { + + // Get yield histograms + Histo1DPtr hYield = _histYield[itype][ipt]; + Histo1DPtr hYieldNoBkg = _histYieldNoBkg[itype][ipt]; + + // Check if histograms are fine + if (counter->sumW() == 0 || hYield->numEntries() == 0) { + MSG_WARNING("There are no entries in one of the histograms"); + continue; + } + + // Scale yield histogram + norm[itype] = 1. / counter->sumW(); + scale(hYield, norm[itype]); + + // Calculate background + double sum = 0.0; + int nbins = 0; + for (size_t ibin = 0; ibin < hYield->numBins(); ++ibin) { + double xmid = hYield->bin(ibin).xMid(); + if (inRange(xmid, -0.5 * M_PI, -0.5 * M_PI + 0.4) || + inRange(xmid, 0.5 * M_PI - 0.4, 0.5 * M_PI + 0.4) || + inRange(xmid, 1.5 * M_PI - 0.4, 1.5 * M_PI)) { + sum += hYield->bin(ibin).sumW(); + nbins += 1; } + } + if (nbins == 0) { + MSG_WARNING("Failed to estimate background!"); + continue; + } + bkg = sum / nbins; - // Scale yield histogram - if ((_histTriggerCounter->bin(itype).sumW() != 0)) { - scalingFactor[itype] = 1. / _histTriggerCounter->bin(itype).sumW(); - scale(_histYield[itype][ipt], (1. / _histTriggerCounter->bin(itype).sumW())); + // Calculate background error + sum = 0.0; + nbins = 0; + for (size_t ibin = 0; ibin < hYield->numBins(); ++ibin) { + double xmid = hYield->bin(ibin).xMid(); + if (inRange(xmid, 0.5 * M_PI - 0.4, 0.5 * M_PI + 0.4)) { + sum += (hYield->bin(ibin).sumW() - bkg) * + (hYield->bin(ibin).sumW() - bkg); + nbins++; } + } + if (nbins < 2) { + MSG_WARNING("Failed to estimate background error!"); + continue; + } + bkgErr[itype][ipt] = sqrt(sum / (nbins - 1)); - // Calculate background - double sum = 0.0; - int nbins = 0; - for (size_t ibin = 0; ibin < _histYield[itype][ipt]->numBins(); ibin++) { - if ((_histYield[itype][ipt]->bin(ibin).xMid() > (-0.5 * M_PI) && - _histYield[itype][ipt]->bin(ibin).xMid() < (-0.5 * M_PI + 0.4)) || - (_histYield[itype][ipt]->bin(ibin).xMid() > (0.5 * M_PI - 0.4) && - _histYield[itype][ipt]->bin(ibin).xMid() < (0.5 * M_PI + 0.4)) || - (_histYield[itype][ipt]->bin(ibin).xMid() > (1.5 * M_PI - 0.4) && - _histYield[itype][ipt]->bin(ibin).xMid() < (1.5 * M_PI))) { - sum += _histYield[itype][ipt]->bin(ibin).sumW(); - nbins += 1; - } - } - if (nbins == 0) { - MSG_WARNING("Failed to estimate background!"); - continue; - } - background[itype][ipt] = sum / nbins; + // Fill histograms with removed background + for (size_t ibin = 0; ibin < hYield->numBins(); ++ibin) { + hYieldNoBkg->fillBin(ibin, hYield->bin(ibin).sumW() - bkg); + } - // Calculate background error - sum = 0.0; - nbins = 0; - for (size_t ibin = 0; ibin < _histYield[itype][ipt]->numBins(); ibin++) { - if (_histYield[itype][ipt]->bin(ibin).xMid() > (0.5 * M_PI - 0.4) && - _histYield[itype][ipt]->bin(ibin).xMid() < (0.5 * M_PI + 0.4)) { - sum += (_histYield[itype][ipt]->bin(ibin).sumW() - background[itype][ipt]) * - (_histYield[itype][ipt]->bin(ibin).sumW() - background[itype][ipt]); - nbins++; - } - } - backgroundError[itype][ipt] = sqrt(sum / (nbins - 1)); + // Integrate near-side yield + size_t lowerBin = hYield->binIndexAt(-0.7 + 0.02); + size_t upperBin = hYield->binIndexAt( 0.7 - 0.02) + 1; + nbins = upperBin - lowerBin; + numBins[itype][ipt][NEAR] = nbins; + integral[itype][ipt][NEAR] = + hYield->integralRange(lowerBin, upperBin) - nbins * bkg; + numEntries[itype][ipt][NEAR] = + hYield->integralRange(lowerBin, upperBin) * counter->sumW(); - // Fill histograms with removed background - for (size_t ibin = 0; ibin < _histYield[itype][ipt]->numBins(); ibin++) { - _histYieldBkgRemoved[itype][ipt]->fillBin(ibin, - _histYield[itype][ipt]->bin(ibin).sumW() - background[itype][ipt]); - } + // Integrate away-side yield + lowerBin = hYield->binIndexAt(M_PI - 0.7 + 0.02); + upperBin = hYield->binIndexAt(M_PI + 0.7 - 0.02) + 1; + nbins = upperBin - lowerBin; + numBins[itype][ipt][AWAY] = nbins; + integral[itype][ipt][AWAY] = + hYield->integralRange(lowerBin, upperBin) - nbins * bkg; + numEntries[itype][ipt][AWAY] = + hYield->integralRange(lowerBin, upperBin) * counter->sumW(); - // Integrate near-side yield - size_t lowerBin = _histYield[itype][ipt]->binIndexAt(-0.7 + 0.02); - size_t upperBin = _histYield[itype][ipt]->binIndexAt( 0.7 - 0.02) + 1; - nbins = upperBin - lowerBin; - numberOfBins[itype][ipt][0] = nbins; - nearSide[itype][ipt] = _histYield[itype][ipt]->integralRange(lowerBin, upperBin) - nbins * background[itype][ipt]; - numberOfEntries[itype][ipt][0] = _histYield[itype][ipt]->integralRange(lowerBin, upperBin) * _histTriggerCounter->bin(itype).sumW(); + } + } - // Integrate away-side yield - lowerBin = _histYield[itype][ipt]->binIndexAt(M_PI - 0.7 + 0.02); - upperBin = _histYield[itype][ipt]->binIndexAt(M_PI + 0.7 - 0.02) + 1; - nbins = upperBin - lowerBin; - numberOfBins[itype][ipt][1] = nbins; - awaySide[itype][ipt] = _histYield[itype][ipt]->integralRange(lowerBin, upperBin) - nbins * background[itype][ipt]; - numberOfEntries[itype][ipt][1] = _histYield[itype][ipt]->integralRange(lowerBin, upperBin) * _histTriggerCounter->bin(itype).sumW(); + // Variables for IAA/ICP plots + double yval[2] = { 0.0, 0.0 }; + double yerr[2] = { 0.0, 0.0 }; + double xval[PT_BINS] = { 3.5, 5.0, 7.0, 9.0 }; + double xerr[PT_BINS] = { 0.5, 1.0, 1.0, 1.0 }; + int types1[3] = {1, 2, 1}; + int types2[3] = {0, 0, 2}; + + // Fill IAA/ICP plots for near and away side peak + for (int ihist = 0; ihist < 3; ++ihist) { + int type1 = types1[ihist]; + int type2 = types2[ihist]; + double norm1 = norm[type1]; + double norm2 = norm[type2]; + for (int ipt = 0; ipt < PT_BINS; ++ipt) { + double bkgErr1 = bkgErr[type1][ipt]; + double bkgErr2 = bkgErr[type2][ipt]; + for (int ina = 0; ina < 2; ++ina) { + double integ1 = integral[type1][ipt][ina]; + double integ2 = integral[type2][ipt][ina]; + double numEntries1 = numEntries[type1][ipt][ina]; + double numEntries2 = numEntries[type2][ipt][ina]; + double numBins1 = numBins[type1][ipt][ina]; + double numBins2 = numBins[type2][ipt][ina]; + yval[ina] = integ1 / integ2; + yerr[ina] = norm1 * norm1 * numEntries1 + + norm2 * norm2 * numEntries2 * integ1 * integ1 / (integ2 * integ2) + + numBins1 * numBins1 * bkgErr1 * bkgErr1 + + numBins2 * numBins2 * bkgErr2 * bkgErr2 * integ1 * integ1 / (integ2 * integ2); + yerr[ina] = sqrt(yerr[ina])/integ2; } - } - - // Variables for IAA/ICP plots - double dI = 0.0; - int near = 0; - int away = 1; - double xval[PT_BINS] = { 3.5, 5.0, 7.0, 9.0 }; - double xerr[PT_BINS] = { 0.5, 1.0, 1.0, 1.0 }; - - int types1[3] = {1, 2, 1}; - int types2[3] = {0, 0, 2}; - - // Fill IAA/ICP plots for near side peak - for (int ihist = 0; ihist < 3; ihist++) { - int type1 = types1[ihist]; - int type2 = types2[ihist]; - for (int ipt = 0; ipt < PT_BINS; ipt++) { - dI = scalingFactor[type1] * scalingFactor[type1] * numberOfEntries[type1][ipt][near] + - scalingFactor[type2] * scalingFactor[type2] * numberOfEntries[type2][ipt][near] * - nearSide[type1][ipt] * nearSide[type1][ipt] / (nearSide[type2][ipt] * nearSide[type2][ipt]) + - numberOfBins[type1][ipt][near] * numberOfBins[type1][ipt][near] * backgroundError[type1][ipt] * - backgroundError[type1][ipt] + numberOfBins[type2][ipt][near] * numberOfBins[type2][ipt][near] * - backgroundError[type2][ipt] * backgroundError[type2][ipt] * nearSide[type1][ipt] * - nearSide[type1][ipt] / (nearSide[type2][ipt] * nearSide[type2][ipt]); - - dI = sqrt(dI)/nearSide[type2][ipt]; - _histIAA[ihist]->addPoint(xval[ipt], nearSide[type1][ipt] / nearSide[type2][ipt], xerr[ipt], dI); - } - } - - // Fill IAA/ICP plots for away side peak - for (int ihist = 0; ihist < 3; ihist++) { - int type1 = types1[ihist]; - int type2 = types2[ihist]; - for (int ipt = 0; ipt < PT_BINS; ipt++) { - dI = scalingFactor[type1] * scalingFactor[type1] * numberOfEntries[type1][ipt][away] + - scalingFactor[type2] * scalingFactor[type2] * numberOfEntries[type2][ipt][away] * - awaySide[type1][ipt] * awaySide[type1][ipt] / (awaySide[type2][ipt] * awaySide[type2][ipt]) + - numberOfBins[type1][ipt][away] * numberOfBins[type1][ipt][away] * backgroundError[type1][ipt] * - backgroundError[type1][ipt] + numberOfBins[type2][ipt][away] * numberOfBins[type2][ipt][away] * - backgroundError[type2][ipt] * backgroundError[type2][ipt] * awaySide[type1][ipt] * - awaySide[type1][ipt] / (awaySide[type2][ipt] * awaySide[type2][ipt]); - - dI = sqrt(dI)/awaySide[type2][ipt]; - _histIAA[ihist + 3]->addPoint(xval[ipt], awaySide[type1][ipt] / awaySide[type2][ipt], xerr[ipt], dI); - } + _histIAA[ihist]->addPoint(xval[ipt], yval[NEAR], xerr[ipt], yerr[NEAR]); + _histIAA[ihist + 3]->addPoint(xval[ipt], yval[AWAY], xerr[ipt], yerr[AWAY]); } } } + //@} private: static const int PT_BINS = 4; static const int EVENT_TYPES = 3; + static const int NEAR = 0; + static const int AWAY = 1; + /// @name Histograms + //@{ Histo1DPtr _histYield[EVENT_TYPES][PT_BINS]; - Histo1DPtr _histYieldBkgRemoved[EVENT_TYPES][PT_BINS]; - Histo1DPtr _histTriggerCounter; + Histo1DPtr _histYieldNoBkg[EVENT_TYPES][PT_BINS]; + CounterPtr _counterTrigger[EVENT_TYPES]; Scatter2DPtr _histIAA[6]; - double pt_limits[5]; - int event_type; - string event_string[EVENT_TYPES + 1]; - bool reentrant_flag; + //@} }; - + // The hook for the plugin system DECLARE_RIVET_PLUGIN(ALICE_2012_I930312); }