diff --git a/analyses/pluginALICE/ALICE_2010_I880049.cc b/analyses/pluginALICE/ALICE_2010_I880049.cc --- a/analyses/pluginALICE/ALICE_2010_I880049.cc +++ b/analyses/pluginALICE/ALICE_2010_I880049.cc @@ -1,83 +1,77 @@ // -*- C++ -*- -#include "Rivet/Analysis.hh" +#include "pluginALICE/HeavyIonAnalysis.hh" #include "Rivet/Projections/ChargedFinalState.hh" -#include "Rivet/Projections/Centrality.hh" #include "Rivet/Tools/Cuts.hh" #include -//#include "Centrality.hh" #define _USE_MATH_DEFINES #include namespace Rivet { - class ALICE_2010_I880049 : public Analysis { + class ALICE_2010_I880049 : public HeavyIonAnalysis { public: ALICE_2010_I880049() : - Analysis("ALICE_2010_I880049") + HeavyIonAnalysis("ALICE_2010_I880049") { } void init() { - + HeavyIonAnalysis::init(); + + // Set centrality method + addCentralityMethod(HeavyIonAnalysis::ImpactParameter, 10000, "method1"); + // Charged final states with |eta| < 0.5 and pT > 50 MeV const Cut& cut = Cuts::abseta < 0.5 && Cuts::pT > 50*MeV; const ChargedFinalState cfs(cut); addProjection(cfs,"CFS"); - // Centrality projection for this analysis - Centrality centrality(cfs, Centrality::Method::ImpactParameter, 10000); - addProjection(centrality, "CENTR"); - // Histograms and variables initialization. Do it for each centrality range _histNchVsCentr = bookHisto1D(1, 1, 1); } void analyze(const Event& event) { const ChargedFinalState& charged = applyProjection(event, "CFS"); Particles chargedParticles = charged.particlesByPt(); - const Centrality& centralityProjection = applyProjection(event, "CENTR"); - if (!centralityProjection.isValid()) - vetoEvent; - - const double c = centralityProjection.getCentrality(); + const double c = centrality(event, "ImpactParameterMethod"); cout << "Centrality: " << c << endl; if ((c < 0.) || (c > 80.)) vetoEvent; _histNchVsCentr->fill(c, charged.particles().size() * event.weight()); } void finalize() { // Finishing the centrality calculations //const Centrality& centralityProjection = getProjection("CENTR"); //centralityProjection.finalize(); // Right scaling of the histograms with their individual weights. for (size_t ii = 0; ii < _histNchVsCentr->numBins(); ii++) { double scale = _histNchVsCentr->bin(ii).xWidth() / _histNchVsCentr->bin(ii).numEntries(); _histNchVsCentr->bin(ii).scaleW( scale ); } } // @note First time a post() method is implemented and used by Rivet. This is only available on the // mcplots-alice-dev.cern.ch and mcplots-alice-dev2.cern.ch machine because the Rivet installation here // is the only one which implements Rivet::Analysis::post() and Rivet::AnalysisHandler::post() void post() { } private: Histo1DPtr _histNchVsCentr; }; // The hook for the plugin system DECLARE_RIVET_PLUGIN(ALICE_2010_I880049); } diff --git a/analyses/pluginALICE/ALICE_2012_I1127497.cc b/analyses/pluginALICE/ALICE_2012_I1127497.cc --- a/analyses/pluginALICE/ALICE_2012_I1127497.cc +++ b/analyses/pluginALICE/ALICE_2012_I1127497.cc @@ -1,174 +1,176 @@ // -*- C++ -*- -#include "Rivet/Analysis.hh" +#include "pluginALICE/HeavyIonAnalysis.hh" #include "Rivet/Projections/ChargedFinalState.hh" #include #define NHISTOS 15 #define _USE_MATH_DEFINES #include // maybe use boost namespace Rivet { - class ALICE_2012_I1127497 : public Analysis { + class ALICE_2012_I1127497 : public HeavyIonAnalysis { public: - ALICE_2012_I1127497() : Analysis("ALICE_2012_I1127497") { } + ALICE_2012_I1127497() : + HeavyIonAnalysis("ALICE_2012_I1127497") + { } void init() { // charged final states const ChargedFinalState cfs(-0.8, 0.8, 150*MeV); addProjection(cfs,"CFS"); // @debug Member "log" is declared below. //log.open("/data/mcplots/mcplots/scripts/mcprod/testarea/logs/log.log"); // @note and @todo In principle, only ONE pp histogram needed since centrality does not make a difference here. // However, in some cases it had to be re-binned since some of the binnings of histograms in this analysis // differ from each other. This is therefore the "brute-force" but easy-to-implement way. Making this more // efficient this could be part of the optimization procedure in the end. Now, initialize AA and pp dummy // histograms. for( size_t d = 0; d < NHISTOS; ++d ) { m_sumOfWeights[1][d] = 0; _histNch[1][d] = bookHisto1D(d + 1, 1, 1); _histRAA[d] = bookScatter2D(d+16, 1, 1); m_sumOfWeights[0][d] = 0; // @note Give a proper name for the dummy histograms. They have to incorporate at least the name of the // analysis ALICE_2012_I1127497 and must of course not have the same path as another analysis object used // in this analysis. std::string name = _histNch[1][d]->name(); //cout << "PbPb name: " << _histNch[1][d]->name() << endl; std::string namePP = name + "-pp"; _histNch[0][d] = bookHisto1D( namePP, refData(d + 1, 1, 1) ); // binning is also taken from ref data //cout << "pp name: " << _histNch[0][d]->name() << endl; } // Centrality regions, 2 following numbers are boundaries for a certain region. Note, that some // regions overlap with other regions. The current implementation of centrality bins may not be // the most efficient and flexible one. Take this into account during the optimization procedure. m_centrRegions.clear(); m_centrRegions += {{0., 0.05, 0.05, 0.1, 0.1, 0.2, 0.2, 0.3, 0.3, 0.4, 0.4, 0.5, 0.5, 0.6, 0.6, 0.7, 0.7, 0.8, 0., 0.1, 0., 0.2, 0.2, 0.4, 0.4, 0.6, 0.4, 0.8, 0.6, 0.8}}; } void analyze(const Event& event) { const double weight = event.weight(); // final state particles with at least pT = 50 MeV in eta range of |eta| < 0.8 const ChargedFinalState& charged = applyProjection(event, "CFS"); Particles chargedParticles = charged.particlesByPt(); // @note Choose this convention here since JEWEL return a centrality value of -1 for a vacuum run. float centr = -1; // @note It has to be checked explicitly, if the pointer is sane and is no null_ptr. The standard // constructor of HepMC does not initialize the pointer and returns a null_ptr in case no heavy-ion // was written to the HepMC file. This is e.g. the case for the current pp generators. Trying to // access this naivly may therefore lead to a segmentation fault. if( event.genEvent()->heavy_ion() ) { // JEWEL saves the centrality as the impact parameter. This should be changed later in JEWEL directly. // Note, that changes of HepMC may be required as well. However, given the current implementation, // the centrality can be accessed via centr = event.genEvent()->heavy_ion()->impact_parameter(); } // Veto event for too large centralities since those are not used in the analysis at all. if( centr > 0.8 ) vetoEvent; size_t pp_AA; float pT; vector< size_t > indices; indices.clear(); if( centr < 0. ) { // Use this as a flag to decide later which histograms should be filled. pp_AA = 0 corresponds // to pp beam (and in case of JEWEL to a vacuum run). pp_AA = 0; for(size_t i = 0; i < NHISTOS; ++i) // Push all indices in case of pp dummy histograms. indices.push_back(i); } else { pp_AA = 1; for( size_t i = 0; i < NHISTOS; ++i ) { // Check centrality bins and push corresponding indices of AA histograms. if( inRange( centr, m_centrRegions[ 2 * i ], m_centrRegions[ 2 * i + 1] ) ) { indices.push_back(i); } } } // Fill the right histograms and add weights based on the flag pp_AA and indices pushed to "indices". for( size_t i = 0; i < indices.size(); ++i ) { m_sumOfWeights[pp_AA][indices.at(i)] += weight; foreach (const Particle& p, chargedParticles) { pT = p.pT()/GeV; if(pT < 50.) { _histNch[pp_AA][indices.at(i)]->fill( pT , weight * ( 1 / pT ) ); //cout << "Weight: " << weight << endl; //cout << "pT: " << pT << endl; } } } } void finalize() { // Right scaling of the histograms with their individual weights. for( size_t pp_AA = 0; pp_AA < 2; ++pp_AA ) { for( size_t i = 0; i < NHISTOS; ++i ) { if( m_sumOfWeights[pp_AA][i] > 0 ) //@note is this necessary or can this be handeled by the scaling method? scale( _histNch[pp_AA][i], ( 1./m_sumOfWeights[pp_AA][i] / 2. / M_PI / 1.6 ) ); // TODO PI } } } // @note First time a post() method is implemented and used by Rivet. This is only available on the // mcplots-alice-dev.cern.ch machine because the Rivet installation here is the only one which implements // Rivet::Analysis::post() and Rivet::AnalysisHandler::post() void post() { // Divide AA and pp histograms to obtain R_AA histograms. for( size_t i = 0; i < NHISTOS; ++i) { //cout << "_histNch[1][" << i << "]->bin(0).numEntries(): " << _histNch[1][i]->bin(0).numEntries() << endl; //cout << "_histNch[0][" << i << "]->bin(0).numEntries(): " << _histNch[0][i]->bin(0).numEntries() << endl; //cout << "_histRAA[" << i << "]->bin(0): " << _histRAA[i]->bin(0) << endl << endl; divide( _histNch[1][i], _histNch[0][i], _histRAA[i] ); //cout << "_histRAA[" << i << "]->bin(0): " << _histRAA[i]->bin(0) << endl << endl; } // @debug //log.close(); } private: //std::ofstream validationFile; Histo1DPtr _histNch[2][NHISTOS]; double m_sumOfWeights[2][NHISTOS]; Scatter2DPtr _histRAA[NHISTOS]; std::vector m_centrRegions; // @debug //std::ofstream log; }; // The hook for the plugin system DECLARE_RIVET_PLUGIN(ALICE_2012_I1127497); } diff --git a/analyses/pluginALICE/ALICE_2012_I930312.cc b/analyses/pluginALICE/ALICE_2012_I930312.cc --- a/analyses/pluginALICE/ALICE_2012_I930312.cc +++ b/analyses/pluginALICE/ALICE_2012_I930312.cc @@ -1,355 +1,355 @@ // -*- C++ -*- #include "pluginALICE/HeavyIonAnalysis.hh" #include "Rivet/Projections/ChargedFinalState.hh" #include "Rivet/Tools/Cuts.hh" #include #define _USE_MATH_DEFINES #include namespace Rivet { class ALICE_2012_I930312 : public HeavyIonAnalysis { public: ALICE_2012_I930312() : HeavyIonAnalysis("ALICE_2012_I930312") { } void init() { HeavyIonAnalysis::init(); // Set centrality method addCentralityMethod(HeavyIonAnalysis::ImpactParameter, 10000, "method1"); // Charged final states with |eta| < 1.0 and 8 < pT < 15 GeV/c for trigger particles const Cut& cutTrigger = Cuts::abseta < 1.0 && Cuts::pT > 8*GeV && Cuts::pT < 15*GeV; const ChargedFinalState cfsTrigger(cutTrigger); addProjection(cfsTrigger,"CFSTrigger"); // Set limit values of pT bins pt_limits[0] = 3; pt_limits[1] = 4; pt_limits[2] = 6; pt_limits[3] = 8; pt_limits[4] = 10; // Chaged final states with |eta| < 1.0 and different pT bins for associated particles for (int ipt = 0; ipt < PT_BINS; ipt++) { Cut mycut = Cuts::abseta < 1.0 && Cuts::pT > pt_limits[ipt]*GeV && Cuts::pT < pt_limits[ipt + 1]*GeV; addProjection(ChargedFinalState(mycut), "CFSAssoc" + std::to_string(ipt)); } // Create event strings event_string[0] = "pp"; event_string[1] = "central"; event_string[2] = "peripheral"; event_string[3] = "other"; // For each event type for (int itype = 0; itype < EVENT_TYPES; itype++) { // For each pT range for (int ipt = 0; ipt < PT_BINS; ipt++) { // Initialize yield histograms _histYield[itype][ipt] = bookHisto1D("Yield_" + event_string[itype] + "_" + std::to_string(ipt), 36, -0.5 * M_PI, 1.5 * M_PI, "Associated particle per trigger particle yield", "#Delta#eta (rad)", "1 / N_trig dN_assoc / d#Delta#eta (rad^-1)"); _histYieldBkgRemoved[itype][ipt] = bookHisto1D("Yield_" + event_string[itype] + "_nobkg_" + std::to_string(ipt), 36, -0.5 * M_PI, 1.5 * M_PI, "Associated particle per trigger particle yield no bkg", "#Delta#eta (rad)", "1 / N_trig dN_assoc / d#Delta#eta (rad^-1)"); } } // Histogram for counting trigger particles for each event type _histTriggerCounter = bookHisto1D("Trigger", EVENT_TYPES, 0.0, EVENT_TYPES, "Trigger counter", "event type", "N"); // Initialize IAA and ICP histograms _histIAA[0] = bookScatter2D(1, 1, 1); _histIAA[1] = bookScatter2D(2, 1, 1); _histIAA[2] = bookScatter2D(5, 1, 1); _histIAA[3] = bookScatter2D(3, 1, 1); _histIAA[4] = bookScatter2D(4, 1, 1); _histIAA[5] = bookScatter2D(6, 1, 1); } void analyze(const Event& event) { // Check if heavy-ion event if (HeavyIonAnalysis::is_heavy_ion(event)) { std::cout << "HI EVENT: "; } else { std::cout << "PP EVENT: "; } std::cout << event.genEvent()->heavy_ion(); // Create charged final state for trigger particle const ChargedFinalState& triggerFinalState = applyProjection(event, "CFSTrigger"); Particles triggerParticles = triggerFinalState.particlesByPt(); std::cout << "trig: " << triggerParticles.size(); // Create charged final state for associated particle ChargedFinalState associatedFinalState[PT_BINS]; Particles associatedParticles[PT_BINS]; std::cout << ", assoc: "; for (int ipt = 0; ipt < PT_BINS; ipt++) { associatedFinalState[ipt] = applyProjection(event, "CFSAssoc" + std::to_string(ipt)); associatedParticles[ipt] = associatedFinalState[ipt].particlesByPt(); std::cout << associatedParticles[ipt].size() << " "; } std::cout << std::endl; // Check event type if (HeavyIonAnalysis::is_heavy_ion(event)) { double centr = centrality(event, "method1"); std::cout << "centrality: " << centr << std::endl; if (centr > 0.0 && centr < 5.0) event_type = 1; // PbPb, central else if (centr > 60.0 && centr < 90.0) event_type = 2; // PbPb, peripherial else event_type = 3; // PbPb, other } else { event_type = 0; // pp } std::cout << "ev type: " << event_string[event_type] << " (" << event_type << ")" << std::endl; // Veto event if not valid event type if (event_type == 3) vetoEvent; // Fill trigger histogram for a proper event type _histTriggerCounter->fill(event_type, triggerParticles.size()); // Loop over trigger particles foreach (const Particle& triggerParticle, triggerParticles) { std::cout << "Trigger particle" << std::endl; // For each pt bin for (int ipt = 0; ipt < PT_BINS; ipt++) { std::cout << "pt bin " << ipt << std::endl; // Loop over associated particles foreach (const Particle& associatedParticle, associatedParticles[ipt]) { std::cout << "Associated particle" << std::endl; // If associated and trigger particle are not the same particles if (associatedParticle != triggerParticle) { // If pt of trigger particle is greater than pt of associated particle if (triggerParticle.pt() > associatedParticle.pt()) { // Calculate delta phi in range (-0.5*PI, 1.5*PI) double dPhi = triggerParticle.phi() - associatedParticle.phi(); while (dPhi > 1.5 * M_PI) { dPhi -= 2 * M_PI; } while (dPhi < -0.5 * M_PI) { dPhi += 2 * M_PI; } // Fill yield histogram for calculated delta phi std::cout << "Filling yield histogram for pt = " << pt_limits[ipt] << "-" << pt_limits[ipt + 1] << " GeV/c for delta phi = " << dPhi << std::endl; _histYield[event_type][ipt]->fill(dPhi, 1); } } } } } std::cout << std::endl; } void finalize() { std::cout << "Finalizing..." << std::endl; } void post() { // Variables for near and away side peak calculation double nearSide[EVENT_TYPES][PT_BINS] = { {0.0} }; double awaySide[EVENT_TYPES][PT_BINS] = { {0.0} }; // Variables for background error calculation double background[EVENT_TYPES][PT_BINS] = { {0.0} }; double backgroundError[EVENT_TYPES][PT_BINS] = { {0.0} }; // Variables for integration error calculation double scalingFactor[EVENT_TYPES] = {0.0}; double numberOfEntries[EVENT_TYPES][PT_BINS][2] = { { {0.0} } }; int numberOfBins[EVENT_TYPES][PT_BINS][2] = { { {0} } }; std::cout << "Trigger counter histogram entries: " << _histTriggerCounter->bin(0).sumW() << " " << _histTriggerCounter->bin(1).sumW() << " " << _histTriggerCounter->bin(2).sumW() << std::endl; std::cout << "pp yield pt bin 0, entries: " << _histYield[0][0]->numEntries() << std::endl; std::cout << "pp yield pt bin 1, entries: " << _histYield[0][1]->numEntries() << std::endl; std::cout << "pp yield pt bin 2, entries: " << _histYield[0][2]->numEntries() << std::endl; std::cout << "pp yield pt bin 3, entries: " << _histYield[0][3]->numEntries() << std::endl; std::cout << "Central yield pt bin 0, entries: " << _histYield[1][0]->numEntries() << std::endl; std::cout << "Central yield pt bin 1, entries: " << _histYield[1][1]->numEntries() << std::endl; std::cout << "Central yield pt bin 2, entries: " << _histYield[1][2]->numEntries() << std::endl; std::cout << "Central yield pt bin 3, entries: " << _histYield[1][3]->numEntries() << std::endl; std::cout << "Peripheral yield pt bin 0, entries: " << _histYield[2][0]->numEntries() << std::endl; std::cout << "Peripheral yield pt bin 1, entries: " << _histYield[2][1]->numEntries() << std::endl; std::cout << "Peripheral yield pt bin 2, entries: " << _histYield[2][2]->numEntries() << std::endl; std::cout << "Peripheral yield pt bin 3, entries: " << _histYield[2][3]->numEntries() << std::endl; // For each event type for (int itype = 0; itype < EVENT_TYPES; itype++) { // For each pT range for (int ipt = 0; ipt < PT_BINS; ipt++) { // Check if histograms are fine if (_histTriggerCounter->numEntries() == 0 || _histYield[itype][ipt]->numEntries() == 0) { std::cout << _histTriggerCounter->numEntries() << std::endl; std::cout << _histYield[itype][ipt]->numEntries() << std::endl; std::cout << "There are no entries in one of the histograms" << std::endl; continue; } // Scale yield histogram std::cout << "Scaling yield histograms..." << std::endl; if ((_histTriggerCounter->bin(itype).sumW() != 0)) { std::cout << "Scaling histogram type " << itype << " pt bin " << ipt << "..." << std::endl; std::cout << "Scaling factor: " << _histTriggerCounter->bin(itype).sumW() << std::endl; scalingFactor[itype] = 1. / _histTriggerCounter->bin(itype).sumW(); std::cout << "Bin 1 before scaling: " << _histYield[itype][ipt]->bin(1).sumW() << std::endl; scale(_histYield[itype][ipt], (1. / _histTriggerCounter->bin(itype).sumW())); std::cout << "Bin 1 after scaling: " << _histYield[itype][ipt]->bin(1).sumW() << std::endl; } // Calculate background std::cout << "Calculating background" << std::endl; double sum = 0.0; int nbins = 0; for (unsigned int ibin = 0; ibin < _histYield[itype][ipt]->numBins(); ibin++) { std::cout << "Bin " << ibin << std::endl; if (_histYield[itype][ipt]->bin(ibin).xMid() > (0.5 * M_PI - 0.4) && _histYield[itype][ipt]->bin(ibin).xMid() < (0.5 * M_PI + 0.4)) { std::cout << "Adding " << _histYield[itype][ipt]->bin(ibin).sumW() << std::endl; sum += _histYield[itype][ipt]->bin(ibin).sumW(); nbins++; } } if (nbins == 0) { std::cout << "Failed to estimate background!" << std::endl; continue; } std::cout << "Dividing " << sum << " / " << nbins << std::endl; background[itype][ipt] = sum / nbins; std::cout << "background: " << background[itype][ipt] << std::endl; // Calculate background error sum = 0.0; nbins = 0; for (unsigned int ibin = 0; ibin < _histYield[itype][ipt]->numBins(); ibin++) { if (_histYield[itype][ipt]->bin(ibin).xMid() > (0.5 * M_PI - 0.4) && _histYield[itype][ipt]->bin(ibin).xMid() < (0.5 * M_PI + 0.4)) { sum += (_histYield[itype][ipt]->bin(ibin).sumW() - background[itype][ipt]) * (_histYield[itype][ipt]->bin(ibin).sumW() - background[itype][ipt]); nbins++; } } backgroundError[itype][ipt] = sqrt(sum / (nbins - 1)); std::cout << "backgroundError: " << backgroundError[itype][ipt] << std::endl; // Fill histograms with removed background std::cout << "Filling histogram with removed background..." << std::endl; for (unsigned int ibin = 0; ibin < _histYield[itype][ipt]->numBins(); ibin++) { std::cout << "Bin " << ibin << ", value " << _histYield[itype][ipt]->bin(ibin).sumW() - background[itype][ipt] << std::endl; _histYieldBkgRemoved[itype][ipt]->fillBin(ibin, _histYield[itype][ipt]->bin(ibin).sumW() - background[itype][ipt]); } std::cout << "Integrating the whole histogram: " << _histYieldBkgRemoved[itype][ipt]->integral() << std::endl; // Integrate near-side yield std::cout << "Integrating near-side yield..." << std::endl; - unsigned int lowerBin = _histYield[itype][ipt]->binIndexAt(-0.7); - unsigned int upperBin = _histYield[itype][ipt]->binIndexAt(0.7) + 1; + unsigned int lowerBin = _histYield[itype][ipt]->binIndexAt(-0.7 + 0.02); + unsigned int upperBin = _histYield[itype][ipt]->binIndexAt( 0.7 - 0.02) + 1; nbins = upperBin - lowerBin; numberOfBins[itype][ipt][0] = nbins; std::cout << _histYield[itype][ipt]->integralRange(1, 1) << " " << _histYield[itype][ipt]->bin(1).sumW() << std::endl; std::cout << _histYield[itype][ipt]->integralRange(1, 2) << " " << _histYield[itype][ipt]->bin(1).sumW() + _histYield[itype][ipt]->bin(2).sumW() << std::endl; nearSide[itype][ipt] = _histYield[itype][ipt]->integralRange(lowerBin, upperBin) - nbins * background[itype][ipt]; numberOfEntries[itype][ipt][0] = _histYield[itype][ipt]->integralRange(lowerBin, upperBin) * _histTriggerCounter->bin(itype).sumW(); std::cout << "_histYield[" << itype << "][" << ipt << "]->integralRange(" << lowerBin << ", " << upperBin << ") - nbins * bkg = " << _histYield[itype][ipt]->integralRange(lowerBin, upperBin) << " - " << nbins << " * " << background[itype][ipt] << " = " << nearSide[itype][ipt] << std::endl; // Integrate away-side yield std::cout << "Integrating away-side yield..." << std::endl; lowerBin = _histYield[itype][ipt]->binIndexAt(M_PI - 0.7); upperBin = _histYield[itype][ipt]->binIndexAt(M_PI + 0.7) + 1; nbins = upperBin - lowerBin; numberOfBins[itype][ipt][1] = nbins; awaySide[itype][ipt] = _histYield[itype][ipt]->integralRange(lowerBin, upperBin) - nbins * background[itype][ipt]; numberOfEntries[itype][ipt][1] = _histYield[itype][ipt]->integralRange(lowerBin, upperBin) * _histTriggerCounter->bin(itype).sumW(); std::cout << "_histYield[" << itype << "][" << ipt << "]->integralRange(" << lowerBin << ", " << upperBin << ") - nbins * bkg = " << _histYield[itype][ipt]->integralRange(lowerBin, upperBin) << " - " << nbins << " * " << background[itype][ipt] << " = " << awaySide[itype][ipt] << std::endl; } } // Variables for IAA/ICP plots double dI = 0.0; int near = 0; int away = 1; double xval[PT_BINS] = { 3.5, 5.0, 7.0, 9.0 }; double xerr[PT_BINS] = { 0.5, 1.0, 1.0, 1.0 }; int types1[3] = {1, 2, 1}; int types2[3] = {0, 0, 2}; string type_string[3] = {"I_AA 0-5%/pp", "I_AA 60-90%/pp", "I_CP 0-5%/60-90%"}; // Fill IAA/ICP plots for near side peak std::cout << "Near side" << std::endl; for (int ihist = 0; ihist < 3; ihist++) { int type1 = types1[ihist]; int type2 = types2[ihist]; for (int ipt = 0; ipt < PT_BINS; ipt++) { std::cout << type_string[ihist] << ", pt=(" << pt_limits[ipt] << ", " << pt_limits[ipt+1] << "): " << nearSide[type1][ipt] / nearSide[type2][ipt]; dI = scalingFactor[type1] * sqrt(numberOfEntries[type1][ipt][near]) / nearSide[type2][ipt] + scalingFactor[type2] * sqrt(numberOfEntries[type2][ipt][near]) * nearSide[type1][ipt] / (nearSide[type2][ipt] * nearSide[type2][ipt]) + numberOfBins[type1][ipt][near] * backgroundError[type1][ipt] / nearSide[type2][ipt] + numberOfBins[type2][ipt][near] * backgroundError[type2][ipt] * nearSide[type1][ipt] / (nearSide[type2][ipt] * nearSide[type2][ipt]); std::cout << " +- " << dI << std::endl; _histIAA[ihist]->addPoint(xval[ipt], nearSide[type1][ipt] / nearSide[type2][ipt], xerr[ipt], dI); } } // Fill IAA/ICP plots for away side peak std::cout << "Away side" << std::endl; for (int ihist = 0; ihist < 3; ihist++) { int type1 = types1[ihist]; int type2 = types2[ihist]; for (int ipt = 0; ipt < PT_BINS; ipt++) { std::cout << type_string[ihist] << ", pt=(" << pt_limits[ipt] << ", " << pt_limits[ipt+1] << "): " << awaySide[type1][ipt] / awaySide[type2][ipt]; dI = scalingFactor[type1] * sqrt(numberOfEntries[type1][ipt][away]) / awaySide[type2][ipt] + scalingFactor[type2] * sqrt(numberOfEntries[type2][ipt][away]) * awaySide[type1][ipt] / (awaySide[type2][ipt] * awaySide[type2][ipt]) + numberOfBins[type1][ipt][away] * backgroundError[type1][ipt] / awaySide[type2][ipt] + numberOfBins[type2][ipt][away] * backgroundError[type2][ipt] * awaySide[type1][ipt] / (awaySide[type2][ipt] * awaySide[type2][ipt]); std::cout << " +- " << dI << std::endl; _histIAA[ihist + 3]->addPoint(xval[ipt], awaySide[type1][ipt] / awaySide[type2][ipt], xerr[ipt], dI); } } } private: static const int PT_BINS = 4; static const int EVENT_TYPES = 3; Histo1DPtr _histYield[EVENT_TYPES][PT_BINS]; Histo1DPtr _histYieldBkgRemoved[EVENT_TYPES][PT_BINS]; Histo1DPtr _histTriggerCounter; Scatter2DPtr _histIAA[6]; int pt_limits[5]; int event_type; string event_string[EVENT_TYPES + 1]; }; // The hook for the plugin system DECLARE_RIVET_PLUGIN(ALICE_2012_I930312); } diff --git a/analyses/pluginALICE/HeavyIonAnalysis.hh b/analyses/pluginALICE/HeavyIonAnalysis.hh --- a/analyses/pluginALICE/HeavyIonAnalysis.hh +++ b/analyses/pluginALICE/HeavyIonAnalysis.hh @@ -1,62 +1,68 @@ // -*- C++ -*- +#ifndef RIVET_HeavyIonAnalysis_HH +#define RIVET_HeavyIonAnalysis_HH + #include "Rivet/Analysis.hh" +#include "Rivet/Projections/ChargedFinalState.hh" namespace Rivet { class HeavyIonAnalysis : public Analysis { public: HeavyIonAnalysis(const std::string &name = ""); // methods specific to heavy-ion analyses enum CentralityMethod { ImpactParameter, Multiplicity, Undefined }; double quantile(double value, const Histo1DPtr hist) const; void addCentralityMethod(const CentralityMethod method, const size_t numEventsRequired, const string methodID, const FinalState *fs = 0x0); double centrality(const Event& event, const string methodID = "") const; bool is_heavy_ion(const Event& event) const; private: void centrality_method_not_found() const; float calculateObservable(const Event &event, const int index) const; void checkObservable(const float observable, const int index) const; string trimString(const string& str) const; /// Histogram for centrality calibration std::vector _histCentralityCalibrationVector; /// Histogram for centrality control. It may be used to compare distribution /// in the current run to the one provided in calibration histogram. std::vector _histCentralityControlVector; /// String with the cuts std::vector _cutStringVector; /// Method of the centrality calibration std::vector _centmethodVector; /// Number of events required for a selected method of centrality calibration std::vector _numEventsRequiredVector; /// Name of the centrality calibration method std::vector _methodNameVector; /// ID of the centrality method std::vector _methodIDVector; }; } + +#endif