diff --git a/analyses/pluginATLAS/ATLAS_2010_S8817804.info b/analyses/pluginATLAS/ATLAS_2010_S8817804.info --- a/analyses/pluginATLAS/ATLAS_2010_S8817804.info +++ b/analyses/pluginATLAS/ATLAS_2010_S8817804.info @@ -1,40 +1,41 @@ Name: ATLAS_2010_S8817804 Year: 2010 Summary: Inclusive jet cross section and di-jet mass and chi spectra at 7 TeV in ATLAS Experiment: ATLAS Collider: LHC 7TeV SpiresID: 8817804 InspireID: 871366 Status: VALIDATED +Reentrant: True Authors: - James Monk References: - arXiv:1009.5908 RunInfo: pp QCD jet production with a minimum jet pT of 60 GeV (inclusive) or 30 GeV (di-jets) at 7 TeV. NumEvents: 10000000 Beams: [p+, p+] Energies: [7000] PtCuts: [30, 60] Description: The first jet cross section measurement made with the ATLAS detector at the LHC. Anti-kt jets with $R=0.4$ and $R=0.6$ are resconstructed within $|y|<2.8$ and above 60~GeV for the inclusive jet cross section plots. For the di-jet plots the second jet must have pT>30~GeV. Jet pT and di-jet mass spectra are plotted in bins of rapidity between $|y| = $ 0.3, 0.8, 1.2, 2.1, and 2.8. Di-jet $\chi$ spectra are plotted in bins of di-jet mass between 340 GeV, 520 GeV, 800 GeV and 1200 GeV. NeedCrossSection: yes BibKey: Aad:2010wv BibTeX: '@Article{Aad:2010wv, author = "Aad, G. and others", collaboration = "ATLAS", title = "{Measurement of inclusive jet and dijet cross sections in proton-proton collisions at 7 TeV centre-of-mass energy with the ATLAS detector}", year = "2010", eprint = "1009.5908", archivePrefix = "arXiv", primaryClass = "hep-ex", SLACcitation = "%%CITATION = 1009.5908;%%" }' diff --git a/analyses/pluginMC/MC_Cent_pPb_Calib.cc b/analyses/pluginMC/MC_Cent_pPb_Calib.cc new file mode 100644 --- /dev/null +++ b/analyses/pluginMC/MC_Cent_pPb_Calib.cc @@ -0,0 +1,81 @@ +// -*- C++ -*- +#include "Rivet/Analyses/MC_Cent_pPb.hh" +#include "Rivet/Projections/ImpactParameterProjection.hh" + +namespace Rivet { + + +/// Generic analysis looking at various distributions of final state particles +class MC_Cent_pPb_Calib : public Analysis { + +public: + + DEFAULT_RIVET_ANALYSIS_CTOR(MC_Cent_pPb_Calib); + + /// Book histograms and initialise projections before the run + void init() { + + // One projection for the actual observable, and one for the + // generated impact parameter. + declare(MC_SumETFwdPbCentrality(), "Centrality"); + declare(ImpactParameterProjection(), "IMP"); + declare(MC_pPbMinBiasTrigger(), "Trigger"); + + // The calibrationhistogram: + _calib = bookHisto1D("SumETPb", 100, 0.0, 200.0); + + // If histogram was pre-loaded, the calibration is done. + _done = ( _calib->numEntries() > 0 ); + + // The alternative histogram based on impact parameter. Note that + // it MUST be named the same as the histogram for the experimental + // observable with an added _IMP suffix for the Pecentile<> + // binning to work properly. + _impcalib = bookHisto1D("SumETPb_IMP", 400, 0.0, 20.0); + + + } + + /// Perform the per-event analysis + void analyze(const Event& event) { + + if ( _done ) return; + + const double weight = event.weight(); + + // The alternative centrality based on generated impact + // parameter, assumes that the generator does not describe the + // full final state, and should therefore be filled even if the + // event is not triggered. + _impcalib->fill(apply(event, "IMP")(), weight); + + if ( !apply(event, "Trigger")() ) vetoEvent; + + _calib->fill(apply(event, "Centrality")(), weight); + + } + + /// Finalize + void finalize() { + + _calib->normalize(); + _impcalib->normalize(); + + } + +private: + + /// The calibration histograms. + Histo1DPtr _calib; + Histo1DPtr _impcalib; + + /// Safeguard from adding to a pre-loaded histogram. + bool _done; + +}; + + +// The hook for the plugin system +DECLARE_RIVET_PLUGIN(MC_Cent_pPb_Calib); + +} diff --git a/analyses/pluginMC/MC_Cent_pPb_Calib.info b/analyses/pluginMC/MC_Cent_pPb_Calib.info new file mode 100644 --- /dev/null +++ b/analyses/pluginMC/MC_Cent_pPb_Calib.info @@ -0,0 +1,39 @@ +Name: MC_Cent_pPb_Calib +Summary: Template analysis for generating calibration histograms + for centrality +Status: UNVALIDATED +Authors: + - Leif Lönnblad +NumEvents: 50000 +References: + - arXiv:1508.00848 [hep-ex], Eur.Phys.J. C76 (2016) no.4, 199 +RunInfo: Any! +Description: + Template analysis for generating calibration histograms to be used + together with the CentralityProjection and Percentile<> + classes. The example is pPb collisions at 5 TeV and is based on the + ATLAS analysis arXiv:1508.00848 [hep-ex]. The reference YODA file + contains the experimental centrality calibration, but extracted + from the plot in the paper. Note that this has not been unfolded + for detector effects. + +BibKey: Aad:2015zza +BibTeX: '@article{Aad:2015zza, + author = "Aad, Georges and others", + title = "{Measurement of the centrality dependence of the + charged-particle pseudorapidity distribution in + proton–lead collisions at $\sqrt{s_{_\text {NN}}} = + 5.02$ TeV with the ATLAS detector}", + collaboration = "ATLAS", + journal = "Eur. Phys. J.", + volume = "C76", + year = "2016", + number = "4", + pages = "199", + doi = "10.1140/epjc/s10052-016-4002-3", + eprint = "1508.00848", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + reportNumber = "CERN-PH-EP-2015-160", + SLACcitation = "%%CITATION = ARXIV:1508.00848;%%" +}' diff --git a/analyses/pluginMC/MC_Cent_pPb_Calib.plot b/analyses/pluginMC/MC_Cent_pPb_Calib.plot new file mode 100644 --- /dev/null +++ b/analyses/pluginMC/MC_Cent_pPb_Calib.plot @@ -0,0 +1,6 @@ +# BEGIN PLOT /MC_Centrality/SumETPb +Title=Sum $E_T^{Pb}$ distribution, p+Pb $\sqrt{s_{NN}}=5.02$ TeV +XLabel=$\sum E_T^{Pb}$ +YLabel=$(1/N_{evt}) \; \mathrm{d}N/\mathrm{d}\sum E_T^{Pb}$ +# END PLOT + diff --git a/analyses/pluginMC/MC_Cent_pPb_Calib.yoda b/analyses/pluginMC/MC_Cent_pPb_Calib.yoda new file mode 100644 --- /dev/null +++ b/analyses/pluginMC/MC_Cent_pPb_Calib.yoda @@ -0,0 +1,105 @@ +# BEGIN YODA_SCATTER2D /REF/MC_Cent_pPb_Calib/SumETPb +Path=/REF/MC_Cent_pPb_Calib/SumETPb +Type=Scatter2D +# xval xerr- xerr+ yval yerr- yerr+ +1 1.0 1.0 0.026433905291155 6.0e-7 6.0e-7 +3 1.0 1.0 0.0356724593141198 6.0e-7 6.0e-7 +5 1.0 1.0 0.0329659708829365 6.0e-7 6.0e-7 +7 1.0 1.0 0.0292277573876039 6.0e-7 6.0e-7 +9 1.0 1.0 0.0266992277205129 6.0e-7 6.0e-7 +11 1.0 1.0 0.0247287959369707 6.0e-7 6.0e-7 +13 1.0 1.0 0.0231605480909041 6.0e-7 6.0e-7 +15 1.0 1.0 0.0215676779843033 6.0e-7 6.0e-7 +17 1.0 1.0 0.0202422148507814 6.0e-7 6.0e-7 +19 1.0 1.0 0.0191019905081114 6.0e-7 6.0e-7 +21 1.0 1.0 0.0179312930096975 6.0e-7 6.0e-7 +23 1.0 1.0 0.0169304004601292 6.0e-7 6.0e-7 +25 1.0 1.0 0.0158904991267403 6.0e-7 6.0e-7 +27 1.0 1.0 0.014999190244734 6.0e-7 6.0e-7 +29 1.0 1.0 0.0141849733133324 6.0e-7 6.0e-7 +31 1.0 1.0 0.0133146626796707 6.0e-7 6.0e-7 +33 1.0 1.0 0.0123949194577669 6.0e-7 6.0e-7 +35 1.0 1.0 0.0116996800120865 6.0e-7 6.0e-7 +37 1.0 1.0 0.0109320444338458 6.0e-7 6.0e-7 +39 1.0 1.0 0.0100626105356297 6.0e-7 6.0e-7 +41 1.0 1.0 0.00935197743407604 6.0e-7 6.0e-7 +43 1.0 1.0 0.00864243794000489 6.0e-7 6.0e-7 +45 1.0 1.0 0.00799855436037713 6.0e-7 6.0e-7 +47 1.0 1.0 0.00730026559389592 6.0e-7 6.0e-7 +49 1.0 1.0 0.00672015474402399 6.0e-7 6.0e-7 +51 1.0 1.0 0.00614188724849189 6.0e-7 6.0e-7 +53 1.0 1.0 0.00563387143007573 6.0e-7 6.0e-7 +55 1.0 1.0 0.00520079450100681 6.0e-7 6.0e-7 +57 1.0 1.0 0.00470515268017545 6.0e-7 6.0e-7 +59 1.0 1.0 0.0042735188361277 6.0e-7 6.0e-7 +61 1.0 1.0 0.00395028220117227 6.0e-7 6.0e-7 +63 1.0 1.0 0.0035243909516945 6.0e-7 6.0e-7 +65 1.0 1.0 0.00315259491593193 6.0e-7 6.0e-7 +67 1.0 1.0 0.00286390373331309 6.0e-7 6.0e-7 +69 1.0 1.0 0.00257903279345153 6.0e-7 6.0e-7 +71 1.0 1.0 0.00228856121374107 6.0e-7 6.0e-7 +73 1.0 1.0 0.00206315111747963 6.0e-7 6.0e-7 +75 1.0 1.0 0.00185050505624057 6.0e-7 6.0e-7 +77 1.0 1.0 0.00165242698106285 6.0e-7 6.0e-7 +79 1.0 1.0 0.00142552591969422 6.0e-7 6.0e-7 +81 1.0 1.0 0.00129643602017987 6.0e-7 6.0e-7 +83 1.0 1.0 0.00114326285625885 6.0e-7 6.0e-7 +85 1.0 1.0 0.0010073507668534 6.0e-7 6.0e-7 +87 1.0 1.0 0.00086984314473827 6.0e-7 6.0e-7 +89 1.0 1.0 0.000800375037434358 6.0e-7 6.0e-7 +91 1.0 1.0 0.000701419565466503 6.0e-7 6.0e-7 +93 1.0 1.0 0.0006034263019753 6.0e-7 6.0e-7 +95 1.0 1.0 0.000558046549456325 6.0e-7 6.0e-7 +97 1.0 1.0 0.00048017011878087 6.0e-7 6.0e-7 +99 1.0 1.0 0.000407903552816494 6.0e-7 6.0e-7 +101 1.0 1.0 0.000365333907109052 6.0e-7 6.0e-7 +103 1.0 1.0 0.00031832250942536 6.0e-7 6.0e-7 +105 1.0 1.0 0.000273416231149758 6.0e-7 6.0e-7 +107 1.0 1.0 0.000231781021653951 6.0e-7 6.0e-7 +109 1.0 1.0 0.000201140886726608 6.0e-7 6.0e-7 +111 1.0 1.0 0.000187814907942984 6.0e-7 6.0e-7 +113 1.0 1.0 0.000157404340006397 6.0e-7 6.0e-7 +115 1.0 1.0 0.000138696978093341 6.0e-7 6.0e-7 +117 1.0 1.0 0.000121386839011078 6.0e-7 6.0e-7 +119 1.0 1.0 9.94028911278862e-05 6.0e-7 6.0e-7 +121 1.0 1.0 8.11599192653393e-05 6.0e-7 6.0e-7 +123 1.0 1.0 7.48454921075343e-05 6.0e-7 6.0e-7 +125 1.0 1.0 6.36178083202589e-05 6.0e-7 6.0e-7 +127 1.0 1.0 4.93500481892389e-05 6.0e-7 6.0e-7 +129 1.0 1.0 4.30353487437446e-05 6.0e-7 6.0e-7 +131 1.0 1.0 3.46150893566469e-05 6.0e-7 6.0e-7 +133 1.0 1.0 3.53177359385025e-05 6.0e-7 6.0e-7 +135 1.0 1.0 3.34463685169656e-05 6.0e-7 6.0e-7 +137 1.0 1.0 2.17513144352331e-05 6.0e-7 6.0e-7 +139 1.0 1.0 2.2921143608273e-05 6.0e-7 6.0e-7 +141 1.0 1.0 1.63723538208324e-05 6.0e-7 6.0e-7 +143 1.0 1.0 1.70739371347754e-05 6.0e-7 6.0e-7 +145 1.0 1.0 1.33314862326709e-05 6.0e-7 6.0e-7 +147 1.0 1.0 9.35568960183511e-06 6.0e-7 6.0e-7 +149 1.0 1.0 8.65398238805364e-06 6.0e-7 6.0e-7 +151 1.0 1.0 6.78275450054245e-06 6.0e-7 6.0e-7 +153 1.0 1.0 4.21001034057646e-06 6.0e-7 6.0e-7 +155 1.0 1.0 6.0810323235536e-06 6.0e-7 6.0e-7 +157 1.0 1.0 4.21001034057646e-06 6.0e-7 6.0e-7 +159 1.0 1.0 4.91173578953359e-06 6.0e-7 6.0e-7 +161 1.0 1.0 3.97616258743694e-06 6.0e-7 6.0e-7 +163 1.0 1.0 2.57278761203589e-06 6.0e-7 6.0e-7 +165 1.0 1.0 1.63722947523766e-06 6.0e-7 6.0e-7 +167 1.0 1.0 1.40334349974549e-06 6.0e-7 6.0e-7 +169 1.0 1.0 9.35562209472569e-07 6.0e-7 6.0e-7 +171 1.0 1.0 1.63722947523766e-06 6.0e-7 6.0e-7 +173 1.0 1.0 0 6.0e-7 6.0e-7 +175 1.0 1.0 7.01672670918585e-07 6.0e-7 6.0e-7 +177 1.0 1.0 0 6.0e-7 6.0e-7 +179 1.0 1.0 0 6.0e-7 6.0e-7 +181 1.0 1.0 0 6.0e-7 6.0e-7 +183 1.0 1.0 0 6.0e-7 6.0e-7 +185 1.0 1.0 0 6.0e-7 6.0e-7 +187 1.0 1.0 0 6.0e-7 6.0e-7 +189 1.0 1.0 0 6.0e-7 6.0e-7 +191 1.0 1.0 0 6.0e-7 6.0e-7 +193 1.0 1.0 7.01672670918585e-07 6.0e-7 6.0e-7 +195 1.0 1.0 0 6.0e-7 6.0e-7 +197 1.0 1.0 0 6.0e-7 6.0e-7 +199 1.0 1.0 0 6.0e-7 6.0e-7 +# END YODA_SCATTER2D diff --git a/analyses/pluginMC/MC_Cent_pPb_Eta.cc b/analyses/pluginMC/MC_Cent_pPb_Eta.cc new file mode 100644 --- /dev/null +++ b/analyses/pluginMC/MC_Cent_pPb_Eta.cc @@ -0,0 +1,78 @@ +// -*- C++ -*- +#include "Rivet/Analyses/MC_Cent_pPb.hh" +#include "Rivet/Tools/Percentile.hh" + +namespace Rivet { + + +class MC_Cent_pPb_Eta : public Analysis { + +public: + + DEFAULT_RIVET_ANALYSIS_CTOR(MC_Cent_pPb_Eta); + + /// Book histograms and initialise projections before the run + void init() { + + MSG_INFO("CENT parameter set to " << getOption("cent","REF")); + + // The centrality projection. + declareCentrality(MC_SumETFwdPbCentrality(), + "MC_Cent_pPb_Calib", "SumETPb", "CENT"); + + // The trigger projection. + declare(MC_pPbMinBiasTrigger(), "Trigger"); + + // The particles to be analysed. + declare(ChargedFinalState(Cuts::eta > -2.7 && Cuts::eta < 2.7 && + Cuts::pT > 0.1*GeV), "CFS"); + + // The centrality bins and the corresponding histograms. + std::vector< std::pair > centralityBins = + { {0, 1}, {1, 5}, {5, 10}, {10, 20}, + {20, 30}, {30, 40}, {40, 60}, {60, 90} }; + // std::vector< std::tuple > refData = + // { {2, 1, 8}, {2, 1, 7}, {2, 1, 6}, {2, 1, 5}, + // {2, 1, 4}, {2, 1, 3}, {2, 1, 2}, {2, 1, 1} }; + std::vector< std::tuple > refData; + for ( int i = 8; i > 0; --i ) + refData.push_back(std::tuple(2, 1, i)); + + // The centrality-binned histograms. + _hEta = bookPercentile("CENT", centralityBins, refData); + + } + + /// Perform the per-event analysis + void analyze(const Event& event) { + const double weight = event.weight(); + + if ( !apply(event, "Trigger")() ) vetoEvent; + + _hEta->init(event); + for ( const auto &p : apply(event,"CFS").particles() ) + _hEta->fill(p.eta(), weight); + + } + + /// Finalize + void finalize() { + + // Scale by the inverse sum of event weights in each centrality + // bin. + _hEta->normalizePerEvent(); + + } + +private: + + /// The histograms binned in centrality. + Percentile _hEta; + +}; + + +// The hook for the plugin system +DECLARE_RIVET_PLUGIN(MC_Cent_pPb_Eta); + +} diff --git a/analyses/pluginMC/MC_Cent_pPb_Eta.info b/analyses/pluginMC/MC_Cent_pPb_Eta.info new file mode 100644 --- /dev/null +++ b/analyses/pluginMC/MC_Cent_pPb_Eta.info @@ -0,0 +1,43 @@ +Name: MC_Cent_pPb_Eta +Summary: Template analysis for ontaining eta distributions binned in centrality +Status: UNVALIDATED +Authors: + - Leif Lönnblad +Options: + - cent=REF,GEN,IMP +NumEvents: 50000 +Reentrant: True +References: + - arXiv:1508.00848 [hep-ex], Eur.Phys.J. C76 (2016) no.4, 199 +RunInfo: Any! +Description: + Template analysis for obtaining eta distributions binned in + centrality using the CentralityProjection and Percentile<> + classes. The example is pPb collisions at 5 TeV and is based on the + ATLAS analysis arXiv:1508.00848 [hep-ex]. The reference YODA file + contains the corresponding plots from HepData. The generator should + be run in minimum-bias mode with a cut on the transverse momentum + of charged particles of 0.1 GeV, and setting particles with tcau>10 + fm stable. Note that a calibration histogram for the generated + centrality may be preloaded with the output of a corresponding + MC_Cent_pPb_Calib analysis. +BibKey: Aad:2015zza +BibTeX: '@article{Aad:2015zza, + author = "Aad, Georges and others", + title = "{Measurement of the centrality dependence of the + charged-particle pseudorapidity distribution in + proton–lead collisions at $\sqrt{s_{_\text {NN}}} = + 5.02$ TeV with the ATLAS detector}", + collaboration = "ATLAS", + journal = "Eur. Phys. J.", + volume = "C76", + year = "2016", + number = "4", + pages = "199", + doi = "10.1140/epjc/s10052-016-4002-3", + eprint = "1508.00848", + archivePrefix = "arXiv", + primaryClass = "hep-ex", + reportNumber = "CERN-PH-EP-2015-160", + SLACcitation = "%%CITATION = ARXIV:1508.00848;%%" +}' diff --git a/analyses/pluginMC/MC_Cent_pPb_Eta.plot b/analyses/pluginMC/MC_Cent_pPb_Eta.plot new file mode 100644 --- /dev/null +++ b/analyses/pluginMC/MC_Cent_pPb_Eta.plot @@ -0,0 +1,55 @@ +# BEGIN PLOT /MC_Centrality/d02-x01-y01 +Title=Charged particle density. Centrality 60-90\% +YLabel=$(1/N_{evt}) \; dN_{ch}/d\eta$ +LogY=0 +YMax=85 +# END PLOT +# BEGIN PLOT /MC_Centrality/d02-x01-y02 +Title=Charged particle density. Centrality 40-60\% +YLabel=$(1/N_{evt}) \; dN_{ch}/d\eta$ +XLabel=$\eta$ +LogY=0 +YMax=85 +# END PLOT +# BEGIN PLOT /MC_Centrality/d02-x01-y03 +Title=Charged particle density. Centrality 30-40\% +YLabel=$(1/N_{evt}) \; dN_{ch}/d\eta$ +XLabel=$\eta$ +LogY=0 +YMax=85 +# END PLOT +# BEGIN PLOT /MC_Centrality/d02-x01-y04 +Title=Charged particle density. Centrality 20-30\% +YLabel=$(1/N_{evt}) \; dN_{ch}/d\eta$ +XLabel=$\eta$ +LogY=0 +YMax=85 +# END PLOT +# BEGIN PLOT /MC_Centrality/d02-x01-y05 +Title=Charged particle density. Centrality 10-20\% +YLabel=$(1/N_{evt}) \; dN_{ch}/d\eta$ +XLabel=$\eta$ +LogY=0 +YMax=85 +# END PLOT +# BEGIN PLOT /MC_Centrality/d02-x01-y06 +Title=Charged particle density. Centrality 5-10\% +YLabel=$(1/N_{evt}) \; dN_{ch}/d\eta$ +XLabel=$\eta$ +LogY=0 +YMax=85 +# END PLOT +# BEGIN PLOT /MC_Centrality/d02-x01-y07 +Title=Charged particle density. Centrality 1-5\% +YLabel=$(1/N_{evt}) \; dN_{ch}/d\eta$ +XLabel=$\eta$ +LogY=0 +YMax=85 +# END PLOT +# BEGIN PLOT /MC_Centrality/d02-x01-y08 +Title=Charged particle density. Centrality 0-1\% +XLabel=$\eta$ +YLabel=$(1/N_{evt}) \; dN_{ch}/d\eta$ +LogY=0 +YMax=85 +# END PLOT diff --git a/analyses/pluginMC/MC_Cent_pPb_Eta.yoda b/analyses/pluginMC/MC_Cent_pPb_Eta.yoda new file mode 100644 --- /dev/null +++ b/analyses/pluginMC/MC_Cent_pPb_Eta.yoda @@ -0,0 +1,486 @@ +# BEGIN YODA_SCATTER2D /REF/MC_Cent_pPb_Eta/d02-x01-y01 +Path=/REF/MC_Cent_pPb_Eta/d02-x01-y01 +Type=Scatter2D +# xval xerr- xerr+ yval yerr- yerr+ +-2.6500000000000004 0.04999999999999982 0.050000000000000266 7.908 0.5645467208300833 0.5645467208300833 +-2.55 0.050000000000000266 0.04999999999999982 7.815 0.5531121043694488 0.5531121043694488 +-2.45 0.04999999999999982 0.050000000000000266 8.035 0.5512839558702938 0.5512839558702938 +-2.3499999999999996 0.050000000000000266 0.04999999999999982 8.135 0.549097441261567 0.549097441261567 +-2.25 0.04999999999999982 0.04999999999999982 8.206 0.5481103903412159 0.5481103903412159 +-2.1500000000000004 0.04999999999999982 0.050000000000000266 8.239 0.5476139150898195 0.5476139150898195 +-2.05 0.050000000000000266 0.04999999999999982 8.286 0.5418671423882426 0.5418671423882426 +-1.95 0.050000000000000044 0.050000000000000044 8.393 0.5418671423882426 0.5418671423882426 +-1.85 0.04999999999999982 0.050000000000000044 8.369 0.5339850185164374 0.5339850185164374 +-1.75 0.050000000000000044 0.050000000000000044 8.427 0.5321888762460185 0.5321888762460185 +-1.65 0.050000000000000044 0.04999999999999982 8.457 0.5312042921513342 0.5312042921513342 +-1.55 0.050000000000000044 0.050000000000000044 8.419 0.5233297239790609 0.5233297239790609 +-1.45 0.050000000000000044 0.050000000000000044 8.418 0.5152795357861595 0.5152795357861595 +-1.35 0.04999999999999982 0.050000000000000044 8.405 0.5113443067053743 0.5113443067053743 +-1.25 0.050000000000000044 0.050000000000000044 8.358 0.5054433697260258 0.5054433697260258 +-1.15 0.050000000000000044 0.04999999999999982 8.338 0.5024937810560445 0.5024937810560445 +-1.05 0.050000000000000044 0.050000000000000044 8.325 0.4975791394341206 0.4975791394341206 +-0.95 0.050000000000000044 0.04999999999999993 8.227 0.4875674312338756 0.4875674312338756 +-0.8500000000000001 0.04999999999999993 0.050000000000000044 8.222 0.4856027182790475 0.4856027182790475 +-0.75 0.050000000000000044 0.050000000000000044 8.149 0.4795216366338436 0.4795216366338436 +-0.6499999999999999 0.050000000000000044 0.04999999999999993 8.1 0.47264786046273394 0.47264786046273394 +-0.55 0.04999999999999993 0.050000000000000044 8.047 0.4687216658103186 0.4687216658103186 +-0.45 0.04999999999999999 0.04999999999999999 7.997 0.4642251608863957 0.4642251608863957 +-0.35 0.050000000000000044 0.04999999999999999 7.94 0.4622607489285674 0.4622607489285674 +-0.25 0.04999999999999999 0.04999999999999999 7.958 0.45931470692761406 0.45931470692761406 +-0.15000000000000002 0.04999999999999999 0.05000000000000002 7.923 0.45656105834816885 0.45656105834816885 +-0.05 0.05 0.05 7.888 0.4544062059435368 0.4544062059435368 +0.05 0.05 0.05 7.863 0.4514620692815732 0.4514620692815732 +0.15000000000000002 0.05000000000000002 0.04999999999999999 7.917 0.45656105834816885 0.45656105834816885 +0.25 0.04999999999999999 0.04999999999999999 7.985 0.4614683087710358 0.4614683087710358 +0.35 0.04999999999999999 0.050000000000000044 8.03 0.46754892792091823 0.46754892792091823 +0.45 0.04999999999999999 0.04999999999999999 8.083 0.4724584637827965 0.4724584637827965 +0.55 0.050000000000000044 0.04999999999999993 8.201 0.4803176032585106 0.4803176032585106 +0.6499999999999999 0.04999999999999993 0.050000000000000044 8.254 0.4842489029414522 0.4842489029414522 +0.75 0.050000000000000044 0.050000000000000044 8.289 0.4891645939763016 0.4891645939763016 +0.8500000000000001 0.050000000000000044 0.04999999999999993 8.38 0.498196748283246 0.498196748283246 +0.95 0.04999999999999993 0.050000000000000044 8.459 0.506067189215029 0.506067189215029 +1.05 0.050000000000000044 0.050000000000000044 8.492 0.510988258182123 0.510988258182123 +1.15 0.04999999999999982 0.050000000000000044 8.563 0.516895540704309 0.516895540704309 +1.25 0.050000000000000044 0.050000000000000044 8.656 0.5288875116695421 0.5288875116695421 +1.35 0.050000000000000044 0.04999999999999982 8.658 0.5306872902190141 0.5306872902190141 +1.45 0.050000000000000044 0.050000000000000044 8.668 0.5366013417799103 0.5366013417799103 +1.55 0.050000000000000044 0.050000000000000044 8.706 0.5428526503573506 0.5428526503573506 +1.65 0.04999999999999982 0.050000000000000044 8.747 0.5518921996187299 0.5518921996187299 +1.75 0.050000000000000044 0.050000000000000044 8.73 0.5558354072924827 0.5558354072924827 +1.85 0.050000000000000044 0.04999999999999982 8.717 0.563560112144215 0.563560112144215 +1.95 0.050000000000000044 0.050000000000000044 8.702 0.5716161299333671 0.5716161299333671 +2.05 0.04999999999999982 0.050000000000000266 8.702 0.585434027026103 0.585434027026103 +2.1500000000000004 0.050000000000000266 0.04999999999999982 8.604 0.585434027026103 0.585434027026103 +2.25 0.04999999999999982 0.04999999999999982 8.7 0.6205610364823109 0.6205610364823109 +2.3499999999999996 0.04999999999999982 0.050000000000000266 8.693 0.6204167953883905 0.6204167953883905 +2.45 0.050000000000000266 0.04999999999999982 8.544 0.621003220603565 0.621003220603565 +2.55 0.04999999999999982 0.050000000000000266 8.546 0.6260966379082386 0.6260966379082386 +2.6500000000000004 0.050000000000000266 0.04999999999999982 8.42 0.6543699259593155 0.6543699259593155 +# END YODA_SCATTER2D + + +# BEGIN YODA_SCATTER2D /REF/MC_Cent_pPb_Eta/d02-x01-y02 +Path=/REF/MC_Cent_pPb_Eta/d02-x01-y02 +Type=Scatter2D +# xval xerr- xerr+ yval yerr- yerr+ +-2.6500000000000004 0.04999999999999982 0.050000000000000266 16.357 0.7618300335376651 0.7618300335376651 +-2.55 0.050000000000000266 0.04999999999999982 16.275 0.7586145266207338 0.7586145266207338 +-2.45 0.04999999999999982 0.050000000000000266 16.709 0.7371838305334701 0.7371838305334701 +-2.3499999999999996 0.050000000000000266 0.04999999999999982 16.888 0.7302629663347306 0.7302629663347306 +-2.25 0.04999999999999982 0.04999999999999982 16.985 0.7274737108652105 0.7274737108652105 +-2.1500000000000004 0.04999999999999982 0.050000000000000266 17.067 0.7340694790004554 0.7340694790004554 +-2.05 0.050000000000000266 0.04999999999999982 17.205 0.7234141275922111 0.7234141275922111 +-1.95 0.050000000000000044 0.050000000000000044 17.201 0.7106229661360516 0.7106229661360516 +-1.85 0.04999999999999982 0.050000000000000044 17.145 0.700788841235361 0.700788841235361 +-1.75 0.050000000000000044 0.050000000000000044 17.247 0.6970408883272201 0.6970408883272201 +-1.65 0.050000000000000044 0.04999999999999982 17.206 0.6940929332589405 0.6940929332589405 +-1.55 0.050000000000000044 0.050000000000000044 17.137 0.6832876407487553 0.6832876407487553 +-1.45 0.050000000000000044 0.050000000000000044 17.124 0.6664082832618454 0.6664082832618454 +-1.35 0.04999999999999982 0.050000000000000044 17.004 0.6605210064789764 0.6605210064789764 +-1.25 0.050000000000000044 0.050000000000000044 16.955 0.6554220014616537 0.6554220014616537 +-1.15 0.050000000000000044 0.04999999999999982 16.803 0.6495367580052726 0.6495367580052726 +-1.05 0.050000000000000044 0.050000000000000044 16.755 0.6405154174569102 0.6405154174569102 +-0.95 0.050000000000000044 0.04999999999999993 16.621 0.627774641093442 0.627774641093442 +-0.8500000000000001 0.04999999999999993 0.050000000000000044 16.442 0.6226756780218736 0.6226756780218736 +-0.75 0.050000000000000044 0.050000000000000044 16.225 0.6114981602588841 0.6114981602588841 +-0.6499999999999999 0.050000000000000044 0.04999999999999993 16.126 0.5987695717051761 0.5987695717051761 +-0.55 0.04999999999999993 0.050000000000000044 16.006 0.5966070733740927 0.5966070733740927 +-0.45 0.04999999999999999 0.04999999999999999 15.871 0.5907351352340573 0.5907351352340573 +-0.35 0.050000000000000044 0.04999999999999999 15.702 0.5885720006932031 0.5885720006932031 +-0.25 0.04999999999999999 0.04999999999999999 15.588 0.5748782479795179 0.5748782479795179 +-0.15000000000000002 0.04999999999999999 0.05000000000000002 15.57 0.5717350785110181 0.5717350785110181 +-0.05 0.05 0.05 15.453 0.567824796922431 0.567824796922431 +0.05 0.05 0.05 15.437 0.5629387178015028 0.5629387178015028 +0.15000000000000002 0.05000000000000002 0.04999999999999999 15.388 0.567824796922431 0.567824796922431 +0.25 0.04999999999999999 0.04999999999999999 15.476 0.5727128425310541 0.5727128425310541 +0.35 0.04999999999999999 0.050000000000000044 15.559 0.5846580197004057 0.5846580197004057 +0.45 0.04999999999999999 0.04999999999999999 15.616 0.58955067636294 0.58955067636294 +0.55 0.050000000000000044 0.04999999999999993 15.677 0.5917136131609615 0.5917136131609615 +0.6499999999999999 0.04999999999999993 0.050000000000000044 15.821 0.5987695717051761 0.5987695717051761 +0.75 0.050000000000000044 0.050000000000000044 15.864 0.6046428698000167 0.6046428698000167 +0.8500000000000001 0.050000000000000044 0.04999999999999993 15.982 0.6165971131946694 0.6165971131946694 +0.95 0.04999999999999993 0.050000000000000044 16.02 0.6212970304129901 0.6212970304129901 +1.05 0.050000000000000044 0.050000000000000044 16.071 0.6291406837901997 0.6291406837901997 +1.15 0.04999999999999982 0.050000000000000044 16.117 0.6301214168713836 0.6301214168713836 +1.25 0.050000000000000044 0.050000000000000044 16.147 0.6428763489194481 0.6428763489194481 +1.35 0.050000000000000044 0.04999999999999982 16.184 0.6418948512022822 0.6418948512022822 +1.45 0.050000000000000044 0.050000000000000044 16.151 0.6497484128491581 0.6497484128491581 +1.55 0.050000000000000044 0.050000000000000044 16.141 0.6530742683646324 0.6530742683646324 +1.65 0.04999999999999982 0.050000000000000044 16.135 0.6599462099292639 0.6599462099292639 +1.75 0.050000000000000044 0.050000000000000044 16.057 0.663874235077699 0.663874235077699 +1.85 0.050000000000000044 0.04999999999999982 15.93 0.6744961082170897 0.6744961082170897 +1.95 0.050000000000000044 0.050000000000000044 15.847 0.6853116079565559 0.6853116079565559 +2.05 0.04999999999999982 0.050000000000000266 15.749 0.7099133749972597 0.7099133749972597 +2.1500000000000004 0.050000000000000266 0.04999999999999982 15.684 0.715011188723645 0.715011188723645 +2.25 0.04999999999999982 0.04999999999999982 15.634 0.7783064948977363 0.7783064948977363 +2.3499999999999996 0.04999999999999982 0.050000000000000266 15.448 0.7579294162387419 0.7579294162387419 +2.45 0.050000000000000266 0.04999999999999982 15.356 0.7718633298712927 0.7718633298712927 +2.55 0.04999999999999982 0.050000000000000266 15.1 0.7608449250668627 0.7608449250668627 +2.6500000000000004 0.050000000000000266 0.04999999999999982 14.731 0.8106719435135276 0.8106719435135276 +# END YODA_SCATTER2D + + +# BEGIN YODA_SCATTER2D /REF/MC_Cent_pPb_Eta/d02-x01-y03 +Path=/REF/MC_Cent_pPb_Eta/d02-x01-y03 +Type=Scatter2D +# xval xerr- xerr+ yval yerr- yerr+ +-2.6500000000000004 0.04999999999999982 0.050000000000000266 22.483 0.9143850392476902 0.9143850392476902 +-2.55 0.050000000000000266 0.04999999999999982 22.227 0.9090374029708569 0.9090374029708569 +-2.45 0.04999999999999982 0.050000000000000266 22.78 0.8682079244052083 0.8682079244052083 +-2.3499999999999996 0.050000000000000266 0.04999999999999982 22.974 0.8571137614109343 0.8571137614109343 +-2.25 0.04999999999999982 0.04999999999999982 23.131 0.8555816734830171 0.8555816734830171 +-2.1500000000000004 0.04999999999999982 0.050000000000000266 23.261 0.8688066528290399 0.8688066528290399 +-2.05 0.050000000000000266 0.04999999999999982 23.249 0.8473470363434334 0.8473470363434334 +-1.95 0.050000000000000044 0.050000000000000044 23.402 0.8356536363829214 0.8356536363829214 +-1.85 0.04999999999999982 0.050000000000000044 23.421 0.827348777723156 0.827348777723156 +-1.75 0.050000000000000044 0.050000000000000044 23.486 0.8207728065670792 0.8207728065670792 +-1.65 0.050000000000000044 0.04999999999999982 23.282 0.8147054682521776 0.8147054682521776 +-1.55 0.050000000000000044 0.050000000000000044 23.243 0.8020735627110521 0.8020735627110521 +-1.45 0.050000000000000044 0.050000000000000044 23.181 0.7775705755749763 0.7775705755749763 +-1.35 0.04999999999999982 0.050000000000000044 23.13 0.7756313557354422 0.7756313557354422 +-1.25 0.050000000000000044 0.050000000000000044 22.967 0.768600026021337 0.768600026021337 +-1.15 0.050000000000000044 0.04999999999999982 22.773 0.7637545417213569 0.7637545417213569 +-1.05 0.050000000000000044 0.050000000000000044 22.577 0.748010695110705 0.748010695110705 +-0.95 0.050000000000000044 0.04999999999999993 22.376 0.731312518694983 0.731312518694983 +-0.8500000000000001 0.04999999999999993 0.050000000000000044 22.163 0.7271897964080629 0.7271897964080629 +-0.75 0.050000000000000044 0.050000000000000044 22.028 0.7201673416644218 0.7201673416644218 +-0.6499999999999999 0.050000000000000044 0.04999999999999993 21.662 0.6955070093104742 0.6955070093104742 +-0.55 0.04999999999999993 0.050000000000000044 21.489 0.694279482629294 0.694279482629294 +-0.45 0.04999999999999999 0.04999999999999999 21.249 0.6853334954604219 0.6853334954604219 +-0.35 0.050000000000000044 0.04999999999999999 21.136 0.6889303302947258 0.6889303302947258 +-0.25 0.04999999999999999 0.04999999999999999 20.873 0.6655238538174271 0.6655238538174271 +-0.15000000000000002 0.04999999999999999 0.05000000000000002 20.736 0.6594763073833662 0.6594763073833662 +-0.05 0.05 0.05 20.639 0.6563177584067035 0.6563177584067035 +0.05 0.05 0.05 20.445 0.6435347698454218 0.6435347698454218 +0.15000000000000002 0.05000000000000002 0.04999999999999999 20.59 0.6575507584970152 0.6575507584970152 +0.25 0.04999999999999999 0.04999999999999999 20.531 0.659207099476333 0.659207099476333 +0.35 0.04999999999999999 0.050000000000000044 20.589 0.673931747286029 0.673931747286029 +0.45 0.04999999999999999 0.04999999999999999 20.696 0.6809478687829195 0.6809478687829195 +0.55 0.050000000000000044 0.04999999999999993 20.775 0.6834047117191979 0.6834047117191979 +0.6499999999999999 0.04999999999999993 0.050000000000000044 20.702 0.6821759303874625 0.6821759303874625 +0.75 0.050000000000000044 0.050000000000000044 20.895 0.6930526675513197 0.6930526675513197 +0.8500000000000001 0.050000000000000044 0.04999999999999993 20.839 0.7017442554093336 0.7017442554093336 +0.95 0.04999999999999993 0.050000000000000044 20.98 0.710443523441519 0.710443523441519 +1.05 0.050000000000000044 0.050000000000000044 21.037 0.7191501929360793 0.7191501929360793 +1.15 0.04999999999999982 0.050000000000000044 21.019 0.7162471640432512 0.7162471640432512 +1.25 0.050000000000000044 0.050000000000000044 20.968 0.7302773445753332 0.7302773445753332 +1.35 0.050000000000000044 0.04999999999999982 20.969 0.7237409757641196 0.7237409757641196 +1.45 0.050000000000000044 0.050000000000000044 20.938 0.736339595567154 0.736339595567154 +1.55 0.050000000000000044 0.050000000000000044 20.859 0.7361100461208229 0.7361100461208229 +1.65 0.04999999999999982 0.050000000000000044 20.806 0.7428923205956567 0.7428923205956567 +1.75 0.050000000000000044 0.050000000000000044 20.499 0.7392489431849056 0.7392489431849056 +1.85 0.050000000000000044 0.04999999999999982 20.437 0.7579294162387419 0.7579294162387419 +1.95 0.050000000000000044 0.050000000000000044 20.176 0.7664365596707923 0.7664365596707923 +2.05 0.04999999999999982 0.050000000000000266 20.015 0.8002755775356387 0.8002755775356387 +2.1500000000000004 0.050000000000000266 0.04999999999999982 19.817 0.8002755775356387 0.8002755775356387 +2.25 0.04999999999999982 0.04999999999999982 19.652 0.8837024386070234 0.8837024386070234 +2.3499999999999996 0.04999999999999982 0.050000000000000266 19.376 0.8533024082938007 0.8533024082938007 +2.45 0.050000000000000266 0.04999999999999982 19.261 0.8705659079012915 0.8705659079012915 +2.55 0.04999999999999982 0.050000000000000266 18.954 0.8578321514142495 0.8578321514142495 +2.6500000000000004 0.050000000000000266 0.04999999999999982 18.718 0.9458377239251985 0.9458377239251985 +# END YODA_SCATTER2D + + +# BEGIN YODA_SCATTER2D /REF/MC_Cent_pPb_Eta/d02-x01-y04 +Path=/REF/MC_Cent_pPb_Eta/d02-x01-y04 +Type=Scatter2D +# xval xerr- xerr+ yval yerr- yerr+ +-2.6500000000000004 0.04999999999999982 0.050000000000000266 27.382 0.9996504389035199 0.9996504389035199 +-2.55 0.050000000000000266 0.04999999999999982 27.193 1.0033394241232625 1.0033394241232625 +-2.45 0.04999999999999982 0.050000000000000266 27.797 0.9431065687397157 0.9431065687397157 +-2.3499999999999996 0.050000000000000266 0.04999999999999982 27.975 0.9256052074183679 0.9256052074183679 +-2.25 0.04999999999999982 0.04999999999999982 28.062 0.9209695977609684 0.9209695977609684 +-2.1500000000000004 0.04999999999999982 0.050000000000000266 28.25 0.9425179043392227 0.9425179043392227 +-2.05 0.050000000000000266 0.04999999999999982 28.198 0.9143593385535033 0.9143593385535033 +-1.95 0.050000000000000044 0.050000000000000044 28.279 0.8959397301158153 0.8959397301158153 +-1.85 0.04999999999999982 0.050000000000000044 28.202 0.8833510061125192 0.8833510061125192 +-1.75 0.050000000000000044 0.050000000000000044 28.417 0.8821757194572972 0.8821757194572972 +-1.65 0.050000000000000044 0.04999999999999982 28.249 0.8802420121761969 0.8802420121761969 +-1.55 0.050000000000000044 0.050000000000000044 28.176 0.8647832098277579 0.8647832098277579 +-1.45 0.050000000000000044 0.050000000000000044 27.971 0.8292677492824619 0.8292677492824619 +-1.35 0.04999999999999982 0.050000000000000044 27.74 0.8251460476788337 0.8251460476788337 +-1.25 0.050000000000000044 0.050000000000000044 27.593 0.8193668287159299 0.8193668287159299 +-1.15 0.050000000000000044 0.04999999999999982 27.499 0.8203298117220903 0.8203298117220903 +-1.05 0.050000000000000044 0.050000000000000044 27.122 0.7986144251138969 0.7986144251138969 +-0.95 0.050000000000000044 0.04999999999999993 26.896 0.779400410572127 0.779400410572127 +-0.8500000000000001 0.04999999999999993 0.050000000000000044 26.63 0.7769202018225553 0.7769202018225553 +-0.75 0.050000000000000044 0.050000000000000044 26.331 0.7670410680009252 0.7670410680009252 +-0.6499999999999999 0.050000000000000044 0.04999999999999993 25.999 0.739635045140507 0.739635045140507 +-0.55 0.04999999999999993 0.050000000000000044 25.816 0.7409750333175875 0.7409750333175875 +-0.45 0.04999999999999999 0.04999999999999999 25.482 0.7319808740670756 0.7319808740670756 +-0.35 0.050000000000000044 0.04999999999999999 25.15 0.7320669368302327 0.7320669368302327 +-0.25 0.04999999999999999 0.04999999999999999 25.009 0.7088222626300617 0.7088222626300617 +-0.15000000000000002 0.04999999999999999 0.05000000000000002 24.842 0.7011854248342588 0.7011854248342588 +-0.05 0.05 0.05 24.636 0.6961156513109011 0.6961156513109011 +0.05 0.05 0.05 24.412 0.6802595093050886 0.6802595093050886 +0.15000000000000002 0.05000000000000002 0.04999999999999999 24.464 0.6945631720729224 0.6945631720729224 +0.25 0.04999999999999999 0.04999999999999999 24.3 0.6933108970728789 0.6933108970728789 +0.35 0.04999999999999999 0.050000000000000044 24.395 0.713679199640847 0.713679199640847 +0.45 0.04999999999999999 0.04999999999999999 24.482 0.7203811491148279 0.7203811491148279 +0.55 0.050000000000000044 0.04999999999999993 24.562 0.7215407126420518 0.7215407126420518 +0.6499999999999999 0.04999999999999993 0.050000000000000044 24.535 0.7224970588175429 0.7224970588175429 +0.75 0.050000000000000044 0.050000000000000044 24.643 0.7311094309335641 0.7311094309335641 +0.8500000000000001 0.050000000000000044 0.04999999999999993 24.734 0.7457673900084395 0.7457673900084395 +0.95 0.04999999999999993 0.050000000000000044 24.649 0.7483615436405053 0.7483615436405053 +1.05 0.050000000000000044 0.050000000000000044 24.714 0.7582400675247912 0.7582400675247912 +1.15 0.04999999999999982 0.050000000000000044 24.744 0.7550801281983257 0.7550801281983257 +1.25 0.050000000000000044 0.050000000000000044 24.51 0.7670234676983488 0.7670234676983488 +1.35 0.050000000000000044 0.04999999999999982 24.542 0.7583679845563102 0.7583679845563102 +1.45 0.050000000000000044 0.050000000000000044 24.419 0.7696395260120156 0.7696395260120156 +1.55 0.050000000000000044 0.050000000000000044 24.145 0.7640026177965623 0.7640026177965623 +1.65 0.04999999999999982 0.050000000000000044 24.082 0.7714175263759567 0.7714175263759567 +1.75 0.050000000000000044 0.050000000000000044 23.694 0.7667515894994936 0.7667515894994936 +1.85 0.050000000000000044 0.04999999999999982 23.6 0.7879492369435991 0.7879492369435991 +1.95 0.050000000000000044 0.050000000000000044 23.199 0.796113685349021 0.796113685349021 +2.05 0.04999999999999982 0.050000000000000266 22.971 0.836508218728304 0.836508218728304 +2.1500000000000004 0.050000000000000266 0.04999999999999982 22.66 0.833354666393607 0.833354666393607 +2.25 0.04999999999999982 0.04999999999999982 22.33 0.9304493538070732 0.9304493538070732 +2.3499999999999996 0.04999999999999982 0.050000000000000266 22.056 0.8949687145369943 0.8949687145369943 +2.45 0.050000000000000266 0.04999999999999982 21.842 0.9127349012719959 0.9127349012719959 +2.55 0.04999999999999982 0.050000000000000266 21.473 0.8949513953282602 0.8949513953282602 +2.6500000000000004 0.050000000000000266 0.04999999999999982 21.249 1.0062628881162219 1.0062628881162219 +# END YODA_SCATTER2D + + +# BEGIN YODA_SCATTER2D /REF/MC_Cent_pPb_Eta/d02-x01-y05 +Path=/REF/MC_Cent_pPb_Eta/d02-x01-y05 +Type=Scatter2D +# xval xerr- xerr+ yval yerr- yerr+ +-2.6500000000000004 0.04999999999999982 0.050000000000000266 34.017 1.2401951459347034 1.2401951459347034 +-2.55 0.050000000000000266 0.04999999999999982 33.662 1.2396519672875932 1.2396519672875932 +-2.45 0.04999999999999982 0.050000000000000266 34.345 1.1628864948910536 1.1628864948910536 +-2.3499999999999996 0.050000000000000266 0.04999999999999982 34.467 1.1389139563636932 1.1389139563636932 +-2.25 0.04999999999999982 0.04999999999999982 34.719 1.138389212879321 1.138389212879321 +-2.1500000000000004 0.04999999999999982 0.050000000000000266 34.823 1.1601422326594268 1.1601422326594268 +-2.05 0.050000000000000266 0.04999999999999982 34.975 1.13241379362846 1.13241379362846 +-1.95 0.050000000000000044 0.050000000000000044 34.96 1.106932698947863 1.106932698947863 +-1.85 0.04999999999999982 0.050000000000000044 34.907 1.0926307702055622 1.0926307702055622 +-1.75 0.050000000000000044 0.050000000000000044 34.906 1.0815035829806574 1.0815035829806574 +-1.65 0.050000000000000044 0.04999999999999982 34.655 1.0776316624895539 1.0776316624895539 +-1.55 0.050000000000000044 0.050000000000000044 34.772 1.0662762306269422 1.0662762306269422 +-1.45 0.050000000000000044 0.050000000000000044 34.46 1.020106367002971 1.020106367002971 +-1.35 0.04999999999999982 0.050000000000000044 34.229 1.0169488679378134 1.0169488679378134 +-1.25 0.050000000000000044 0.050000000000000044 33.913 1.0063418902142551 1.0063418902142551 +-1.15 0.050000000000000044 0.04999999999999982 33.617 1.0019540907646418 1.0019540907646418 +-1.05 0.050000000000000044 0.050000000000000044 33.44 0.9829572727234892 0.9829572727234892 +-0.95 0.050000000000000044 0.04999999999999993 32.895 0.9523239994875694 0.9523239994875694 +-0.8500000000000001 0.04999999999999993 0.050000000000000044 32.577 0.9488877699707169 0.9488877699707169 +-0.75 0.050000000000000044 0.050000000000000044 32.178 0.9358424012620928 0.9358424012620928 +-0.6499999999999999 0.050000000000000044 0.04999999999999993 31.758 0.9016983974700188 0.9016983974700188 +-0.55 0.04999999999999993 0.050000000000000044 31.519 0.904004977862401 0.904004977862401 +-0.45 0.04999999999999999 0.04999999999999999 31.055 0.8914538686886719 0.8914538686886719 +-0.35 0.050000000000000044 0.04999999999999999 30.732 0.8937477272698375 0.8937477272698375 +-0.25 0.04999999999999999 0.04999999999999999 30.311 0.858438116581504 0.858438116581504 +-0.15000000000000002 0.04999999999999999 0.05000000000000002 30.104 0.8488910413003545 0.8488910413003545 +-0.05 0.05 0.05 29.696 0.8381527307120106 0.8381527307120106 +0.05 0.05 0.05 29.78 0.8289155566159921 0.8289155566159921 +0.15000000000000002 0.05000000000000002 0.04999999999999999 29.557 0.838809871186552 0.838809871186552 +0.25 0.04999999999999999 0.04999999999999999 29.506 0.8416751154691459 0.8416751154691459 +0.35 0.04999999999999999 0.050000000000000044 29.49 0.8627149007638618 0.8627149007638618 +0.45 0.04999999999999999 0.04999999999999999 29.426 0.8653005258290324 0.8653005258290324 +0.55 0.050000000000000044 0.04999999999999993 29.457 0.866083136886985 0.866083136886985 +0.6499999999999999 0.04999999999999993 0.050000000000000044 29.543 0.8699080411169907 0.8699080411169907 +0.75 0.050000000000000044 0.050000000000000044 29.466 0.8746913741429031 0.8746913741429031 +0.8500000000000001 0.050000000000000044 0.04999999999999993 29.451 0.8890556787963283 0.8890556787963283 +0.95 0.04999999999999993 0.050000000000000044 29.483 0.8957661525197299 0.8957661525197299 +1.05 0.050000000000000044 0.050000000000000044 29.489 0.9050801069518654 0.9050801069518654 +1.15 0.04999999999999982 0.050000000000000044 29.367 0.8964424130974615 0.8964424130974615 +1.25 0.050000000000000044 0.050000000000000044 29.288 0.9163323632831049 0.9163323632831049 +1.35 0.050000000000000044 0.04999999999999982 29.059 0.8982037630738362 0.8982037630738362 +1.45 0.050000000000000044 0.050000000000000044 28.817 0.9085042652624147 0.9085042652624147 +1.55 0.050000000000000044 0.050000000000000044 28.668 0.9075604663051383 0.9075604663051383 +1.65 0.04999999999999982 0.050000000000000044 28.339 0.9086434944465294 0.9086434944465294 +1.75 0.050000000000000044 0.050000000000000044 27.996 0.9071295387098801 0.9071295387098801 +1.85 0.050000000000000044 0.04999999999999982 27.572 0.9217119940632215 0.9217119940632215 +1.95 0.050000000000000044 0.050000000000000044 27.271 0.936873524014848 0.936873524014848 +2.05 0.04999999999999982 0.050000000000000266 26.86 0.9798658071389162 0.9798658071389162 +2.1500000000000004 0.050000000000000266 0.04999999999999982 26.451 0.9745255255764211 0.9745255255764211 +2.25 0.04999999999999982 0.04999999999999982 26.044 1.0856431273673683 1.0856431273673683 +2.3499999999999996 0.04999999999999982 0.050000000000000266 25.823 1.0489775974728917 1.0489775974728917 +2.45 0.050000000000000266 0.04999999999999982 25.181 1.0528784355280527 1.0528784355280527 +2.55 0.04999999999999982 0.050000000000000266 24.803 1.0355529923668803 1.0355529923668803 +2.6500000000000004 0.050000000000000266 0.04999999999999982 24.39 1.1567307378988423 1.1567307378988423 +# END YODA_SCATTER2D + + +# BEGIN YODA_SCATTER2D /REF/MC_Cent_pPb_Eta/d02-x01-y06 +Path=/REF/MC_Cent_pPb_Eta/d02-x01-y06 +Type=Scatter2D +# xval xerr- xerr+ yval yerr- yerr+ +-2.6500000000000004 0.04999999999999982 0.050000000000000266 41.613 1.4765703505082308 1.4765703505082308 +-2.55 0.050000000000000266 0.04999999999999982 41.388 1.4852238888463922 1.4852238888463922 +-2.45 0.04999999999999982 0.050000000000000266 42.104 1.3832848585884254 1.3832848585884254 +-2.3499999999999996 0.050000000000000266 0.04999999999999982 42.089 1.3485076937118305 1.3485076937118305 +-2.25 0.04999999999999982 0.04999999999999982 42.206 1.3419720563409658 1.3419720563409658 +-2.1500000000000004 0.04999999999999982 0.050000000000000266 42.516 1.376962236228721 1.376962236228721 +-2.05 0.050000000000000266 0.04999999999999982 42.509 1.3363760698246583 1.3363760698246583 +-1.95 0.050000000000000044 0.050000000000000044 42.702 1.3115624270312107 1.3115624270312107 +-1.85 0.04999999999999982 0.050000000000000044 42.483 1.290645187493449 1.290645187493449 +-1.75 0.050000000000000044 0.050000000000000044 42.374 1.2745701236103097 1.2745701236103097 +-1.65 0.050000000000000044 0.04999999999999982 42.367 1.2808891443056265 1.2808891443056265 +-1.55 0.050000000000000044 0.050000000000000044 42.012 1.2520850610082368 1.2520850610082368 +-1.45 0.050000000000000044 0.050000000000000044 41.704 1.198547871384368 1.198547871384368 +-1.35 0.04999999999999982 0.050000000000000044 41.355 1.194137764246655 1.194137764246655 +-1.25 0.050000000000000044 0.050000000000000044 41.222 1.1896507890973722 1.1896507890973722 +-1.15 0.050000000000000044 0.04999999999999982 40.643 1.1792107530038896 1.1792107530038896 +-1.05 0.050000000000000044 0.050000000000000044 40.183 1.1493180586765355 1.1493180586765355 +-0.95 0.050000000000000044 0.04999999999999993 39.826 1.120130349557586 1.120130349557586 +-0.8500000000000001 0.04999999999999993 0.050000000000000044 39.377 1.1166615422767991 1.1166615422767991 +-0.75 0.050000000000000044 0.050000000000000044 38.857 1.1005135165003652 1.1005135165003652 +-0.6499999999999999 0.050000000000000044 0.04999999999999993 38.231 1.0568348972285122 1.0568348972285122 +-0.55 0.04999999999999993 0.050000000000000044 37.665 1.0524077156691698 1.0524077156691698 +-0.45 0.04999999999999999 0.04999999999999999 37.451 1.0467330127592231 1.0467330127592231 +-0.35 0.050000000000000044 0.04999999999999999 36.836 1.045771007439009 1.045771007439009 +-0.25 0.04999999999999999 0.04999999999999999 36.321 1.0019061832327416 1.0019061832327416 +-0.15000000000000002 0.04999999999999999 0.05000000000000002 35.825 0.9835954452924232 0.9835954452924232 +-0.05 0.05 0.05 35.432 0.974109850068256 0.974109850068256 +0.05 0.05 0.05 35.497 0.9612497074121791 0.9612497074121791 +0.15000000000000002 0.05000000000000002 0.04999999999999999 35.189 0.9725044987042477 0.9725044987042477 +0.25 0.04999999999999999 0.04999999999999999 35.197 0.9794432091754989 0.9794432091754989 +0.35 0.04999999999999999 0.050000000000000044 35.132 1.0030872344915969 1.0030872344915969 +0.45 0.04999999999999999 0.04999999999999999 34.876 1.0011803034418925 1.0011803034418925 +0.55 0.050000000000000044 0.04999999999999993 34.929 1.003466491717586 1.003466491717586 +0.6499999999999999 0.04999999999999993 0.050000000000000044 34.937 1.0053561557975361 1.0053561557975361 +0.75 0.050000000000000044 0.050000000000000044 34.655 1.0047034388315788 1.0047034388315788 +0.8500000000000001 0.050000000000000044 0.04999999999999993 34.811 1.0267974483801565 1.0267974483801565 +0.95 0.04999999999999993 0.050000000000000044 34.619 1.0271085629085175 1.0271085629085175 +1.05 0.050000000000000044 0.050000000000000044 34.651 1.0397586258358233 1.0397586258358233 +1.15 0.04999999999999982 0.050000000000000044 34.42 1.0264755233321445 1.0264755233321445 +1.25 0.050000000000000044 0.050000000000000044 34.256 1.0474015466858926 1.0474015466858926 +1.35 0.050000000000000044 0.04999999999999982 33.985 1.0261700638783027 1.0261700638783027 +1.45 0.050000000000000044 0.050000000000000044 33.673 1.0379152181175493 1.0379152181175493 +1.55 0.050000000000000044 0.050000000000000044 33.242 1.029005344981259 1.029005344981259 +1.65 0.04999999999999982 0.050000000000000044 33.024 1.0353327967373582 1.0353327967373582 +1.75 0.050000000000000044 0.050000000000000044 32.426 1.0271202461250581 1.0271202461250581 +1.85 0.050000000000000044 0.04999999999999982 31.986 1.0455783088798276 1.0455783088798276 +1.95 0.050000000000000044 0.050000000000000044 31.598 1.0624598815955357 1.0624598815955357 +2.05 0.04999999999999982 0.050000000000000266 31.154 1.1148403473143587 1.1148403473143587 +2.1500000000000004 0.050000000000000266 0.04999999999999982 30.609 1.1066006506414137 1.1066006506414137 +2.25 0.04999999999999982 0.04999999999999982 29.963 1.229424662189595 1.229424662189595 +2.3499999999999996 0.04999999999999982 0.050000000000000266 29.208 1.166355434676754 1.166355434676754 +2.45 0.050000000000000266 0.04999999999999982 29.015 1.1937922767382942 1.1937922767382942 +2.55 0.04999999999999982 0.050000000000000266 28.366 1.1652231545931448 1.1652231545931448 +2.6500000000000004 0.050000000000000266 0.04999999999999982 27.603 1.3079514516984183 1.3079514516984183 +# END YODA_SCATTER2D + + +# BEGIN YODA_SCATTER2D /REF/MC_Cent_pPb_Eta/d02-x01-y07 +Path=/REF/MC_Cent_pPb_Eta/d02-x01-y07 +Type=Scatter2D +# xval xerr- xerr+ yval yerr- yerr+ +-2.6500000000000004 0.04999999999999982 0.050000000000000266 50.659 1.792847176978562 1.792847176978562 +-2.55 0.050000000000000266 0.04999999999999982 50.522 1.807139452283636 1.807139452283636 +-2.45 0.04999999999999982 0.050000000000000266 51.237 1.6783444223400632 1.6783444223400632 +-2.3499999999999996 0.050000000000000266 0.04999999999999982 51.247 1.6392269519502174 1.6392269519502174 +-2.25 0.04999999999999982 0.04999999999999982 51.567 1.6363520403629532 1.6363520403629532 +-2.1500000000000004 0.04999999999999982 0.050000000000000266 51.485 1.6664429183143359 1.6664429183143359 +-2.05 0.050000000000000266 0.04999999999999982 51.509 1.6195854407841532 1.6195854407841532 +-1.95 0.050000000000000044 0.050000000000000044 51.791 1.5916271548324374 1.5916271548324374 +-1.85 0.04999999999999982 0.050000000000000044 51.615 1.5691284204933642 1.5691284204933642 +-1.75 0.050000000000000044 0.050000000000000044 51.208 1.542565395696403 1.542565395696403 +-1.65 0.050000000000000044 0.04999999999999982 51.266 1.5523714117439809 1.5523714117439809 +-1.55 0.050000000000000044 0.050000000000000044 50.95 1.5220831120540035 1.5220831120540035 +-1.45 0.050000000000000044 0.050000000000000044 50.268 1.4477268388753453 1.4477268388753453 +-1.35 0.04999999999999982 0.050000000000000044 50.148 1.4528430748019554 1.4528430748019554 +-1.25 0.050000000000000044 0.050000000000000044 49.398 1.4295121545478373 1.4295121545478373 +-1.15 0.050000000000000044 0.04999999999999982 49.149 1.4307931366902764 1.4307931366902764 +-1.05 0.050000000000000044 0.050000000000000044 48.526 1.394085363239999 1.394085363239999 +-0.95 0.050000000000000044 0.04999999999999993 47.852 1.3537167355100548 1.3537167355100548 +-0.8500000000000001 0.04999999999999993 0.050000000000000044 47.28 1.3472672340705092 1.3472672340705092 +-0.75 0.050000000000000044 0.050000000000000044 46.669 1.3280850876355779 1.3280850876355779 +-0.6499999999999999 0.050000000000000044 0.04999999999999993 46.079 1.2803831457809807 1.2803831457809807 +-0.55 0.04999999999999993 0.050000000000000044 44.869 1.2618339827409943 1.2618339827409943 +-0.45 0.04999999999999999 0.04999999999999999 44.685 1.2599658725536975 1.2599658725536975 +-0.35 0.050000000000000044 0.04999999999999999 44.065 1.2605002975009565 1.2605002975009565 +-0.25 0.04999999999999999 0.04999999999999999 43.376 1.2080769843019112 1.2080769843019112 +-0.15000000000000002 0.04999999999999999 0.05000000000000002 42.865 1.1886193671651157 1.1886193671651157 +-0.05 0.05 0.05 42.451 1.1787726668022125 1.1787726668022125 +0.05 0.05 0.05 42.105 1.1522139558259135 1.1522139558259135 +0.15000000000000002 0.05000000000000002 0.04999999999999999 41.69 1.164077746544448 1.164077746544448 +0.25 0.04999999999999999 0.04999999999999999 41.58 1.167769240903356 1.167769240903356 +0.35 0.04999999999999999 0.050000000000000044 41.586 1.1979252898240358 1.1979252898240358 +0.45 0.04999999999999999 0.04999999999999999 41.319 1.1962508098220876 1.1962508098220876 +0.55 0.050000000000000044 0.04999999999999993 41.132 1.1937692406826372 1.1937692406826372 +0.6499999999999999 0.04999999999999993 0.050000000000000044 40.999 1.1928461761685787 1.1928461761685787 +0.75 0.050000000000000044 0.050000000000000044 40.882 1.1972405773277148 1.1972405773277148 +0.8500000000000001 0.050000000000000044 0.04999999999999993 40.719 1.214279210066614 1.214279210066614 +0.95 0.04999999999999993 0.050000000000000044 40.677 1.2194658666809826 1.2194658666809826 +1.05 0.050000000000000044 0.050000000000000044 40.524 1.2280052931482013 1.2280052931482013 +1.15 0.04999999999999982 0.050000000000000044 40.377 1.2168635913692216 1.2168635913692216 +1.25 0.050000000000000044 0.050000000000000044 39.985 1.2341815101515659 1.2341815101515659 +1.35 0.050000000000000044 0.04999999999999982 39.772 1.2124099966595459 1.2124099966595459 +1.45 0.050000000000000044 0.050000000000000044 39.213 1.2198905688626338 1.2198905688626338 +1.55 0.050000000000000044 0.050000000000000044 38.63 1.2083219769581284 1.2083219769581284 +1.65 0.04999999999999982 0.050000000000000044 38.231 1.2109207240773443 1.2109207240773443 +1.75 0.050000000000000044 0.050000000000000044 37.687 1.2057267517974377 1.2057267517974377 +1.85 0.050000000000000044 0.04999999999999982 37.029 1.223114058458981 1.223114058458981 +1.95 0.050000000000000044 0.050000000000000044 36.328 1.2325603433503773 1.2325603433503773 +2.05 0.04999999999999982 0.050000000000000266 35.452 1.2780316897479498 1.2780316897479498 +2.1500000000000004 0.050000000000000266 0.04999999999999982 34.818 1.2679341465549383 1.2679341465549383 +2.25 0.04999999999999982 0.04999999999999982 34.233 1.4121320759758982 1.4121320759758982 +2.3499999999999996 0.04999999999999982 0.050000000000000266 33.524 1.3445965937782232 1.3445965937782232 +2.45 0.050000000000000266 0.04999999999999982 32.598 1.351127677164523 1.351127677164523 +2.55 0.04999999999999982 0.050000000000000266 31.902 1.320021969514144 1.320021969514144 +2.6500000000000004 0.050000000000000266 0.04999999999999982 30.884 1.5050083056249224 1.5050083056249224 +# END YODA_SCATTER2D + + +# BEGIN YODA_SCATTER2D /REF/MC_Cent_pPb_Eta/d02-x01-y08 +Path=/REF/MC_Cent_pPb_Eta/d02-x01-y08 +Type=Scatter2D +# xval xerr- xerr+ yval yerr- yerr+ +-2.6500000000000004 0.04999999999999982 0.050000000000000266 67.419 2.798495488651 2.798495488651 +-2.55 0.050000000000000266 0.04999999999999982 67.2 2.8188029374186483 2.8188029374186483 +-2.45 0.04999999999999982 0.050000000000000266 67.497 2.572081064041334 2.572081064041334 +-2.3499999999999996 0.050000000000000266 0.04999999999999982 67.3 2.4863453098875867 2.4863453098875867 +-2.25 0.04999999999999982 0.04999999999999982 67.923 2.4778242471975287 2.4778242471975287 +-2.1500000000000004 0.04999999999999982 0.050000000000000266 67.859 2.509781066148998 2.509781066148998 +-2.05 0.050000000000000266 0.04999999999999982 67.553 2.4126676107578517 2.4126676107578517 +-1.95 0.050000000000000044 0.050000000000000044 67.468 2.3413596477260814 2.3413596477260814 +-1.85 0.04999999999999982 0.050000000000000044 68.18 2.3291135652861583 2.3291135652861583 +-1.75 0.050000000000000044 0.050000000000000044 67.46 2.2725591301438124 2.2725591301438124 +-1.65 0.050000000000000044 0.04999999999999982 66.687 2.248791897886507 2.248791897886507 +-1.55 0.050000000000000044 0.050000000000000044 66.25 2.1923361968457304 2.1923361968457304 +-1.45 0.050000000000000044 0.050000000000000044 65.985 2.0956473940050127 2.0956473940050127 +-1.35 0.04999999999999982 0.050000000000000044 64.829 2.061788058943014 2.061788058943014 +-1.25 0.050000000000000044 0.050000000000000044 63.821 2.020396000787964 2.020396000787964 +-1.15 0.050000000000000044 0.04999999999999982 63.543 2.0173390889981784 2.0173390889981784 +-1.05 0.050000000000000044 0.050000000000000044 61.893 1.930331059688985 1.930331059688985 +-0.95 0.050000000000000044 0.04999999999999993 61.441 1.8794001702670988 1.8794001702670988 +-0.8500000000000001 0.04999999999999993 0.050000000000000044 60.333 1.855080052181037 1.855080052181037 +-0.75 0.050000000000000044 0.050000000000000044 60.375 1.8497707966123804 1.8497707966123804 +-0.6499999999999999 0.050000000000000044 0.04999999999999993 59.273 1.7673112346160196 1.7673112346160196 +-0.55 0.04999999999999993 0.050000000000000044 58.196 1.7525438653568703 1.7525438653568703 +-0.45 0.04999999999999999 0.04999999999999999 56.825 1.7115846458764463 1.7115846458764463 +-0.35 0.050000000000000044 0.04999999999999999 56.576 1.72874578813659 1.72874578813659 +-0.25 0.04999999999999999 0.04999999999999999 55.315 1.639988414593225 1.639988414593225 +-0.15000000000000002 0.04999999999999999 0.05000000000000002 54.983 1.6216238774759084 1.6216238774759084 +-0.05 0.05 0.05 53.874 1.591258621343495 1.591258621343495 +0.05 0.05 0.05 53.162 1.547651769617442 1.547651769617442 +0.15000000000000002 0.05000000000000002 0.04999999999999999 52.759 1.568807508906048 1.568807508906048 +0.25 0.04999999999999999 0.04999999999999999 52.593 1.572941829820798 1.572941829820798 +0.35 0.04999999999999999 0.050000000000000044 52.464 1.61352099459536 1.61352099459536 +0.45 0.04999999999999999 0.04999999999999999 52.264 1.618029047946915 1.618029047946915 +0.55 0.050000000000000044 0.04999999999999993 51.772 1.6080251863699149 1.6080251863699149 +0.6499999999999999 0.04999999999999993 0.050000000000000044 51.557 1.6058256443337802 1.6058256443337802 +0.75 0.050000000000000044 0.050000000000000044 51.19 1.6109090601272313 1.6109090601272313 +0.8500000000000001 0.050000000000000044 0.04999999999999993 51.073 1.6391622860473578 1.6391622860473578 +0.95 0.04999999999999993 0.050000000000000044 50.812 1.6458845646034839 1.6458845646034839 +1.05 0.050000000000000044 0.050000000000000044 50.392 1.655488447558605 1.655488447558605 +1.15 0.04999999999999982 0.050000000000000044 49.859 1.6320416661347836 1.6320416661347836 +1.25 0.050000000000000044 0.050000000000000044 49.602 1.6725600138709522 1.6725600138709522 +1.35 0.050000000000000044 0.04999999999999982 48.872 1.63004815879777 1.63004815879777 +1.45 0.050000000000000044 0.050000000000000044 48.289 1.65091126351479 1.65091126351479 +1.55 0.050000000000000044 0.050000000000000044 47.612 1.6415267283842807 1.6415267283842807 +1.65 0.04999999999999982 0.050000000000000044 46.932 1.645484123290164 1.645484123290164 +1.75 0.050000000000000044 0.050000000000000044 46.074 1.6395319454039317 1.6395319454039317 +1.85 0.050000000000000044 0.04999999999999982 45.194 1.6698038806997666 1.6698038806997666 +1.95 0.050000000000000044 0.050000000000000044 43.937 1.6778596484807662 1.6778596484807662 +2.05 0.04999999999999982 0.050000000000000266 42.904 1.7532655246710351 1.7532655246710351 +2.1500000000000004 0.050000000000000266 0.04999999999999982 42.284 1.7532655246710351 1.7532655246710351 +2.25 0.04999999999999982 0.04999999999999982 41.173 1.9561722828012875 1.9561722828012875 +2.3499999999999996 0.04999999999999982 0.050000000000000266 40.237 1.8695574342608468 1.8695574342608468 +2.45 0.050000000000000266 0.04999999999999982 39.347 1.8942882568394916 1.8942882568394916 +2.55 0.04999999999999982 0.050000000000000266 37.992 1.8348005341180824 1.8348005341180824 +2.6500000000000004 0.050000000000000266 0.04999999999999982 36.991 2.0822723164850463 2.0822723164850463 +# END YODA_SCATTER2D diff --git a/bin/Makefile.am b/bin/Makefile.am --- a/bin/Makefile.am +++ b/bin/Makefile.am @@ -1,35 +1,35 @@ bin_SCRIPTS = rivet-config rivet-buildplugin dist_bin_SCRIPTS = make-plots make-pgfplots EXTRA_DIST = RIVETPROGS = \ rivet \ rivet-mkanalysis \ rivet-findid rivet-which \ rivet-diffhepdata \ - rivet-cmphistos rivet-mkhtml + rivet-cmphistos rivet-mkhtml rivet-merge if ENABLE_PYEXT dist_bin_SCRIPTS += $(RIVETPROGS) else EXTRA_DIST += $(RIVETPROGS) endif ## bash completion if ENABLE_PYEXT dist_pkgdata_DATA = rivet-completion bashcomp_dir = $(DESTDIR)$(prefix)/etc/bash_completion.d install-data-local: if [[ -d "$(bashcomp_dir)" && -w "$(bashcomp_dir)" ]]; then \ $(install_sh_DATA) rivet-completion $(bashcomp_dir)/; fi uninstall-local: rm -f $(bashcomp_dir)/rivet-completion else EXTRA_DIST += rivet-completion endif ## No-Python Rivet program noinst_PROGRAMS = rivet-nopy rivet_nopy_SOURCES = rivet-nopy.cc rivet_nopy_CPPFLAGS = -I$(top_srcdir)/include $(AM_CPPFLAGS) rivet_nopy_LDFLAGS = -L$(HEPMCLIBPATH) rivet_nopy_LDADD = $(top_builddir)/src/libRivet.la -lHepMC diff --git a/bin/rivet b/bin/rivet --- a/bin/rivet +++ b/bin/rivet @@ -1,672 +1,678 @@ #! /usr/bin/env python """\ Run Rivet analyses on HepMC events read from a file or Unix pipe Examples: %prog [options] [ ...] or mkfifo fifo.hepmc my_generator -o fifo.hepmc & %prog [options] fifo.hepmc ENVIRONMENT: * RIVET_ANALYSIS_PATH: list of paths to be searched for plugin analysis libraries at runtime * RIVET_DATA_PATH: list of paths to be searched for data files (defaults to use analysis path) * RIVET_WEIGHT_INDEX: the numerical weight-vector index to use in this run (default = 0; -1 = ignore weights) * See the documentation for more environment variables. """ from __future__ import print_function import os, sys ## Load the rivet module try: import rivet except: ## If rivet loading failed, try to bootstrap the Python path! try: # TODO: Is this a good idea? Maybe just notify the user that their PYTHONPATH is wrong? import commands modname = sys.modules[__name__].__file__ binpath = os.path.dirname(modname) rivetconfigpath = os.path.join(binpath, "rivet-config") rivetpypath = commands.getoutput(rivetconfigpath + " --pythonpath") sys.path.append(rivetpypath) import rivet except: sys.stderr.write("The rivet Python module could not be loaded: is your PYTHONPATH set correctly?\n") sys.exit(5) rivet.util.check_python_version() rivet.util.set_process_name("rivet") import time, datetime, logging, signal ## Parse command line options from optparse import OptionParser, OptionGroup parser = OptionParser(usage=__doc__, version="rivet v%s" % rivet.version()) anagroup = OptionGroup(parser, "Analysis handling") anagroup.add_option("-a", "--analysis", "--analyses", dest="ANALYSES", action="append", default=[], metavar="ANA", help="add an analysis (or comma-separated list of analyses) to the processing list.") anagroup.add_option("--list-analyses", "--list", dest="LIST_ANALYSES", action="store_true", default=False, help="show the list of available analyses' names. With -v, it shows the descriptions, too") anagroup.add_option("--list-keywords", "--keywords", dest="LIST_KEYWORDS", action="store_true", default=False, help="show the list of available keywords.") anagroup.add_option("--list-used-analyses", action="store_true", dest="LIST_USED_ANALYSES", default=False, help="list the analyses used by this command (after subtraction of inappropriate ones)") anagroup.add_option("--show-analysis", "--show-analyses", "--show", dest="SHOW_ANALYSES", action="append", default=[], help="show the details of an analysis") anagroup.add_option("--show-bibtex", dest="SHOW_BIBTEX", action="store_true", default=False, help="show BibTeX entries for all used analyses") anagroup.add_option("--analysis-path", dest="ANALYSIS_PATH", metavar="PATH", default=None, help="specify the analysis search path (cf. $RIVET_ANALYSIS_PATH).") # TODO: remove/deprecate the append? anagroup.add_option("--analysis-path-append", dest="ANALYSIS_PATH_APPEND", metavar="PATH", default=None, help="append to the analysis search path (cf. $RIVET_ANALYSIS_PATH).") anagroup.add_option("--pwd", dest="ANALYSIS_PATH_PWD", action="store_true", default=False, help="append the current directory (pwd) to the analysis/data search paths (cf. $RIVET_ANALYSIS_PATH).") # TODO: add control for more paths? parser.add_option_group(anagroup) extragroup = OptionGroup(parser, "Extra run settings") extragroup.add_option("-o", "-H", "--histo-file", dest="HISTOFILE", default="Rivet.yoda", help="specify the output histo file path (default = %default)") extragroup.add_option("-p", "--preload-file", dest="PRELOADFILE", default=None, help="specify and old yoda file to initialize (default = %default)") extragroup.add_option("--no-histo-file", dest="WRITE_DATA", action="store_false", default=True, help="don't write out any histogram file at the end of the run (default = write)") extragroup.add_option("-x", "--cross-section", dest="CROSS_SECTION", default=None, metavar="XS", help="specify the signal process cross-section in pb") extragroup.add_option("-n", "--nevts", dest="MAXEVTNUM", type="int", default=None, metavar="NUM", help="restrict the max number of events to process") extragroup.add_option("--nskip", dest="EVTSKIPNUM", type="int", default=0, metavar="NUM", help="skip NUM events read from input before beginning processing") extragroup.add_option("--runname", dest="RUN_NAME", default=None, metavar="NAME", help="give an optional run name, to be prepended as a 'top level directory' in histo paths") extragroup.add_option("--ignore-beams", dest="IGNORE_BEAMS", action="store_true", default=False, help="ignore input event beams when checking analysis compatibility. " "WARNING: analyses may not work correctly, or at all, with inappropriate beams") +extragroup.add_option("-d", "--dump", dest="DUMP_PERIOD", type="int", + default=None, metavar="NUM", + help="run finalize() and write out the resulting yoda-file at regular intervals") + parser.add_option_group(extragroup) timinggroup = OptionGroup(parser, "Timeouts and periodic operations") timinggroup.add_option("--event-timeout", dest="EVENT_TIMEOUT", type="int", default=21600, metavar="NSECS", help="max time in whole seconds to wait for an event to be generated from the specified source (default = %default)") timinggroup.add_option("--run-timeout", dest="RUN_TIMEOUT", type="int", default=None, metavar="NSECS", help="max time in whole seconds to wait for the run to finish. This can be useful on batch systems such " "as the LCG Grid where tokens expire on a fixed wall-clock and can render long Rivet runs unable to write " "out the final histogram file (default = unlimited)") timinggroup.add_option("--histo-interval", dest="HISTO_WRITE_INTERVAL", type=int, default=1000, help="specify the number of events between histogram file updates, default = %default. " "Set to 0 to only write out at the end of the run. Note that intermediate histograms will be those " "from the analyze step only: analysis finalizing is currently not executed until the end of the run.") parser.add_option_group(timinggroup) verbgroup = OptionGroup(parser, "Verbosity control") parser.add_option("-l", dest="NATIVE_LOG_STRS", action="append", default=[], help="set a log level in the Rivet library") verbgroup.add_option("-v", "--verbose", action="store_const", const=logging.DEBUG, dest="LOGLEVEL", default=logging.INFO, help="print debug (very verbose) messages") verbgroup.add_option("-q", "--quiet", action="store_const", const=logging.WARNING, dest="LOGLEVEL", default=logging.INFO, help="be very quiet") parser.add_option_group(verbgroup) opts, args = parser.parse_args() ## Override/modify analysis search path if opts.ANALYSIS_PATH: rivet.setAnalysisLibPaths(opts.ANALYSIS_PATH.split(":")) rivet.setAnalysisDataPaths(opts.ANALYSIS_PATH.split(":")) if opts.ANALYSIS_PATH_APPEND: for ap in opts.ANALYSIS_PATH_APPEND.split(":"): rivet.addAnalysisLibPath(ap) rivet.addAnalysisDataPath(ap) if opts.ANALYSIS_PATH_PWD: rivet.addAnalysisLibPath(os.path.abspath(".")) rivet.addAnalysisDataPath(os.path.abspath(".")) ## Configure logging logging.basicConfig(level=opts.LOGLEVEL, format="%(message)s") for l in opts.NATIVE_LOG_STRS: name, level = None, None try: name, level = l.split("=") except: name = "Rivet" level = l ## Fix name if name != "Rivet" and not name.startswith("Rivet."): name = "Rivet." + name try: ## Get right error type level = rivet.LEVELS.get(level.upper(), None) logging.debug("Setting log level: %s %d" % (name, level)) rivet.setLogLevel(name, level) except: logging.warning("Couldn't process logging string '%s'" % l) ############################ ## Listing available analyses/keywords def getAnalysesByKeyword(alist, kstring): add, veto, ret = [], [], [] bits = [i for i in kstring.replace("^@", "@^").split("@") if len(i) > 0] for b in bits: if b.startswith("^"): veto.append(b.strip("^")) else: add.append(b) add = set(add) veto = set(veto) for a in alist: kwds = set([i.lower() for i in rivet.AnalysisLoader.getAnalysis(a).keywords()]) if kwds.intersection(veto) and len(kwds.intersection(add)) == len(list(add)): ret.append(a) return ret ## List of analyses all_analyses = rivet.AnalysisLoader.analysisNames() if opts.LIST_ANALYSES: ## Treat args as case-insensitive regexes if present regexes = None if args: import re regexes = [re.compile(arg, re.I) for arg in args] # import tempfile, subprocess # tf, tfpath = tempfile.mkstemp(prefix="rivet-list.") names = [] msg = [] for aname in all_analyses: if not regexes: toshow = True else: toshow = False for regex in regexes: if regex.search(aname): toshow = True break if toshow: names.append(aname) msg.append('') if opts.LOGLEVEL <= logging.INFO: a = rivet.AnalysisLoader.getAnalysis(aname) st = "" if a.status() == "VALIDATED" else ("[%s] " % a.status()) # detex will very likely introduce some non-ASCII chars from # greek names in analysis titles. # The u"" prefix and explicit print encoding are necessary for # py2 to handle this properly msg[-1] = u"%s%s" % (st, a.summary()) if opts.LOGLEVEL < logging.INFO: if a.keywords(): msg[-1] += u" [%s]" % " ".join(a.keywords()) if a.luminosityfb(): msg[-1] += u" [ \int L = %s fb^{-1} ]" % a.luminosityfb() msg = rivet.util.detex(msg) retlist = '\n'.join([ u"%-25s %s" % (a,m) for a,m in zip(names,msg) ]) if type(u'') is not str: retlist = retlist.encode('utf-8') print(retlist) sys.exit(0) def getKeywords(alist): all_keywords = [] for a in alist: all_keywords.extend(rivet.AnalysisLoader.getAnalysis(a).keywords()) all_keywords = [i.lower() for i in all_keywords] return sorted(list(set(all_keywords))) ## List keywords if opts.LIST_KEYWORDS: # a = rivet.AnalysisLoader.getAnalysis(aname) for k in getKeywords(all_analyses): print(k) sys.exit(0) ## Show analyses' details if len(opts.SHOW_ANALYSES) > 0: toshow = [] for i, a in enumerate(opts.SHOW_ANALYSES): a_up = a.upper() if a_up in all_analyses and a_up not in toshow: toshow.append(a_up) else: ## Treat as a case-insensitive regex import re regex = re.compile(a, re.I) for ana in all_analyses: if regex.search(ana) and a_up not in toshow: toshow.append(ana) msgs = [] for i, name in enumerate(sorted(toshow)): import textwrap ana = rivet.AnalysisLoader.getAnalysis(name) msg = "" msg += name + "\n" msg += (len(name) * "=") + "\n\n" msg += rivet.util.detex(ana.summary()) + "\n\n" msg += "Status: " + ana.status() + "\n\n" # TODO: reduce to only show Inspire in v3 if ana.inspireId(): msg += "Inspire ID: " + ana.inspireId() + "\n" msg += "Inspire URL: http://inspire-hep.net/record/" + ana.inspireId() + "\n" msg += "HepData URL: http://hepdata.net/record/ins" + ana.inspireId() + "\n" elif ana.spiresId(): msg += "Spires ID: " + ana.spiresId() + "\n" msg += "Inspire URL: http://inspire-hep.net/search?p=find+key+" + ana.spiresId() + "\n" msg += "HepData URL: http://hepdata.cedar.ac.uk/view/irn" + ana.spiresId() + "\n" if ana.year(): msg += "Year of publication: " + ana.year() + "\n" if ana.bibKey(): msg += "BibTeX key: " + ana.bibKey() + "\n" msg += "Authors:\n" for a in ana.authors(): msg += " " + a + "\n" msg += "\n" msg += "Description:\n" twrap = textwrap.TextWrapper(width=75, initial_indent=2*" ", subsequent_indent=2*" ") msg += twrap.fill(rivet.util.detex(ana.description())) + "\n\n" if ana.experiment(): msg += "Experiment: " + ana.experiment() if ana.collider(): msg += "(%s)" % ana.collider() msg += "\n" # TODO: move this formatting into Analysis or a helper function? if ana.requiredBeams(): def pid_to_str(pid): if pid == 11: return "e-" elif pid == -11: return "e+" elif pid == 2212: return "p+" elif pid == -2212: return "p-" elif pid == 10000: return "*" else: return str(pid) beamstrs = [] for bp in ana.requiredBeams(): beamstrs.append(pid_to_str(bp[0]) + " " + pid_to_str(bp[1])) msg += "Beams:" + ", ".join(beamstrs) + "\n" if ana.requiredEnergies(): msg += "Beam energies:" + "; ".join(["(%0.1f, %0.1f) GeV\n" % (epair[0], epair[1]) for epair in ana.requiredEnergies()]) else: msg += "Beam energies: ANY\n" if ana.runInfo(): msg += "Run details:\n" twrap = textwrap.TextWrapper(width=75, initial_indent=2*" ", subsequent_indent=4*" ") for l in ana.runInfo().split("\n"): msg += twrap.fill(l) + "\n" if ana.luminosityfb(): msg+= "\nIntegrated data luminosity = %s inverse fb.\n"%ana.luminosityfb() if ana.keywords(): msg += "\nAnalysis keywords:" for k in ana.keywords(): msg += " %s"%k msg+= "\n\n" if ana.references(): msg += "\n" + "References:\n" for r in ana.references(): url = None if r.startswith("arXiv:"): code = r.split()[0].replace("arXiv:", "") url = "http://arxiv.org/abs/" + code elif r.startswith("doi:"): code = r.replace("doi:", "") url = "http://dx.doi.org/" + code if url is not None: r += " - " + url msg += " " + r + "\n" ## Add to the output msgs.append(msg) ## Write the combined messages to a temporary file and page it if msgs: try: import tempfile, subprocess tffd, tfpath = tempfile.mkstemp(prefix="rivet-show.") os.write(tffd, u"\n\n".join(msgs).encode('utf-8')) if sys.stdout.isatty(): pager = subprocess.Popen(["less", "-FX", tfpath]) #, stdin=subprocess.PIPE) pager.communicate() else: f = open(tfpath) print(f.read()) f.close() finally: os.unlink(tfpath) #< always clean up sys.exit(0) ############################ ## Actual analysis runs ## We allow comma-separated lists of analysis names -- normalise the list here newanas = [] for a in opts.ANALYSES: if "," in a: newanas += a.split(",") elif "@" in a: #< NB. this bans combination of ana lists and keywords in a single arg temp = getAnalysesByKeyword(all_analyses, a) for i in temp: newanas.append(i) else: newanas.append(a) opts.ANALYSES = newanas ## Parse supplied cross-section if opts.CROSS_SECTION is not None: xsstr = opts.CROSS_SECTION try: opts.CROSS_SECTION = float(xsstr) except: import re suffmatch = re.search(r"[^\d.]", xsstr) if not suffmatch: raise ValueError("Bad cross-section string: %s" % xsstr) factor = base = None suffstart = suffmatch.start() if suffstart != -1: base = xsstr[:suffstart] suffix = xsstr[suffstart:].lower() if suffix == "mb": factor = 1e+9 elif suffix == "mub": factor = 1e+6 elif suffix == "nb": factor = 1e+3 elif suffix == "pb": factor = 1 elif suffix == "fb": factor = 1e-3 elif suffix == "ab": factor = 1e-6 if factor is None or base is None: raise ValueError("Bad cross-section string: %s" % xsstr) xs = float(base) * factor opts.CROSS_SECTION = xs ## Print the available CLI options! #if opts.LIST_OPTIONS: # for o in parser.option_list: # print(o.get_opt_string()) # sys.exit(0) ## Set up signal handling RECVD_KILL_SIGNAL = None def handleKillSignal(signum, frame): "Declare us as having been signalled, and return to default handling behaviour" global RECVD_KILL_SIGNAL logging.critical("Signal handler called with signal " + str(signum)) RECVD_KILL_SIGNAL = signum signal.signal(signum, signal.SIG_DFL) ## Signals to handle signal.signal(signal.SIGTERM, handleKillSignal); signal.signal(signal.SIGHUP, handleKillSignal); signal.signal(signal.SIGINT, handleKillSignal); signal.signal(signal.SIGUSR1, handleKillSignal); signal.signal(signal.SIGUSR2, handleKillSignal); try: signal.signal(signal.SIGXCPU, handleKillSignal); except: pass ## Identify HepMC files/streams ## TODO: check readability, deal with stdin if len(args) > 0: HEPMCFILES = args else: HEPMCFILES = ["-"] ## Event number logging def logNEvt(n, starttime, maxevtnum): if n % 10000 == 0: nevtloglevel = logging.CRITICAL elif n % 1000 == 0: nevtloglevel = logging.WARNING elif n % 100 == 0: nevtloglevel = logging.INFO else: nevtloglevel = logging.DEBUG currenttime = datetime.datetime.now().replace(microsecond=0) elapsedtime = currenttime - starttime logging.log(nevtloglevel, "Event %d (%s elapsed)" % (n, str(elapsedtime))) # if maxevtnum is None: # logging.log(nevtloglevel, "Event %d (%s elapsed)" % (n, str(elapsedtime))) # else: # remainingtime = (maxevtnum-n) * elapsedtime.total_seconds() / float(n) # eta = time.strftime("%a %b %d %H:%M", datetime.localtime(currenttime + remainingtime)) # logging.log(nevtloglevel, "Event %d (%d s elapsed / %d s left) -> ETA: %s" % # (n, elapsedtime, remainingtime, eta)) ## Do some checks on output histo file, before we stat the event loop histo_parentdir = os.path.dirname(os.path.abspath(opts.HISTOFILE)) if not os.path.exists(histo_parentdir): logging.error('Parent path of output histogram file does not exist: %s\nExiting.' % histo_parentdir) sys.exit(4) if not os.access(histo_parentdir,os.W_OK): logging.error('Insufficient permissions to write output histogram file to directory %s\nExiting.' % histo_parentdir) sys.exit(4) ## Set up analysis handler RUNNAME = opts.RUN_NAME or "" ah = rivet.AnalysisHandler(RUNNAME) ah.setIgnoreBeams(opts.IGNORE_BEAMS) for a in opts.ANALYSES: ## Print warning message and exit if not a valid analysis name if not rivet.stripOptions(a) in all_analyses: logging.warning("'%s' is not a known Rivet analysis! Do you need to set RIVET_ANALYSIS_PATH or use the --pwd switch?\n" % a) # TODO: lay out more neatly, or even try for a "did you mean XXXX?" heuristic? logging.warning("There are %d currently available analyses:\n" % len(all_analyses) + ", ".join(all_analyses)) sys.exit(1) logging.debug("Adding analysis '%s'" % a) ah.addAnalysis(a) if opts.PRELOADFILE is not None: ah.readData(opts.PRELOADFILE) +if opts.DUMP_PERIOD: + ah.dump(opts.HISTOFILE, opts.DUMP_PERIOD) if opts.SHOW_BIBTEX: bibs = [] for aname in sorted(ah.analysisNames()): ana = rivet.AnalysisLoader.getAnalysis(aname) bibs.append("% " + aname + "\n" + ana.bibTeX()) if bibs: print("\nBibTeX for used Rivet analyses:\n") print("% --------------------------\n") print("\n\n".join(bibs) + "\n") print("% --------------------------\n") ## Read and process events run = rivet.Run(ah) if opts.CROSS_SECTION is not None: logging.info("User-supplied cross-section = %e pb" % opts.CROSS_SECTION) run.setCrossSection(opts.CROSS_SECTION) if opts.LIST_USED_ANALYSES is not None: run.setListAnalyses(opts.LIST_USED_ANALYSES) ## Print platform type import platform starttime = datetime.datetime.now().replace(microsecond=0) logging.info("Rivet %s running on machine %s (%s) at %s" % \ (rivet.version(), platform.node(), platform.machine(), str(starttime))) def min_nonnull(a, b): "A version of min which considers None to always be greater than a real number" if a is None: return b if b is None: return a return min(a, b) ## Set up an event timeout handler class TimeoutException(Exception): pass if opts.EVENT_TIMEOUT or opts.RUN_TIMEOUT: def evttimeouthandler(signum, frame): logging.warn("It has taken more than %d secs to get an event! Is the input event stream working?" % min_nonnull(opts.EVENT_TIMEOUT, opts.RUN_TIMEOUT)) raise TimeoutException("Event timeout") signal.signal(signal.SIGALRM, evttimeouthandler) ## Init run based on one event hepmcfile = HEPMCFILES[0] ## Apply a file-level weight derived from the filename hepmcfileweight = 1.0 if ":" in hepmcfile: hepmcfile, hepmcfileweight = hepmcfile.rsplit(":", 1) hepmcfileweight = float(hepmcfileweight) try: if opts.EVENT_TIMEOUT or opts.RUN_TIMEOUT: signal.alarm(min_nonnull(opts.EVENT_TIMEOUT, opts.RUN_TIMEOUT)) init_ok = run.init(hepmcfile, hepmcfileweight) signal.alarm(0) if not init_ok: logging.error("Failed to initialise using event file '%s'... exiting" % hepmcfile) sys.exit(2) except TimeoutException as te: logging.error("Timeout in initialisation from event file '%s'... exiting" % hepmcfile) sys.exit(3) except Exception as ex: logging.warning("Could not read from '%s' (error=%s)" % (hepmcfile, str(ex))) sys.exit(3) ## Event loop evtnum = 0 for fileidx, hepmcfile in enumerate(HEPMCFILES): ## Apply a file-level weight derived from the filename hepmcfileweight = 1.0 if ":" in hepmcfile: hepmcfile, hepmcfileweight = hepmcfile.rsplit(":", 1) hepmcfileweight = float(hepmcfileweight) ## Open next HepMC file (NB. this doesn't apply to the first file: it was already used for the run init) if fileidx > 0: try: run.openFile(hepmcfile, hepmcfileweight) except Exception as ex: logging.warning("Could not read from '%s' (error=%s)" % (hepmcfile, ex)) continue if not run.readEvent(): logging.warning("Could not read events from '%s'" % hepmcfile) continue ## Announce new file msg = "Reading events from '%s'" % hepmcfile if hepmcfileweight != 1.0: msg += " (file weight = %e)" % hepmcfileweight logging.info(msg) ## The event loop while opts.MAXEVTNUM is None or evtnum-opts.EVTSKIPNUM < opts.MAXEVTNUM: evtnum += 1 ## Optional event skipping if evtnum <= opts.EVTSKIPNUM: logging.debug("Skipping event #%i" % evtnum) run.skipEvent(); continue ## Only log the event number once we're actually processing logNEvt(evtnum, starttime, opts.MAXEVTNUM) ## Process this event processed_ok = run.processEvent() if not processed_ok: logging.warn("Event processing failed for evt #%i!" % evtnum) break ## Set flag to exit event loop if run timeout exceeded if opts.RUN_TIMEOUT and (time.time() - starttime) > opts.RUN_TIMEOUT: logging.warning("Run timeout of %d secs exceeded... exiting gracefully" % opts.RUN_TIMEOUT) RECVD_KILL_SIGNAL = True ## Exit the loop if signalled if RECVD_KILL_SIGNAL is not None: break ## Read next event (with timeout handling if requested) try: if opts.EVENT_TIMEOUT: signal.alarm(opts.EVENT_TIMEOUT) read_ok = run.readEvent() signal.alarm(0) if not read_ok: break except TimeoutException as te: logging.error("Timeout in reading event from '%s'... exiting" % hepmcfile) sys.exit(3) ## Write a histo file snapshot if appropriate if opts.HISTO_WRITE_INTERVAL is not None and opts.HISTO_WRITE_INTERVAL > 0: if evtnum % opts.HISTO_WRITE_INTERVAL == 0: ah.writeData(opts.HISTOFILE) ## Print end-of-loop messages loopendtime = datetime.datetime.now().replace(microsecond=0) logging.info("Finished event loop at %s" % str(loopendtime)) logging.info("Cross-section = %e pb" % ah.crossSection()) print() ## Finalize and write out data file run.finalize() if opts.WRITE_DATA: ah.writeData(opts.HISTOFILE) print() endtime = datetime.datetime.now().replace(microsecond=0) logging.info("Rivet run completed at %s, time elapsed = %s" % (str(endtime), str(endtime-starttime))) print() logging.info("Histograms written to %s" % os.path.abspath(opts.HISTOFILE)) diff --git a/bin/rivet-cmphistos b/bin/rivet-cmphistos --- a/bin/rivet-cmphistos +++ b/bin/rivet-cmphistos @@ -1,490 +1,490 @@ #! /usr/bin/env python """\ %prog - generate histogram comparison plots USAGE: %prog [options] yodafile1[:'PlotOption1=Value':'PlotOption2=Value':...] [path/to/yodafile2 ...] [PLOT:Key1=Val1:...] where the plot options are described in the make-plots manual in the HISTOGRAM section. ENVIRONMENT: * RIVET_ANALYSIS_PATH: list of paths to be searched for plugin analysis libraries at runtime * RIVET_DATA_PATH: list of paths to be searched for data files """ from __future__ import print_function import rivet, yoda, sys, os rivet.util.check_python_version() rivet.util.set_process_name(os.path.basename(__file__)) class Plot(dict): "A tiny Plot object to help writing out the head in the .dat file" def __repr__(self): return "# BEGIN PLOT\n" + "\n".join("%s=%s" % (k,v) for k,v in self.items()) + "\n# END PLOT\n\n" def sanitiseString(s): #s = s.replace('_','\\_') #s = s.replace('^','\\^{}') #s = s.replace('$','\\$') s = s.replace('#','\\#') s = s.replace('%','\\%') return s def getCommandLineOptions(): "Parse command line options" from optparse import OptionParser, OptionGroup parser = OptionParser(usage=__doc__) parser.add_option('-o', '--outdir', dest='OUTDIR', default='.', help='write data files into this directory') parser.add_option("--hier-out", action="store_true", dest="HIER_OUTPUT", default=False, help="write output dat files into a directory hierarchy which matches the analysis paths") parser.add_option('--plotinfodir', dest='PLOTINFODIRS', action='append', default=['.'], help='directory which may contain plot header information (in addition ' 'to standard Rivet search paths)') parser.add_option("--no-rivet-refs", dest="RIVETREFS", action="store_false", default=True, help="don't use Rivet reference data files") # parser.add_option("--refid", dest="REF_ID", # default="REF", help="ID of reference data set (file path for non-REF data)") parser.add_option("--reftitle", dest="REFTITLE", default='Data', help="Reference data legend entry") parser.add_option("--pwd", dest="PATH_PWD", action="store_true", default=False, help="append the current directory (pwd) to the analysis/data search paths (cf. $RIVET_ANALYSIS/DATA_PATH)") parser.add_option("-v", "--verbose", dest="VERBOSE", action="store_true", default=False, help="produce debug output to the terminal") stygroup = OptionGroup(parser, "Plot style") stygroup.add_option("--linear", action="store_true", dest="LINEAR", default=False, help="plot with linear scale") stygroup.add_option("--errs", "--mcerrs", "--mc-errs", action="store_true", dest="MC_ERRS", default=False, help="show vertical error bars on the MC lines") stygroup.add_option("--no-ratio", action="store_false", dest="RATIO", default=True, help="disable the ratio plot") stygroup.add_option("--rel-ratio", action="store_true", dest="RATIO_DEVIATION", default=False, help="show the ratio plots scaled to the ref error") stygroup.add_option("--no-plottitle", action="store_true", dest="NOPLOTTITLE", default=False, help="don't show the plot title on the plot " "(useful when the plot description should only be given in a caption)") stygroup.add_option("--style", dest="STYLE", default="default", help="change plotting style: default|bw|talk") stygroup.add_option("-c", "--config", dest="CONFIGFILES", action="append", default=["~/.make-plots"], help="additional plot config file(s). Settings will be included in the output configuration.") parser.add_option_group(stygroup) selgroup = OptionGroup(parser, "Selective plotting") # selgroup.add_option("--show-single", dest="SHOW_SINGLE", choices=("no", "ref", "mc", "all"), # default="mc", help="control if a plot file is made if there is only one dataset to be plotted " # "[default=%default]. If the value is 'no', single plots are always skipped, for 'ref' and 'mc', " # "the plot will be written only if the single plot is a reference plot or an MC " # "plot respectively, and 'all' will always create single plot files.\n The 'ref' and 'all' values " # "should be used with great care, as they will also write out plot files for all reference " # "histograms without MC traces: combined with the -R/--rivet-refs flag, this is a great way to " # "write out several thousand irrelevant reference data histograms!") # selgroup.add_option("--show-mc-only", "--all", action="store_true", dest="SHOW_IF_MC_ONLY", # default=False, help="make a plot file even if there is only one dataset to be plotted and " # "it is an MC one. Deprecated and will be removed: use --show-single instead, which overrides this.") # # selgroup.add_option("-l", "--histogram-list", dest="HISTOGRAMLIST", # # default=None, help="specify a file containing a list of histograms to plot, in the format " # # "/ANALYSIS_ID/histoname, one per line, e.g. '/DELPHI_1996_S3430090/d01-x01-y01'.") selgroup.add_option("-m", "--match", action="append", help="only write out histograms whose $path/$name string matches these regexes. The argument " "may also be a text file.", dest="PATHPATTERNS") selgroup.add_option("-M", "--unmatch", action="append", help="exclude histograms whose $path/$name string matches these regexes", dest="PATHUNPATTERNS") parser.add_option_group(selgroup) return parser def getHistos(filelist): """Loop over all input files. Only use the first occurrence of any REF-histogram and the first occurrence in each MC file for every MC-histogram.""" refhistos, mchistos = {}, {} for infile in filelist: mchistos.setdefault(infile, {}) analysisobjects = yoda.read(infile, patterns=opts.PATHPATTERNS, unpatterns=opts.PATHUNPATTERNS) #print(analysisobjects) for path, ao in analysisobjects.items(): ## We can't plot non-histograms yet # TODO: support counter plotting with a faked x (or y) position and forced plot width/height if ao.type not in ("Counter", "Histo1D", "Histo2D", "Profile1D", "Profile2D", "Scatter1D", "Scatter2D", "Scatter3D"): continue ## Make a path object and ensure the path is in standard form try: aop = rivet.AOPath(path) except Exception as e: #print(e) print("Found analysis object with non-standard path structure:", path, "... skipping") continue ## We don't plot data objects with path components hidden by an underscore prefix - if aop.istmp(): + if aop.istmp() or aop.israw(): continue ## Add it to the ref or mc paths, if this path isn't already known basepath = aop.basepath(keepref=False) if aop.isref() and basepath not in refhistos: ao.path = aop.varpath(keepref=False, defaultvarid=0) refhistos[basepath] = ao else: #if basepath not in mchistos[infile]: mchistos[infile].setdefault(basepath, {})[aop.varid(0)] = ao return refhistos, mchistos def getRivetRefData(anas=None): "Find all Rivet reference data files" refhistos = {} rivet_data_dirs = rivet.getAnalysisRefPaths() dirlist = [] for d in rivet_data_dirs: if anas is None: import glob dirlist.append(glob.glob(os.path.join(d, '*.yoda'))) else: dirlist.append([os.path.join(d, a+'.yoda') for a in anas]) for filelist in dirlist: # TODO: delegate to getHistos? for infile in filelist: analysisobjects = yoda.read(infile, patterns=opts.PATHPATTERNS, unpatterns=opts.PATHUNPATTERNS) for path, ao in analysisobjects.items(): aop = rivet.AOPath(ao.path) if aop.isref(): ao.path = aop.basepath(keepref=False) refhistos[ao.path] = ao return refhistos def parseArgs(args): """Look at the argument list and split it at colons, in order to separate the file names from the plotting options. Store the file names and file specific plotting options.""" filelist = [] plotoptions = {} for a in args: asplit = a.split(':') path = asplit[0] filelist.append(path) plotoptions[path] = [] has_title = False for i in range(1, len(asplit)): ## Add 'Title' if there is no = sign before math mode if '=' not in asplit[i] or ('$' in asplit[i] and asplit[i].index('$') < asplit[i].index('=')): asplit[i] = 'Title=%s' % asplit[i] if asplit[i].startswith('Title='): has_title = True plotoptions[path].append(asplit[i]) if path != "PLOT" and not has_title: plotoptions[path].append('Title=%s' % sanitiseString(os.path.basename( os.path.splitext(path)[0] )) ) return filelist, plotoptions def setStyle(ao, istyle, variation=False): """Set default plot styles (color and line width) colors borrowed from Google Ngrams""" # LINECOLORS = ['{[HTML]{EE3311}}', # red (Google uses 'DC3912') # '{[HTML]{3366FF}}', # blue # '{[HTML]{109618}}', # green # '{[HTML]{FF9900}}', # orange # '{[HTML]{990099}}'] # lilac LINECOLORS = ['red', 'blue', 'green', 'orange', 'lilac'] LINESTYLES = ['solid', 'dashed', 'dashdotted', 'dotted'] if opts.STYLE == 'talk': ao.setAnnotation('LineWidth', '1pt') if opts.STYLE == 'bw': LINECOLORS = ['black!90', 'black!50', 'black!30'] jc = istyle % len(LINECOLORS) c = LINECOLORS[jc] js = (istyle // len(LINECOLORS)) % len(LINESTYLES) s = LINESTYLES[js] ## If plotting a variation (i.e. band), fade the colour if variation: c += "!30" ao.setAnnotation('LineStyle', '%s' % s) ao.setAnnotation('LineColor', '%s' % c) def setOptions(ao, options): "Set arbitrary annotations" for opt in options: key, val = opt.split('=', 1) ao.setAnnotation(key, val) # TODO: move to rivet.utils def mkoutdir(outdir): "Function to make output directories" if not os.path.exists(outdir): try: os.makedirs(outdir) except: msg = "Can't make output directory '%s'" % outdir raise Exception(msg) if not os.access(outdir, os.W_OK): msg = "Can't write to output directory '%s'" % outdir raise Exception(msg) def mkOutput(hpath, aos, plot=None, special=None): """ Make the .dat file string. We can't use "yoda.writeFLAT(anaobjects, 'foobar.dat')" because the PLOT and SPECIAL blocks don't have a corresponding analysis object. """ output = '' if plot is not None: output += str(plot) if special is not None: output += "\n" output += "# BEGIN SPECIAL %s\n" % hpath output += special output += "# END SPECIAL\n\n" from io import StringIO sio = StringIO() yoda.writeFLAT(aos, sio) output += sio.getvalue() return output def writeOutput(output, h): "Choose output file name and dir" if opts.HIER_OUTPUT: hparts = h.strip("/").split("/", 1) ana = "_".join(hparts[:-1]) if len(hparts) > 1 else "ANALYSIS" outdir = os.path.join(opts.OUTDIR, ana) outfile = '%s.dat' % hparts[-1].replace("/", "_") else: hparts = h.strip("/").split("/") outdir = opts.OUTDIR outfile = '%s.dat' % "_".join(hparts) mkoutdir(outdir) outfilepath = os.path.join(outdir, outfile) f = open(outfilepath, 'w') f.write(output) f.close() #-------------------------------------------------------------------------------------------- if __name__ == '__main__': ## Command line parsing parser = getCommandLineOptions() opts, args = parser.parse_args() ## Add pwd to search paths if opts.PATH_PWD: rivet.addAnalysisLibPath(os.path.abspath(".")) rivet.addAnalysisDataPath(os.path.abspath(".")) ## Split the input file names and the associated plotting options ## given on the command line into two separate lists filelist, plotoptions = parseArgs(args) ## Remove the PLOT dummy file from the file list if "PLOT" in filelist: filelist.remove("PLOT") ## Check that the files exist for f in filelist: if not os.access(f, os.R_OK): print("Error: cannot read from %s" % f) sys.exit(1) ## Read the .plot files plotdirs = opts.PLOTINFODIRS + [os.path.abspath(os.path.dirname(f)) for f in filelist] plotparser = rivet.mkStdPlotParser(plotdirs, opts.CONFIGFILES) ## Create a list of all histograms to be plotted, and identify if they are 2D histos (which need special plotting) try: refhistos, mchistos = getHistos(filelist) except IOError as e: print("File reading error: ", e.strerror) exit(1) hpaths, h2ds = [], [] for aos in mchistos.values(): for p in aos.keys(): ps = rivet.stripOptions(p) if ps and ps not in hpaths: hpaths.append(ps) firstaop = aos[p][sorted(aos[p].keys())[0]] # TODO: Would be nicer to test via isHisto and dim or similar, or yoda.Scatter/Histo/Profile base classes if type(firstaop) in (yoda.Histo2D, yoda.Profile2D) and ps not in h2ds: h2ds.append(ps) ## Take reference data from the Rivet search paths, if there is not already if opts.RIVETREFS: try: refhistos2 = getRivetRefData() except IOError as e: print("File reading error: ", e.strerror) exit(1) refhistos2.update(refhistos) refhistos = refhistos2 ## Purge unmatched ref data entries to save memory keylist = list(refhistos.keys()) # can't modify for-loop target for refhpath in keylist: if refhpath not in hpaths: del refhistos[refhpath] ## Now loop over all MC histograms and plot them # TODO: factorize much of this into a rivet.utils mkplotfile(mchists, refhist, kwargs, is2d=False) function for hpath in hpaths: #print('Currently looking at', h) ## The analysis objects to be plotted anaobjects = [] ## List of histos to be drawn, to sync the legend and plotted lines mainlines = [] varlines = [] ## Is this a 2D histo? is2d = (hpath in h2ds) ## Will we be drawing a ratio plot? showratio = opts.RATIO and not is2d ## A Plot object to represent the PLOT section in the .dat file plot = Plot() if not is2d: plot['Legend'] = '1' plot['LogY'] = '1' headers = plotparser.getHeaders(hpath) if headers: plot.update(headers) # for key, val in headers.items(): # plot[key] = val if "PLOT" in plotoptions: for key_val in plotoptions["PLOT"]: key, val = [s.strip() for s in key_val.split("=",1)] plot[key] = val if opts.LINEAR: plot['LogY'] = '0' if opts.NOPLOTTITLE: plot['Title'] = '' if showratio and opts.RATIO_DEVIATION: plot['RatioPlotMode'] = 'deviation' if opts.STYLE == 'talk': plot['PlotSize'] = '8,6' elif opts.STYLE == 'bw' and showratio: plot['RatioPlotErrorBandColor'] = 'black!10' ## Get a special object, if there is one for this path special = plotparser.getSpecial(hpath) ## Handle reference data histogram, if there is one ratioreference, hasdataref = None, False if hpath in refhistos: hasdataref = True refdata = refhistos[hpath] refdata.setAnnotation('Title', opts.REFTITLE) if not is2d: refdata.setAnnotation('ErrorBars', '1') refdata.setAnnotation('PolyMarker', '*') refdata.setAnnotation('ConnectBins', '0') if showratio: ratioreference = hpath ## For 1D anaobjects.append(refdata) mainlines.append(hpath) ## For 2D if is2d: s = mkOutput(hpath, [refdata], plot, special) writeOutput(s, hpath) ## Loop over the MC files to plot all instances of the histogram styleidx = 0 for infile in filelist: if infile in mchistos: for xpath in sorted(mchistos[infile]): if rivet.stripOptions(xpath) != hpath: continue hmcs = mchistos[infile][xpath] ## For now, just plot all the different variation histograms (reversed, so [0] is on top) # TODO: calculate and plot an appropriate error band, somehow... for i in sorted(hmcs.keys(), reverse=True): iscanonical = (str(i) == "0") hmc = hmcs[i] ## Default linecolor, linestyle if not is2d: setStyle(hmc, styleidx, not iscanonical) if opts.MC_ERRS: hmc.setAnnotation('ErrorBars', '1') ## Plot defaults from .plot files histopts = plotparser.getHistogramOptions(hpath) if histopts: for key, val in histopts.items(): hmc.setAnnotation(key, val) ## Command line plot options setOptions(hmc, plotoptions[infile]) ## Set path attribute fullpath = "/"+infile+xpath if not iscanonical: fullpath += "["+str(i)+"]" hmc.setAnnotation('Path', fullpath) ## Add object / path to appropriate lists if hmc.hasAnnotation("Title"): hmc.setAnnotation("Title", hmc.annotation("Title") + rivet.extractOptionString(xpath)) anaobjects.append(hmc) if iscanonical: mainlines.append(fullpath) else: varlines.append(fullpath) if showratio and ratioreference is None and iscanonical: ratioreference = fullpath ## For 2D, plot each histo now (since overlay makes no sense) if is2d: s = mkOutput(hpath, [hmc], plot, special) writeOutput(s, fullpath) styleidx += 1 ## Finally render the combined plots; only show the first one if it's 2D # TODO: Only show the first *MC* one if 2D? if is2d: anaobjects = anaobjects[:1] ## Add final attrs to Plot plot['DrawOnly'] = ' '.join(varlines + mainlines).strip() plot['LegendOnly'] = ' '.join(mainlines).strip() if showratio and len(varlines + mainlines) > 1: plot['RatioPlot'] = '1' plot['RatioPlotReference'] = ratioreference if not hasdataref and "RatioPlotYLabel" not in plot: if plot.get('RatioPlotMode', '') == 'deviation': plot['RatioPlotYLabel'] = 'Deviation' #r'$\text{MC}-\text{MC}_\text{ref}$' else: plot['RatioPlotYLabel'] = 'Ratio' #r'$\text{MC}/\text{MC}_\text{ref}$' ## Make the output and write to file o = mkOutput(hpath, anaobjects, plot, special) writeOutput(o, hpath) diff --git a/bin/rivet-merge b/bin/rivet-merge new file mode 100755 --- /dev/null +++ b/bin/rivet-merge @@ -0,0 +1,66 @@ +#! /usr/bin/env python + +"""\ +Merge together yoda files produced by Rivet. + +Examples: + + %prog [options] [ ...] + +ENVIRONMENT: + * RIVET_ANALYSIS_PATH: list of paths to be searched for plugin + analysis libraries at runtime + * RIVET_DATA_PATH: list of paths to be searched for data files +""" + +import os, sys + +## Load the rivet module +try: + import rivet +except: + ## If rivet loading failed, try to bootstrap the Python path! + try: + # TODO: Is this a good idea? Maybe just notify the user that their PYTHONPATH is wrong? + import commands + modname = sys.modules[__name__].__file__ + binpath = os.path.dirname(modname) + rivetconfigpath = os.path.join(binpath, "rivet-config") + rivetpypath = commands.getoutput(rivetconfigpath + " --pythonpath") + sys.path.append(rivetpypath) + import rivet + except: + sys.stderr.write("The rivet Python module could not be loaded: is your PYTHONPATH set correctly?\n") + sys.exit(5) + +rivet.util.check_python_version() +rivet.util.set_process_name("rivet-merge") + +import time, datetime, logging, signal + +## Parse command line options +from optparse import OptionParser, OptionGroup +parser = OptionParser(usage=__doc__, version="rivet v%s" % rivet.version()) + +extragroup = OptionGroup(parser, "Run settings") +extragroup.add_option("-o", "--output-file", dest="OUTPUTFILE", + default="Rivet.yoda", help="specify the output histo file path (default = %default)") +extragroup.add_option("-e", "--equiv", dest="EQUIV", action="store_true", default=False, + help="assume that the yoda files are equivalent but statistically independent (default= assume that different files contains different processes)") +parser.add_option_group(extragroup) + + +opts, args = parser.parse_args() + + +############################ +## Actual analysis runs + + + +## Set up analysis handler +ah = rivet.AnalysisHandler("Merging") +ah.mergeYodas(args, opts.EQUIV) +ah.writeData(opts.OUTPUTFILE) + + diff --git a/include/Rivet/Analyses/MC_Cent_pPb.hh b/include/Rivet/Analyses/MC_Cent_pPb.hh new file mode 100644 --- /dev/null +++ b/include/Rivet/Analyses/MC_Cent_pPb.hh @@ -0,0 +1,87 @@ +// -*- C++ -*- +#ifndef RIVET_MC_Cent_pPb_HH +#define RIVET_MC_Cent_pPb_HH + +#include "Rivet/Projections/FinalState.hh" +#include "Rivet/Projections/ChargedFinalState.hh" +#include "Rivet/Projections/SingleValueProjection.hh" +#include "Rivet/Projections/TriggerProjection.hh" + +namespace Rivet { + +/// Example of a centrality observable projection for pPb that uses +/// summed Et in the Pb direction. +class MC_SumETFwdPbCentrality: public SingleValueProjection { + +public: + + /// Constructor. + MC_SumETFwdPbCentrality() { + declare(FinalState(Cuts::eta < -3.2 && Cuts::eta > -4.9 && Cuts::pT > 0.1*GeV), + "FSSumETFwdCentrality"); + } + + /// Clone on the heap. + DEFAULT_RIVET_PROJ_CLONE(MC_SumETFwdPbCentrality); + +protected: + + /// Perform the projection on the Event + void project(const Event& e) { + clear(); + const FinalState & fsfwd = + apply(e, "FSSumETFwdCentrality"); + double estimate = 0.0; + for ( const Particle & p : fsfwd.particles() ) { + estimate += p.Et(); + } + set(estimate); + } + + /// Compare projections + int compare(const Projection& p) const { + return mkNamedPCmp(p, "FSSumETFwdCentrality"); + } + +}; + +/// Example of a trigger projection for minimum bias pPb requiring at +/// least one charged particle in both forward and backward direction. +class MC_pPbMinBiasTrigger: public TriggerProjection { + +public: + + /// Constructor. + MC_pPbMinBiasTrigger() { + declare(FinalState(Cuts::eta < -3.2 && Cuts::eta > -4.9 && Cuts::pT > 0.1*GeV), + "FSSumETFwdCentrality"); + declare(ChargedFinalState(Cuts::eta > 2.09 && Cuts::eta < 3.84 && + Cuts::pT > 0.1*GeV), "MBB"); + declare(ChargedFinalState(Cuts::eta < -2.09 && Cuts::eta > -3.84 && + Cuts::pT > 0.1*GeV), "MBF"); + } + + /// Clone on the heap. + DEFAULT_RIVET_PROJ_CLONE(MC_pPbMinBiasTrigger); + +protected: + + /// Perform the projection on the Event + void project(const Event& event) { + pass(); + if ( applyProjection(event,"MBF").particles().empty() || + applyProjection(event,"MBB").particles().empty() ) + fail(); + } + + /// Compare projections + int compare(const Projection& p) const { + return mkNamedPCmp(p, "MBF") || mkNamedPCmp(p, "MBB"); + } + +}; + + +} + +#endif diff --git a/include/Rivet/Analysis.hh b/include/Rivet/Analysis.hh --- a/include/Rivet/Analysis.hh +++ b/include/Rivet/Analysis.hh @@ -1,1150 +1,1209 @@ // -*- C++ -*- #ifndef RIVET_Analysis_HH #define RIVET_Analysis_HH #include "Rivet/Config/RivetCommon.hh" #include "Rivet/AnalysisInfo.hh" #include "Rivet/Event.hh" #include "Rivet/Projection.hh" #include "Rivet/ProjectionApplier.hh" #include "Rivet/ProjectionHandler.hh" #include "Rivet/AnalysisLoader.hh" #include "Rivet/Tools/Cuts.hh" #include "Rivet/Tools/Logging.hh" #include "Rivet/Tools/ParticleUtils.hh" #include "Rivet/Tools/BinnedHistogram.hh" #include "Rivet/Tools/RivetMT2.hh" #include "Rivet/Tools/RivetYODA.hh" +#include "Rivet/Tools/Percentile.hh" #include "Rivet/Projections/CentralityProjection.hh" /// @def vetoEvent /// Preprocessor define for vetoing events, including the log message and return. #define vetoEvent \ do { MSG_DEBUG("Vetoing event on line " << __LINE__ << " of " << __FILE__); return; } while(0) namespace Rivet { // Forward declaration class AnalysisHandler; /// @brief This is the base class of all analysis classes in Rivet. /// /// There are /// three virtual functions which should be implemented in base classes: /// /// void init() is called by Rivet before a run is started. Here the /// analysis class should book necessary histograms. The needed /// projections should probably rather be constructed in the /// constructor. /// /// void analyze(const Event&) is called once for each event. Here the /// analysis class should apply the necessary Projections and fill the /// histograms. /// /// void finalize() is called after a run is finished. Here the analysis /// class should do whatever manipulations are necessary on the /// histograms. Writing the histograms to a file is, however, done by /// the Rivet class. class Analysis : public ProjectionApplier { /// The AnalysisHandler is a friend. friend class AnalysisHandler; public: /// @name Standard constructors and destructors. //@{ // /// The default constructor. // Analysis(); /// Constructor Analysis(const std::string& name); /// The destructor. virtual ~Analysis() {} //@} public: /// @name Main analysis methods //@{ /// Initialize this analysis object. A concrete class should here /// book all necessary histograms. An overridden function must make /// sure it first calls the base class function. virtual void init() { } /// Analyze one event. A concrete class should here apply the /// necessary projections on the \a event and fill the relevant /// histograms. An overridden function must make sure it first calls /// the base class function. virtual void analyze(const Event& event) = 0; /// Finalize this analysis object. A concrete class should here make /// all necessary operations on the histograms. Writing the /// histograms to a file is, however, done by the Rivet class. An /// overridden function must make sure it first calls the base class /// function. virtual void finalize() { } //@} public: /// @name Metadata /// Metadata is used for querying from the command line and also for /// building web pages and the analysis pages in the Rivet manual. //@{ /// Get the actual AnalysisInfo object in which all this metadata is stored. const AnalysisInfo& info() const { assert(_info && "No AnalysisInfo object :O"); return *_info; } /// @brief Get the name of the analysis. /// /// By default this is computed by combining the results of the /// experiment, year and Spires ID metadata methods and you should /// only override it if there's a good reason why those won't /// work. If options has been set for this instance, a /// corresponding string is appended at the end. virtual std::string name() const { return ( (info().name().empty()) ? _defaultname : info().name() ) + _optstring; } // get name of reference data file, which could be different from plugin name virtual std::string getRefDataName() const { return (info().getRefDataName().empty()) ? _defaultname : info().getRefDataName(); } // set name of reference data file, which could be different from plugin name virtual void setRefDataName(const std::string& ref_data="") { info().setRefDataName(!ref_data.empty() ? ref_data : name()); } /// Get the Inspire ID code for this analysis. virtual std::string inspireId() const { return info().inspireId(); } /// Get the SPIRES ID code for this analysis (~deprecated). virtual std::string spiresId() const { return info().spiresId(); } /// @brief Names & emails of paper/analysis authors. /// /// Names and email of authors in 'NAME \' format. The first /// name in the list should be the primary contact person. virtual std::vector authors() const { return info().authors(); } /// @brief Get a short description of the analysis. /// /// Short (one sentence) description used as an index entry. /// Use @a description() to provide full descriptive paragraphs /// of analysis details. virtual std::string summary() const { return info().summary(); } /// @brief Get a full description of the analysis. /// /// Full textual description of this analysis, what it is useful for, /// what experimental techniques are applied, etc. Should be treated /// as a chunk of restructuredText (http://docutils.sourceforge.net/rst.html), /// with equations to be rendered as LaTeX with amsmath operators. virtual std::string description() const { return info().description(); } /// @brief Information about the events needed as input for this analysis. /// /// Event types, energies, kinematic cuts, particles to be considered /// stable, etc. etc. Should be treated as a restructuredText bullet list /// (http://docutils.sourceforge.net/rst.html) virtual std::string runInfo() const { return info().runInfo(); } /// Experiment which performed and published this analysis. virtual std::string experiment() const { return info().experiment(); } /// Collider on which the experiment ran. virtual std::string collider() const { return info().collider(); } /// When the original experimental analysis was published. virtual std::string year() const { return info().year(); } /// The luminosity in inverse femtobarn virtual std::string luminosityfb() const { return info().luminosityfb(); } /// Journal, and preprint references. virtual std::vector references() const { return info().references(); } /// BibTeX citation key for this article. virtual std::string bibKey() const { return info().bibKey(); } /// BibTeX citation entry for this article. virtual std::string bibTeX() const { return info().bibTeX(); } /// Whether this analysis is trusted (in any way!) virtual std::string status() const { return (info().status().empty()) ? "UNVALIDATED" : info().status(); } /// Any work to be done on this analysis. virtual std::vector todos() const { return info().todos(); } /// Return the allowed pairs of incoming beams required by this analysis. virtual const std::vector& requiredBeams() const { return info().beams(); } /// Declare the allowed pairs of incoming beams required by this analysis. virtual Analysis& setRequiredBeams(const std::vector& requiredBeams) { info().setBeams(requiredBeams); return *this; } /// Sets of valid beam energy pairs, in GeV virtual const std::vector >& requiredEnergies() const { return info().energies(); } /// Get vector of analysis keywords virtual const std::vector & keywords() const { return info().keywords(); } /// Declare the list of valid beam energy pairs, in GeV virtual Analysis& setRequiredEnergies(const std::vector >& requiredEnergies) { info().setEnergies(requiredEnergies); return *this; } /// Return true if this analysis needs to know the process cross-section. /// @todo Remove this and require HepMC >= 2.06 bool needsCrossSection() const { return info().needsCrossSection(); } /// Declare whether this analysis needs to know the process cross-section from the generator. /// @todo Remove this and require HepMC >= 2.06 Analysis& setNeedsCrossSection(bool needed=true) { info().setNeedsCrossSection(needed); return *this; } //@} /// @name Internal metadata modifying methods //@{ /// Get the actual AnalysisInfo object in which all this metadata is stored (non-const). AnalysisInfo& info() { assert(_info && "No AnalysisInfo object :O"); return *_info; } //@} /// @name Run conditions //@{ /// Incoming beams for this run const ParticlePair& beams() const; /// Incoming beam IDs for this run const PdgIdPair beamIds() const; /// Centre of mass energy for this run double sqrtS() const; //@} /// @name Analysis / beam compatibility testing //@{ /// Check if analysis is compatible with the provided beam particle IDs and energies bool isCompatible(const ParticlePair& beams) const; /// Check if analysis is compatible with the provided beam particle IDs and energies bool isCompatible(PdgId beam1, PdgId beam2, double e1, double e2) const; /// Check if analysis is compatible with the provided beam particle IDs and energies bool isCompatible(const PdgIdPair& beams, const std::pair& energies) const; //@} /// Set the cross section from the generator Analysis& setCrossSection(double xs); //, double xserr=0.0); /// Access the controlling AnalysisHandler object. AnalysisHandler& handler() const { return *_analysishandler; } protected: /// Get a Log object based on the name() property of the calling analysis object. Log& getLog() const; /// Get the process cross-section in pb. Throws if this hasn't been set. double crossSection() const; /// Get the process cross-section per generated event in pb. Throws if this /// hasn't been set. double crossSectionPerEvent() const; /// @brief Get the number of events seen (via the analysis handler). /// /// @note Use in the finalize phase only. size_t numEvents() const; /// @brief Get the sum of event weights seen (via the analysis handler). /// /// @note Use in the finalize phase only. double sumW() const; /// Alias double sumOfWeights() const { return sumW(); } /// @brief Get the sum of squared event weights seen (via the analysis handler). /// /// @note Use in the finalize phase only. double sumW2() const; protected: /// @name Histogram paths //@{ /// Get the canonical histogram "directory" path for this analysis. const std::string histoDir() const; /// Get the canonical histogram path for the named histogram in this analysis. const std::string histoPath(const std::string& hname) const; /// Get the canonical histogram path for the numbered histogram in this analysis. const std::string histoPath(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const; /// Get the internal histogram name for given d, x and y (cf. HepData) const std::string mkAxisCode(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const; /// Alias /// @deprecated Prefer the "mk" form, consistent with other "making function" names const std::string makeAxisCode(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const { return mkAxisCode(datasetId, xAxisId, yAxisId); } //@} /// @name Histogram reference data //@{ /// Get reference data for a named histo /// @todo SFINAE to ensure that the type inherits from YODA::AnalysisObject? template const T& refData(const string& hname) const { _cacheRefData(); MSG_TRACE("Using histo bin edges for " << name() << ":" << hname); if (!_refdata[hname]) { MSG_ERROR("Can't find reference histogram " << hname); throw Exception("Reference data " + hname + " not found."); } return dynamic_cast(*_refdata[hname]); } /// Get reference data for a numbered histo /// @todo SFINAE to ensure that the type inherits from YODA::AnalysisObject? template const T& refData(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const { const string hname = makeAxisCode(datasetId, xAxisId, yAxisId); return refData(hname); } //@} /// @name Counter booking //@{ /// Book a counter. CounterPtr bookCounter(const std::string& name, const std::string& title=""); // const std::string& valtitle="" /// Book a counter, using a path generated from the dataset and axis ID codes /// /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way. CounterPtr bookCounter(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, const std::string& title=""); // const std::string& valtitle="" //@} /// @name 1D histogram booking //@{ /// Book a 1D histogram with @a nbins uniformly distributed across the range @a lower - @a upper . Histo1DPtr bookHisto1D(const std::string& name, size_t nbins, double lower, double upper, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D histogram with non-uniform bins defined by the vector of bin edges @a binedges . Histo1DPtr bookHisto1D(const std::string& name, const std::vector& binedges, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D histogram with non-uniform bins defined by the vector of bin edges @a binedges . Histo1DPtr bookHisto1D(const std::string& name, const std::initializer_list& binedges, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D histogram with binning from a reference scatter. Histo1DPtr bookHisto1D(const std::string& name, const Scatter2D& refscatter, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D histogram, using the binnings in the reference data histogram. Histo1DPtr bookHisto1D(const std::string& name, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D histogram, using the binnings in the reference data histogram. /// /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way. Histo1DPtr bookHisto1D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); //@} /// @name 2D histogram booking //@{ /// Book a 2D histogram with @a nxbins and @a nybins uniformly /// distributed across the ranges @a xlower - @a xupper and @a /// ylower - @a yupper respectively along the x- and y-axis. Histo2DPtr bookHisto2D(const std::string& name, size_t nxbins, double xlower, double xupper, size_t nybins, double ylower, double yupper, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D histogram with non-uniform bins defined by the /// vectors of bin edges @a xbinedges and @a ybinedges. Histo2DPtr bookHisto2D(const std::string& name, const std::vector& xbinedges, const std::vector& ybinedges, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D histogram with non-uniform bins defined by the /// vectors of bin edges @a xbinedges and @a ybinedges. Histo2DPtr bookHisto2D(const std::string& name, const std::initializer_list& xbinedges, const std::initializer_list& ybinedges, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D histogram with binning from a reference scatter. Histo2DPtr bookHisto2D(const std::string& name, const Scatter3D& refscatter, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D histogram, using the binnings in the reference data histogram. Histo2DPtr bookHisto2D(const std::string& name, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D histogram, using the binnings in the reference data histogram. /// /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way. Histo2DPtr bookHisto2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); //@} /// @name 1D profile histogram booking //@{ /// Book a 1D profile histogram with @a nbins uniformly distributed across the range @a lower - @a upper . Profile1DPtr bookProfile1D(const std::string& name, size_t nbins, double lower, double upper, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D profile histogram with non-uniform bins defined by the vector of bin edges @a binedges . Profile1DPtr bookProfile1D(const std::string& name, const std::vector& binedges, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D profile histogram with non-uniform bins defined by the vector of bin edges @a binedges . Profile1DPtr bookProfile1D(const std::string& name, const std::initializer_list& binedges, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D profile histogram with binning from a reference scatter. Profile1DPtr bookProfile1D(const std::string& name, const Scatter2D& refscatter, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D profile histogram, using the binnings in the reference data histogram. Profile1DPtr bookProfile1D(const std::string& name, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D profile histogram, using the binnings in the reference data histogram. /// /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way. Profile1DPtr bookProfile1D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); //@} /// @name 2D profile histogram booking //@{ /// Book a 2D profile histogram with @a nxbins and @a nybins uniformly /// distributed across the ranges @a xlower - @a xupper and @a ylower - @a /// yupper respectively along the x- and y-axis. Profile2DPtr bookProfile2D(const std::string& name, size_t nxbins, double xlower, double xupper, size_t nybins, double ylower, double yupper, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D profile histogram with non-uniform bins defined by the vectorx /// of bin edges @a xbinedges and @a ybinedges. Profile2DPtr bookProfile2D(const std::string& name, const std::vector& xbinedges, const std::vector& ybinedges, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D profile histogram with non-uniform bins defined by the vectorx /// of bin edges @a xbinedges and @a ybinedges. Profile2DPtr bookProfile2D(const std::string& name, const std::initializer_list& xbinedges, const std::initializer_list& ybinedges, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D profile histogram with binning from a reference scatter. Profile2DPtr bookProfile2D(const std::string& name, const Scatter3D& refscatter, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D profile histogram, using the binnings in the reference data histogram. Profile2DPtr bookProfile2D(const std::string& name, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D profile histogram, using the binnings in the reference data histogram. /// /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way. Profile2DPtr bookProfile2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); //@} /// @name 2D scatter booking //@{ /// @brief Book a 2-dimensional data point set with the given name. /// /// @note Unlike histogram booking, scatter booking by default makes no /// attempt to use reference data to pre-fill the data object. If you want /// this, which is sometimes useful e.g. when the x-position is not really /// meaningful and can't be extracted from the data, then set the @a /// copy_pts parameter to true. This creates points to match the reference /// data's x values and errors, but with the y values and errors zeroed... /// assuming that there is a reference histo with the same name: if there /// isn't, an exception will be thrown. Scatter2DPtr bookScatter2D(const std::string& name, bool copy_pts=false, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// @brief Book a 2-dimensional data point set, using the binnings in the reference data histogram. /// /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way. /// /// @note Unlike histogram booking, scatter booking by default makes no /// attempt to use reference data to pre-fill the data object. If you want /// this, which is sometimes useful e.g. when the x-position is not really /// meaningful and can't be extracted from the data, then set the @a /// copy_pts parameter to true. This creates points to match the reference /// data's x values and errors, but with the y values and errors zeroed. Scatter2DPtr bookScatter2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, bool copy_pts=false, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// @brief Book a 2-dimensional data point set with equally spaced x-points in a range. /// /// The y values and errors will be set to 0. Scatter2DPtr bookScatter2D(const std::string& name, size_t npts, double lower, double upper, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// @brief Book a 2-dimensional data point set based on provided contiguous "bin edges". /// /// The y values and errors will be set to 0. Scatter2DPtr bookScatter2D(const std::string& hname, const std::vector& binedges, const std::string& title, const std::string& xtitle, const std::string& ytitle); //@} public: /// @name accessing options for this Analysis instance. //@{ /// Get an option for this analysis instance as a string. std::string getOption(std::string optname) { if ( _options.find(optname) != _options.end() ) return _options.find(optname)->second; return ""; } /// Get an option for this analysis instance converted to a /// specific type (given by the specified @a def value). template T getOption(std::string optname, T def) { if (_options.find(optname) == _options.end()) return def; std::stringstream ss; ss << _options.find(optname)->second; T ret; ss >> ret; return ret; } //@} /// @brief Book a CentralityProjection /// /// Using a SingleValueProjection, @a proj, giving the value of an /// experimental observable to be used as a centrality estimator, /// book a CentralityProjection based on the experimentally /// measured pecentiles of this observable (as given by the /// reference data for the @a calHistName histogram in the @a /// calAnaName analysis. If a preloaded file with the output of a /// run using the @a calAnaName analysis contains a valid /// generated @a calHistName histogram, it will be used as an /// optional percentile binning. Also if this preloaded file /// contains a histogram with the name @a calHistName with an /// appended "_IMP" This histogram will be used to add an optional /// centrality percentile based on the generated impact /// parameter. If @increasing is true, a low (high) value of @proj /// is assumed to correspond to a more peripheral (central) event. const CentralityProjection& declareCentrality(const SingleValueProjection &proj, string calAnaName, string calHistName, const string projName, bool increasing = false); + /// @brief Book a Pecentile wrapper around AnalysisObjects. + /// + /// Based on a previously registered CentralityProjection named @a + /// projName book one AnalysisObject for each @a centralityBin and + /// name them according to the corresponding code in the @a ref + /// vector. + template + Percentile bookPercentile(string projName, + vector > centralityBins, + vector > ref) { + typedef typename ReferenceTraits::RefT RefT; + const CentralityProjection & proj = + getProjection(projName); + Percentile pctl(this, projName, proj.projections().size()); + + const int nCent = centralityBins.size(); + for (int iCent = 0; iCent < nCent; ++iCent) { + const string axisCode = makeAxisCode(std::get<0>(ref[iCent]), + std::get<1>(ref[iCent]), + std::get<2>(ref[iCent])); + const RefT & refscatter = refData(axisCode); + shared_ptr temp = make_shared(refscatter, histoPath(axisCode)); + + vector aos = + pctl.add(proj, *temp, centralityBins[iCent]); + for ( auto ao : aos ) addAnalysisObject(ao); + } + return pctl; + } + + /// @brief Book Pecentile wrappers around AnalysisObjects. + /// + /// Based on a previously registered CentralityProjection named @a + /// projName book one (or several) AnalysisObject(s) named + /// according to @a ref where the x-axis will be filled according + /// to the percentile output(s) of the @projName. + template + PercentileXaxis bookPercentileXaxis(string projName, + tuple ref) { + typedef typename ReferenceTraits::RefT RefT; + const CentralityProjection & proj = + getProjection(projName); + PercentileXaxis pctl(this, projName, proj.projections().size()); + + const string axisCode = makeAxisCode(std::get<0>(ref), + std::get<1>(ref), + std::get<2>(ref)); + const RefT& refscatter = refData(axisCode); + + shared_ptr temp = make_shared(refscatter, histoPath(axisCode)); + + vector aos = pctl.add(proj, *temp); + for ( auto ao : aos ) addAnalysisObject(ao); + + return pctl; + } + + /// @name Analysis object manipulation /// @todo Should really be protected: only public to keep BinnedHistogram happy for now... //@{ /// Multiplicatively scale the given counter, @a cnt, by factor @s factor. void scale(CounterPtr cnt, double factor); /// Multiplicatively scale the given counters, @a cnts, by factor @s factor. /// @note Constness intentional, if weird, to allow passing rvalue refs of smart ptrs (argh) /// @todo Use SFINAE for a generic iterable of CounterPtrs void scale(const std::vector& cnts, double factor) { for (auto& c : cnts) scale(c, factor); } /// @todo YUCK! template void scale(const CounterPtr (&cnts)[array_size], double factor) { // for (size_t i = 0; i < std::extent::value; ++i) scale(cnts[i], factor); for (auto& c : cnts) scale(c, factor); } /// Normalize the given histogram, @a histo, to area = @a norm. void normalize(Histo1DPtr histo, double norm=1.0, bool includeoverflows=true); /// Normalize the given histograms, @a histos, to area = @a norm. /// @note Constness intentional, if weird, to allow passing rvalue refs of smart ptrs (argh) /// @todo Use SFINAE for a generic iterable of Histo1DPtrs void normalize(const std::vector& histos, double norm=1.0, bool includeoverflows=true) { for (auto& h : histos) normalize(h, norm, includeoverflows); } /// @todo YUCK! template void normalize(const Histo1DPtr (&histos)[array_size], double norm=1.0, bool includeoverflows=true) { for (auto& h : histos) normalize(h, norm, includeoverflows); } /// Multiplicatively scale the given histogram, @a histo, by factor @s factor. void scale(Histo1DPtr histo, double factor); /// Multiplicatively scale the given histograms, @a histos, by factor @s factor. /// @note Constness intentional, if weird, to allow passing rvalue refs of smart ptrs (argh) /// @todo Use SFINAE for a generic iterable of Histo1DPtrs void scale(const std::vector& histos, double factor) { for (auto& h : histos) scale(h, factor); } /// @todo YUCK! template void scale(const Histo1DPtr (&histos)[array_size], double factor) { for (auto& h : histos) scale(h, factor); } /// Normalize the given histogram, @a histo, to area = @a norm. void normalize(Histo2DPtr histo, double norm=1.0, bool includeoverflows=true); /// Normalize the given histograms, @a histos, to area = @a norm. /// @note Constness intentional, if weird, to allow passing rvalue refs of smart ptrs (argh) /// @todo Use SFINAE for a generic iterable of Histo2DPtrs void normalize(const std::vector& histos, double norm=1.0, bool includeoverflows=true) { for (auto& h : histos) normalize(h, norm, includeoverflows); } /// @todo YUCK! template void normalize(const Histo2DPtr (&histos)[array_size], double norm=1.0, bool includeoverflows=true) { for (auto& h : histos) normalize(h, norm, includeoverflows); } /// Multiplicatively scale the given histogram, @a histo, by factor @s factor. void scale(Histo2DPtr histo, double factor); /// Multiplicatively scale the given histograms, @a histos, by factor @s factor. /// @note Constness intentional, if weird, to allow passing rvalue refs of smart ptrs (argh) /// @todo Use SFINAE for a generic iterable of Histo2DPtrs void scale(const std::vector& histos, double factor) { for (auto& h : histos) scale(h, factor); } /// @todo YUCK! template void scale(const Histo2DPtr (&histos)[array_size], double factor) { for (auto& h : histos) scale(h, factor); } /// Helper for counter division. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(CounterPtr c1, CounterPtr c2, Scatter1DPtr s) const; /// Helper for histogram division with raw YODA objects. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(const YODA::Counter& c1, const YODA::Counter& c2, Scatter1DPtr s) const; /// Helper for histogram division. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(Histo1DPtr h1, Histo1DPtr h2, Scatter2DPtr s) const; /// Helper for histogram division with raw YODA objects. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(const YODA::Histo1D& h1, const YODA::Histo1D& h2, Scatter2DPtr s) const; /// Helper for profile histogram division. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(Profile1DPtr p1, Profile1DPtr p2, Scatter2DPtr s) const; /// Helper for profile histogram division with raw YODA objects. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(const YODA::Profile1D& p1, const YODA::Profile1D& p2, Scatter2DPtr s) const; /// Helper for 2D histogram division. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(Histo2DPtr h1, Histo2DPtr h2, Scatter3DPtr s) const; /// Helper for 2D histogram division with raw YODA objects. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(const YODA::Histo2D& h1, const YODA::Histo2D& h2, Scatter3DPtr s) const; /// Helper for 2D profile histogram division. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(Profile2DPtr p1, Profile2DPtr p2, Scatter3DPtr s) const; /// Helper for 2D profile histogram division with raw YODA objects /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(const YODA::Profile2D& p1, const YODA::Profile2D& p2, Scatter3DPtr s) const; /// Helper for histogram efficiency calculation. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void efficiency(Histo1DPtr h1, Histo1DPtr h2, Scatter2DPtr s) const; /// Helper for histogram efficiency calculation. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void efficiency(const YODA::Histo1D& h1, const YODA::Histo1D& h2, Scatter2DPtr s) const; /// Helper for histogram asymmetry calculation. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void asymm(Histo1DPtr h1, Histo1DPtr h2, Scatter2DPtr s) const; /// Helper for histogram asymmetry calculation. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void asymm(const YODA::Histo1D& h1, const YODA::Histo1D& h2, Scatter2DPtr s) const; /// Helper for converting a differential histo to an integral one. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void integrate(Histo1DPtr h, Scatter2DPtr s) const; /// Helper for converting a differential histo to an integral one. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void integrate(const Histo1D& h, Scatter2DPtr s) const; //@} public: /// List of registered analysis data objects const vector& analysisObjects() const { return _analysisobjects; } protected: /// @name Data object registration, retrieval, and removal //@{ /// Register a data object in the histogram system void addAnalysisObject(AnalysisObjectPtr ao); /// Get a data object from the histogram system template const std::shared_ptr getAnalysisObject(const std::string& name) const { foreach (const AnalysisObjectPtr& ao, analysisObjects()) { if (ao->path() == histoPath(name)) return dynamic_pointer_cast(ao); } throw Exception("Data object " + histoPath(name) + " not found"); } /// Get a data object from the histogram system (non-const) template std::shared_ptr getAnalysisObject(const std::string& name) { foreach (const AnalysisObjectPtr& ao, analysisObjects()) { if (ao->path() == histoPath(name)) return dynamic_pointer_cast(ao); } throw Exception("Data object " + histoPath(name) + " not found"); } /// Unregister a data object from the histogram system (by name) void removeAnalysisObject(const std::string& path); /// Unregister a data object from the histogram system (by pointer) void removeAnalysisObject(AnalysisObjectPtr ao); /// Get all data object from the AnalysisHandler. vector getAllData(bool includeorphans) const; /// Get a data object from another analysis (e.g. preloaded /// calibration histogram). /// Get a data object from the histogram system (non-const) template std::shared_ptr getAnalysisObject(const std::string & ananame, const std::string& name) { std::string path = "/" + ananame + "/" + name; for ( AnalysisObjectPtr ao : getAllData(true) ) { if ( ao->path() == path ) return dynamic_pointer_cast(ao); } return std::shared_ptr(); } /// Get a named Histo1D object from the histogram system const Histo1DPtr getHisto1D(const std::string& name) const { return getAnalysisObject(name); } /// Get a named Histo1D object from the histogram system (non-const) Histo1DPtr getHisto1D(const std::string& name) { return getAnalysisObject(name); } /// Get a Histo1D object from the histogram system by axis ID codes (non-const) const Histo1DPtr getHisto1D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a Histo1D object from the histogram system by axis ID codes (non-const) Histo1DPtr getHisto1D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a named Histo2D object from the histogram system const Histo2DPtr getHisto2D(const std::string& name) const { return getAnalysisObject(name); } /// Get a named Histo2D object from the histogram system (non-const) Histo2DPtr getHisto2D(const std::string& name) { return getAnalysisObject(name); } /// Get a Histo2D object from the histogram system by axis ID codes (non-const) const Histo2DPtr getHisto2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a Histo2D object from the histogram system by axis ID codes (non-const) Histo2DPtr getHisto2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a named Profile1D object from the histogram system const Profile1DPtr getProfile1D(const std::string& name) const { return getAnalysisObject(name); } /// Get a named Profile1D object from the histogram system (non-const) Profile1DPtr getProfile1D(const std::string& name) { return getAnalysisObject(name); } /// Get a Profile1D object from the histogram system by axis ID codes (non-const) const Profile1DPtr getProfile1D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a Profile1D object from the histogram system by axis ID codes (non-const) Profile1DPtr getProfile1D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a named Profile2D object from the histogram system const Profile2DPtr getProfile2D(const std::string& name) const { return getAnalysisObject(name); } /// Get a named Profile2D object from the histogram system (non-const) Profile2DPtr getProfile2D(const std::string& name) { return getAnalysisObject(name); } /// Get a Profile2D object from the histogram system by axis ID codes (non-const) const Profile2DPtr getProfile2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a Profile2D object from the histogram system by axis ID codes (non-const) Profile2DPtr getProfile2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a named Scatter2D object from the histogram system const Scatter2DPtr getScatter2D(const std::string& name) const { return getAnalysisObject(name); } /// Get a named Scatter2D object from the histogram system (non-const) Scatter2DPtr getScatter2D(const std::string& name) { return getAnalysisObject(name); } /// Get a Scatter2D object from the histogram system by axis ID codes (non-const) const Scatter2DPtr getScatter2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a Scatter2D object from the histogram system by axis ID codes (non-const) Scatter2DPtr getScatter2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } //@} private: /// Name passed to constructor (used to find .info analysis data file, and as a fallback) string _defaultname; /// Pointer to analysis metadata object unique_ptr _info; /// Storage of all plot objects /// @todo Make this a map for fast lookup by path? vector _analysisobjects; /// @name Cross-section variables //@{ double _crossSection; bool _gotCrossSection; //@} /// The controlling AnalysisHandler object. AnalysisHandler* _analysishandler; /// Collection of cached refdata to speed up many autobookings: the /// reference data file should only be read once. mutable std::map _refdata; /// Options the (this instance of) the analysis map _options; /// The string of options. string _optstring; private: /// @name Utility functions //@{ /// Get the reference data for this paper and cache it. void _cacheRefData() const; //@} /// The assignment operator is private and must never be called. /// In fact, it should not even be implemented. Analysis& operator=(const Analysis&); }; } // Include definition of analysis plugin system so that analyses automatically see it when including Analysis.hh #include "Rivet/AnalysisBuilder.hh" /// @def DECLARE_RIVET_PLUGIN /// Preprocessor define to prettify the global-object plugin hook mechanism. #define DECLARE_RIVET_PLUGIN(clsname) Rivet::AnalysisBuilder plugin_ ## clsname /// @def DECLARE_ALIASED_RIVET_PLUGIN /// Preprocessor define to prettify the global-object plugin hook mechanism, with an extra alias name for this analysis. // #define DECLARE_ALIASED_RIVET_PLUGIN(clsname, alias) Rivet::AnalysisBuilder plugin_ ## clsname ## ( ## #alias ## ) #define DECLARE_ALIASED_RIVET_PLUGIN(clsname, alias) DECLARE_RIVET_PLUGIN(clsname)( #alias ) /// @def DEFAULT_RIVET_ANALYSIS_CONSTRUCTOR /// Preprocessor define to prettify the manky constructor with name string argument #define DEFAULT_RIVET_ANALYSIS_CONSTRUCTOR(clsname) clsname() : Analysis(# clsname) {} /// @def DEFAULT_RIVET_ANALYSIS_CTOR /// Slight abbreviation for DEFAULT_RIVET_ANALYSIS_CONSTRUCTOR #define DEFAULT_RIVET_ANALYSIS_CTOR(clsname) DEFAULT_RIVET_ANALYSIS_CONSTRUCTOR(clsname) #endif diff --git a/include/Rivet/AnalysisHandler.hh b/include/Rivet/AnalysisHandler.hh --- a/include/Rivet/AnalysisHandler.hh +++ b/include/Rivet/AnalysisHandler.hh @@ -1,272 +1,308 @@ // -*- C++ -*- #ifndef RIVET_RivetHandler_HH #define RIVET_RivetHandler_HH #include "Rivet/Config/RivetCommon.hh" #include "Rivet/Particle.hh" #include "Rivet/AnalysisLoader.hh" #include "Rivet/Tools/RivetYODA.hh" namespace Rivet { // Forward declaration and smart pointer for Analysis class Analysis; typedef std::shared_ptr AnaHandle; // Needed to make smart pointers compare equivalent in the STL set struct CmpAnaHandle { bool operator() (const AnaHandle& a, const AnaHandle& b) const { return a.get() < b.get(); } }; /// A class which handles a number of analysis objects to be applied to /// generated events. An {@link Analysis}' AnalysisHandler is also responsible /// for handling the final writing-out of histograms. class AnalysisHandler { public: /// @name Constructors and destructors. */ //@{ /// Preferred constructor, with optional run name. AnalysisHandler(const string& runname=""); /// @brief Destructor /// The destructor is not virtual, as this class should not be inherited from. ~AnalysisHandler(); //@} private: /// Get a logger object. Log& getLog() const; public: /// @name Run properties //@{ /// Get the name of this run. string runName() const { return _runname; } /// Get the number of events seen. Should only really be used by external /// steering code or analyses in the finalize phase. size_t numEvents() const { return _eventcounter.numEntries(); } /// @brief Access the sum of the event weights seen /// /// This is the weighted equivalent of the number of events. It should only /// be used by external steering code or analyses in the finalize phase. double sumW() const { return _eventcounter.sumW(); } /// Access to the sum of squared-weights double sumW2() const { return _eventcounter.sumW2(); } /// @brief Compatibility alias for sumOfWeights /// /// @deprecated Prefer sumW double sumOfWeights() const { return sumW(); } /// @brief Set the sum of weights /// /// This is useful if Rivet is steered externally and /// the analyses are run for a sub-contribution of the events /// (but of course have to be normalised to the total sum of weights) /// /// @todo What about the sumW2 term? That needs to be set coherently. Need a /// new version, with all three N,sumW,sumW2 numbers (or a counter) /// supplied. /// /// @deprecated Weight sums are no longer tracked this way... void setSumOfWeights(const double& sum) { //_sumOfWeights = sum; throw Error("Can't set sum of weights independently, since it's now tracked by a Counter. " "Please contact the Rivet authors if you need this."); } /// Is cross-section information required by at least one child analysis? /// @deprecated Should no-longer be an issue: does any generator not write the cross-section? bool needCrossSection() const; /// Whether the handler knows about a cross-section. /// @deprecated Should no-longer be an issue: does any generator not write the cross-section? bool hasCrossSection() const; /// Get the cross-section known to the handler. double crossSection() const { return _xs; } /// Set the cross-section for the process being generated. /// @todo What about the xsec uncertainty? Add a second, optional argument? AnalysisHandler& setCrossSection(double xs); /// Set the beam particles for this run AnalysisHandler& setRunBeams(const ParticlePair& beams) { _beams = beams; MSG_DEBUG("Setting run beams = " << beams << " @ " << sqrtS()/GeV << " GeV"); return *this; } /// Get the beam particles for this run, usually determined from the first event. const ParticlePair& beams() const { return _beams; } /// Get beam IDs for this run, usually determined from the first event. /// @deprecated Use standalone beamIds(ah.beams()), to clean AH interface PdgIdPair beamIds() const; /// Get energy for this run, usually determined from the first event. /// @deprecated Use standalone sqrtS(ah.beams()), to clean AH interface double sqrtS() const; /// Setter for _ignoreBeams void setIgnoreBeams(bool ignore=true); //@} /// @name Handle analyses //@{ /// Get a list of the currently registered analyses' names. std::vector analysisNames() const; /// Get the collection of currently registered analyses. const std::set& analyses() const { return _analyses; } /// Get a registered analysis by name. const AnaHandle analysis(const std::string& analysisname) const; /// Add an analysis to the run list by object AnalysisHandler& addAnalysis(Analysis* analysis); /// @brief Add an analysis to the run list using its name. /// /// The actual Analysis to be used will be obtained via /// AnalysisLoader::getAnalysis(string). If no matching analysis is found, /// no analysis is added (i.e. the null pointer is checked and discarded. AnalysisHandler& addAnalysis(const std::string& analysisname); /// @brief Add an analysis with a map of analysis options. AnalysisHandler& addAnalysis(const std::string& analysisname, std::map pars); /// @brief Add analyses to the run list using their names. /// /// The actual {@link Analysis}' to be used will be obtained via /// AnalysisHandler::addAnalysis(string), which in turn uses /// AnalysisLoader::getAnalysis(string). If no matching analysis is found /// for a given name, no analysis is added, but also no error is thrown. AnalysisHandler& addAnalyses(const std::vector& analysisnames); /// Remove an analysis from the run list using its name. AnalysisHandler& removeAnalysis(const std::string& analysisname); /// Remove analyses from the run list using their names. AnalysisHandler& removeAnalyses(const std::vector& analysisnames); //@} /// @name Main init/execute/finalise //@{ /// Initialize a run, with the run beams taken from the example event. void init(const GenEvent& event); /// @brief Analyze the given \a event by reference. /// /// This function will call the AnalysisBase::analyze() function of all /// included analysis objects. void analyze(const GenEvent& event); /// @brief Analyze the given \a event by pointer. /// /// This function will call the AnalysisBase::analyze() function of all /// included analysis objects, after checking the event pointer validity. void analyze(const GenEvent* event); /// Finalize a run. This function calls the AnalysisBase::finalize() /// functions of all included analysis objects. void finalize(); //@} /// @name Histogram / data object access //@{ /// Add a vector of analysis objects to the current state. void addData(const std::vector& aos); /// Read analysis plots into the histo collection (via addData) from the named file. void readData(const std::string& filename); /// Get all analyses' plots as a vector of analysis objects. - std::vector getData(bool includeorphans = false) const; + std::vector getData(bool includeorphans = false, + bool includetmps = false) const; /// Write all analyses' plots (via getData) to the named file. void writeData(const std::string& filename) const; + /// Tell Rivet to dump intermediate result to a file named @a + /// dumpfile every @a period'th event. If @period is not positive, + /// no dumping will be done. + void dump(string dumpfile, int period) { + _dumpPeriod = period; + _dumpFile = dumpfile; + } + + /// Take the vector of yoda files and merge them together using + /// the cross section and weight information provided in each + /// file. Each file in @a aofiles is assumed to have been produced + /// by Rivet. By default the files are assumed to contain + /// different processes (or the same processs but mutually + /// exclusive cuts), but if @a equiv if ture, the files are + /// assumed to contain output of completely equivalent (but + /// statistically independent) Rivet runs. The corresponding + /// analyses will be loaded and their analysis objects will be + /// filled with the merged result. finalize() will be run on each + /// relevant anslysis. The resulting YODA file can then be rwitten + /// out by writeData(). + void mergeYodas(const vector & aofiles, bool equiv = false); + //@} - private: /// The collection of Analysis objects to be used. set _analyses; /// A vector of pre-loaded object which do not have a valid /// Analysis plugged in. vector _orphanedPreloads; + /// A vector containing copies of analysis objects after + /// finalize() has been run. + vector _finalizedAOs; /// @name Run properties //@{ /// Run name std::string _runname; /// Event counter Counter _eventcounter; /// Cross-section known to AH double _xs, _xserr; /// Beams used by this run. ParticlePair _beams; /// Flag to check if init has been called bool _initialised; /// Flag whether input event beams should be ignored in compatibility check bool _ignoreBeams; + /// Determines how often Rivet runs finalize() and writes the + /// result to a YODA file. + int _dumpPeriod; + + /// The name of a YODA file to which Rivet periodically dumps + /// results. + string _dumpFile; + + /// Flag to indicate periodic dumping is in progress + bool _dumping; + //@} private: /// The assignment operator is private and must never be called. /// In fact, it should not even be implemented. AnalysisHandler& operator=(const AnalysisHandler&); /// The copy constructor is private and must never be called. In /// fact, it should not even be implemented. AnalysisHandler(const AnalysisHandler&); }; } #endif diff --git a/include/Rivet/AnalysisInfo.hh b/include/Rivet/AnalysisInfo.hh --- a/include/Rivet/AnalysisInfo.hh +++ b/include/Rivet/AnalysisInfo.hh @@ -1,289 +1,298 @@ // -*- C++ -*- #ifndef RIVET_AnalysisInfo_HH #define RIVET_AnalysisInfo_HH #include "Rivet/Config/RivetCommon.hh" #include namespace Rivet { class AnalysisInfo { public: /// Static factory method: returns null pointer if no metadata found static unique_ptr make(const std::string& name); /// @name Standard constructors and destructors. //@{ /// The default constructor. AnalysisInfo() { clear(); } /// The destructor. ~AnalysisInfo() { } //@} public: /// @name Metadata /// Metadata is used for querying from the command line and also for /// building web pages and the analysis pages in the Rivet manual. //@{ /// Get the name of the analysis. By default this is computed using the /// experiment, year and Inspire/Spires ID metadata methods. std::string name() const { if (!_name.empty()) return _name; if (!experiment().empty() && !year().empty()) { if (!inspireId().empty()) { return experiment() + "_" + year() + "_I" + inspireId(); } else if (!spiresId().empty()) { return experiment() + "_" + year() + "_S" + spiresId(); } } return ""; } /// Set the name of the analysis. void setName(const std::string& name) { _name = name; } /// Get the reference data name of the analysis (if different from plugin name). std::string getRefDataName() const { if (!_refDataName.empty()) return _refDataName; return name(); } /// Set the reference data name of the analysis (if different from plugin name). void setRefDataName(const std::string& name) { _refDataName = name; } /// Get the Inspire (SPIRES replacement) ID code for this analysis. const std::string& inspireId() const { return _inspireId; } /// Set the Inspire (SPIRES replacement) ID code for this analysis. void setInspireId(const std::string& inspireId) { _inspireId = inspireId; } /// Get the SPIRES ID code for this analysis. const std::string& spiresId() const { return _spiresId; } /// Set the SPIRES ID code for this analysis. void setSpiresId(const std::string& spiresId) { _spiresId = spiresId; } /// @brief Names & emails of paper/analysis authors. /// Names and email of authors in 'NAME \' format. The first /// name in the list should be the primary contact person. const std::vector& authors() const { return _authors; } /// Set the author list. void setAuthors(const std::vector& authors) { _authors = authors; } /// @brief Get a short description of the analysis. /// Short (one sentence) description used as an index entry. /// Use @a description() to provide full descriptive paragraphs /// of analysis details. const std::string& summary() const { return _summary; } /// Set the short description for this analysis. void setSummary(const std::string& summary) { _summary = summary; } /// @brief Get a full description of the analysis. /// Full textual description of this analysis, what it is useful for, /// what experimental techniques are applied, etc. Should be treated /// as a chunk of restructuredText (http://docutils.sourceforge.net/rst.html), /// with equations to be rendered as LaTeX with amsmath operators. const std::string& description() const { return _description; } /// Set the full description for this analysis. void setDescription(const std::string& description) { _description = description; } /// @brief Information about the events needed as input for this analysis. /// Event types, energies, kinematic cuts, particles to be considered /// stable, etc. etc. Should be treated as a restructuredText bullet list /// (http://docutils.sourceforge.net/rst.html) const std::string& runInfo() const { return _runInfo; } /// Set the full description for this analysis. void setRunInfo(const std::string& runInfo) { _runInfo = runInfo; } /// Beam particle types const std::vector& beams() const { return _beams; } /// Set beam particle types void setBeams(const std::vector& beams) { _beams = beams; } /// Sets of valid beam energies const std::vector >& energies() const { return _energies; } /// Set the valid beam energies void setEnergies(const std::vector >& energies) { _energies = energies; } /// Experiment which performed and published this analysis. const std::string& experiment() const { return _experiment; } /// Set the experiment which performed and published this analysis. void setExperiment(const std::string& experiment) { _experiment = experiment; } /// Collider on which the experiment ran. const std::string& collider() const { return _collider; } /// Set the collider on which the experiment ran. void setCollider(const std::string& collider) { _collider = collider; } /// @brief When the original experimental analysis was published. /// When the refereed paper on which this is based was published, /// according to SPIRES. const std::string& year() const { return _year; } /// Set the year in which the original experimental analysis was published. void setYear(const std::string& year) { _year = year; } /// The integrated data luminosity of the data set const std::string& luminosityfb() const { return _luminosityfb; } /// Set the integrated data luminosity of the data set void setLuminosityfb(const std::string& luminosityfb) { _luminosityfb = luminosityfb; } /// Journal and preprint references. const std::vector& references() const { return _references; } /// Set the journal and preprint reference list. void setReferences(const std::vector& references) { _references = references; } /// Analysis Keywords for grouping etc const std::vector& keywords() const { return _keywords; } /// BibTeX citation key for this article. const std::string& bibKey() const { return _bibKey;} /// Set the BibTeX citation key for this article. void setBibKey(const std::string& bibKey) { _bibKey = bibKey; } /// BibTeX citation entry for this article. const std::string& bibTeX() const { return _bibTeX; } /// Set the BibTeX citation entry for this article. void setBibTeX(const std::string& bibTeX) { _bibTeX = bibTeX; } /// Whether this analysis is trusted (in any way!) const std::string& status() const { return _status; } /// Set the analysis code status. void setStatus(const std::string& status) { _status = status; } /// Any work to be done on this analysis. const std::vector& todos() const { return _todos; } /// Set the to-do list. void setTodos(const std::vector& todos) { _todos = todos; } /// Get the option list. const std::vector& options() const { return _options; } /// Check if the given option is valid. bool validOption(std::string key, std::string val) const; /// Set the option list. void setOptions(const std::vector& opts) { _options = opts; buildOptionMap(); } /// Build a map of options to facilitate checking. void buildOptionMap(); /// Return true if this analysis needs to know the process cross-section. bool needsCrossSection() const { return _needsCrossSection; } /// Return true if this analysis needs to know the process cross-section. void setNeedsCrossSection(bool needXsec) { _needsCrossSection = needXsec; } + /// Return true if finalize() can be run multiple times for this analysis. + bool reentrant() const { return _reentrant; } + + /// setReentrant + void setReentrant(bool ree = true) { _reentrant = ree; } + //@} private: std::string _name; std::string _refDataName; std::string _spiresId, _inspireId; std::vector _authors; std::string _summary; std::string _description; std::string _runInfo; std::string _experiment; std::string _collider; std::vector > _beams; std::vector > _energies; std::string _year; std::string _luminosityfb; std::vector _references; std::vector _keywords; std::string _bibKey; std::string _bibTeX; //std::string _bibTeXBody; ///< Was thinking of avoiding duplication of BibKey... std::string _status; std::vector _todos; bool _needsCrossSection; std::vector _options; std::map< std::string, std::set > _optionmap; + bool _reentrant; + void clear() { _name = ""; _refDataName = ""; _spiresId = ""; _inspireId = ""; _authors.clear(); _summary = ""; _description = ""; _runInfo = ""; _experiment = ""; _collider = ""; _beams.clear(); _energies.clear(); _year = ""; _luminosityfb = ""; _references.clear(); _keywords.clear(); _bibKey = ""; _bibTeX = ""; //_bibTeXBody = ""; _status = ""; _todos.clear(); _needsCrossSection = false; _options.clear(); _optionmap.clear(); + _reentrant = false; } }; /// String representation std::string toString(const AnalysisInfo& ai); /// Stream an AnalysisInfo as a text description inline std::ostream& operator<<(std::ostream& os, const AnalysisInfo& ai) { os << toString(ai); return os; } } #endif diff --git a/include/Rivet/Makefile.am b/include/Rivet/Makefile.am --- a/include/Rivet/Makefile.am +++ b/include/Rivet/Makefile.am @@ -1,183 +1,185 @@ ## Internal headers - not to be installed nobase_dist_noinst_HEADERS = ## Public headers - to be installed nobase_pkginclude_HEADERS = ## Rivet interface nobase_pkginclude_HEADERS += \ Rivet.hh \ Run.hh \ Event.hh \ ParticleBase.hh \ Particle.fhh Particle.hh \ Jet.fhh Jet.hh \ Projection.fhh Projection.hh \ ProjectionApplier.hh \ ProjectionHandler.hh \ Analysis.hh \ AnalysisHandler.hh \ AnalysisInfo.hh \ AnalysisBuilder.hh \ AnalysisLoader.hh ## Build config stuff nobase_pkginclude_HEADERS += \ Config/RivetCommon.hh \ Config/RivetConfig.hh \ Config/BuildOptions.hh ## Projections nobase_pkginclude_HEADERS += \ Projections/AliceCommon.hh \ Projections/AxesDefinition.hh \ Projections/Beam.hh \ Projections/BeamThrust.hh \ Projections/CentralEtHCM.hh \ Projections/CentralityProjection.hh \ Projections/ChargedFinalState.hh \ Projections/ChargedLeptons.hh \ Projections/ConstLossyFinalState.hh \ Projections/DirectFinalState.hh \ Projections/DISFinalState.hh \ Projections/DISKinematics.hh \ Projections/DISLepton.hh \ Projections/DressedLeptons.hh \ Projections/FastJets.hh \ Projections/FinalPartons.hh \ Projections/FinalState.hh \ Projections/FoxWolframMoments.hh \ Projections/FParameter.hh \ Projections/GeneratedPercentileProjection.hh \ Projections/HadronicFinalState.hh \ Projections/HeavyHadrons.hh \ Projections/Hemispheres.hh \ Projections/IdentifiedFinalState.hh \ Projections/ImpactParameterProjection.hh \ Projections/IndirectFinalState.hh \ Projections/InitialQuarks.hh \ Projections/InvMassFinalState.hh \ Projections/JetAlg.hh \ Projections/JetShape.hh \ Projections/LeadingParticlesFinalState.hh \ Projections/LossyFinalState.hh \ Projections/MergedFinalState.hh \ Projections/MissingMomentum.hh \ Projections/MixedFinalState.hh \ Projections/NeutralFinalState.hh \ Projections/NonHadronicFinalState.hh \ Projections/NonPromptFinalState.hh \ Projections/ParisiTensor.hh \ Projections/ParticleFinder.hh \ Projections/PartonicTops.hh \ Projections/PercentileProjection.hh \ Projections/PrimaryHadrons.hh \ Projections/PrimaryParticles.hh \ Projections/PromptFinalState.hh \ Projections/SingleValueProjection.hh \ Projections/SmearedParticles.hh \ Projections/SmearedJets.hh \ Projections/SmearedMET.hh \ Projections/Sphericity.hh \ Projections/Spherocity.hh \ Projections/TauFinder.hh \ Projections/Thrust.hh \ Projections/TriggerCDFRun0Run1.hh \ Projections/TriggerCDFRun2.hh \ Projections/TriggerProjection.hh \ Projections/TriggerUA5.hh \ Projections/UnstableFinalState.hh \ Projections/UserCentEstimate.hh \ Projections/VetoedFinalState.hh \ Projections/VisibleFinalState.hh \ Projections/WFinder.hh \ Projections/ZFinder.hh ## Meta-projection convenience headers nobase_pkginclude_HEADERS += \ Projections/FinalStates.hh \ Projections/Smearing.hh ## Analysis base class headers # TODO: Move to Rivet/AnalysisTools header dir? nobase_pkginclude_HEADERS += \ + Analyses/MC_Cent_pPb.hh \ Analyses/MC_ParticleAnalysis.hh \ Analyses/MC_JetAnalysis.hh \ Analyses/MC_JetSplittings.hh ## Tools nobase_pkginclude_HEADERS += \ Tools/AliceCommon.hh \ Tools/AtlasCommon.hh \ Tools/BeamConstraint.hh \ Tools/BinnedHistogram.hh \ Tools/CentralityBinner.hh \ Tools/Cmp.fhh \ Tools/Cmp.hh \ Tools/Cutflow.hh \ Tools/Cuts.fhh \ Tools/Cuts.hh \ Tools/Exceptions.hh \ Tools/JetUtils.hh \ Tools/Logging.hh \ Tools/Random.hh \ Tools/ParticleBaseUtils.hh \ Tools/ParticleIdUtils.hh \ Tools/ParticleUtils.hh \ Tools/ParticleName.hh \ + Tools/Percentile.hh \ Tools/PrettyPrint.hh \ Tools/RivetPaths.hh \ Tools/RivetSTL.hh \ Tools/RivetFastJet.hh \ Tools/RivetHepMC.hh \ Tools/RivetYODA.hh \ Tools/RivetMT2.hh \ Tools/SmearingFunctions.hh \ Tools/MomentumSmearingFunctions.hh \ Tools/ParticleSmearingFunctions.hh \ Tools/JetSmearingFunctions.hh \ Tools/TypeTraits.hh \ Tools/Utils.hh \ Tools/Nsubjettiness/AxesDefinition.hh \ Tools/Nsubjettiness/ExtraRecombiners.hh \ Tools/Nsubjettiness/MeasureDefinition.hh \ Tools/Nsubjettiness/Njettiness.hh \ Tools/Nsubjettiness/NjettinessPlugin.hh \ Tools/Nsubjettiness/Nsubjettiness.hh \ Tools/Nsubjettiness/TauComponents.hh \ Tools/Nsubjettiness/XConePlugin.hh nobase_dist_noinst_HEADERS += \ Tools/osdir.hh ## Maths nobase_pkginclude_HEADERS += \ Math/Matrices.hh \ Math/Vector3.hh \ Math/VectorN.hh \ Math/MatrixN.hh \ Math/MatrixDiag.hh \ Math/MathHeader.hh \ Math/Vectors.hh \ Math/LorentzTrans.hh \ Math/Matrix3.hh \ Math/MathUtils.hh \ Math/Vector4.hh \ Math/Math.hh \ Math/Units.hh \ Math/Constants.hh \ Math/eigen/util.h \ Math/eigen/regressioninternal.h \ Math/eigen/regression.h \ Math/eigen/vector.h \ Math/eigen/ludecompositionbase.h \ Math/eigen/ludecomposition.h \ Math/eigen/linearsolver.h \ Math/eigen/linearsolverbase.h \ Math/eigen/matrix.h \ Math/eigen/vectorbase.h \ Math/eigen/projective.h \ Math/eigen/matrixbase.h diff --git a/include/Rivet/Tools/Percentile.hh b/include/Rivet/Tools/Percentile.hh new file mode 100644 --- /dev/null +++ b/include/Rivet/Tools/Percentile.hh @@ -0,0 +1,720 @@ +#ifndef PERCENTILE_HH +#define PERCENTILE_HH + +#include "Rivet/Event.hh" +#include "Rivet/Projections/CentralityProjection.hh" +#include "Rivet/ProjectionApplier.hh" + +namespace Rivet { + + +/// Forward declaration. +class Analysis; + +/// @brief PercentileBase is the base class of all Percentile classes. +/// +/// This base class contains all non-templated variables and +/// infrastructure needed. +class PercentileBase { + +public: + + /// @brief the main constructor + /// + /// requiring a pointer, @a ana, to the Analysis to which this + /// object belongs, the name of the CentralityProjection, @a + /// projname, to be used and the number, @a nProj, of multiple + /// centrality definitions that are available in that projection. + PercentileBase(Analysis * ana, string projName, int nProj) + : _ana(ana), _projName(projName), _nProj(nProj) {} + + /// @brief Default constructor. + PercentileBase() {} + + /// @initialize the PercentileBase for a new event. + /// + /// This will perform the assigned CentralityProjection and select + /// out the (indices) of the internal AnalysisObjects that are to be + /// active in this event. + void selectBins(const Event &); + + /// @brief Helper function to check if @a x is within @a range. + static bool inRange(double x, pair range) { + return x >= range.first && ( x < range.second || ( x == 100.0 && x == range.second ) ); + } + + /// @brief Helper function to get an analysis object from the analysis assigned to. + AnalysisObjectPtr getObject(string path) const; + + /// @brief Helper function to get an analysis object from the analysis assigned to. + template + std::shared_ptr getAnalysisObject(string path) const { + return dynamic_pointer_cast(getObject(path)); + } + + /// @brief Copy information from @a other PercentileBase + void copyFrom(const PercentileBase & other) { + _ana = other._ana; + _projName = other._projName; + _cent = other._cent; + _nProj = other._nProj; + } + + /// @brief check if @a other PercentileBase is compatible with this. + bool compatible(const PercentileBase & other) const { + return ( _ana == other._ana && + _projName == other._projName && + _cent == other._cent && + _nProj == other._nProj ); + } + + /// @breif return the list of centrality bins. + /// + /// The size of this vector is the same as number of internal + /// analysis objects in the sub class PercentileTBase. + const vector< pair > & centralities() const { + return _cent; + } + +protected: + + /// The Analysis object to which This object is assigned. + Analysis * _ana; + + /// The name of the CentralityProjection. + string _projName; + + /// The list of indices of the analysis objects that are to be + /// filled in the current event. + vector _activeBins; + + /// The list of centrality intervals, one for each included analysis + /// object. + vector > _cent; + + /// The number of internal optional centrality definitions included + /// in the CentralityProjection. + int _nProj; + +}; + +/// @brief PercentileTBase is the base class of all Percentile classes. +/// +/// This base class contains all template-dependent variables and +/// infrastructure needed for Percentile and PercentileXaxis. +template +class PercentileTBase : public PercentileBase { + +public: + + /// Convenient typedef. + typedef typename T::Ptr TPtr; + + /// @brief the main constructor + /// + /// requiring a pointer, @a ana, to the Analysis to which this + /// object belongs, the name of the CentralityProjection, @a + /// projname, to be used and the number, @a nProj, of multiple + /// centrality definitions that are available in that projection. + PercentileTBase(Analysis * ana, string projName, int nProj) + : PercentileBase(ana, projName, nProj), _histos() {} + + /// @brief Default constructor. + PercentileTBase() {} + + /// @brief Empty destructor. + ~PercentileTBase() {} + + /// @brief add a new percentile bin. + /// + /// Add an analysis objects which are clones of @a temp that should + /// be active for events in the given centrality bin @a + /// cent. Several analysis objects may be added depending on the + /// number of alternative centrality definitions in the + /// CentralityProjection @a proj. This function is common for + /// Percentile and PecentileXaxis, but for the latter the @a cent + /// argument should be left to its default. + vector + add(const CentralityProjection & proj, const T & temp, + pair cent = {0.0, 100.0} ) { + vector ret; + for ( int i = 0, N = _nProj; i < N; ++i ) { + _cent.push_back(cent); + auto cnt = make_shared(); + string path = temp.path(); + if ( i > 0) path += "[" + proj.projections()[i] + "]"; + auto hist = getAnalysisObject(path); + if ( !hist ) { + hist = make_shared(temp, path); + ret.push_back(hist); + } + _histos.push_back( { hist, cnt } ); + } + return ret; + } + + /// @brief Copy the information from an @a other Percentile object. + /// + /// This function differs from a simple assignement as the @a other + /// analysis objects are not copied, but supplied separately through + /// @a tv. + bool add(const PercentileBase & other, const vector & tv) { + copyFrom(other); + if ( tv.size() != _cent.size() ) return false; + for ( auto t : tv ) + _histos.push_back( { t, make_shared() } ); + return true; + } + + /// @brief initialize for a new event. Select which AnalysisObjects + /// should be filled for this event. Keeps track of the number of + /// events seen for each centrality bin and AnalysisAbject. + void init(const Event & event) { + selectBins(event); + for (const auto bin : _activeBins) + _histos[bin].second->fill(event.weight()); + } + + /// @brief Normalize each AnalysisObject + /// + /// by dividing by the sum of the events seen for each centrality + /// bin. + void normalizePerEvent() { + for (const auto &hist : _histos) + if ( hist.second->numEntries() > 0 && hist.first->numEntries() > 0) + hist.first->scaleW(1./hist.second->val()); + } + + /// Simple scaling of each AnalysisObject. + void scale(float scale) { + for (const auto hist : _histos) + hist.first->scaleW(scale); + } + + /// Execute a function for each AnalysisObject. + void exec(function f) { for ( auto hist : _histos) f(hist); } + + /// @brief Access the underlyng AnalysisObjects + /// + /// The returned vector contains a pair, where the first member is + /// the AnalysisObject and the second is a counter keeping track of + /// the sum of event weights for which the AnalysisObject has been + /// active. + const vector, shared_ptr > > & + analysisObjects() const{ + return _histos; + } + +protected: + + /// The returned vector contains a pair, where the first member is + /// the AnalysisObject and the second is a counter keeping track of + /// the sum of event weights for which the AnalysisObject has been + /// active. + vector, shared_ptr > > _histos; + +}; + +/// @brief The Percentile class for centrality binning. +/// +/// The Percentile class automatically handles the selection of which +/// AnalysisObject(s) should be filled depending on the centrality of +/// an event. It cointains a list of AnalysisObjects, one for each +/// centrality bin requested (note that these bins may be overlapping) +/// and each centrality definition is available in the assigned +/// CentralityProjection. +template +class Percentile : public PercentileTBase { + +public: + + /// @brief the main constructor + /// + /// requiring a pointer, @a ana, to the Analysis to which this + /// object belongs, the name of the CentralityProjection, @a + /// projname, to be used and the number, @a nProj, of multiple + /// centrality definitions that are available in that projection. + Percentile(Analysis * ana, string projName, int nProj) + : PercentileTBase(ana, projName, nProj) {} + + /// @brief Default constructor. + Percentile() {} + + /// @brief Empty destructor. + ~Percentile() {} + + /// Needed to access members of the templated base class. + using PercentileTBase::_histos; + + /// Needed to access members of the templated base class. + using PercentileTBase::_activeBins; + + /// Fill each AnalysisObject selected in the last call to + /// PercentileTBaseinit + template + void fill(Args... args) { + for (const auto bin : _activeBins) { + _histos[bin].first->fill(args...); + } + } + + /// Subtract the contents fro another Pecentile. + Percentile &operator-=(const Percentile &rhs) { + const int nCent = _histos.size(); + for (int iCent = 0; iCent < nCent; ++iCent) { + *_histos[iCent].first -= *rhs._histos[iCent].first; + } + } + + /// Add the contents fro another Pecentile. + Percentile &operator+=(const Percentile &rhs) { + const int nCent = _histos.size(); + for (int iCent = 0; iCent < nCent; ++iCent) { + *_histos[iCent].first += *rhs._histos[iCent].first; + /// @todo should this also add the Counter? + } + } + + /// Make this object look like a pointer. + Percentile *operator->() { return this; } + + /// Pointer to member operator. + Percentile &operator->*(function f) { exec(f); return *this; } + +}; + +/// @brief The PercentileXaxis class for centrality binning. +/// +/// The PercentileXaxis class automatically handles the x-axis of an +/// AnalysisObject when the x-axis is to be the centrality of an +/// event. This could also be done by eg. filling directly a Histo1D +/// with the result of a CentralityProjection. However, since the +/// CentralityProjection may handle several centrality definitions at +/// the same time it is reasonable to instead use +/// PercentileXaxis which will fill one histogram for each +/// centrality definition. +/// +/// Operationally this class works like the Percentile class, but only +/// one centrality bin (0-100) is included. When fill()ed the first +/// argument is always given by the assigned CentralityProjection. +template +class PercentileXaxis : public PercentileTBase { + +public: + + /// @brief the main constructor + /// + /// requiring a pointer, @a ana, to the Analysis to which this + /// object belongs, the name of the CentralityProjection, @a + /// projname, to be used and the number, @a nProj, of multiple + /// centrality definitions that are available in that projection. + PercentileXaxis(Analysis * ana, string projName, int nProj) + : PercentileTBase(ana, projName, nProj) {} + + /// @brief Default constructor. + PercentileXaxis() {} + + /// @brief Empty destructor. + ~PercentileXaxis() {} + + /// Needed to access members of the templated base class. + using PercentileTBase::_histos; + + /// Needed to access members of the templated base class. + using PercentileTBase::_activeBins; + + /// Fill each AnalysisObject selected in the last call to + /// PercentileTBaseinit + template + void fill(Args... args) { + for (const auto bin : _activeBins) { + _histos[bin].first->fill(bin, args...); + } + } + + /// Subtract the contents fro another PecentileXaxis. + PercentileXaxis &operator-=(const PercentileXaxis &rhs) { + const int nCent = _histos.size(); + for (int iCent = 0; iCent < nCent; ++iCent) { + *_histos[iCent].first -= *rhs._histos[iCent].first; + } + } + + /// Add the contents fro another PecentileXaxis. + PercentileXaxis &operator+=(const PercentileXaxis &rhs) { + const int nCent = this->_histos.size(); + for (int iCent = 0; iCent < nCent; ++iCent) { + *_histos[iCent].first += *rhs._histos[iCent].first; + } + } + + /// Make this object look like a pointer. + PercentileXaxis *operator->() { return this; } + + /// Pointer to member operator. + PercentileXaxis &operator->*(function f) { exec(f); return *this; } + +}; + +/// @name Combining Percentiles following the naming of functions for +// the underlying AnalysisObjects: global operators +// @{ + +template +Percentile::RefT> +divide(const Percentile numer, const Percentile denom) { + typedef typename ReferenceTraits::RefT ScatT; + Percentile ret; + vector scatters; + assert( numer.compatible(denom) ); + for ( int i = 0, N = numer.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(divide(*numer.analysisObjects()[i].first, + *denom.analysisObjects()[i].first))); + ret.add(numer, scatters); + return ret; +} + +template +Percentile::RefT> +divide(const Percentile numer, + const Percentile::RefT> denom) { + typedef typename ReferenceTraits::RefT ScatT; + Percentile ret; + vector scatters; + assert( numer.compatible(denom) ); + for ( int i = 0, N = numer.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(divide(*numer.analysisObjects()[i].first, + *denom.analysisObjects()[i].first))); + ret.add(numer, scatters); + return ret; +} + +template +Percentile::RefT> +divide(const Percentile::RefT> numer, + const Percentile denom) { + typedef typename ReferenceTraits::RefT ScatT; + Percentile::RefT> ret; + vector scatters; + assert( numer.compatible(denom) ); + for ( int i = 0, N = numer.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(divide(*numer.analysisObjects()[i].first, + *denom.analysisObjects()[i].first))); + ret.add(numer, scatters); + return ret; +} + +template +Percentile add(const Percentile pctla, const Percentile pctlb) { + Percentile ret; + vector aos; + assert( pctla.compatible(pctlb) ); + for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) + aos.push_back(make_shared(add(*pctla.analysisObjects()[i].first, + *pctlb.analysisObjects()[i].first))); + ret.add(pctla, aos); + return ret; +} + +template +Percentile::RefT> +add(const Percentile pctla, + const Percentile::RefT> pctlb) { + typedef typename ReferenceTraits::RefT ScatT; + Percentile ret; + vector scatters; + assert( pctla.compatible(pctlb) ); + for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(add(*pctla.analysisObjects()[i].first, + *pctlb.analysisObjects()[i].first))); + ret.add(pctla, scatters); + return ret; +} + +template +Percentile::RefT> +add(const Percentile::RefT> pctla, + const Percentile pctlb) { + typedef typename ReferenceTraits::RefT ScatT; + Percentile ret; + vector scatters; + assert( pctla.compatible(pctlb) ); + for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(add(*pctla.analysisObjects()[i].first, + *pctlb.analysisObjects()[i].first))); + ret.add(pctla, scatters); + return ret; +} + +template +Percentile subtract(const Percentile pctla, const Percentile pctlb) { + Percentile ret; + vector aos; + assert( pctla.compatible(pctlb) ); + for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) + aos.push_back(make_shared(subtract(*pctla.analysisObjects()[i].first, + *pctlb.analysisObjects()[i].first))); + ret.add(pctla, aos); + return ret; +} + +template +Percentile::RefT> +subtract(const Percentile pctla, + const Percentile::RefT> pctlb) { + typedef typename ReferenceTraits::RefT ScatT; + Percentile ret; + vector scatters; + assert( pctla.compatible(pctlb) ); + for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(subtract(*pctla.analysisObjects()[i].first, + *pctlb.analysisObjects()[i].first))); + ret.add(pctla, scatters); + return ret; +} + +template +Percentile::RefT> +subtract(const Percentile::RefT> pctla, + const Percentile pctlb) { + typedef typename ReferenceTraits::RefT ScatT; + Percentile ret; + vector scatters; + assert( pctla.compatible(pctlb) ); + for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(subtract(*pctla.analysisObjects()[i].first, + *pctlb.analysisObjects()[i].first))); + ret.add(pctla, scatters); + return ret; +} + +template +Percentile::RefT> +multiply(const Percentile pctla, + const Percentile::RefT> pctlb) { + typedef typename ReferenceTraits::RefT ScatT; + Percentile ret; + vector scatters; + assert( pctla.compatible(pctlb) ); + for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(multiply(*pctla.analysisObjects()[i].first, + *pctlb.analysisObjects()[i].first))); + ret.add(pctla, scatters); + return ret; +} + +template +Percentile::RefT> +multiply(const Percentile::RefT> pctla, + const Percentile pctlb) { + typedef typename ReferenceTraits::RefT ScatT; + Percentile ret; + vector scatters; + assert( pctla.compatible(pctlb) ); + for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(multiply(*pctla.analysisObjects()[i].first, + *pctlb.analysisObjects()[i].first))); + ret.add(pctla, scatters); + return ret; +} + + + + +template +PercentileXaxis::RefT> +divide(const PercentileXaxis numer, const PercentileXaxis denom) { + typedef typename ReferenceTraits::RefT ScatT; + PercentileXaxis ret; + vector scatters; + assert( numer.compatible(denom) ); + for ( int i = 0, N = numer.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(divide(*numer.analysisObjects()[i].first, + *denom.analysisObjects()[i].first))); + ret.add(numer, scatters); + return ret; +} + +template +PercentileXaxis::RefT> +divide(const PercentileXaxis numer, + const PercentileXaxis::RefT> denom) { + typedef typename ReferenceTraits::RefT ScatT; + PercentileXaxis ret; + vector scatters; + assert( numer.compatible(denom) ); + for ( int i = 0, N = numer.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(divide(*numer.analysisObjects()[i].first, + *denom.analysisObjects()[i].first))); + ret.add(numer, scatters); + return ret; +} + +template +PercentileXaxis::RefT> +divide(const PercentileXaxis::RefT> numer, + const PercentileXaxis denom) { + typedef typename ReferenceTraits::RefT ScatT; + PercentileXaxis::RefT> ret; + vector scatters; + assert( numer.compatible(denom) ); + for ( int i = 0, N = numer.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(divide(*numer.analysisObjects()[i].first, + *denom.analysisObjects()[i].first))); + ret.add(numer, scatters); + return ret; +} + +template +PercentileXaxis add(const PercentileXaxis pctla, const PercentileXaxis pctlb) { + PercentileXaxis ret; + vector aos; + assert( pctla.compatible(pctlb) ); + for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) + aos.push_back(make_shared(add(*pctla.analysisObjects()[i].first, + *pctlb.analysisObjects()[i].first))); + ret.add(pctla, aos); + return ret; +} + +template +PercentileXaxis::RefT> +add(const PercentileXaxis pctla, + const PercentileXaxis::RefT> pctlb) { + typedef typename ReferenceTraits::RefT ScatT; + PercentileXaxis ret; + vector scatters; + assert( pctla.compatible(pctlb) ); + for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(add(*pctla.analysisObjects()[i].first, + *pctlb.analysisObjects()[i].first))); + ret.add(pctla, scatters); + return ret; +} + +template +PercentileXaxis::RefT> +add(const PercentileXaxis::RefT> pctla, + const PercentileXaxis pctlb) { + typedef typename ReferenceTraits::RefT ScatT; + PercentileXaxis ret; + vector scatters; + assert( pctla.compatible(pctlb) ); + for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(add(*pctla.analysisObjects()[i].first, + *pctlb.analysisObjects()[i].first))); + ret.add(pctla, scatters); + return ret; +} + +template +PercentileXaxis subtract(const PercentileXaxis pctla, const PercentileXaxis pctlb) { + PercentileXaxis ret; + vector aos; + assert( pctla.compatible(pctlb) ); + for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) + aos.push_back(make_shared(subtract(*pctla.analysisObjects()[i].first, + *pctlb.analysisObjects()[i].first))); + ret.add(pctla, aos); + return ret; +} + +template +PercentileXaxis::RefT> +subtract(const PercentileXaxis pctla, + const PercentileXaxis::RefT> pctlb) { + typedef typename ReferenceTraits::RefT ScatT; + PercentileXaxis ret; + vector scatters; + assert( pctla.compatible(pctlb) ); + for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(subtract(*pctla.analysisObjects()[i].first, + *pctlb.analysisObjects()[i].first))); + ret.add(pctla, scatters); + return ret; +} + +template +PercentileXaxis::RefT> +subtract(const PercentileXaxis::RefT> pctla, + const PercentileXaxis pctlb) { + typedef typename ReferenceTraits::RefT ScatT; + PercentileXaxis ret; + vector scatters; + assert( pctla.compatible(pctlb) ); + for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(subtract(*pctla.analysisObjects()[i].first, + *pctlb.analysisObjects()[i].first))); + ret.add(pctla, scatters); + return ret; +} + +template +PercentileXaxis::RefT> +multiply(const PercentileXaxis pctla, + const PercentileXaxis::RefT> pctlb) { + typedef typename ReferenceTraits::RefT ScatT; + PercentileXaxis ret; + vector scatters; + assert( pctla.compatible(pctlb) ); + for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(multiply(*pctla.analysisObjects()[i].first, + *pctlb.analysisObjects()[i].first))); + ret.add(pctla, scatters); + return ret; +} + +template +PercentileXaxis::RefT> +multiply(const PercentileXaxis::RefT> pctla, + const PercentileXaxis pctlb) { + typedef typename ReferenceTraits::RefT ScatT; + PercentileXaxis ret; + vector scatters; + assert( pctla.compatible(pctlb) ); + for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) + scatters.push_back(make_shared(multiply(*pctla.analysisObjects()[i].first, + *pctlb.analysisObjects()[i].first))); + ret.add(pctla, scatters); + return ret; +} + +template +Percentile +operator+(const Percentile pctla, const Percentile pctlb) { + return add(pctla, pctlb); +} + +template +Percentile +operator-(const Percentile pctla, const Percentile pctlb) { + return subtract(pctla, pctlb); +} + +template +Percentile::RefT> +operator/(const Percentile numer, const Percentile denom) { + return divide(numer, denom); +} + +template +PercentileXaxis +operator+(const PercentileXaxis pctla, const PercentileXaxis pctlb) { + return add(pctla, pctlb); +} + +template +PercentileXaxis +operator-(const PercentileXaxis pctla, const PercentileXaxis pctlb) { + return subtract(pctla, pctlb); +} + +template +PercentileXaxis::RefT> +operator/(const PercentileXaxis numer, const PercentileXaxis denom) { + return divide(numer, denom); +} + +} + +#endif diff --git a/include/Rivet/Tools/RivetYODA.hh b/include/Rivet/Tools/RivetYODA.hh --- a/include/Rivet/Tools/RivetYODA.hh +++ b/include/Rivet/Tools/RivetYODA.hh @@ -1,54 +1,103 @@ #ifndef RIVET_RIVETYODA_HH #define RIVET_RIVETYODA_HH #include "Rivet/Config/RivetCommon.hh" #include "YODA/AnalysisObject.h" #include "YODA/Counter.h" #include "YODA/Histo1D.h" #include "YODA/Histo2D.h" #include "YODA/Profile1D.h" #include "YODA/Profile2D.h" #include "YODA/Scatter1D.h" #include "YODA/Scatter2D.h" #include "YODA/Scatter3D.h" namespace Rivet { typedef std::shared_ptr AnalysisObjectPtr; typedef std::shared_ptr CounterPtr; typedef std::shared_ptr Histo1DPtr; typedef std::shared_ptr Histo2DPtr; typedef std::shared_ptr Profile1DPtr; typedef std::shared_ptr Profile2DPtr; typedef std::shared_ptr Scatter1DPtr; typedef std::shared_ptr Scatter2DPtr; typedef std::shared_ptr Scatter3DPtr; using YODA::AnalysisObject; using YODA::Counter; using YODA::Histo1D; using YODA::HistoBin1D; using YODA::Histo2D; using YODA::HistoBin2D; using YODA::Profile1D; using YODA::ProfileBin1D; using YODA::Profile2D; using YODA::ProfileBin2D; using YODA::Scatter1D; using YODA::Point1D; using YODA::Scatter2D; using YODA::Point2D; using YODA::Scatter3D; using YODA::Point3D; /// Function to get a map of all the refdata in a paper with the given @a papername. map getRefData(const string& papername); /// Get the file system path to the reference file for this paper. string getDatafilePath(const string& papername); + /// Traits class to access the type of the AnalysisObject in the + /// reference files. + template struct ReferenceTraits {}; + template<> struct ReferenceTraits { typedef Counter RefT; }; + template<> struct ReferenceTraits { typedef Scatter1D RefT; }; + template<> struct ReferenceTraits { typedef Scatter2D RefT; }; + template<> struct ReferenceTraits { typedef Scatter2D RefT; }; + template<> struct ReferenceTraits { typedef Scatter2D RefT; }; + template<> struct ReferenceTraits { typedef Scatter3D RefT; }; + template<> struct ReferenceTraits { typedef Scatter3D RefT; }; + template<> struct ReferenceTraits { typedef Scatter3D RefT; }; + + + // If @a dst and @a src both are of same subclass T, copy the + // contents of @a src into @a dst and return true. Otherwise return + // false. + template + bool aocopy(AnalysisObjectPtr src, AnalysisObjectPtr dst) { + shared_ptr tsrc = dynamic_pointer_cast(src); + if ( !tsrc ) return false; + shared_ptr tdst = dynamic_pointer_cast(dst); + if ( !tdst ) return false; + *tdst = *tsrc; + return true; + } + + // If @a dst and @a src both are of same subclass T, add the + // contents of @a src into @a dst and return true. Otherwise return + // false. + template + bool aoadd(AnalysisObjectPtr dst, AnalysisObjectPtr src, double scale) { + shared_ptr tsrc = dynamic_pointer_cast(src); + if ( !tsrc ) return false; + shared_ptr tdst = dynamic_pointer_cast(dst); + if ( !tdst ) return false; + tsrc->scaleW(scale); + *tdst += *tsrc; + return true; + } + + // If @a dst is the same subclass as @a src, copy the contents of @a + // src into @a dst and return true. Otherwise return false. + bool copyao(AnalysisObjectPtr src, AnalysisObjectPtr dst); + + // If @a dst is the same subclass as @a src, scale the contents of + // @a src with @a scale and add it to @a dst and return true. Otherwise + // return false. + bool addaos(AnalysisObjectPtr dst, AnalysisObjectPtr src, double scale); + } #endif diff --git a/pyext/rivet/aopaths.py b/pyext/rivet/aopaths.py --- a/pyext/rivet/aopaths.py +++ b/pyext/rivet/aopaths.py @@ -1,102 +1,112 @@ def isRefPath(path): return path.startswith("/REF") +def isRawPath(path): + return path.startswith("/RAW") + +def isRawAO(ao): + return isRawPath(ao.path) + def stripOptions(path): import re return re.sub(r':\w+=[^:/]+', "", path) def extractOptionString(path): import re re_opts = re.compile(r"^.*(:\w+=[^:/]+)+") m = re_opts.match(path) if not m: return "" opts = list(m.groups()) for i in range(len(opts)): opts[i] = opts[i].strip(':') return " [" + ",".join(opts) + "]" def isRefAO(ao): return int(ao.annotation("IsRef")) == 1 or isRefPath(ao.path) def isTmpPath(path): return "/_" in path #< match *any* underscore-prefixed path component def isTmpAO(ao): return isTmpPath(ao.path) class AOPath(object): """ Object representation of analysis object path structures. TODO: move to YODA? """ import re re_aopath = re.compile(r"^(/[^\[\]\@\#]+)(\[[A-Za-z\d\._]+\])?(#\d+|@[\d\.]+)?$") def __init__(self, path): import os self.origpath = path m = self.re_aopath.match(path) if not m: raise Exception("Supplied path '%s' does not meet required structure" % path) self._basepath = m.group(1) self._varid = m.group(2).lstrip("[").rstrip("]") if m.group(2) else None self._binid = int(m.group(3).lstrip("#")) if m.group(3) else None self._isref = isRefPath(self._basepath) def basepath(self, keepref=False): "Main 'Unix-like' part of the AO path, optionally including a /REF prefix" p = self._basepath.rstrip("/") if not keepref and p.startswith("/REF"): p = p[4:] return p def varpath(self, keepref=False, defaultvarid=None): "The basepath, plus any bracketed variation identifier" p = self.basepath(keepref) if self.varid(defaultvarid) is not None: p += "[%s]" % str(self.varid(defaultvarid)) return p def binpath(self, keepref=False, defaultbinid=None, defaultvarid=None): "The varpath, plus any #-prefixed bin number identifier" p = self.varpath(keepref, defaultvarid) if self.binid(defaultbinid) is not None: p += "#%d" % self.binid(defaultbinid) return p def basepathparts(self, keepref=False): "List of basepath components, split by forward slashes" return self.basepath(keepref).strip("/").split("/") # TODO: basepathhead, basepathtail def dirname(self, keepref=False): "The non-final (i.e. dir-like) part of the basepath" return os.path.dirname(self.basepath(keepref)) def dirnameparts(self, keepref=False): "List of dirname components, split by forward slashes" return self.dirname(keepref).strip("/").split("/") def basename(self): "The final (i.e. file-like) part of the basepath" return os.path.basename(self._basepath) def varid(self, default=None): "The variation identifier (without brackets) if there is one, otherwise None" return self._varid if self._varid is not None else default def binid(self, default=None): "The bin identifier (without #) if there is one, otherwise None" return self._binid if self._binid is not None else default def isref(self): "Is there a /REF prefix in the original path?" return self._isref def istmp(self): "Do any basepath components start with an underscore, used to hide them from plotting?" return isTmpPath(self.basepath()) + + def israw(self): + "Do any basepath components start with /RAW, used to hide them from plotting?" + return isRawPath(self.basepath()) diff --git a/pyext/rivet/core.pyx b/pyext/rivet/core.pyx --- a/pyext/rivet/core.pyx +++ b/pyext/rivet/core.pyx @@ -1,232 +1,238 @@ # distutils: language = c++ cimport rivet as c from cython.operator cimport dereference as deref # Need to be careful with memory management -- perhaps use the base object that # we used in YODA? cdef extern from "" namespace "std" nogil: cdef c.unique_ptr[c.Analysis] move(c.unique_ptr[c.Analysis]) cdef class AnalysisHandler: cdef c.AnalysisHandler *_ptr def __cinit__(self): self._ptr = new c.AnalysisHandler() def __del__(self): del self._ptr def setIgnoreBeams(self, ignore=True): self._ptr.setIgnoreBeams(ignore) def addAnalysis(self, name): self._ptr.addAnalysis(name.encode('utf-8')) return self def analysisNames(self): anames = self._ptr.analysisNames() return [ a.decode('utf-8') for a in anames ] # def analysis(self, aname): # cdef c.Analysis* ptr = self._ptr.analysis(aname) # cdef Analysis pyobj = Analysis.__new__(Analysis) # if not ptr: # return None # pyobj._ptr = ptr # return pyobj def readData(self, name): self._ptr.readData(name.encode('utf-8')) def writeData(self, name): self._ptr.writeData(name.encode('utf-8')) def crossSection(self): return self._ptr.crossSection() def finalize(self): self._ptr.finalize() + def dump(self, file, period): + self._ptr.dump(file, period) + + def mergeYodas(self, filelist, equiv): + self._ptr.mergeYodas(filelist, equiv) + cdef class Run: cdef c.Run *_ptr def __cinit__(self, AnalysisHandler h): self._ptr = new c.Run(h._ptr[0]) def __del__(self): del self._ptr def setCrossSection(self, double x): self._ptr.setCrossSection(x) return self def setListAnalyses(self, choice): self._ptr.setListAnalyses(choice) return self def init(self, name, weight=1.0): return self._ptr.init(name.encode('utf-8'), weight) def openFile(self, name, weight=1.0): return self._ptr.openFile(name.encode('utf-8'), weight) def readEvent(self): return self._ptr.readEvent() def skipEvent(self): return self._ptr.skipEvent() def processEvent(self): return self._ptr.processEvent() def finalize(self): return self._ptr.finalize() cdef class Analysis: cdef c.unique_ptr[c.Analysis] _ptr def __init__(self): raise RuntimeError('This class cannot be instantiated') def requiredBeams(self): return deref(self._ptr).requiredBeams() def requiredEnergies(self): return deref(self._ptr).requiredEnergies() def keywords(self): kws = deref(self._ptr).keywords() return [ k.decode('utf-8') for k in kws ] def authors(self): auths = deref(self._ptr).authors() return [ a.decode('utf-8') for a in auths ] def bibKey(self): return deref(self._ptr).bibKey().decode('utf-8') def name(self): return deref(self._ptr).name().decode('utf-8') def bibTeX(self): return deref(self._ptr).bibTeX().decode('utf-8') def references(self): refs = deref(self._ptr).references() return [ r.decode('utf-8') for r in refs ] def collider(self): return deref(self._ptr).collider().decode('utf-8') def description(self): return deref(self._ptr).description().decode('utf-8') def experiment(self): return deref(self._ptr).experiment().decode('utf-8') def inspireId(self): return deref(self._ptr).inspireId().decode('utf-8') def spiresId(self): return deref(self._ptr).spiresId().decode('utf-8') def runInfo(self): return deref(self._ptr).runInfo().decode('utf-8') def status(self): return deref(self._ptr).status().decode('utf-8') def summary(self): return deref(self._ptr).summary().decode('utf-8') def year(self): return deref(self._ptr).year().decode('utf-8') def luminosityfb(self): return deref(self._ptr).luminosityfb().decode('utf-8') #cdef object LEVELS = dict(TRACE = 0, DEBUG = 10, INFO = 20, WARN = 30, WARNING = 30, ERROR = 40, CRITICAL = 50, ALWAYS = 50) cdef class AnalysisLoader: @staticmethod def analysisNames(): names = c.AnalysisLoader_analysisNames() return [ n.decode('utf-8') for n in names ] @staticmethod def getAnalysis(name): name = name.encode('utf-8') cdef c.unique_ptr[c.Analysis] ptr = c.AnalysisLoader_getAnalysis(name) cdef Analysis pyobj = Analysis.__new__(Analysis) if not ptr: return None pyobj._ptr = move(ptr) # Create python object return pyobj def getAnalysisLibPaths(): ps = c.getAnalysisLibPaths() return [ p.decode('utf-8') for p in ps ] def setAnalysisLibPaths(xs): bs = [ x.encode('utf-8') for x in xs ] c.setAnalysisLibPaths(bs) def addAnalysisLibPath(path): c.addAnalysisLibPath(path.encode('utf-8')) def setAnalysisDataPaths(xs): bs = [ x.encode('utf-8') for x in xs ] c.setAnalysisDataPaths(bs) def addAnalysisDataPath(path): c.addAnalysisDataPath(path.encode('utf-8')) def getAnalysisDataPaths(): ps = c.getAnalysisDataPaths() return [ p.decode('utf-8') for p in ps ] def findAnalysisDataFile(q): f = c.findAnalysisDataFile(q.encode('utf-8')) return f.decode('utf-8') def getAnalysisRefPaths(): ps = c.getAnalysisRefPaths() return [ p.decode('utf-8') for p in ps ] def findAnalysisRefFile(q): f = c.findAnalysisRefFile(q.encode('utf-8')) return f.decode('utf-8') def getAnalysisInfoPaths(): ps = c.getAnalysisInfoPaths() return [ p.decode('utf-8') for p in ps ] def findAnalysisInfoFile(q): f = c.findAnalysisInfoFile(q.encode('utf-8')) return f.decode('utf-8') def getAnalysisPlotPaths(): ps = c.getAnalysisPlotPaths() return [ p.decode('utf-8') for p in ps ] def findAnalysisPlotFile(q): f = c.findAnalysisPlotFile(q.encode('utf-8')) return f.decode('utf-8') def version(): return c.version().decode('utf-8') def setLogLevel(name, level): c.setLogLevel(name.encode('utf-8'), level) diff --git a/pyext/rivet/rivet.pxd b/pyext/rivet/rivet.pxd --- a/pyext/rivet/rivet.pxd +++ b/pyext/rivet/rivet.pxd @@ -1,92 +1,94 @@ from libcpp.map cimport map from libcpp.pair cimport pair from libcpp.vector cimport vector from libcpp cimport bool from libcpp.string cimport string from libcpp.memory cimport unique_ptr ctypedef int PdgId ctypedef pair[PdgId,PdgId] PdgIdPair cdef extern from "Rivet/AnalysisHandler.hh" namespace "Rivet": cdef cppclass AnalysisHandler: void setIgnoreBeams(bool) AnalysisHandler& addAnalysis(string) vector[string] analysisNames() const # Analysis* analysis(string) void writeData(string&) void readData(string&) double crossSection() void finalize() + void dump(string, int) + void mergeYodas(vector[string], bool) cdef extern from "Rivet/Run.hh" namespace "Rivet": cdef cppclass Run: Run(AnalysisHandler) Run& setCrossSection(double) # For chaining? Run& setListAnalyses(bool) bool init(string, double) except + # $2=1.0 bool openFile(string, double) except + # $2=1.0 bool readEvent() except + bool skipEvent() except + bool processEvent() except + bool finalize() except + cdef extern from "Rivet/Analysis.hh" namespace "Rivet": cdef cppclass Analysis: vector[PdgIdPair]& requiredBeams() vector[pair[double, double]] requiredEnergies() vector[string] authors() vector[string] references() vector[string] keywords() string name() string bibTeX() string bibKey() string collider() string description() string experiment() string inspireId() string spiresId() string runInfo() string status() string summary() string year() string luminosityfb() # Might need to translate the following errors, although I believe 'what' is now # preserved. But often, we need the exception class name. #Error #RangeError #LogicError #PidError #InfoError #WeightError #UserError cdef extern from "Rivet/AnalysisLoader.hh": vector[string] AnalysisLoader_analysisNames "Rivet::AnalysisLoader::analysisNames" () unique_ptr[Analysis] AnalysisLoader_getAnalysis "Rivet::AnalysisLoader::getAnalysis" (string) cdef extern from "Rivet/Tools/RivetPaths.hh" namespace "Rivet": vector[string] getAnalysisLibPaths() void setAnalysisLibPaths(vector[string]) void addAnalysisLibPath(string) vector[string] getAnalysisDataPaths() void setAnalysisDataPaths(vector[string]) void addAnalysisDataPath(string) string findAnalysisDataFile(string) vector[string] getAnalysisRefPaths() string findAnalysisRefFile(string) vector[string] getAnalysisInfoPaths() string findAnalysisInfoFile(string) vector[string] getAnalysisPlotPaths() string findAnalysisPlotFile(string) cdef extern from "Rivet/Rivet.hh" namespace "Rivet": string version() cdef extern from "Rivet/Tools/Logging.hh": void setLogLevel "Rivet::Log::setLevel" (string, int) diff --git a/src/Core/AnalysisInfo.cc b/src/Core/AnalysisInfo.cc --- a/src/Core/AnalysisInfo.cc +++ b/src/Core/AnalysisInfo.cc @@ -1,182 +1,183 @@ #include "Rivet/Config/RivetCommon.hh" #include "Rivet/AnalysisInfo.hh" #include "Rivet/Tools/RivetPaths.hh" #include "Rivet/Tools/Utils.hh" #include "Rivet/Tools/Logging.hh" #include "yaml-cpp/yaml.h" #include #include #include #ifdef YAML_NAMESPACE #define YAML YAML_NAMESPACE #endif namespace Rivet { namespace { Log& getLog() { return Log::getLog("Rivet.AnalysisInfo"); } } /// Static factory method unique_ptr AnalysisInfo::make(const std::string& ananame) { // Returned AI, in semi-null state unique_ptr ai( new AnalysisInfo ); ai->_beams += make_pair(PID::ANY, PID::ANY); ai->_name = ananame; /// If no ana data file found, return null AI const string datapath = findAnalysisInfoFile(ananame + ".info"); if (datapath.empty()) { MSG_DEBUG("No datafile " << ananame + ".info found"); return ai; } // Read data from YAML document MSG_DEBUG("Reading analysis data from " << datapath); YAML::Node doc; try { doc = YAML::LoadFile(datapath); } catch (const YAML::ParserException& ex) { MSG_ERROR("Parse error when reading analysis data from " << datapath << " (" << ex.what() << ")"); return ai; } #define THROW_INFOERR(KEY) throw InfoError("Problem in info parsing while accessing key " + string(KEY) + " in file " + datapath) // Simple scalars (test for nullness before casting) #define TRY_GETINFO(KEY, VAR) try { if (doc[KEY] && !doc[KEY].IsNull()) ai->_ ## VAR = doc[KEY].as(); } catch (...) { THROW_INFOERR(KEY); } TRY_GETINFO("Name", name); TRY_GETINFO("Summary", summary); TRY_GETINFO("Status", status); TRY_GETINFO("RunInfo", runInfo); TRY_GETINFO("Description", description); TRY_GETINFO("Experiment", experiment); TRY_GETINFO("Collider", collider); TRY_GETINFO("Year", year); TRY_GETINFO("Luminosity_fb", luminosityfb); TRY_GETINFO("SpiresID", spiresId); TRY_GETINFO("InspireID", inspireId); TRY_GETINFO("BibKey", bibKey); TRY_GETINFO("BibTeX", bibTeX); #undef TRY_GETINFO // Sequences (test the seq *and* each entry for nullness before casting) #define TRY_GETINFO_SEQ(KEY, VAR) try { \ if (doc[KEY] && !doc[KEY].IsNull()) { \ const YAML::Node& VAR = doc[KEY]; \ for (size_t i = 0; i < VAR.size(); ++i) \ if (!VAR[i].IsNull()) ai->_ ## VAR += VAR[i].as(); \ } } catch (...) { THROW_INFOERR(KEY); } TRY_GETINFO_SEQ("Authors", authors); TRY_GETINFO_SEQ("References", references); TRY_GETINFO_SEQ("ToDo", todos); TRY_GETINFO_SEQ("Keywords", keywords); TRY_GETINFO_SEQ("Options", options); #undef TRY_GETINFO_SEQ // Build the option map ai->buildOptionMap(); // A boolean with some name flexibility try { if (doc["NeedsCrossSection"]) ai->_needsCrossSection = doc["NeedsCrossSection"].as(); else if (doc["NeedCrossSection"]) ai->_needsCrossSection = doc["NeedCrossSection"].as(); + if (doc["Reentrant"]) ai->_reentrant = doc["Reentrant"].as(); } catch (...) { - THROW_INFOERR("NeedsCrossSection|NeedCrossSection"); + THROW_INFOERR("NeedsCrossSection|NeedCrossSection|Reentrant"); } // Beam particle identities try { if (doc["Beams"]) { const YAML::Node& beams = doc["Beams"]; vector beam_pairs; if (beams.size() == 2 && beams[0].IsScalar() && beams[0].IsScalar()) { beam_pairs += PID::make_pdgid_pair(beams[0].as(), beams[1].as()); } else { for (size_t i = 0; i < beams.size(); ++i) { const YAML::Node& bp = beams[i]; if (bp.size() != 2 || !bp[0].IsScalar() || !bp[0].IsScalar()) throw InfoError("Beam ID pairs have to be either a 2-tuple or a list of 2-tuples of particle names"); beam_pairs += PID::make_pdgid_pair(bp[0].as(), bp[1].as()); } } ai->_beams = beam_pairs; } } catch (...) { THROW_INFOERR("Beams"); } // Beam energies try { if (doc["Energies"]) { vector< pair > beam_energy_pairs; for (size_t i = 0; i < doc["Energies"].size(); ++i) { const YAML::Node& be = doc["Energies"][i]; if (be.IsScalar()) { // If beam energy is a scalar, then assume symmetric beams each with half that energy beam_energy_pairs += make_pair(be.as()/2.0, be.as()/2.0); } else if (be.IsSequence()) { if (be.size() != 2) throw InfoError("Beam energies have to be a list of either numbers or pairs of numbers"); beam_energy_pairs += make_pair(be[0].as(), be[1].as()); } else { throw InfoError("Beam energies have to be a list of either numbers or pairs of numbers"); } } ai->_energies = beam_energy_pairs; } } catch (...) { THROW_INFOERR("Energies"); } #undef THROW_INFOERR MSG_TRACE("AnalysisInfo pointer = " << ai.get()); return ai; } string toString(const AnalysisInfo& ai) { std::stringstream ss; ss << ai.name(); ss << " - " << ai.summary(); // ss << " - " << ai.beams(); // ss << " - " << ai.energies(); ss << " (" << ai.status() << ")"; return ss.str(); } void AnalysisInfo::buildOptionMap() { _optionmap.clear(); for ( auto opttag : _options ) { std::vector optv = split(opttag, "="); std::string optname = optv[0]; for ( auto opt : split(optv[1], ",") ) _optionmap[optname].insert(opt); } } bool AnalysisInfo::validOption(std::string key, std::string val) const { auto opt = _optionmap.find(key); // The option is required to be defined in the .info file. if ( opt == _optionmap.end() ) return false; // If the selection option is among the range of given options, // we are fine. if ( opt->second.find(val) != opt->second.end() ) return true; // Wild card selection option for value types is #. if ( opt->second.size() == 1 && *opt->second.begin() == "#" ) { std::istringstream ss(val); double test; if ( ss >> test ) return true; } // Wild card selection option for any type is *. if ( opt->second.size() == 1 && *opt->second.begin() == "*" ) return true; return false; } } diff --git a/src/Tools/Makefile.am b/src/Tools/Makefile.am --- a/src/Tools/Makefile.am +++ b/src/Tools/Makefile.am @@ -1,21 +1,22 @@ SUBDIRS = Nsubjettiness noinst_LTLIBRARIES = libRivetTools.la libRivetTools_la_SOURCES = \ BinnedHistogram.cc \ Cuts.cc \ JetUtils.cc \ Random.cc \ Logging.cc \ ParticleUtils.cc \ ParticleName.cc \ + Percentile.cc \ RivetYODA.cc \ RivetMT2.cc \ RivetPaths.cc \ Utils.cc \ binreloc.c dist_noinst_HEADERS = binreloc.h lester_mt2_bisect.hh libRivetTools_la_CPPFLAGS = $(AM_CPPFLAGS) -DENABLE_BINRELOC -DDEFAULTDATADIR=\"$(datadir)\" -DDEFAULTLIBDIR=\"$(libdir)\" diff --git a/src/Tools/Percentile.cc b/src/Tools/Percentile.cc new file mode 100644 --- /dev/null +++ b/src/Tools/Percentile.cc @@ -0,0 +1,26 @@ +#include "Rivet/Tools/RivetYODA.hh" +#include "Rivet/Tools/Percentile.hh" +#include "Rivet/Analysis.hh" + +using namespace std; + +namespace Rivet { + +void PercentileBase::selectBins(const Event & ev) { + const CentralityProjection & proj = + _ana->apply(ev, _projName); + _activeBins.clear(); + const int nCent = _cent.size(); + for (int iCent = 0; iCent < nCent; ++iCent) { + if ( inRange(proj[iCent % _nProj], _cent[iCent]) ) + _activeBins.push_back(iCent); + } +} + +AnalysisObjectPtr PercentileBase::getObject(string path) const { + foreach (const AnalysisObjectPtr & ao, _ana->analysisObjects()) + if (ao->path() == path ) return ao; + return AnalysisObjectPtr(); +} + +} diff --git a/src/Tools/RivetYODA.cc b/src/Tools/RivetYODA.cc --- a/src/Tools/RivetYODA.cc +++ b/src/Tools/RivetYODA.cc @@ -1,48 +1,71 @@ #include "Rivet/Config/RivetCommon.hh" #include "Rivet/Tools/RivetYODA.hh" #include "Rivet/Tools/RivetPaths.hh" #include "YODA/ReaderYODA.h" #include "YODA/ReaderAIDA.h" using namespace std; namespace Rivet { string getDatafilePath(const string& papername) { /// Try to find YODA otherwise fall back to try AIDA const string path1 = findAnalysisRefFile(papername + ".yoda"); if (!path1.empty()) return path1; const string path2 = findAnalysisRefFile(papername + ".aida"); if (!path2.empty()) return path2; throw Rivet::Error("Couldn't find ref data file '" + papername + ".yoda" + " in data path, '" + getRivetDataPath() + "', or '.'"); } map getRefData(const string& papername) { const string datafile = getDatafilePath(papername); // Make an appropriate data file reader and read the data objects /// @todo Remove AIDA support some day... YODA::Reader& reader = (datafile.find(".yoda") != string::npos) ? \ YODA::ReaderYODA::create() : YODA::ReaderAIDA::create(); vector aovec; reader.read(datafile, aovec); // Return value, to be populated map rtn; foreach ( YODA::AnalysisObject* ao, aovec ) { AnalysisObjectPtr refdata(ao); if (!refdata) continue; const string plotpath = refdata->path(); // Split path at "/" and only return the last field, i.e. the histogram ID const size_t slashpos = plotpath.rfind("/"); const string plotname = (slashpos+1 < plotpath.size()) ? plotpath.substr(slashpos+1) : ""; rtn[plotname] = refdata; } return rtn; } + bool copyao(AnalysisObjectPtr src, AnalysisObjectPtr dst) { + for (const std::string& a : src->annotations()) + dst->setAnnotation(a, src->annotation(a)); + if ( aocopy(src,dst) ) return true; + if ( aocopy(src,dst) ) return true; + if ( aocopy(src,dst) ) return true; + if ( aocopy(src,dst) ) return true; + if ( aocopy(src,dst) ) return true; + if ( aocopy(src,dst) ) return true; + if ( aocopy(src,dst) ) return true; + if ( aocopy(src,dst) ) return true; + return false; + } + + bool addaos(AnalysisObjectPtr dst, AnalysisObjectPtr src, double scale) { + if ( aoadd(dst,src,scale) ) return true; + if ( aoadd(dst,src,scale) ) return true; + if ( aoadd(dst,src,scale) ) return true; + if ( aoadd(dst,src,scale) ) return true; + if ( aoadd(dst,src,scale) ) return true; + return false; + } + }