diff --git a/analyses/pluginATLAS/ATLAS_2016_I1426523.cc b/analyses/pluginATLAS/ATLAS_2016_I1426523.cc
--- a/analyses/pluginATLAS/ATLAS_2016_I1426523.cc
+++ b/analyses/pluginATLAS/ATLAS_2016_I1426523.cc
@@ -1,426 +1,426 @@
 // -*- C++ -*-
 #include "Rivet/Analysis.hh"
 #include "Rivet/Projections/FinalState.hh"
 #include "Rivet/Projections/FastJets.hh"
 #include "Rivet/Projections/IdentifiedFinalState.hh"
 #include "Rivet/Projections/PromptFinalState.hh"
 #include "Rivet/Projections/DressedLeptons.hh"
 #include "Rivet/Projections/VetoedFinalState.hh"
 
 namespace Rivet {
 
 
   /// @brief Measurement of the WZ production cross section at 8 TeV
   class ATLAS_2016_I1426523 : public Analysis {
   public:
 
     /// Constructor
     DEFAULT_RIVET_ANALYSIS_CTOR(ATLAS_2016_I1426523);
 
 
     /// @name Analysis methods
     //@{
 
     /// Book histograms and initialise projections before the run
     void init() {
 
       // Lepton cuts
       Cut FS_Zlept = Cuts::abseta < 2.5 && Cuts::pT > 15*GeV;
 
       const FinalState fs;
       Cut fs_z = Cuts::abseta < 2.5 && Cuts::pT > 15*GeV;
       Cut fs_j = Cuts::abseta < 4.5 && Cuts::pT > 25*GeV;
 
       // Get photons to dress leptons
       PromptFinalState photons(Cuts::abspid == PID::PHOTON);
 
       // Electrons and muons in Fiducial PS
       PromptFinalState leptons(fs_z && (Cuts::abspid == PID::ELECTRON || Cuts::abspid == PID::MUON));
       leptons.acceptTauDecays(false);
       DressedLeptons dressedleptons(photons, leptons, 0.1, FS_Zlept, true);
       declare(dressedleptons, "DressedLeptons");
 
       // Electrons and muons in Total PS
       PromptFinalState leptons_total(Cuts::abspid == PID::ELECTRON || Cuts::abspid == PID::MUON);
       leptons_total.acceptTauDecays(false);
       DressedLeptons dressedleptonsTotal(photons, leptons_total, 0.1, Cuts::open(), true);
       declare(dressedleptonsTotal, "DressedLeptonsTotal");
 
       // Neutrinos
       IdentifiedFinalState nu_id;
       nu_id.acceptNeutrinos();
       PromptFinalState neutrinos(nu_id);
       neutrinos.acceptTauDecays(false);
       declare(neutrinos, "Neutrinos");
       MSG_WARNING("\033[91;1mLIMITED VALIDITY - check info file for details!\033[m");
 
       // Jets
       VetoedFinalState veto;
       veto.addVetoOnThisFinalState(dressedleptons);
       FastJets jets(veto, FastJets::ANTIKT, 0.4);
       declare(jets, "Jets");
 
       // Book histograms
       book(_h["eee"]       , 1, 1, 1);
       book(_h["mee"]       , 1, 1, 2);
       book(_h["emm"]       , 1, 1, 3);
       book(_h["mmm"]       , 1, 1, 4);
       book(_h["fid"]       , 1, 1, 5);
       book(_h["eee_Plus"]  , 2, 1, 1);
       book(_h["mee_Plus"]  , 2, 1, 2);
       book(_h["emm_Plus"]  , 2, 1, 3);
       book(_h["mmm_Plus"]  , 2, 1, 4);
       book(_h["fid_Plus"]  , 2, 1, 5);
       book(_h["eee_Minus"] , 3, 1, 1);
       book(_h["mee_Minus"] , 3, 1, 2);
       book(_h["emm_Minus"] , 3, 1, 3);
       book(_h["mmm_Minus"] , 3, 1, 4);
       book(_h["fid_Minus"] , 3, 1, 5);
       book(_h["total"]     , 5, 1, 1);
       book(_h["Njets"]     , 27, 1, 1);
       book(_h["Njets_norm"], 41, 1, 1);
 
       bookHandler("ZpT",	             12);
       bookHandler("ZpT_Plus",          13);
       bookHandler("ZpT_Minus",         14);
       bookHandler("WpT",	             15);
       bookHandler("WpT_Plus",          16);
       bookHandler("WpT_Minus",         17);
       bookHandler("mTWZ",              18);
       bookHandler("mTWZ_Plus",         19);
       bookHandler("mTWZ_Minus",        20);
       bookHandler("pTv",               21);
       bookHandler("pTv_Plus",          22);
       bookHandler("pTv_Minus",         23);
       bookHandler("Deltay",	           24);
       bookHandler("Deltay_Plus",       25);
       bookHandler("Deltay_Minus",      26);
       bookHandler("mjj",               28);
       bookHandler("Deltayjj",          29);
       bookHandler("ZpT_norm",          30);
       bookHandler("ZpT_Plus_norm",     31);
       bookHandler("ZpT_Minus_norm",    32);
       bookHandler("WpT_norm",          33);
       bookHandler("mTWZ_norm",         34);
       bookHandler("pTv_norm", 	       35);
       bookHandler("pTv_Plus_norm",	   36);
       bookHandler("pTv_Minus_norm",	   37);
       bookHandler("Deltay_norm",	     38);
       bookHandler("Deltay_Minus_norm", 39);
       bookHandler("Deltay_Plus_norm",  40);
       bookHandler("mjj_norm",          42);
       bookHandler("Deltayjj_norm",     43);
     }
 
     void bookHandler(const string& tag, size_t ID) {
       book(_s[tag], ID, 1, 1);
-      const string code1 = makeAxisCode(ID, 1, 1);
-      const string code2 = makeAxisCode(ID, 1, 2);
+      const string code1 = mkAxisCode(ID, 1, 1);
+      const string code2 = mkAxisCode(ID, 1, 2);
       book(_h[tag], code2, refData(code1));
     }
 
 
     /// Perform the per-event analysis
     void analyze(const Event& event) {
 
       const vector<DressedLepton>& dressedleptons = apply<DressedLeptons>(event, "DressedLeptons").dressedLeptons();
       const vector<DressedLepton>& dressedleptonsTotal = apply<DressedLeptons>(event, "DressedLeptonsTotal").dressedLeptons();
       const Particles& neutrinos = apply<PromptFinalState>(event, "Neutrinos").particlesByPt();
       Jets jets = apply<JetAlg>(event, "Jets").jetsByPt( (Cuts::abseta < 4.5) && (Cuts::pT > 25*GeV) );
 
       if ((dressedleptonsTotal.size()<3) || (neutrinos.size()<1)) vetoEvent;
 
       //---Total PS: assign leptons to W and Z bosons using Resonant shape algorithm
       // NB: This resonant shape algorithm assumes the Standard Model and can therefore
       // NOT be used for reinterpretation in terms of new-physics models.
 
       int i, j, k;
       double MassZ01 = 0., MassZ02 = 0., MassZ12 = 0.;
       double MassW0 = 0., MassW1 = 0., MassW2 = 0.;
       double WeightZ1, WeightZ2, WeightZ3;
       double WeightW1, WeightW2, WeightW3;
       double M1, M2, M3;
       double WeightTotal1, WeightTotal2, WeightTotal3;
 
       //try Z pair of leptons 01
       if ( (dressedleptonsTotal[0].pid()==-(dressedleptonsTotal[1].pid())) && (dressedleptonsTotal[2].abspid()==neutrinos[0].abspid()-1)){
         MassZ01 = (dressedleptonsTotal[0].momentum()+dressedleptonsTotal[1].momentum()).mass();
         MassW2 = (dressedleptonsTotal[2].momentum()+neutrinos[0].momentum()).mass();
       }
       //try Z pair of leptons 02
       if ( (dressedleptonsTotal[0].pid()==-(dressedleptonsTotal[2].pid())) && (dressedleptonsTotal[1].abspid()==neutrinos[0].abspid()-1)){
         MassZ02 = (dressedleptonsTotal[0].momentum()+dressedleptonsTotal[2].momentum()).mass();
         MassW1 = (dressedleptonsTotal[1].momentum()+neutrinos[0].momentum()).mass();
       }
       //try Z pair of leptons 12
       if ( (dressedleptonsTotal[1].pid()==-(dressedleptonsTotal[2].pid())) && (dressedleptonsTotal[0].abspid()==neutrinos[0].abspid()-1)){
         MassZ12 = (dressedleptonsTotal[1].momentum()+dressedleptonsTotal[2].momentum()).mass();
         MassW0 = (dressedleptonsTotal[0].momentum()+neutrinos[0].momentum()).mass();
       }
       WeightZ1 = 1/(pow(MassZ01*MassZ01 - MZ_PDG*MZ_PDG,2) + pow(MZ_PDG*GammaZ_PDG,2));
       WeightW1 = 1/(pow(MassW2*MassW2 - MW_PDG*MW_PDG,2) + pow(MW_PDG*GammaW_PDG,2));
       WeightTotal1 = WeightZ1*WeightW1;
       M1 = -1*WeightTotal1;
 
       WeightZ2 = 1/(pow(MassZ02*MassZ02- MZ_PDG*MZ_PDG,2) + pow(MZ_PDG*GammaZ_PDG,2));
       WeightW2 = 1/(pow(MassW1*MassW1- MW_PDG*MW_PDG,2) + pow(MW_PDG*GammaW_PDG,2));
       WeightTotal2 = WeightZ2*WeightW2;
       M2 = -1*WeightTotal2;
 
       WeightZ3 = 1/(pow(MassZ12*MassZ12 - MZ_PDG*MZ_PDG,2) + pow(MZ_PDG*GammaZ_PDG,2));
       WeightW3 = 1/(pow(MassW0*MassW0 - MW_PDG*MW_PDG,2) + pow(MW_PDG*GammaW_PDG,2));
       WeightTotal3 = WeightZ3*WeightW3;
       M3 = -1*WeightTotal3;
 
       if( (M1 < M2 && M1 < M3) || (MassZ01 != 0 && MassW2 != 0 && MassZ02 == 0 && MassZ12 == 0) ){
         i = 0; j = 1; k = 2;
       }
       if( (M2 < M1 && M2 < M3) || (MassZ02 != 0 && MassW1 != 0 && MassZ01 == 0 && MassZ12 == 0) ){
         i = 0; j = 2; k = 1;
       }
       if( (M3 < M1 && M3 < M2) || (MassZ12 != 0 && MassW0 != 0 && MassZ01 == 0 && MassZ02 == 0) ){
         i = 1; j = 2; k = 0;
       }
 
       FourMomentum ZbosonTotal   = dressedleptonsTotal[i].momentum()+dressedleptonsTotal[j].momentum();
 
       if ( (ZbosonTotal.mass() >= 66*GeV) && (ZbosonTotal.mass() <= 116*GeV) ) _h["total"]->fill(8000);
 
       //---end Total PS
 
 
       //---Fiducial PS: assign leptons to W and Z bosons using Resonant shape algorithm
       if (dressedleptons.size() < 3 || neutrinos.size() < 1)  vetoEvent;
 
       int EventType = -1;
       int Nel = 0, Nmu = 0;
 
       for (const DressedLepton& l : dressedleptons) {
         if (l.abspid() == 11)  ++Nel;
         if (l.abspid() == 13)  ++Nmu;
       }
 
       if ( Nel == 3  && Nmu==0 )  EventType = 3;
       if ( Nel == 2  && Nmu==1 )  EventType = 2;
       if ( Nel == 1  && Nmu==2 )  EventType = 1;
       if ( Nel == 0  && Nmu==3 )  EventType = 0;
 
       int EventCharge = -dressedleptons[0].charge()*dressedleptons[1].charge()*dressedleptons[2].charge();
 
       MassZ01 = 0; MassZ02 = 0; MassZ12 = 0;
       MassW0 = 0;  MassW1 = 0;  MassW2 = 0;
 
       //try Z pair of leptons 01
       if ( (dressedleptons[0].pid()==-(dressedleptons[1].pid())) && (dressedleptons[2].abspid()==neutrinos[0].abspid()-1)){
         MassZ01 = (dressedleptons[0].momentum()+dressedleptons[1].momentum()).mass();
         MassW2 = (dressedleptons[2].momentum()+neutrinos[0].momentum()).mass();
       }
       //try Z pair of leptons 02
       if ( (dressedleptons[0].pid()==-(dressedleptons[2].pid())) && (dressedleptons[1].abspid()==neutrinos[0].abspid()-1)){
         MassZ02 = (dressedleptons[0].momentum()+dressedleptons[2].momentum()).mass();
         MassW1 = (dressedleptons[1].momentum()+neutrinos[0].momentum()).mass();
       }
       //try Z pair of leptons 12
       if ( (dressedleptons[1].pid()==-(dressedleptons[2].pid())) && (dressedleptons[0].abspid()==neutrinos[0].abspid()-1)){
         MassZ12 = (dressedleptons[1].momentum()+dressedleptons[2].momentum()).mass();
         MassW0 = (dressedleptons[0].momentum()+neutrinos[0].momentum()).mass();
       }
       WeightZ1 = 1/(pow(MassZ01*MassZ01 - MZ_PDG*MZ_PDG,2) + pow(MZ_PDG*GammaZ_PDG,2));
       WeightW1 = 1/(pow(MassW2*MassW2 - MW_PDG*MW_PDG,2) + pow(MW_PDG*GammaW_PDG,2));
       WeightTotal1 = WeightZ1*WeightW1;
       M1 = -1*WeightTotal1;
 
       WeightZ2 = 1/(pow(MassZ02*MassZ02- MZ_PDG*MZ_PDG,2) + pow(MZ_PDG*GammaZ_PDG,2));
       WeightW2 = 1/(pow(MassW1*MassW1- MW_PDG*MW_PDG,2) + pow(MW_PDG*GammaW_PDG,2));
       WeightTotal2 = WeightZ2*WeightW2;
       M2 = -1*WeightTotal2;
 
       WeightZ3 = 1/(pow(MassZ12*MassZ12 - MZ_PDG*MZ_PDG,2) + pow(MZ_PDG*GammaZ_PDG,2));
       WeightW3 = 1/(pow(MassW0*MassW0 - MW_PDG*MW_PDG,2) + pow(MW_PDG*GammaW_PDG,2));
       WeightTotal3 = WeightZ3*WeightW3;
       M3 = -1*WeightTotal3;
 
       if( (M1 < M2 && M1 < M3) || (MassZ01 != 0 && MassW2 != 0 && MassZ02 == 0 && MassZ12 == 0) ){
         i = 0; j = 1; k = 2;
       }
       if( (M2 < M1 && M2 < M3) || (MassZ02 != 0 && MassW1 != 0 && MassZ01 == 0 && MassZ12 == 0) ){
         i = 0; j = 2; k = 1;
       }
       if( (M3 < M1 && M3 < M2) || (MassZ12 != 0 && MassW0 != 0 && MassZ01 == 0 && MassZ02 == 0) ){
         i = 1; j = 2; k = 0;
       }
 
       FourMomentum Zlepton1 = dressedleptons[i].momentum();
       FourMomentum Zlepton2 = dressedleptons[j].momentum();
       FourMomentum Wlepton  = dressedleptons[k].momentum();
       FourMomentum Zboson   = dressedleptons[i].momentum()+dressedleptons[j].momentum();
       FourMomentum Wboson   = dressedleptons[k].momentum()+neutrinos[0].momentum();
 
       double Wboson_mT = sqrt( 2 * Wlepton.pT() * neutrinos[0].pt() * (1 - cos(deltaPhi(Wlepton, neutrinos[0]))) )/GeV;
 
       if (fabs(Zboson.mass()-MZ_PDG)>=10.)  vetoEvent;
       if (Wboson_mT<=30.)                   vetoEvent;
       if (Wlepton.pT()<=20.)                vetoEvent;
       if (deltaR(Zlepton1,Zlepton2) < 0.2)  vetoEvent;
       if (deltaR(Zlepton1,Wlepton)  < 0.3)  vetoEvent;
       if (deltaR(Zlepton2,Wlepton)  < 0.3)  vetoEvent;
 
       double WZ_pt = Zlepton1.pt() + Zlepton2.pt() + Wlepton.pt() + neutrinos[0].pt();
       double WZ_px = Zlepton1.px() + Zlepton2.px() + Wlepton.px() + neutrinos[0].px();
       double WZ_py = Zlepton1.py() + Zlepton2.py() + Wlepton.py() + neutrinos[0].py();
       double mTWZ = sqrt( pow(WZ_pt, 2) - ( pow(WZ_px, 2) + pow(WZ_py,2) ) )/GeV;
 
       double AbsDeltay = fabs(Zboson.rapidity()-Wlepton.rapidity());
 
       if (EventType == 3) _h["eee"]->fill(8000.);
       if (EventType == 2) _h["mee"]->fill(8000.);
       if (EventType == 1) _h["emm"]->fill(8000.);
       if (EventType == 0) _h["mmm"]->fill(8000.);
       _h["fid"]->fill(8000.);
 
       if (EventCharge == 1) {
 
         if (EventType == 3) _h["eee_Plus"]->fill(8000.);
         if (EventType == 2) _h["mee_Plus"]->fill(8000.);
         if (EventType == 1) _h["emm_Plus"]->fill(8000.);
         if (EventType == 0) _h["mmm_Plus"]->fill(8000.);
         _h["fid_Plus"]->fill(8000.);
 
         _h["Deltay_Plus"]->fill(AbsDeltay);
         _h["Deltay_Plus_norm"]->fill(AbsDeltay);
         fillWithOverflow("ZpT_Plus", Zboson.pT()/GeV, 220);
         fillWithOverflow("WpT_Plus", Wboson.pT()/GeV, 220);
         fillWithOverflow("mTWZ_Plus", mTWZ, 600);
         fillWithOverflow("pTv_Plus", neutrinos[0].pt(), 90);
         fillWithOverflow("ZpT_Plus_norm", Zboson.pT()/GeV, 220);
         fillWithOverflow("pTv_Plus_norm", neutrinos[0].pt()/GeV, 90);
 
       } else {
 
         if (EventType == 3) _h["eee_Minus"]->fill(8000.);
         if (EventType == 2) _h["mee_Minus"]->fill(8000.);
         if (EventType == 1) _h["emm_Minus"]->fill(8000.);
         if (EventType == 0) _h["mmm_Minus"]->fill(8000.);
         _h["fid_Minus"]->fill(8000.);
 
         _h["Deltay_Minus"]->fill(AbsDeltay);
         _h["Deltay_Minus_norm"]->fill(AbsDeltay);
         fillWithOverflow("ZpT_Minus", Zboson.pT()/GeV, 220);
         fillWithOverflow("WpT_Minus", Wboson.pT()/GeV, 220);
         fillWithOverflow("mTWZ_Minus", mTWZ, 600);
         fillWithOverflow("pTv_Minus", neutrinos[0].pt()/GeV, 90);
         fillWithOverflow("ZpT_Minus_norm", Zboson.pT()/GeV, 220);
         fillWithOverflow("pTv_Minus_norm", neutrinos[0].pt()/GeV, 90);
 
       }
 
       fillWithOverflow("ZpT", Zboson.pT()/GeV, 220);
       fillWithOverflow("WpT", Wboson.pT()/GeV, 220);
       fillWithOverflow("mTWZ", mTWZ, 600);
       fillWithOverflow("pTv", neutrinos[0].pt()/GeV, 90);
 
       _h["Deltay"]->fill(AbsDeltay);
 
       fillWithOverflow("Njets", jets.size(), 5);
       fillWithOverflow("Njets_norm", jets.size(), 5);
       fillWithOverflow("ZpT_norm", Zboson.pT()/GeV, 220);
       fillWithOverflow("WpT_norm", Wboson.pT()/GeV, 220);
       fillWithOverflow("mTWZ_norm", mTWZ, 600);
       fillWithOverflow("pTv_norm", neutrinos[0].pt()/GeV, 90);
 
       _h["Deltay_norm"]->fill(AbsDeltay);
 
       if (jets.size()>1) {
         double mjj = (jets[0].momentum()+jets[1].momentum()).mass()/GeV;
         fillWithOverflow("mjj",      mjj, 800);
         fillWithOverflow("mjj_norm", mjj, 800);
         double DeltaYjj = fabs(jets[0].rapidity()-jets[1].rapidity());
         fillWithOverflow("Deltayjj",      DeltaYjj, 5);
         fillWithOverflow("Deltayjj_norm", DeltaYjj, 5);
       }
 
     }
 
 
     void fillWithOverflow(const string& tag, const double value, const double overflow){
       if (value < overflow) _h[tag]->fill(value);
       else _h[tag]->fill(overflow - 0.45);
     }
 
 
     /// Normalise histograms etc., after the run
     void finalize() {
 
       const double xs_pb(crossSection() / picobarn);
       const double xs_fb(crossSection() / femtobarn);
       const double sumw(sumOfWeights());
       MSG_INFO("Cross-Section/pb: " << xs_pb      );
       MSG_INFO("Cross-Section/fb: " << xs_fb      );
       MSG_INFO("Sum of weights  : " << sumw       );
       MSG_INFO("nEvents         : " << numEvents());
 
       const double sf_pb(xs_pb / sumw);
       const double sf_fb(xs_fb / sumw);
 
       MSG_INFO("sf_pb         : " << sf_pb);
       MSG_INFO("sf_fb         : " << sf_fb);
 
       float totalBR= 4*0.1086*0.033658; // W and Z leptonic branching fractions
 
       for (map<string, Histo1DPtr>::iterator it = _h.begin(); it != _h.end(); ++it) {
         if (it->first.find("total") != string::npos)        scale(it->second, sf_pb/totalBR);
         else if (it->first.find("norm") != string::npos)    normalize(it->second);
         else if (it->first.find("fid") != string::npos)     scale(it->second, sf_fb/4.);
         else if (it->first.find("Njets") != string::npos)   scale(it->second, sf_fb/4.);
         else if (it->first.find("ZpT") != string::npos)     scale(it->second, sf_fb/4.);
         else if (it->first.find("WpT") != string::npos)     scale(it->second, sf_fb/4.);
         else if (it->first.find("mTWZ") != string::npos)    scale(it->second, sf_fb/4.);
         else if (it->first.find("pTv") != string::npos)     scale(it->second, sf_fb/4.);
         else if (it->first.find("Deltay") != string::npos)  scale(it->second, sf_fb/4.);
         else if (it->first.find("mjj") != string::npos)     scale(it->second, sf_fb/4.);
         else                                                scale(it->second, sf_fb);
       }
       for (map<string, Scatter2DPtr>::iterator it = _s.begin(); it != _s.end(); ++it) {
         makeScatterWithoutDividingByBinwidth(it->first);
         removeAnalysisObject(_h[it->first]);
       }
     }
 
     void makeScatterWithoutDividingByBinwidth(const string& tag) {
       vector<Point2D> points;
       //size_t nBins = _dummy->numBins();
       for (const HistoBin1D &bin : _h[tag]->bins()) {
         double  x = bin.midpoint();
         double  y = bin.sumW();
         double ex = bin.xWidth()/2;
         double ey = sqrt(bin.sumW2());
         points.push_back(Point2D(x, y, ex, ey));
       }
       _s[tag]->addPoints(points);
     }
 
 
     //@}
 
 
   private:
 
     /// @name Histograms
     //@{
 
      map<string, Histo1DPtr> _h;
      map<string, Scatter2DPtr> _s;
 
      //@}
 
      double MZ_PDG = 91.1876;
      double MW_PDG = 83.385;
      double GammaZ_PDG = 2.4952;
      double GammaW_PDG = 2.085;
 
   };
 
   // The hook for the plugin system
   DECLARE_RIVET_PLUGIN(ATLAS_2016_I1426523);
 
 }
diff --git a/analyses/pluginATLAS/ATLAS_2017_I1609448.cc b/analyses/pluginATLAS/ATLAS_2017_I1609448.cc
--- a/analyses/pluginATLAS/ATLAS_2017_I1609448.cc
+++ b/analyses/pluginATLAS/ATLAS_2017_I1609448.cc
@@ -1,285 +1,285 @@
 // -*- C++ -*-
 #include "Rivet/Analysis.hh"
 #include "Rivet/Projections/FastJets.hh"
 #include "Rivet/Projections/FinalState.hh"
 #include "Rivet/Projections/DressedLeptons.hh"
 #include "Rivet/Projections/VetoedFinalState.hh"
 #include "Rivet/Projections/PromptFinalState.hh"
 #include "Rivet/Projections/MissingMomentum.hh"
 
 namespace Rivet {
 
 
   /// ATLAS pTmiss+jets cross-section ratios at 13 TeV
   class ATLAS_2017_I1609448 : public Analysis {
   public:
 
     /// Constructor
     ATLAS_2017_I1609448(string name="ATLAS_2017_I1609448")
       : Analysis(name)
     {
       _mode = 0; // using Z -> nunu channel by default
 
     }
 
 
     struct HistoHandler {
       Histo1DPtr histo;
       Scatter2DPtr scatter;
       unsigned int d, x, y;
 
       void fill(double value) {
         histo->fill(value);
       }
     };
 
 
     /// Initialize
     void init() {
 
       // Prompt photons
       PromptFinalState photon_fs(Cuts::abspid == PID::PHOTON && Cuts::abseta < 4.9);
       // Prompt electrons
       PromptFinalState el_fs(Cuts::abseta < 4.9 && Cuts::abspid == PID::ELECTRON);
       // Prompt muons
       PromptFinalState mu_fs(Cuts::abseta < 4.9 && Cuts::abspid == PID::MUON);
 
       // Dressed leptons
       Cut lep_cuts = Cuts::pT > 7*GeV && Cuts::abseta < 2.5;
       DressedLeptons dressed_leps(photon_fs, (_mode == 2 ? el_fs : mu_fs), 0.1, lep_cuts);
       declare(dressed_leps, "DressedLeptons");
 
       // In-acceptance leptons for lepton veto
       PromptFinalState veto_lep_fs(Cuts::abseta < 4.9 && (Cuts::abspid == PID::ELECTRON || Cuts::abspid == PID::MUON));
       veto_lep_fs.acceptTauDecays();
       veto_lep_fs.acceptMuonDecays();
       DressedLeptons veto_lep(photon_fs, veto_lep_fs, 0.1, lep_cuts);
       declare(veto_lep, "VetoLeptons");
 
       // MET
       VetoedFinalState met_fs(!(Cuts::abseta > 2.5 && Cuts::abspid == PID::MUON));
       if (_mode) met_fs.addVetoOnThisFinalState(dressed_leps);
       declare(MissingMomentum(met_fs), "MET");
 
       // Jet collection
       FastJets jets(FinalState(Cuts::abseta < 4.9), FastJets::ANTIKT, 0.4, JetAlg::Muons::NONE, JetAlg::Invisibles::NONE);
       declare(jets, "Jets");
 
       _h["met_mono"] = bookHandler(1, 1, 2);
       _h["met_vbf" ] = bookHandler(2, 1, 2);
       _h["mjj_vbf" ] = bookHandler(3, 1, 2);
       _h["dphijj_vbf"] = bookHandler(4, 1, 2);
     }
 
 
     HistoHandler bookHandler(unsigned int id_d, unsigned int id_x, unsigned int id_y) {
       HistoHandler dummy;
       if (_mode < 2) {  // numerator mode
-        const string histName = "_" + makeAxisCode(id_d, id_x, id_y);
+        const string histName = "_" + mkAxisCode(id_d, id_x, id_y);
         book(dummy.histo, histName, refData(id_d, id_x, id_y)); // hidden auxiliary output
         book(dummy.scatter, id_d, id_x, id_y - 1, true); // ratio
         dummy.d = id_d;
         dummy.x = id_x;
         dummy.y = id_y;
       } else {
         book(dummy.histo, id_d, id_x, 4); // denominator mode
       }
       return dummy;
     }
 
 
     bool isBetweenJets(const Jet& probe, const Jet& boundary1, const Jet& boundary2) {
       const double y_p = probe.rapidity();
       const double y_b1 = boundary1.rapidity();
       const double y_b2 = boundary2.rapidity();
       const double y_min = std::min(y_b1, y_b2);
       const double y_max = std::max(y_b1, y_b2);
       return (y_p > y_min && y_p < y_max);
     }
 
 
     int centralJetVeto(Jets& jets) {
       if (jets.size() < 2) return 0;
       const Jet bj1 = jets.at(0);
       const Jet bj2 = jets.at(1);
 
       // Start loop at the 3rd hardest pT jet
       int n_between = 0;
       for (size_t i = 2; i < jets.size(); ++i) {
         const Jet j = jets.at(i);
         if (isBetweenJets(j, bj1, bj2) && j.pT() > 25*GeV)  ++n_between;
       }
       return n_between;
     }
 
 
     /// Perform the per-event analysis
     void analyze(const Event& event) {
 
       // Require 0 (Znunu) or 2 (Zll) dressed leptons
       bool isZll = bool(_mode);
       const vector<DressedLepton> &vetoLeptons = applyProjection<DressedLeptons>(event, "VetoLeptons").dressedLeptons();
       const vector<DressedLepton> &all_leps = applyProjection<DressedLeptons>(event, "DressedLeptons").dressedLeptons();
       if (!isZll && vetoLeptons.size())    vetoEvent;
       if ( isZll && all_leps.size() != 2)  vetoEvent;
 
       vector<DressedLepton> leptons;
       bool pass_Zll = true;
       if (isZll) {
         // Sort dressed leptons by pT
         if (all_leps[0].pt() > all_leps[1].pt()) {
           leptons.push_back(all_leps[0]);
           leptons.push_back(all_leps[1]);
         } else {
           leptons.push_back(all_leps[1]);
           leptons.push_back(all_leps[0]);
         }
         // Leading lepton pT cut
         pass_Zll &= leptons[0].pT() > 80*GeV;
         // Opposite-charge requirement
         pass_Zll &= charge3(leptons[0]) + charge3(leptons[1]) == 0;
         // Z-mass requirement
         const double Zmass = (leptons[0].mom() + leptons[1].mom()).mass();
         pass_Zll &= (Zmass >= 66*GeV && Zmass <= 116*GeV);
       }
       if (!pass_Zll)  vetoEvent;
 
 
       // Get jets and remove those within dR = 0.5 of a dressed lepton
       Jets jets = applyProjection<FastJets>(event, "Jets").jetsByPt(Cuts::pT > 25*GeV && Cuts::absrap < 4.4);
       for (const DressedLepton& lep : leptons)
         ifilter_discard(jets, deltaRLess(lep, 0.5));
 
       const size_t njets = jets.size();
       if (!njets)  vetoEvent;
       const int njets_gap = centralJetVeto(jets);
 
       double jpt1 = jets[0].pT();
       double jeta1 = jets[0].eta();
       double mjj = 0., jpt2 = 0., dphijj = 0.;
       if (njets >= 2) {
         mjj = (jets[0].momentum() + jets[1].momentum()).mass();
         jpt2 = jets[1].pT();
         dphijj = deltaPhi(jets[0], jets[1]);
       }
 
       // MET
       Vector3 met_vec = apply<MissingMomentum>(event, "MET").vectorMPT();
       double met = met_vec.mod();
 
       // Cut on deltaPhi between MET and first 4 jets, but only if jet pT > 30 GeV
       bool dphi_fail = false;
       for (size_t i = 0; i < jets.size() && i < 4; ++i) {
         dphi_fail |= (deltaPhi(jets[i], met_vec) < 0.4 && jets[i].pT() > 30*GeV);
       }
 
       const bool pass_met_dphi = met > 200*GeV && !dphi_fail;
       const bool pass_vbf = pass_met_dphi && mjj > 200*GeV && jpt1 > 80*GeV && jpt2 > 50*GeV && njets >= 2 && !njets_gap;
       const bool pass_mono = pass_met_dphi && jpt1 > 120*GeV && fabs(jeta1) < 2.4;
       if (pass_mono)  _h["met_mono"].fill(met);
       if (pass_vbf) {
         _h["met_vbf"].fill(met/GeV);
         _h["mjj_vbf"].fill(mjj/GeV);
         _h["dphijj_vbf"].fill(dphijj);
       }
     }
 
 
     /// Normalise, scale and otherwise manipulate histograms here
     void finalize() {
       const double sf(crossSection() / femtobarn / sumOfWeights());
       for (map<string, HistoHandler>::iterator hit = _h.begin(); hit != _h.end(); ++hit) {
         scale(hit->second.histo, sf);
         if (_mode < 2)  constructRmiss(hit->second);
       }
     }
 
 
     void constructRmiss(HistoHandler& handler) {
       // Load transfer function from reference data file
       const YODA::Scatter2D& rmiss = refData(handler.d, handler.x, handler.y);
       const YODA::Scatter2D& numer = refData(handler.d, handler.x, handler.y + 1);
       const YODA::Scatter2D& denom = refData(handler.d, handler.x, handler.y + 2);
       for (size_t i = 0; i < handler.scatter->numPoints(); ++i) {
         const Point2D& r = rmiss.point(i); // SM Rmiss
         const Point2D& n = numer.point(i); // SM numerator
         const Point2D& d = denom.point(i); // SM denominator
         const HistoBin1D& b = handler.histo->bin(i); // BSM
         double bsmy;
         try {
           bsmy = b.height();
         } catch (const Exception&) { // LowStatsError or WeightError
           bsmy = 0;
         }
         double bsmey;
         try {
           bsmey = b.heightErr();
         } catch (const Exception&) { // LowStatsError or WeightError
           bsmey = 0;
         }
         // Combined numerator
         double sm_plus_bsm = n.y() + bsmy;
         // Rmiss central value
         double rmiss_y = safediv(sm_plus_bsm, d.y());
         // Ratio error (Rmiss = SM_num/SM_denom + BSM/SM_denom ~ Rmiss_SM + BSM/SM_denom
         double rmiss_p = sqrt(r.yErrPlus()*r.yErrPlus()   + safediv(bsmey*bsmey, d.y()*d.y()));
         double rmiss_m = sqrt(r.yErrMinus()*r.yErrMinus() + safediv(bsmey*bsmey, d.y()*d.y()));
         // Set new values
         Point2D& p = handler.scatter->point(i); // (SM + BSM) Rmiss
         p.setY(rmiss_y);
         p.setYErrMinus(rmiss_m);
         p.setYErrPlus(rmiss_p);
       }
     }
 
 
   protected:
 
     // Analysis-mode switch
     size_t _mode;
 
     /// Histograms
     map<string, HistoHandler> _h;
 
   };
 
 
 
   /// ATLAS pTmiss+jets specialisation for Znunu channel
   class ATLAS_2017_I1609448_Znunu : public ATLAS_2017_I1609448 {
   public:
     ATLAS_2017_I1609448_Znunu()
       : ATLAS_2017_I1609448("ATLAS_2017_I1609448_Znunu")
     {
       _mode = 0;
     }
   };
 
   /// ATLAS pTmiss+jets specialisation for Zmumu channel
   class ATLAS_2017_I1609448_Zmumu : public ATLAS_2017_I1609448 {
   public:
     ATLAS_2017_I1609448_Zmumu()
       : ATLAS_2017_I1609448("ATLAS_2017_I1609448_Zmumu")
     {
       _mode = 1;
     }
   };
 
   /// ATLAS pTmiss+jets specialisation for Zee channel
   class ATLAS_2017_I1609448_Zee : public ATLAS_2017_I1609448 {
   public:
     ATLAS_2017_I1609448_Zee()
       : ATLAS_2017_I1609448("ATLAS_2017_I1609448_Zee")
     {
       _mode = 2;
     }
   };
 
 
   // Hooks for the plugin system
   DECLARE_RIVET_PLUGIN(ATLAS_2017_I1609448);
   DECLARE_RIVET_PLUGIN(ATLAS_2017_I1609448_Znunu);
   DECLARE_RIVET_PLUGIN(ATLAS_2017_I1609448_Zmumu);
   DECLARE_RIVET_PLUGIN(ATLAS_2017_I1609448_Zee);
 
 }
diff --git a/include/Rivet/Analysis.hh b/include/Rivet/Analysis.hh
--- a/include/Rivet/Analysis.hh
+++ b/include/Rivet/Analysis.hh
@@ -1,961 +1,956 @@
 // -*- C++ -*-
 #ifndef RIVET_Analysis_HH
 #define RIVET_Analysis_HH
 
 #include "Rivet/Config/RivetCommon.hh"
 #include "Rivet/AnalysisInfo.hh"
 #include "Rivet/Event.hh"
 #include "Rivet/Projection.hh"
 #include "Rivet/ProjectionApplier.hh"
 #include "Rivet/ProjectionHandler.hh"
 #include "Rivet/AnalysisLoader.hh"
 #include "Rivet/Tools/Cuts.hh"
 #include "Rivet/Tools/Logging.hh"
 #include "Rivet/Tools/ParticleUtils.hh"
 #include "Rivet/Tools/BinnedHistogram.hh"
 #include "Rivet/Tools/RivetMT2.hh"
 #include "Rivet/Tools/RivetYODA.hh"
 
 
 /// @def vetoEvent
 /// Preprocessor define for vetoing events, including the log message and return.
 #define vetoEvent                                                       \
   do { MSG_DEBUG("Vetoing event on line " << __LINE__ << " of " << __FILE__); return; } while(0)
 
 
 namespace Rivet {
 
   // convenience for analysis writers
   using std::cout;
   using std::cerr;
   using std::endl;
   using std::stringstream;
   using std::swap;
   using std::numeric_limits;
 
   // Forward declaration
   class AnalysisHandler;
 
   /// @brief This is the base class of all analysis classes in Rivet.
   ///
   /// There are
   /// three virtual functions which should be implemented in base classes:
   ///
   /// void init() is called by Rivet before a run is started. Here the
   /// analysis class should book necessary histograms. The needed
   /// projections should probably rather be constructed in the
   /// constructor.
   ///
   /// void analyze(const Event&) is called once for each event. Here the
   /// analysis class should apply the necessary Projections and fill the
   /// histograms.
   ///
   /// void finalize() is called after a run is finished. Here the analysis
   /// class should do whatever manipulations are necessary on the
   /// histograms. Writing the histograms to a file is, however, done by
   /// the Rivet class.
   class Analysis : public ProjectionApplier {
 
     /// The AnalysisHandler is a friend.
     friend class AnalysisHandler;
 
 
   public:
 
     /// @name Standard constructors and destructors.
     //@{
 
     // /// The default constructor.
     // Analysis();
 
     /// Constructor
     Analysis(const std::string& name);
 
     /// The destructor.
     virtual ~Analysis() {}
 
     //@}
 
 
   public:
 
     /// @name Main analysis methods
     //@{
 
     /// Initialize this analysis object. A concrete class should here
     /// book all necessary histograms. An overridden function must make
     /// sure it first calls the base class function.
     virtual void init() { }
 
     /// Analyze one event. A concrete class should here apply the
     /// necessary projections on the \a event and fill the relevant
     /// histograms. An overridden function must make sure it first calls
     /// the base class function.
     virtual void analyze(const Event& event) = 0;
 
     /// Finalize this analysis object. A concrete class should here make
     /// all necessary operations on the histograms. Writing the
     /// histograms to a file is, however, done by the Rivet class. An
     /// overridden function must make sure it first calls the base class
     /// function.
     virtual void finalize() { }
 
     //@}
 
 
   public:
 
     /// @name Metadata
     /// Metadata is used for querying from the command line and also for
     /// building web pages and the analysis pages in the Rivet manual.
     //@{
 
     /// Get the actual AnalysisInfo object in which all this metadata is stored.
     const AnalysisInfo& info() const {
       assert(_info && "No AnalysisInfo object :O");
       return *_info;
     }
 
     /// @brief Get the name of the analysis.
     ///
     /// By default this is computed by combining the results of the experiment,
     /// year and Spires ID metadata methods and you should only override it if
     /// there's a good reason why those won't work.
     virtual std::string name() const {
       return (info().name().empty()) ? _defaultname : info().name();
     }
 
     /// Get the Inspire ID code for this analysis.
     virtual std::string inspireId() const {
       return info().inspireId();
     }
 
     /// Get the SPIRES ID code for this analysis (~deprecated).
     virtual std::string spiresId() const {
       return info().spiresId();
     }
 
     /// @brief Names & emails of paper/analysis authors.
     ///
     /// Names and email of authors in 'NAME \<EMAIL\>' format. The first
     /// name in the list should be the primary contact person.
     virtual std::vector<std::string> authors() const {
       return info().authors();
     }
 
     /// @brief Get a short description of the analysis.
     ///
     /// Short (one sentence) description used as an index entry.
     /// Use @a description() to provide full descriptive paragraphs
     /// of analysis details.
     virtual std::string summary() const {
       return info().summary();
     }
 
     /// @brief Get a full description of the analysis.
     ///
     /// Full textual description of this analysis, what it is useful for,
     /// what experimental techniques are applied, etc. Should be treated
     /// as a chunk of restructuredText (http://docutils.sourceforge.net/rst.html),
     /// with equations to be rendered as LaTeX with amsmath operators.
     virtual std::string description() const {
       return info().description();
     }
 
     /// @brief Information about the events needed as input for this analysis.
     ///
     /// Event types, energies, kinematic cuts, particles to be considered
     /// stable, etc. etc. Should be treated as a restructuredText bullet list
     /// (http://docutils.sourceforge.net/rst.html)
     virtual std::string runInfo() const {
       return info().runInfo();
     }
 
     /// Experiment which performed and published this analysis.
     virtual std::string experiment() const {
       return info().experiment();
     }
 
     /// Collider on which the experiment ran.
     virtual std::string collider() const {
       return info().collider();
     }
 
     /// When the original experimental analysis was published.
     virtual std::string year() const {
       return info().year();
     }
 
     /// The luminosity in inverse femtobarn
     virtual std::string luminosityfb() const {
       return info().luminosityfb();
     }
 
     /// Journal, and preprint references.
     virtual std::vector<std::string> references() const {
       return info().references();
     }
 
     /// BibTeX citation key for this article.
     virtual std::string bibKey() const {
       return info().bibKey();
     }
 
     /// BibTeX citation entry for this article.
     virtual std::string bibTeX() const {
       return info().bibTeX();
     }
 
     /// Whether this analysis is trusted (in any way!)
     virtual std::string status() const {
       return (info().status().empty()) ? "UNVALIDATED" : info().status();
     }
 
     /// Any work to be done on this analysis.
     virtual std::vector<std::string> todos() const {
       return info().todos();
     }
 
 
     /// Return the allowed pairs of incoming beams required by this analysis.
     virtual const std::vector<PdgIdPair>& requiredBeams() const {
       return info().beams();
     }
     /// Declare the allowed pairs of incoming beams required by this analysis.
     virtual Analysis& setRequiredBeams(const std::vector<PdgIdPair>& requiredBeams) {
       info().setBeams(requiredBeams);
       return *this;
     }
 
 
     /// Sets of valid beam energy pairs, in GeV
     virtual const std::vector<std::pair<double, double> >& requiredEnergies() const {
       return info().energies();
     }
 
     /// Get vector of analysis keywords
     virtual const std::vector<std::string> & keywords() const {
       return info().keywords();
     }
 
     /// Declare the list of valid beam energy pairs, in GeV
     virtual Analysis& setRequiredEnergies(const std::vector<std::pair<double, double> >& requiredEnergies) {
       info().setEnergies(requiredEnergies);
       return *this;
     }
 
 
     //@}
 
 
     /// @name Internal metadata modifying methods
     //@{
 
     /// Get the actual AnalysisInfo object in which all this metadata is stored (non-const).
     AnalysisInfo& info() {
       assert(_info && "No AnalysisInfo object :O");
       return *_info;
     }
 
     //@}
 
 
     /// @name Run conditions
     //@{
 
     /// Incoming beams for this run
     const ParticlePair& beams() const;
 
     /// Incoming beam IDs for this run
     const PdgIdPair beamIds() const;
 
     /// Centre of mass energy for this run
     double sqrtS() const;
 
     //@}
 
 
     /// @name Analysis / beam compatibility testing
     //@{
 
     /// Check if analysis is compatible with the provided beam particle IDs and energies
     bool isCompatible(const ParticlePair& beams) const;
 
     /// Check if analysis is compatible with the provided beam particle IDs and energies
     bool isCompatible(PdgId beam1, PdgId beam2, double e1, double e2) const;
 
     /// Check if analysis is compatible with the provided beam particle IDs and energies
     bool isCompatible(const PdgIdPair& beams, const std::pair<double,double>& energies) const;
 
     //@}
 
     /// Access the controlling AnalysisHandler object.
     AnalysisHandler& handler() const { return *_analysishandler; }
 
 
   protected:
 
     /// Get a Log object based on the name() property of the calling analysis object.
     Log& getLog() const;
 
     /// Get the process cross-section in pb. Throws if this hasn't been set.
     double crossSection() const;
 
     /// Get the process cross-section per generated event in pb. Throws if this
     /// hasn't been set.
     double crossSectionPerEvent() const;
 
     /// @brief Get the number of events seen (via the analysis handler).
     ///
     /// @note Use in the finalize phase only.
     size_t numEvents() const;
 
     /// @brief Get the sum of event weights seen (via the analysis handler).
     ///
     /// @note Use in the finalize phase only.
     double sumOfWeights() const;
 
   protected:
 
     /// @name Histogram paths
     //@{
 
     /// Get the canonical histogram "directory" path for this analysis.
     const std::string histoDir() const;
 
     /// Get the canonical histogram path for the named histogram in this analysis.
     const std::string histoPath(const std::string& hname) const;
 
     /// Get the canonical histogram path for the numbered histogram in this analysis.
     const std::string histoPath(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const;
 
     /// Get the internal histogram name for given d, x and y (cf. HepData)
     const std::string mkAxisCode(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const;
-    /// Alias
-    /// @deprecated Prefer the "mk" form, consistent with other "making function" names
-    const std::string makeAxisCode(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const {
-      return mkAxisCode(datasetId, xAxisId, yAxisId);
-    }
 
     //@}
 
 
     /// @name Histogram reference data
     //@{
 
     /// Get reference data for a named histo
     /// @todo SFINAE to ensure that the type inherits from YODA::AnalysisObject?
     template <typename T=YODA::Scatter2D>
     const T& refData(const string& hname) const {
       _cacheRefData();
       MSG_TRACE("Using histo bin edges for " << name() << ":" << hname);
       if (!_refdata[hname]) {
         MSG_ERROR("Can't find reference histogram " << hname);
         throw Exception("Reference data " + hname + " not found.");
       }
       return dynamic_cast<T&>(*_refdata[hname]);
     }
 
     /// Get reference data for a numbered histo
     /// @todo SFINAE to ensure that the type inherits from YODA::AnalysisObject?
     template <typename T=YODA::Scatter2D>
     const T& refData(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const {
-      const string hname = makeAxisCode(datasetId, xAxisId, yAxisId);
+      const string hname = mkAxisCode(datasetId, xAxisId, yAxisId);
       return refData(hname);
     }
 
     //@}
 
 
     /// @name Counter booking
     //@{
 
     /// Book a counter.
     CounterPtr & book(CounterPtr &, const std::string& name,
                            const std::string& title="");
                            // const std::string& valtitle=""
 
     /// Book a counter, using a path generated from the dataset and axis ID codes
     ///
     /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way.
     CounterPtr & book(CounterPtr &, unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId,
                            const std::string& title="");
                            // const std::string& valtitle=""
 
     //@}
 
 
     /// @name 1D histogram booking
     //@{
 
     /// Book a 1D histogram with @a nbins uniformly distributed across the range @a lower - @a upper .
     Histo1DPtr & book(Histo1DPtr &,const std::string& name,
                            size_t nbins, double lower, double upper,
                            const std::string& title="",
                            const std::string& xtitle="",
                            const std::string& ytitle="");
 
     /// Book a 1D histogram with non-uniform bins defined by the vector of bin edges @a binedges .
     Histo1DPtr & book(Histo1DPtr &,const std::string& name,
                            const std::vector<double>& binedges,
                            const std::string& title="",
                            const std::string& xtitle="",
                            const std::string& ytitle="");
 
     /// Book a 1D histogram with non-uniform bins defined by the vector of bin edges @a binedges .
     Histo1DPtr & book(Histo1DPtr &,const std::string& name,
                            const std::initializer_list<double>& binedges,
                            const std::string& title="",
                            const std::string& xtitle="",
                            const std::string& ytitle="");
 
     /// Book a 1D histogram with binning from a reference scatter.
     Histo1DPtr & book(Histo1DPtr &,const std::string& name,
                            const Scatter2D& refscatter,
                            const std::string& title="",
                            const std::string& xtitle="",
                            const std::string& ytitle="");
 
     /// Book a 1D histogram, using the binnings in the reference data histogram.
     Histo1DPtr & book(Histo1DPtr &,const std::string& name,
                            const std::string& title="",
                            const std::string& xtitle="",
                            const std::string& ytitle="");
 
     /// Book a 1D histogram, using the binnings in the reference data histogram.
     ///
     /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way.
     Histo1DPtr & book(Histo1DPtr &,unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId,
                            const std::string& title="",
                            const std::string& xtitle="",
                            const std::string& ytitle="");
     //@}
 
 
     /// @name 2D histogram booking
     //@{
 
     /// Book a 2D histogram with @a nxbins and @a nybins uniformly
     /// distributed across the ranges @a xlower - @a xupper and @a
     /// ylower - @a yupper respectively along the x- and y-axis.
     Histo2DPtr & book(Histo2DPtr &,const std::string& name,
                            size_t nxbins, double xlower, double xupper,
                            size_t nybins, double ylower, double yupper,
                            const std::string& title="",
                            const std::string& xtitle="",
                            const std::string& ytitle="",
                            const std::string& ztitle="");
 
     /// Book a 2D histogram with non-uniform bins defined by the
     /// vectors of bin edges @a xbinedges and @a ybinedges.
     Histo2DPtr & book(Histo2DPtr &,const std::string& name,
                            const std::vector<double>& xbinedges,
                            const std::vector<double>& ybinedges,
                            const std::string& title="",
                            const std::string& xtitle="",
                            const std::string& ytitle="",
                            const std::string& ztitle="");
 
     /// Book a 2D histogram with non-uniform bins defined by the
     /// vectors of bin edges @a xbinedges and @a ybinedges.
     Histo2DPtr & book(Histo2DPtr &,const std::string& name,
                            const std::initializer_list<double>& xbinedges,
                            const std::initializer_list<double>& ybinedges,
                            const std::string& title="",
                            const std::string& xtitle="",
                            const std::string& ytitle="",
                            const std::string& ztitle="");
 
     /// Book a 2D histogram with binning from a reference scatter.
     Histo2DPtr & book(Histo2DPtr &,const std::string& name,
                            const Scatter3D& refscatter,
                            const std::string& title="",
                            const std::string& xtitle="",
                            const std::string& ytitle="",
                            const std::string& ztitle="");
 
     /// Book a 2D histogram, using the binnings in the reference data histogram.
     Histo2DPtr & book(Histo2DPtr &,const std::string& name,
                            const std::string& title="",
                            const std::string& xtitle="",
                            const std::string& ytitle="",
                            const std::string& ztitle="");
 
     /// Book a 2D histogram, using the binnings in the reference data histogram.
     ///
     /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way.
     Histo2DPtr & book(Histo2DPtr &,unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId,
                            const std::string& title="",
                            const std::string& xtitle="",
                            const std::string& ytitle="",
                            const std::string& ztitle="");
 
     //@}
 
 
     /// @name 1D profile histogram booking
     //@{
 
     /// Book a 1D profile histogram with @a nbins uniformly distributed across the range @a lower - @a upper .
     Profile1DPtr & book(Profile1DPtr &,  const std::string& name,
                                size_t nbins, double lower, double upper,
                                const std::string& title="",
                                const std::string& xtitle="",
                                const std::string& ytitle="");
 
     /// Book a 1D profile histogram with non-uniform bins defined by the vector of bin edges @a binedges .
     Profile1DPtr & book(Profile1DPtr &,  const std::string& name,
                                const std::vector<double>& binedges,
                                const std::string& title="",
                                const std::string& xtitle="",
                                const std::string& ytitle="");
 
     /// Book a 1D profile histogram with non-uniform bins defined by the vector of bin edges @a binedges .
     Profile1DPtr & book(Profile1DPtr &,  const std::string& name,
                                const std::initializer_list<double>& binedges,
                                const std::string& title="",
                                const std::string& xtitle="",
                                const std::string& ytitle="");
 
     /// Book a 1D profile histogram with binning from a reference scatter.
     Profile1DPtr & book(Profile1DPtr &,  const std::string& name,
                                const Scatter2D& refscatter,
                                const std::string& title="",
                                const std::string& xtitle="",
                                const std::string& ytitle="");
 
     /// Book a 1D profile histogram, using the binnings in the reference data histogram.
     Profile1DPtr & book(Profile1DPtr &,  const std::string& name,
                                const std::string& title="",
                                const std::string& xtitle="",
                                const std::string& ytitle="");
 
     /// Book a 1D profile histogram, using the binnings in the reference data histogram.
     ///
     /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way.
     Profile1DPtr & book(Profile1DPtr &,  unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId,
                                const std::string& title="",
                                const std::string& xtitle="",
                                const std::string& ytitle="");
 
     //@}
 
 
     /// @name 2D profile histogram booking
     //@{
 
     /// Book a 2D profile histogram with @a nxbins and @a nybins uniformly
     /// distributed across the ranges @a xlower - @a xupper and @a ylower - @a
     /// yupper respectively along the x- and y-axis.
     Profile2DPtr & book(Profile2DPtr &,  const std::string& name,
                                size_t nxbins, double xlower, double xupper,
                                size_t nybins, double ylower, double yupper,
                                const std::string& title="",
                                const std::string& xtitle="",
                                const std::string& ytitle="",
                                const std::string& ztitle="");
 
     /// Book a 2D profile histogram with non-uniform bins defined by the vectorx
     /// of bin edges @a xbinedges and @a ybinedges.
     Profile2DPtr & book(Profile2DPtr &,  const std::string& name,
                                const std::vector<double>& xbinedges,
                                const std::vector<double>& ybinedges,
                                const std::string& title="",
                                const std::string& xtitle="",
                                const std::string& ytitle="",
                                const std::string& ztitle="");
 
     /// Book a 2D profile histogram with non-uniform bins defined by the vectorx
     /// of bin edges @a xbinedges and @a ybinedges.
     Profile2DPtr & book(Profile2DPtr &,  const std::string& name,
                                const std::initializer_list<double>& xbinedges,
                                const std::initializer_list<double>& ybinedges,
                                const std::string& title="",
                                const std::string& xtitle="",
                                const std::string& ytitle="",
                                const std::string& ztitle="");
 
     /// Book a 2D profile histogram with binning from a reference scatter.
     // Profile2DPtr bookProfile2D(const std::string& name,
     //                            const Scatter3D& refscatter,
     //                            const std::string& title="",
     //                            const std::string& xtitle="",
     //                            const std::string& ytitle="",
     //                            const std::string& ztitle="");
 
     // /// Book a 2D profile histogram, using the binnings in the reference data histogram.
     // Profile2DPtr bookProfile2D(const std::string& name,
     //                            const std::string& title="",
     //                            const std::string& xtitle="",
     //                            const std::string& ytitle="",
     //                            const std::string& ztitle="");
 
     // /// Book a 2D profile histogram, using the binnings in the reference data histogram.
     // ///
     // /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way.
     // Profile2DPtr bookProfile2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId,
     //                            const std::string& title="",
     //                            const std::string& xtitle="",
     //                            const std::string& ytitle="",
     //                            const std::string& ztitle="");
 
     //@}
 
 
     /// @name 2D scatter booking
     //@{
 
     /// @brief Book a 2-dimensional data point set with the given name.
     ///
     /// @note Unlike histogram booking, scatter booking by default makes no
     /// attempt to use reference data to pre-fill the data object. If you want
     /// this, which is sometimes useful e.g. when the x-position is not really
     /// meaningful and can't be extracted from the data, then set the @a
     /// copy_pts parameter to true. This creates points to match the reference
     /// data's x values and errors, but with the y values and errors zeroed...
     /// assuming that there is a reference histo with the same name: if there
     /// isn't, an exception will be thrown.
 
     Scatter2DPtr & book(Scatter2DPtr & s2d, const string& hname,
                                bool copy_pts=false,
                                const std::string& title="",
                                const std::string& xtitle="",
                                const std::string& ytitle="");
 
     /// @brief Book a 2-dimensional data point set, using the binnings in the reference data histogram.
     ///
     /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way.
     ///
     /// @note Unlike histogram booking, scatter booking by default makes no
     /// attempt to use reference data to pre-fill the data object. If you want
     /// this, which is sometimes useful e.g. when the x-position is not really
     /// meaningful and can't be extracted from the data, then set the @a
     /// copy_pts parameter to true. This creates points to match the reference
     /// data's x values and errors, but with the y values and errors zeroed.
     Scatter2DPtr & book(Scatter2DPtr & s2d, unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId,
                                bool copy_pts=false,
                                const std::string& title="",
                                const std::string& xtitle="",
                                const std::string& ytitle="");
 
     /// @brief Book a 2-dimensional data point set with equally spaced x-points in a range.
     ///
     /// The y values and errors will be set to 0.
     Scatter2DPtr & book(Scatter2DPtr & s2d, const string& hname,
                                size_t npts, double lower, double upper,
                                const std::string& title="",
                                const std::string& xtitle="",
                                const std::string& ytitle="");
 
     /// @brief Book a 2-dimensional data point set based on provided contiguous "bin edges".
     ///
     /// The y values and errors will be set to 0.
     Scatter2DPtr & book(Scatter2DPtr & s2d, const string& hname,
                                const std::vector<double>& binedges,
                                const std::string& title,
                                const std::string& xtitle,
                                const std::string& ytitle);
 
     //@}
 
 
   private:
     /// to be used in finalize context only
     class CounterAdapter {
     public:
     	CounterAdapter(double          x) : x_(x                ) {}
 
 	CounterAdapter(const YODA::Counter   & c) : x_(c.val()          ) {}
 
 	// CounterAdapter(CounterPtr     cp) : x_(cp->val()        ) {}
 
     	CounterAdapter(const YODA::Scatter1D & s) : x_(s.points()[0].x()) {
     		assert( s.numPoints() == 1 || "Can only scale by a single value.");
     	}
 
     	// CounterAdapter(Scatter1DPtr   sp) : x_(sp->points()[0].x()) {
     	// 	assert( sp->numPoints() == 1 || "Can only scale by a single value.");
     	// }
 
 	operator double() const { return x_; }
 
     private:
     	double x_;
     };
 
   public:
 
     double dbl(double          x) { return x; }
     double dbl(const YODA::Counter   & c) { return c.val(); }
     double dbl(const YODA::Scatter1D & s) {  
     	assert( s.numPoints() == 1 ); 
     	return s.points()[0].x();
     }
 
     /// @name Analysis object manipulation
     /// @todo Should really be protected: only public to keep BinnedHistogram happy for now...
     //@{
 
     /// Multiplicatively scale the given counter, @a cnt, by factor @s factor.
     void scale(CounterPtr cnt, CounterAdapter factor);
 
     /// Multiplicatively scale the given counters, @a cnts, by factor @s factor.
     /// @note Constness intentional, if weird, to allow passing rvalue refs of smart ptrs (argh)
     /// @todo Use SFINAE for a generic iterable of CounterPtrs
     void scale(const std::vector<CounterPtr>& cnts, CounterAdapter factor) {
       for (auto& c : cnts) scale(c, factor);
     }
     /// @todo YUCK!
     template <std::size_t array_size>
     void scale(const CounterPtr (&cnts)[array_size], CounterAdapter factor) {
       // for (size_t i = 0; i < std::extent<decltype(cnts)>::value; ++i) scale(cnts[i], factor);
       for (auto& c : cnts) scale(c, factor);
     }
 
 
     /// Normalize the given histogram, @a histo, to area = @a norm.
     void normalize(Histo1DPtr histo, CounterAdapter norm=1.0, bool includeoverflows=true);
 
     /// Normalize the given histograms, @a histos, to area = @a norm.
     /// @note Constness intentional, if weird, to allow passing rvalue refs of smart ptrs (argh)
     /// @todo Use SFINAE for a generic iterable of Histo1DPtrs
     void normalize(const std::vector<Histo1DPtr>& histos, CounterAdapter norm=1.0, bool includeoverflows=true) {
       for (auto& h : histos) normalize(h, norm, includeoverflows);
     }
     /// @todo YUCK!
     template <std::size_t array_size>
     void normalize(const Histo1DPtr (&histos)[array_size], CounterAdapter norm=1.0, bool includeoverflows=true) {
       for (auto& h : histos) normalize(h, norm, includeoverflows);
     }
 
     /// Multiplicatively scale the given histogram, @a histo, by factor @s factor.
     void scale(Histo1DPtr histo, CounterAdapter factor);
 
     /// Multiplicatively scale the given histograms, @a histos, by factor @s factor.
     /// @note Constness intentional, if weird, to allow passing rvalue refs of smart ptrs (argh)
     /// @todo Use SFINAE for a generic iterable of Histo1DPtrs
     void scale(const std::vector<Histo1DPtr>& histos, CounterAdapter factor) {
       for (auto& h : histos) scale(h, factor);
     }
     /// @todo YUCK!
     template <std::size_t array_size>
     void scale(const Histo1DPtr (&histos)[array_size], CounterAdapter factor) {
       for (auto& h : histos) scale(h, factor);
     }
 
 
     /// Normalize the given histogram, @a histo, to area = @a norm.
     void normalize(Histo2DPtr histo, CounterAdapter norm=1.0, bool includeoverflows=true);
 
     /// Normalize the given histograms, @a histos, to area = @a norm.
     /// @note Constness intentional, if weird, to allow passing rvalue refs of smart ptrs (argh)
     /// @todo Use SFINAE for a generic iterable of Histo2DPtrs
     void normalize(const std::vector<Histo2DPtr>& histos, CounterAdapter norm=1.0, bool includeoverflows=true) {
       for (auto& h : histos) normalize(h, norm, includeoverflows);
     }
     /// @todo YUCK!
     template <std::size_t array_size>
     void normalize(const Histo2DPtr (&histos)[array_size], CounterAdapter norm=1.0, bool includeoverflows=true) {
       for (auto& h : histos) normalize(h, norm, includeoverflows);
     }
 
     /// Multiplicatively scale the given histogram, @a histo, by factor @s factor.
     void scale(Histo2DPtr histo, CounterAdapter factor);
 
     /// Multiplicatively scale the given histograms, @a histos, by factor @s factor.
     /// @note Constness intentional, if weird, to allow passing rvalue refs of smart ptrs (argh)
     /// @todo Use SFINAE for a generic iterable of Histo2DPtrs
     void scale(const std::vector<Histo2DPtr>& histos, CounterAdapter factor) {
       for (auto& h : histos) scale(h, factor);
     }
     /// @todo YUCK!
     template <std::size_t array_size>
     void scale(const Histo2DPtr (&histos)[array_size], CounterAdapter factor) {
       for (auto& h : histos) scale(h, factor);
     }
 
 
     /// Helper for counter division.
     ///
     /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target.
     void divide(CounterPtr c1, CounterPtr c2, Scatter1DPtr s) const;
 
     /// Helper for histogram division with raw YODA objects.
     ///
     /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target.
     void divide(const YODA::Counter& c1, const YODA::Counter& c2, Scatter1DPtr s) const;
 
 
     /// Helper for histogram division.
     ///
     /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target.
     void divide(Histo1DPtr h1, Histo1DPtr h2, Scatter2DPtr s) const;
 
     /// Helper for histogram division with raw YODA objects.
     ///
     /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target.
     void divide(const YODA::Histo1D& h1, const YODA::Histo1D& h2, Scatter2DPtr s) const;
 
 
     /// Helper for profile histogram division.
     ///
     /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target.
     void divide(Profile1DPtr p1, Profile1DPtr p2, Scatter2DPtr s) const;
 
     /// Helper for profile histogram division with raw YODA objects.
     ///
     /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target.
     void divide(const YODA::Profile1D& p1, const YODA::Profile1D& p2, Scatter2DPtr s) const;
 
 
     /// Helper for 2D histogram division.
     ///
     /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target.
     void divide(Histo2DPtr h1, Histo2DPtr h2, Scatter3DPtr s) const;
 
     /// Helper for 2D histogram division with raw YODA objects.
     ///
     /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target.
     void divide(const YODA::Histo2D& h1, const YODA::Histo2D& h2, Scatter3DPtr s) const;
 
 
     /// Helper for 2D profile histogram division.
     ///
     /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target.
     void divide(Profile2DPtr p1, Profile2DPtr p2, Scatter3DPtr s) const;
 
     /// Helper for 2D profile histogram division with raw YODA objects
     ///
     /// @note Assigns to the (already registered) output scatter, @a s.  Preserves the path information of the target.
     void divide(const YODA::Profile2D& p1, const YODA::Profile2D& p2, Scatter3DPtr s) const;
 
 
     /// Helper for histogram efficiency calculation.
     ///
     /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target.
     void efficiency(Histo1DPtr h1, Histo1DPtr h2, Scatter2DPtr s) const;
 
     /// Helper for histogram efficiency calculation.
     ///
     /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target.
     void efficiency(const YODA::Histo1D& h1, const YODA::Histo1D& h2, Scatter2DPtr s) const;
 
 
     /// Helper for histogram asymmetry calculation.
     ///
     /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target.
     void asymm(Histo1DPtr h1, Histo1DPtr h2, Scatter2DPtr s) const;
 
     /// Helper for histogram asymmetry calculation.
     ///
     /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target.
     void asymm(const YODA::Histo1D& h1, const YODA::Histo1D& h2, Scatter2DPtr s) const;
 
 
     /// Helper for converting a differential histo to an integral one.
     ///
     /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target.
     void integrate(Histo1DPtr h, Scatter2DPtr s) const;
 
     /// Helper for converting a differential histo to an integral one.
     ///
     /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target.
     void integrate(const Histo1D& h, Scatter2DPtr s) const;
 
     //@}
 
 
   public:
 
     /// List of registered analysis data objects
     const vector<MultiweightAOPtr>& analysisObjects() const {
       return _analysisobjects;
     }
 
 
   protected:
 
     /// @name Data object registration, and removal
     //@{
 
     /// Register a data object in the histogram system
     void addAnalysisObject(const MultiweightAOPtr & ao);
 
     /// Unregister a data object from the histogram system (by name)
     void removeAnalysisObject(const std::string& path);
 
     /// Unregister a data object from the histogram system (by pointer)
     void removeAnalysisObject(const MultiweightAOPtr & ao);
 
     //@}
 
 
   private:
 
     /// Name passed to constructor (used to find .info analysis data file, and as a fallback)
     string _defaultname;
 
     /// Pointer to analysis metadata object
     unique_ptr<AnalysisInfo> _info;
 
     /// Storage of all plot objects
     /// @todo Make this a map for fast lookup by path?
     vector<MultiweightAOPtr> _analysisobjects;
 
     /// @name Cross-section variables
     //@{
     double _crossSection;
     //@}
 
     /// The controlling AnalysisHandler object.
     AnalysisHandler* _analysishandler;
 
     /// Collection of cached refdata to speed up many autobookings: the
     /// reference data file should only be read once.
     mutable std::map<std::string, YODA::AnalysisObjectPtr> _refdata;
 
 
   private:
 
     /// @name Utility functions
     //@{
 
     /// Get the reference data for this paper and cache it.
     void _cacheRefData() const;
 
     //@}
 
 
     /// The assignment operator is private and must never be called.
     /// In fact, it should not even be implemented.
     Analysis& operator=(const Analysis&);
 
   };
 
 
 }
 
 
 // Include definition of analysis plugin system so that analyses automatically see it when including Analysis.hh
 #include "Rivet/AnalysisBuilder.hh"
 
 /// @def DECLARE_RIVET_PLUGIN
 /// Preprocessor define to prettify the global-object plugin hook mechanism.
 #define DECLARE_RIVET_PLUGIN(clsname) Rivet::AnalysisBuilder<clsname> plugin_ ## clsname
 
 /// @def DECLARE_ALIASED_RIVET_PLUGIN
 /// Preprocessor define to prettify the global-object plugin hook mechanism, with an extra alias name for this analysis.
 // #define DECLARE_ALIASED_RIVET_PLUGIN(clsname, alias) Rivet::AnalysisBuilder<clsname> plugin_ ## clsname ## ( ## #alias ## )
 #define DECLARE_ALIASED_RIVET_PLUGIN(clsname, alias) DECLARE_RIVET_PLUGIN(clsname)( #alias )
 
 /// @def DEFAULT_RIVET_ANALYSIS_CONSTRUCTOR
 /// Preprocessor define to prettify the manky constructor with name string argument
 #define DEFAULT_RIVET_ANALYSIS_CONSTRUCTOR(clsname) clsname() : Analysis(# clsname) {}
 
 /// @def DEFAULT_RIVET_ANALYSIS_CTOR
 /// Slight abbreviation for DEFAULT_RIVET_ANALYSIS_CONSTRUCTOR
 #define DEFAULT_RIVET_ANALYSIS_CTOR(clsname) DEFAULT_RIVET_ANALYSIS_CONSTRUCTOR(clsname)
 
 
 
 #endif