diff --git a/bin/rivet-mkhtml b/bin/rivet-mkhtml --- a/bin/rivet-mkhtml +++ b/bin/rivet-mkhtml @@ -1,508 +1,508 @@ #! /usr/bin/env python """\ %prog [options] [ ...] [PLOT:Key1=Val1:...] Make web pages from histogram files written out by Rivet. You can specify multiple Monte Carlo YODA files to be compared in the same syntax as for rivet-cmphistos, i.e. including plotting options. Reference data, analysis metadata, and plot style information should be found automatically (if not, set the RIVET_ANALYSIS_PATH or similar variables appropriately). Any existing output directory will be overwritten. ENVIRONMENT: * RIVET_ANALYSIS_PATH: list of paths to be searched for plugin analysis libraries at runtime * RIVET_DATA_PATH: list of paths to be searched for data files """ from __future__ import print_function import rivet, sys, os rivet.util.check_python_version() rivet.util.set_process_name(os.path.basename(__file__)) import glob, shutil from subprocess import Popen,PIPE from optparse import OptionParser, OptionGroup parser = OptionParser(usage=__doc__) parser.add_option("-o", "--outputdir", dest="OUTPUTDIR", default="./rivet-plots", help="directory for Web page output") parser.add_option("-c", "--config", dest="CONFIGFILES", action="append", default=["~/.make-plots"], help="plot config file(s) to be used with rivet-cmphistos") parser.add_option("-n", "--num-threads", metavar="NUMTHREADS", dest="NUMTHREADS", type=int, default=None, help="request make-plots to use a specific number of threads") parser.add_option("--ignore-missing", dest="IGNORE_MISSING", action="store_true", default=False, help="ignore missing YODA files") parser.add_option("-i", "--ignore-unvalidated", dest="IGNORE_UNVALIDATED", action="store_true", default=False, help="ignore unvalidated analyses") # parser.add_option("--ref", "--refid", dest="REF_ID", # default=None, help="ID of reference data set (file path for non-REF data)") parser.add_option("--dry-run", help="don't actually do any plotting or HTML building", dest="DRY_RUN", action="store_true", default=False) parser.add_option("--no-cleanup", dest="NO_CLEANUP", action="store_true", default=False, help="keep plotting temporary directory") parser.add_option("--no-subproc", dest="NO_SUBPROC", action="store_true", default=False, help="don't use subprocesses to render the plots in parallel -- useful for debugging") parser.add_option("--pwd", dest="PATH_PWD", action="store_true", default=False, help="append the current directory (pwd) to the analysis/data search paths (cf. $RIVET_ANALYSIS_PATH)") stygroup = OptionGroup(parser, "Style options") stygroup.add_option("-t", "--title", dest="TITLE", default="Plots from Rivet analyses", help="title to be displayed on the main web page") stygroup.add_option("--reftitle", dest="REFTITLE", default="Data", help="legend entry for reference data") stygroup.add_option("--no-plottitle", dest="NOPLOTTITLE", action="store_true", default=False, help="don't show the plot title on the plot " "(useful when the plot description should only be given in a caption)") stygroup.add_option("-s", "--single", dest="SINGLE", action="store_true", default=False, help="display plots on single webpage.") stygroup.add_option("--no-ratio", dest="SHOW_RATIO", action="store_false", default=True, help="don't draw a ratio plot under each main plot.") stygroup.add_option("--errs", "--mcerrs", "--mc-errs", dest="MC_ERRS", action="store_true", default=False, help="plot error bars.") stygroup.add_option("--offline", dest="OFFLINE", action="store_true", default=False, help="generate HTML that does not use external URLs.") stygroup.add_option("--pdf", dest="VECTORFORMAT", action="store_const", const="PDF", default="PDF", help="use PDF as the vector plot format.") stygroup.add_option("--ps", dest="VECTORFORMAT", action="store_const", const="PS", default="PDF", help="use PostScript as the vector plot format. DEPRECATED") stygroup.add_option("--booklet", dest="BOOKLET", action="store_true", default=False, help="create booklet (currently only available for PDF with pdftk or pdfmerge).") stygroup.add_option("--font", dest="OUTPUT_FONT", choices="palatino,cm,times,helvetica,minion".split(","), default="palatino", help="choose the font to be used in the plots") stygroup.add_option("--palatino", dest="OUTPUT_FONT", action="store_const", const="palatino", default="palatino", help="use Palatino as font (default). DEPRECATED: Use --font") stygroup.add_option("--cm", dest="OUTPUT_FONT", action="store_const", const="cm", default="palatino", help="use Computer Modern as font. DEPRECATED: Use --font") stygroup.add_option("--times", dest="OUTPUT_FONT", action="store_const", const="times", default="palatino", help="use Times as font. DEPRECATED: Use --font") stygroup.add_option("--helvetica", dest="OUTPUT_FONT", action="store_const", const="helvetica", default="palatino", help="use Helvetica as font. DEPRECATED: Use --font") stygroup.add_option("--minion", dest="OUTPUT_FONT", action="store_const", const="minion", default="palatino", help="use Adobe Minion Pro as font. DEPRECATED: Use --font") parser.add_option_group(stygroup) selgroup = OptionGroup(parser, "Selective plotting") selgroup.add_option("-m", "--match", action="append", dest="PATHPATTERNS", default=[], help="only write out histograms whose $path/$name string matches any of these regexes") selgroup.add_option("-M", "--unmatch", action="append", dest="PATHUNPATTERNS", default=[], help="exclude histograms whose $path/$name string matches any of these regexes") selgroup.add_option(#"-a", #< these were confusing, and -m should be enough "--ana-match", action="append", dest="ANAPATTERNS", default=[], help="only write out histograms from analyses whose name matches any of these regexes") selgroup.add_option(#"-A", #< these were confusing, and -M should be enough "--ana-unmatch", action="append", dest="ANAUNPATTERNS", default=[], help="exclude histograms from analyses whose name matches any of these regexes") parser.add_option_group(selgroup) vrbgroup = OptionGroup(parser, "Verbosity control") vrbgroup.add_option("-v", "--verbose", help="add extra debug messages", dest="VERBOSE", action="store_true", default=False) parser.add_option_group(vrbgroup) opts, yodafiles = parser.parse_args() ## Add pwd to search paths if opts.PATH_PWD: rivet.addAnalysisLibPath(os.path.abspath(".")) rivet.addAnalysisDataPath(os.path.abspath(".")) ## Check that there are some arguments! if not yodafiles: print("Error: You need to specify some YODA files to be plotted!") sys.exit(1) ## Make output directory if not opts.DRY_RUN: if os.path.exists(opts.OUTPUTDIR) and not os.path.realpath(opts.OUTPUTDIR)==os.getcwd(): import shutil shutil.rmtree(opts.OUTPUTDIR) try: os.makedirs(opts.OUTPUTDIR) except: print("Error: failed to make new directory '%s'" % opts.OUTPUTDIR) sys.exit(1) ## Get set of analyses involved in the runs plotarg = None analyses = set() blocked_analyses = set() import yoda for yodafile in yodafiles: if yodafile.startswith("PLOT:"): plotarg = yodafile continue yodafilepath = os.path.abspath(yodafile.split(":")[0]) if not os.access(yodafilepath, os.R_OK): print("Error: cannot read from %s" % yodafilepath) if opts.IGNORE_MISSING: continue else: sys.exit(2) try: ## Note: we use -m/-M flags here as well as when calling rivet-cmphistos, to potentially speed this initial loading analysisobjects = yoda.read(yodafilepath, patterns=opts.PATHPATTERNS, unpatterns=opts.PATHUNPATTERNS) except IOError as e: print("File reading error: ", e.strerror) sys.exit(1) for path, ao in analysisobjects.items(): ## Make a path object and ensure the path is in standard form try: aop = rivet.AOPath(path) except Exception as e: #print(e) print("Found analysis object with non-standard path structure:", path, "... skipping") continue ## We don't plot data objects with path components hidden by an underscore prefix - if aop.istmp(): + if aop.istmp() or aop.israw(): continue ## Identify analysis/histo name parts analysis = "ANALYSIS" if aop.basepathparts(keepref=False): analysis = rivet.stripOptions(aop.basepathparts(keepref=False)[0]) #< TODO: for compatibility with rivet-cmphistos... generalise? #analysis = "_".join(aop.dirnameparts(keepref=False)[:-1]) #< TODO: would this be nicer? Currently incompatible with rivet-cmphistos ## Optionally veto on analysis name pattern matching if analysis in analyses.union(blocked_analyses): continue import re matched = True if opts.ANAPATTERNS: matched = False for patt in opts.ANAPATTERNS: if re.match(patt, analysis) is not None: matched = True break if matched and opts.ANAUNPATTERNS: for patt in opts.ANAUNPATTERNS: if re.match(patt, analysis) is not None: matched = False break if matched: analyses.add(analysis) else: blocked_analyses.add(analysis) ## Sort analyses: group ascending by analysis name (could specialise grouping by collider), then ## descending by year, and finally descending by bibliographic archive ID code (INSPIRE first). def anasort(name): rtn = (1, name) if name.startswith("MC"): rtn = (99999999, name) else: stdparts = name.split("_") try: year = int(stdparts[1]) rtn = (0, stdparts[0], -year, 0) idcode = (0 if stdparts[2][0] == "I" else 1e10) - int(stdparts[2][1:]) rtn = (0, stdparts[0], -year, idcode) if len(stdparts) > 3: rtn += stdparts[3:] except: pass return rtn analyses = sorted(analyses, key=anasort) ## Uncomment to test analysis ordering on index page # print(analyses) # sys.exit(0) ## Run rivet-cmphistos to get plain .dat files from .yoda ## We do this here since it also makes the necessary directories ch_cmd = ["rivet-cmphistos"] if opts.MC_ERRS: ch_cmd.append("--mc-errs") if not opts.SHOW_RATIO: ch_cmd.append("--no-ratio") if opts.NOPLOTTITLE: ch_cmd.append("--no-plottitle") # if opts.REF_ID is not None: # ch_cmd.append("--refid=%s" % os.path.abspath(opts.REF_ID)) if opts.REFTITLE: ch_cmd.append("--reftitle=%s" % opts.REFTITLE ) if opts.PATHPATTERNS: for patt in opts.PATHPATTERNS: ch_cmd += ["-m", patt] #"'"+patt+"'"] if opts.PATHUNPATTERNS: for patt in opts.PATHUNPATTERNS: ch_cmd += ["-M", patt] #"'"+patt+"'"] ch_cmd.append("--hier-out") # TODO: Need to be able to override this: provide a --plotinfodir cmd line option? ch_cmd.append("--plotinfodir=%s" % os.path.abspath("../")) for af in yodafiles: yodafilepath = os.path.abspath(af.split(":")[0]) if af.startswith("PLOT:"): yodafilepath = "PLOT" elif not os.access(yodafilepath, os.R_OK): continue newarg = yodafilepath if ":" in af: newarg += ":" + af.split(":", 1)[1] # print(newarg) ch_cmd.append(newarg) ## Pass rivet-mkhtml -c args to rivet-cmphistos for configfile in opts.CONFIGFILES: configfile = os.path.abspath(os.path.expanduser(configfile)) if os.access(configfile, os.R_OK): ch_cmd += ["-c", configfile] if opts.VERBOSE: ch_cmd.append("--verbose") print("Calling rivet-cmphistos with the following command:") print(" ".join(ch_cmd)) ## Run rivet-cmphistos in a subdir, after fixing any relative paths in Rivet env vars if not opts.DRY_RUN: for var in ("RIVET_ANALYSIS_PATH", "RIVET_DATA_PATH", "RIVET_REF_PATH", "RIVET_INFO_PATH", "RIVET_PLOT_PATH"): if var in os.environ: abspaths = [os.path.abspath(p) for p in os.environ[var].split(":")] os.environ[var] = ":".join(abspaths) subproc = Popen(ch_cmd, cwd=opts.OUTPUTDIR, stdout=PIPE, stderr=PIPE) out, err = subproc.communicate() retcode = subproc.returncode if opts.VERBOSE or retcode != 0: print('Output from rivet-cmphistos:\n', out) if err : print('Errors from rivet-cmphistos:\n', err) if retcode != 0: print('Crash in rivet-cmphistos code = ', retcode, ' exiting') exit(retcode) ## Write web page containing all (matched) plots ## Make web pages first so that we can load it locally in ## a browser to view the output before all plots are made if not opts.DRY_RUN: style = ''' ''' ## Include MathJax configuration script = '' if not opts.OFFLINE: script = '''\ ''' ## A helper function for metadata LaTeX -> HTML conversion from rivet.util import htmlify ## A timestamp HTML fragment to be used on each page: import datetime timestamp = '

Generated at %s

\n' % datetime.datetime.now().strftime("%A, %d. %B %Y %I:%M%p") index = open(os.path.join(opts.OUTPUTDIR, "index.html"), "w") index.write('\n\n%s\n%s\n' % (opts.TITLE, style + script)) if opts.BOOKLET and opts.VECTORFORMAT == "PDF": index.write('

%s

\n\n' % opts.TITLE) else: index.write('

%s

\n\n' % opts.TITLE) if opts.SINGLE: ## Write table of contents index.write('
    \n') for analysis in analyses: summary = analysis ana = rivet.AnalysisLoader.getAnalysis(analysis) if ana: summary = "%s (%s)" % (ana.summary(), analysis) if opts.IGNORE_UNVALIDATED and ana.status() != "VALIDATED": continue index.write('
  • %s\n' % (analysis, htmlify(summary)) ) index.write('
\n') for analysis in analyses: references = [] summary = htmlify(analysis) description, inspireid, spiresid = None, None, None if analysis.find("_I") > 0: inspireid = analysis[analysis.rfind('_I')+2:len(analysis)] elif analysis.find("_S") > 0: spiresid = analysis[analysis.rfind('_S')+2:len(analysis)] ana = rivet.AnalysisLoader.getAnalysis(analysis) if ana: if ana.summary(): summary = htmlify("%s (%s)" % (ana.summary(), analysis)) references = ana.references() description = htmlify(ana.description()) spiresid = ana.spiresId() if opts.IGNORE_UNVALIDATED and ana.status().upper() != "VALIDATED": continue if opts.SINGLE: index.write('\n

%s

\n' % (analysis.encode("utf-8"), summary.encode("utf-8"))) else: index.write('\n

%s

\n' % (analysis.encode("utf-8"), summary.encode("utf-8"))) reflist = [] if inspireid: reflist.append('Inspire' % inspireid) reflist.append('HepData' % inspireid) elif spiresid: # elif ana.spiresId(): reflist.append('Inspire' % spiresid) reflist.append('HepData' % spiresid) reflist += references index.write('

%s

\n' % " | ".join(reflist)) if description: try: index.write('

%s

\n' % description) except UnicodeEncodeError as ue: print("Unicode error in analysis description for " + analysis + ": " + str(ue)) anapath = os.path.join(opts.OUTPUTDIR, analysis) if not opts.SINGLE: if not os.path.exists(anapath): os.makedirs(anapath) anaindex = open(os.path.join(anapath, "index.html"), 'w') anaindex.write('\n\n%s – %s\n%s\n\n' % (htmlify(opts.TITLE), analysis, style + script)) anaindex.write('

%s

\n' % htmlify(analysis)) anaindex.write('

Back to index

\n') if description: try: anaindex.write('

\n %s\n

\n' % description) except UnicodeEncodeError as ue: print("Unicode error in analysis description for " + analysis + ": " + str(ue)) else: anaindex = index datfiles = glob.glob("%s/*.dat" % anapath) #print(datfiles) anaindex.write('
\n') for datfile in sorted(datfiles): obsname = os.path.basename(datfile).replace(".dat", "") pngfile = obsname+".png" vecfile = obsname+"."+opts.VECTORFORMAT.lower() srcfile = obsname+".dat" if opts.SINGLE: pngfile = os.path.join(analysis, pngfile) vecfile = os.path.join(analysis, vecfile) srcfile = os.path.join(analysis, srcfile) anaindex.write('
\n') anaindex.write(' %s:
\n' % (analysis, obsname, srcfile, os.path.splitext(vecfile)[0]) ) anaindex.write(' \n' % (analysis, obsname, vecfile) ) anaindex.write(' \n' % pngfile ) anaindex.write(' \n') anaindex.write('
\n') anaindex.write('
\n') if not opts.SINGLE: anaindex.write('
%s\n
\n' % timestamp) anaindex.close() index.write('
%s\n' % timestamp) index.close() # http://stackoverflow.com/questions/377017/test-if-executable-exists-in-python def which(program): import os def is_exe(fpath): return os.path.isfile(fpath) and os.access(fpath, os.X_OK) fpath, fname = os.path.split(program) if fpath: if is_exe(program): return program else: for path in os.environ["PATH"].split(os.pathsep): path = path.strip('"') exe_file = os.path.join(path, program) if is_exe(exe_file): return exe_file return None ## Run make-plots on all generated .dat files # sys.exit(0) mp_cmd = ["make-plots"] if opts.NUMTHREADS: mp_cmd.append("--num-threads=%d" % opts.NUMTHREADS) if opts.NO_CLEANUP: mp_cmd.append("--no-cleanup") if opts.NO_SUBPROC: mp_cmd.append("--no-subproc") if opts.VECTORFORMAT == "PDF": mp_cmd.append("--pdfpng") elif opts.VECTORFORMAT == "PS": mp_cmd.append("--pspng") if opts.OUTPUT_FONT: mp_cmd.append("--font=%s" % opts.OUTPUT_FONT) # if opts.OUTPUT_FONT.upper() == "PALATINO": # mp_cmd.append("--palatino") # if opts.OUTPUT_FONT.upper() == "CM": # mp_cmd.append("--cm") # elif opts.OUTPUT_FONT.upper() == "TIMES": # mp_cmd.append("--times") # elif opts.OUTPUT_FONT.upper() == "HELVETICA": # mp_cmd.append("--helvetica") # elif opts.OUTPUT_FONT.upper() == "MINION": # mp_cmd.append("--minion") datfiles = [] for analysis in analyses: anapath = os.path.join(opts.OUTPUTDIR, analysis) #print(anapath) anadatfiles = glob.glob("%s/*.dat" % anapath) datfiles += sorted(anadatfiles) if datfiles: mp_cmd += datfiles if opts.VERBOSE: mp_cmd.append("--verbose") print("Calling make-plots with the following options:") print(" ".join(mp_cmd)) if not opts.DRY_RUN: Popen(mp_cmd).wait() if opts.BOOKLET and opts.VECTORFORMAT=="PDF": if which("pdftk") is not None: bookletcmd = ["pdftk"] for analysis in analyses: anapath = os.path.join(opts.OUTPUTDIR, analysis) bookletcmd += sorted(glob.glob("%s/*.pdf" % anapath)) bookletcmd += ["cat", "output", "%s/booklet.pdf" % opts.OUTPUTDIR] print(bookletcmd) Popen(bookletcmd).wait() elif which("pdfmerge") is not None: bookletcmd = ["pdfmerge"] for analysis in analyses: anapath = os.path.join(opts.OUTPUTDIR, analysis) bookletcmd += sorted(glob.glob("%s/*.pdf" % anapath)) bookletcmd += ["%s/booklet.pdf" % opts.OUTPUTDIR] print(bookletcmd) Popen(bookletcmd).wait() else: print("Neither pdftk nor pdfmerge available --- not booklet output possible") diff --git a/include/Rivet/Analysis.hh b/include/Rivet/Analysis.hh --- a/include/Rivet/Analysis.hh +++ b/include/Rivet/Analysis.hh @@ -1,1209 +1,1226 @@ // -*- C++ -*- #ifndef RIVET_Analysis_HH #define RIVET_Analysis_HH #include "Rivet/Config/RivetCommon.hh" #include "Rivet/AnalysisInfo.hh" #include "Rivet/Event.hh" #include "Rivet/Projection.hh" #include "Rivet/ProjectionApplier.hh" #include "Rivet/ProjectionHandler.hh" #include "Rivet/AnalysisLoader.hh" #include "Rivet/Tools/Cuts.hh" #include "Rivet/Tools/Logging.hh" #include "Rivet/Tools/ParticleUtils.hh" #include "Rivet/Tools/BinnedHistogram.hh" #include "Rivet/Tools/RivetMT2.hh" #include "Rivet/Tools/RivetYODA.hh" #include "Rivet/Tools/Percentile.hh" #include "Rivet/Projections/CentralityProjection.hh" /// @def vetoEvent /// Preprocessor define for vetoing events, including the log message and return. #define vetoEvent \ do { MSG_DEBUG("Vetoing event on line " << __LINE__ << " of " << __FILE__); return; } while(0) namespace Rivet { // Forward declaration class AnalysisHandler; /// @brief This is the base class of all analysis classes in Rivet. /// /// There are /// three virtual functions which should be implemented in base classes: /// /// void init() is called by Rivet before a run is started. Here the /// analysis class should book necessary histograms. The needed /// projections should probably rather be constructed in the /// constructor. /// /// void analyze(const Event&) is called once for each event. Here the /// analysis class should apply the necessary Projections and fill the /// histograms. /// /// void finalize() is called after a run is finished. Here the analysis /// class should do whatever manipulations are necessary on the /// histograms. Writing the histograms to a file is, however, done by /// the Rivet class. class Analysis : public ProjectionApplier { /// The AnalysisHandler is a friend. friend class AnalysisHandler; public: /// @name Standard constructors and destructors. //@{ // /// The default constructor. // Analysis(); /// Constructor Analysis(const std::string& name); /// The destructor. virtual ~Analysis() {} //@} public: /// @name Main analysis methods //@{ /// Initialize this analysis object. A concrete class should here /// book all necessary histograms. An overridden function must make /// sure it first calls the base class function. virtual void init() { } /// Analyze one event. A concrete class should here apply the /// necessary projections on the \a event and fill the relevant /// histograms. An overridden function must make sure it first calls /// the base class function. virtual void analyze(const Event& event) = 0; /// Finalize this analysis object. A concrete class should here make /// all necessary operations on the histograms. Writing the /// histograms to a file is, however, done by the Rivet class. An /// overridden function must make sure it first calls the base class /// function. virtual void finalize() { } //@} public: /// @name Metadata /// Metadata is used for querying from the command line and also for /// building web pages and the analysis pages in the Rivet manual. //@{ /// Get the actual AnalysisInfo object in which all this metadata is stored. const AnalysisInfo& info() const { assert(_info && "No AnalysisInfo object :O"); return *_info; } /// @brief Get the name of the analysis. /// /// By default this is computed by combining the results of the /// experiment, year and Spires ID metadata methods and you should /// only override it if there's a good reason why those won't /// work. If options has been set for this instance, a /// corresponding string is appended at the end. virtual std::string name() const { return ( (info().name().empty()) ? _defaultname : info().name() ) + _optstring; } // get name of reference data file, which could be different from plugin name virtual std::string getRefDataName() const { return (info().getRefDataName().empty()) ? _defaultname : info().getRefDataName(); } // set name of reference data file, which could be different from plugin name virtual void setRefDataName(const std::string& ref_data="") { info().setRefDataName(!ref_data.empty() ? ref_data : name()); } /// Get the Inspire ID code for this analysis. virtual std::string inspireId() const { return info().inspireId(); } /// Get the SPIRES ID code for this analysis (~deprecated). virtual std::string spiresId() const { return info().spiresId(); } /// @brief Names & emails of paper/analysis authors. /// /// Names and email of authors in 'NAME \' format. The first /// name in the list should be the primary contact person. virtual std::vector authors() const { return info().authors(); } /// @brief Get a short description of the analysis. /// /// Short (one sentence) description used as an index entry. /// Use @a description() to provide full descriptive paragraphs /// of analysis details. virtual std::string summary() const { return info().summary(); } /// @brief Get a full description of the analysis. /// /// Full textual description of this analysis, what it is useful for, /// what experimental techniques are applied, etc. Should be treated /// as a chunk of restructuredText (http://docutils.sourceforge.net/rst.html), /// with equations to be rendered as LaTeX with amsmath operators. virtual std::string description() const { return info().description(); } /// @brief Information about the events needed as input for this analysis. /// /// Event types, energies, kinematic cuts, particles to be considered /// stable, etc. etc. Should be treated as a restructuredText bullet list /// (http://docutils.sourceforge.net/rst.html) virtual std::string runInfo() const { return info().runInfo(); } /// Experiment which performed and published this analysis. virtual std::string experiment() const { return info().experiment(); } /// Collider on which the experiment ran. virtual std::string collider() const { return info().collider(); } /// When the original experimental analysis was published. virtual std::string year() const { return info().year(); } /// The luminosity in inverse femtobarn virtual std::string luminosityfb() const { return info().luminosityfb(); } /// Journal, and preprint references. virtual std::vector references() const { return info().references(); } /// BibTeX citation key for this article. virtual std::string bibKey() const { return info().bibKey(); } /// BibTeX citation entry for this article. virtual std::string bibTeX() const { return info().bibTeX(); } /// Whether this analysis is trusted (in any way!) virtual std::string status() const { return (info().status().empty()) ? "UNVALIDATED" : info().status(); } /// Any work to be done on this analysis. virtual std::vector todos() const { return info().todos(); } /// Return the allowed pairs of incoming beams required by this analysis. virtual const std::vector& requiredBeams() const { return info().beams(); } /// Declare the allowed pairs of incoming beams required by this analysis. virtual Analysis& setRequiredBeams(const std::vector& requiredBeams) { info().setBeams(requiredBeams); return *this; } /// Sets of valid beam energy pairs, in GeV virtual const std::vector >& requiredEnergies() const { return info().energies(); } /// Get vector of analysis keywords virtual const std::vector & keywords() const { return info().keywords(); } /// Declare the list of valid beam energy pairs, in GeV virtual Analysis& setRequiredEnergies(const std::vector >& requiredEnergies) { info().setEnergies(requiredEnergies); return *this; } /// Return true if this analysis needs to know the process cross-section. /// @todo Remove this and require HepMC >= 2.06 bool needsCrossSection() const { return info().needsCrossSection(); } /// Declare whether this analysis needs to know the process cross-section from the generator. /// @todo Remove this and require HepMC >= 2.06 Analysis& setNeedsCrossSection(bool needed=true) { info().setNeedsCrossSection(needed); return *this; } //@} /// @name Internal metadata modifying methods //@{ /// Get the actual AnalysisInfo object in which all this metadata is stored (non-const). AnalysisInfo& info() { assert(_info && "No AnalysisInfo object :O"); return *_info; } //@} /// @name Run conditions //@{ /// Incoming beams for this run const ParticlePair& beams() const; /// Incoming beam IDs for this run const PdgIdPair beamIds() const; /// Centre of mass energy for this run double sqrtS() const; //@} /// @name Analysis / beam compatibility testing //@{ /// Check if analysis is compatible with the provided beam particle IDs and energies bool isCompatible(const ParticlePair& beams) const; /// Check if analysis is compatible with the provided beam particle IDs and energies bool isCompatible(PdgId beam1, PdgId beam2, double e1, double e2) const; /// Check if analysis is compatible with the provided beam particle IDs and energies bool isCompatible(const PdgIdPair& beams, const std::pair& energies) const; //@} /// Set the cross section from the generator Analysis& setCrossSection(double xs); //, double xserr=0.0); /// Access the controlling AnalysisHandler object. AnalysisHandler& handler() const { return *_analysishandler; } protected: /// Get a Log object based on the name() property of the calling analysis object. Log& getLog() const; /// Get the process cross-section in pb. Throws if this hasn't been set. double crossSection() const; /// Get the process cross-section per generated event in pb. Throws if this /// hasn't been set. double crossSectionPerEvent() const; /// @brief Get the number of events seen (via the analysis handler). /// /// @note Use in the finalize phase only. size_t numEvents() const; /// @brief Get the sum of event weights seen (via the analysis handler). /// /// @note Use in the finalize phase only. double sumW() const; /// Alias double sumOfWeights() const { return sumW(); } /// @brief Get the sum of squared event weights seen (via the analysis handler). /// /// @note Use in the finalize phase only. double sumW2() const; protected: /// @name Histogram paths //@{ /// Get the canonical histogram "directory" path for this analysis. const std::string histoDir() const; /// Get the canonical histogram path for the named histogram in this analysis. const std::string histoPath(const std::string& hname) const; /// Get the canonical histogram path for the numbered histogram in this analysis. const std::string histoPath(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const; /// Get the internal histogram name for given d, x and y (cf. HepData) const std::string mkAxisCode(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const; /// Alias /// @deprecated Prefer the "mk" form, consistent with other "making function" names const std::string makeAxisCode(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const { return mkAxisCode(datasetId, xAxisId, yAxisId); } //@} /// @name Histogram reference data //@{ /// Get reference data for a named histo /// @todo SFINAE to ensure that the type inherits from YODA::AnalysisObject? template const T& refData(const string& hname) const { _cacheRefData(); MSG_TRACE("Using histo bin edges for " << name() << ":" << hname); if (!_refdata[hname]) { MSG_ERROR("Can't find reference histogram " << hname); throw Exception("Reference data " + hname + " not found."); } return dynamic_cast(*_refdata[hname]); } /// Get reference data for a numbered histo /// @todo SFINAE to ensure that the type inherits from YODA::AnalysisObject? template const T& refData(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const { const string hname = makeAxisCode(datasetId, xAxisId, yAxisId); return refData(hname); } //@} /// @name Counter booking //@{ /// Book a counter. CounterPtr bookCounter(const std::string& name, const std::string& title=""); // const std::string& valtitle="" /// Book a counter, using a path generated from the dataset and axis ID codes /// /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way. CounterPtr bookCounter(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, const std::string& title=""); // const std::string& valtitle="" //@} /// @name 1D histogram booking //@{ /// Book a 1D histogram with @a nbins uniformly distributed across the range @a lower - @a upper . Histo1DPtr bookHisto1D(const std::string& name, size_t nbins, double lower, double upper, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D histogram with non-uniform bins defined by the vector of bin edges @a binedges . Histo1DPtr bookHisto1D(const std::string& name, const std::vector& binedges, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D histogram with non-uniform bins defined by the vector of bin edges @a binedges . Histo1DPtr bookHisto1D(const std::string& name, const std::initializer_list& binedges, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D histogram with binning from a reference scatter. Histo1DPtr bookHisto1D(const std::string& name, const Scatter2D& refscatter, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D histogram, using the binnings in the reference data histogram. Histo1DPtr bookHisto1D(const std::string& name, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D histogram, using the binnings in the reference data histogram. /// /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way. Histo1DPtr bookHisto1D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); //@} /// @name 2D histogram booking //@{ /// Book a 2D histogram with @a nxbins and @a nybins uniformly /// distributed across the ranges @a xlower - @a xupper and @a /// ylower - @a yupper respectively along the x- and y-axis. Histo2DPtr bookHisto2D(const std::string& name, size_t nxbins, double xlower, double xupper, size_t nybins, double ylower, double yupper, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D histogram with non-uniform bins defined by the /// vectors of bin edges @a xbinedges and @a ybinedges. Histo2DPtr bookHisto2D(const std::string& name, const std::vector& xbinedges, const std::vector& ybinedges, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D histogram with non-uniform bins defined by the /// vectors of bin edges @a xbinedges and @a ybinedges. Histo2DPtr bookHisto2D(const std::string& name, const std::initializer_list& xbinedges, const std::initializer_list& ybinedges, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D histogram with binning from a reference scatter. Histo2DPtr bookHisto2D(const std::string& name, const Scatter3D& refscatter, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D histogram, using the binnings in the reference data histogram. Histo2DPtr bookHisto2D(const std::string& name, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D histogram, using the binnings in the reference data histogram. /// /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way. Histo2DPtr bookHisto2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); //@} /// @name 1D profile histogram booking //@{ /// Book a 1D profile histogram with @a nbins uniformly distributed across the range @a lower - @a upper . Profile1DPtr bookProfile1D(const std::string& name, size_t nbins, double lower, double upper, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D profile histogram with non-uniform bins defined by the vector of bin edges @a binedges . Profile1DPtr bookProfile1D(const std::string& name, const std::vector& binedges, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D profile histogram with non-uniform bins defined by the vector of bin edges @a binedges . Profile1DPtr bookProfile1D(const std::string& name, const std::initializer_list& binedges, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D profile histogram with binning from a reference scatter. Profile1DPtr bookProfile1D(const std::string& name, const Scatter2D& refscatter, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D profile histogram, using the binnings in the reference data histogram. Profile1DPtr bookProfile1D(const std::string& name, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// Book a 1D profile histogram, using the binnings in the reference data histogram. /// /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way. Profile1DPtr bookProfile1D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); //@} /// @name 2D profile histogram booking //@{ /// Book a 2D profile histogram with @a nxbins and @a nybins uniformly /// distributed across the ranges @a xlower - @a xupper and @a ylower - @a /// yupper respectively along the x- and y-axis. Profile2DPtr bookProfile2D(const std::string& name, size_t nxbins, double xlower, double xupper, size_t nybins, double ylower, double yupper, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D profile histogram with non-uniform bins defined by the vectorx /// of bin edges @a xbinedges and @a ybinedges. Profile2DPtr bookProfile2D(const std::string& name, const std::vector& xbinedges, const std::vector& ybinedges, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D profile histogram with non-uniform bins defined by the vectorx /// of bin edges @a xbinedges and @a ybinedges. Profile2DPtr bookProfile2D(const std::string& name, const std::initializer_list& xbinedges, const std::initializer_list& ybinedges, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D profile histogram with binning from a reference scatter. Profile2DPtr bookProfile2D(const std::string& name, const Scatter3D& refscatter, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D profile histogram, using the binnings in the reference data histogram. Profile2DPtr bookProfile2D(const std::string& name, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); /// Book a 2D profile histogram, using the binnings in the reference data histogram. /// /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way. Profile2DPtr bookProfile2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, const std::string& title="", const std::string& xtitle="", const std::string& ytitle="", const std::string& ztitle=""); //@} /// @name 2D scatter booking //@{ /// @brief Book a 2-dimensional data point set with the given name. /// /// @note Unlike histogram booking, scatter booking by default makes no /// attempt to use reference data to pre-fill the data object. If you want /// this, which is sometimes useful e.g. when the x-position is not really /// meaningful and can't be extracted from the data, then set the @a /// copy_pts parameter to true. This creates points to match the reference /// data's x values and errors, but with the y values and errors zeroed... /// assuming that there is a reference histo with the same name: if there /// isn't, an exception will be thrown. Scatter2DPtr bookScatter2D(const std::string& name, bool copy_pts=false, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// @brief Book a 2-dimensional data point set, using the binnings in the reference data histogram. /// /// The paper, dataset and x/y-axis IDs will be used to build the histo name in the HepData standard way. /// /// @note Unlike histogram booking, scatter booking by default makes no /// attempt to use reference data to pre-fill the data object. If you want /// this, which is sometimes useful e.g. when the x-position is not really /// meaningful and can't be extracted from the data, then set the @a /// copy_pts parameter to true. This creates points to match the reference /// data's x values and errors, but with the y values and errors zeroed. Scatter2DPtr bookScatter2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, bool copy_pts=false, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// @brief Book a 2-dimensional data point set with equally spaced x-points in a range. /// /// The y values and errors will be set to 0. Scatter2DPtr bookScatter2D(const std::string& name, size_t npts, double lower, double upper, const std::string& title="", const std::string& xtitle="", const std::string& ytitle=""); /// @brief Book a 2-dimensional data point set based on provided contiguous "bin edges". /// /// The y values and errors will be set to 0. Scatter2DPtr bookScatter2D(const std::string& hname, const std::vector& binedges, const std::string& title, const std::string& xtitle, const std::string& ytitle); //@} public: /// @name accessing options for this Analysis instance. //@{ /// Get an option for this analysis instance as a string. std::string getOption(std::string optname) { if ( _options.find(optname) != _options.end() ) return _options.find(optname)->second; return ""; } /// Get an option for this analysis instance converted to a /// specific type (given by the specified @a def value). template T getOption(std::string optname, T def) { if (_options.find(optname) == _options.end()) return def; std::stringstream ss; ss << _options.find(optname)->second; T ret; ss >> ret; return ret; } //@} /// @brief Book a CentralityProjection /// /// Using a SingleValueProjection, @a proj, giving the value of an /// experimental observable to be used as a centrality estimator, /// book a CentralityProjection based on the experimentally /// measured pecentiles of this observable (as given by the /// reference data for the @a calHistName histogram in the @a /// calAnaName analysis. If a preloaded file with the output of a /// run using the @a calAnaName analysis contains a valid /// generated @a calHistName histogram, it will be used as an /// optional percentile binning. Also if this preloaded file /// contains a histogram with the name @a calHistName with an /// appended "_IMP" This histogram will be used to add an optional /// centrality percentile based on the generated impact /// parameter. If @increasing is true, a low (high) value of @proj /// is assumed to correspond to a more peripheral (central) event. const CentralityProjection& declareCentrality(const SingleValueProjection &proj, string calAnaName, string calHistName, const string projName, bool increasing = false); /// @brief Book a Pecentile wrapper around AnalysisObjects. /// /// Based on a previously registered CentralityProjection named @a /// projName book one AnalysisObject for each @a centralityBin and /// name them according to the corresponding code in the @a ref /// vector. template Percentile bookPercentile(string projName, vector > centralityBins, vector > ref) { typedef typename ReferenceTraits::RefT RefT; - const CentralityProjection & proj = - getProjection(projName); - Percentile pctl(this, projName, proj.projections().size()); + Percentile pctl(this, projName); const int nCent = centralityBins.size(); for (int iCent = 0; iCent < nCent; ++iCent) { const string axisCode = makeAxisCode(std::get<0>(ref[iCent]), std::get<1>(ref[iCent]), std::get<2>(ref[iCent])); - const RefT & refscatter = refData(axisCode); - shared_ptr temp = make_shared(refscatter, histoPath(axisCode)); - - vector aos = - pctl.add(proj, *temp, centralityBins[iCent]); - for ( auto ao : aos ) addAnalysisObject(ao); + shared_ptr ao; + CounterPtr cnt; + try { + ao = getAnalysisObject(axisCode); + MSG_TRACE("Found old " << histoPath(axisCode)); + } + catch (Exception) { + const RefT & refscatter = refData(axisCode); + ao = make_shared(refscatter, histoPath(axisCode)); + addAnalysisObject(ao); + MSG_TRACE("Created new " << histoPath(axisCode)); + } + try { + cnt = getAnalysisObject("TMP/COUNTER/" + axisCode); + MSG_TRACE("Found old " << histoPath("TMP/COUNTER/" + axisCode)); + } + catch (Exception) { + cnt = make_shared(histoPath("TMP/COUNTER/" + axisCode)); + addAnalysisObject(cnt); + MSG_TRACE("Created new " << histoPath("TMP/COUNTER/" + axisCode)); + } + pctl.add(ao, cnt, centralityBins[iCent]); } return pctl; } /// @brief Book Pecentile wrappers around AnalysisObjects. /// /// Based on a previously registered CentralityProjection named @a /// projName book one (or several) AnalysisObject(s) named /// according to @a ref where the x-axis will be filled according /// to the percentile output(s) of the @projName. template PercentileXaxis bookPercentileXaxis(string projName, tuple ref) { typedef typename ReferenceTraits::RefT RefT; - const CentralityProjection & proj = - getProjection(projName); - PercentileXaxis pctl(this, projName, proj.projections().size()); + PercentileXaxis pctl(this, projName); const string axisCode = makeAxisCode(std::get<0>(ref), std::get<1>(ref), std::get<2>(ref)); - const RefT& refscatter = refData(axisCode); + shared_ptr ao; + CounterPtr cnt; + try { + ao = getAnalysisObject(histoPath(axisCode)); + } + catch (Exception) { + const RefT & refscatter = refData(axisCode); + ao = make_shared(refscatter, axisCode); + addAnalysisObject(ao); + } + pctl.add(proj, ao, make_shared()); - shared_ptr temp = make_shared(refscatter, histoPath(axisCode)); - - vector aos = pctl.add(proj, *temp); - for ( auto ao : aos ) addAnalysisObject(ao); - return pctl; } /// @name Analysis object manipulation /// @todo Should really be protected: only public to keep BinnedHistogram happy for now... //@{ /// Multiplicatively scale the given counter, @a cnt, by factor @s factor. void scale(CounterPtr cnt, double factor); /// Multiplicatively scale the given counters, @a cnts, by factor @s factor. /// @note Constness intentional, if weird, to allow passing rvalue refs of smart ptrs (argh) /// @todo Use SFINAE for a generic iterable of CounterPtrs void scale(const std::vector& cnts, double factor) { for (auto& c : cnts) scale(c, factor); } /// @todo YUCK! template void scale(const CounterPtr (&cnts)[array_size], double factor) { // for (size_t i = 0; i < std::extent::value; ++i) scale(cnts[i], factor); for (auto& c : cnts) scale(c, factor); } /// Normalize the given histogram, @a histo, to area = @a norm. void normalize(Histo1DPtr histo, double norm=1.0, bool includeoverflows=true); /// Normalize the given histograms, @a histos, to area = @a norm. /// @note Constness intentional, if weird, to allow passing rvalue refs of smart ptrs (argh) /// @todo Use SFINAE for a generic iterable of Histo1DPtrs void normalize(const std::vector& histos, double norm=1.0, bool includeoverflows=true) { for (auto& h : histos) normalize(h, norm, includeoverflows); } /// @todo YUCK! template void normalize(const Histo1DPtr (&histos)[array_size], double norm=1.0, bool includeoverflows=true) { for (auto& h : histos) normalize(h, norm, includeoverflows); } /// Multiplicatively scale the given histogram, @a histo, by factor @s factor. void scale(Histo1DPtr histo, double factor); /// Multiplicatively scale the given histograms, @a histos, by factor @s factor. /// @note Constness intentional, if weird, to allow passing rvalue refs of smart ptrs (argh) /// @todo Use SFINAE for a generic iterable of Histo1DPtrs void scale(const std::vector& histos, double factor) { for (auto& h : histos) scale(h, factor); } /// @todo YUCK! template void scale(const Histo1DPtr (&histos)[array_size], double factor) { for (auto& h : histos) scale(h, factor); } /// Normalize the given histogram, @a histo, to area = @a norm. void normalize(Histo2DPtr histo, double norm=1.0, bool includeoverflows=true); /// Normalize the given histograms, @a histos, to area = @a norm. /// @note Constness intentional, if weird, to allow passing rvalue refs of smart ptrs (argh) /// @todo Use SFINAE for a generic iterable of Histo2DPtrs void normalize(const std::vector& histos, double norm=1.0, bool includeoverflows=true) { for (auto& h : histos) normalize(h, norm, includeoverflows); } /// @todo YUCK! template void normalize(const Histo2DPtr (&histos)[array_size], double norm=1.0, bool includeoverflows=true) { for (auto& h : histos) normalize(h, norm, includeoverflows); } /// Multiplicatively scale the given histogram, @a histo, by factor @s factor. void scale(Histo2DPtr histo, double factor); /// Multiplicatively scale the given histograms, @a histos, by factor @s factor. /// @note Constness intentional, if weird, to allow passing rvalue refs of smart ptrs (argh) /// @todo Use SFINAE for a generic iterable of Histo2DPtrs void scale(const std::vector& histos, double factor) { for (auto& h : histos) scale(h, factor); } /// @todo YUCK! template void scale(const Histo2DPtr (&histos)[array_size], double factor) { for (auto& h : histos) scale(h, factor); } /// Helper for counter division. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(CounterPtr c1, CounterPtr c2, Scatter1DPtr s) const; /// Helper for histogram division with raw YODA objects. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(const YODA::Counter& c1, const YODA::Counter& c2, Scatter1DPtr s) const; /// Helper for histogram division. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(Histo1DPtr h1, Histo1DPtr h2, Scatter2DPtr s) const; /// Helper for histogram division with raw YODA objects. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(const YODA::Histo1D& h1, const YODA::Histo1D& h2, Scatter2DPtr s) const; /// Helper for profile histogram division. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(Profile1DPtr p1, Profile1DPtr p2, Scatter2DPtr s) const; /// Helper for profile histogram division with raw YODA objects. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(const YODA::Profile1D& p1, const YODA::Profile1D& p2, Scatter2DPtr s) const; /// Helper for 2D histogram division. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(Histo2DPtr h1, Histo2DPtr h2, Scatter3DPtr s) const; /// Helper for 2D histogram division with raw YODA objects. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(const YODA::Histo2D& h1, const YODA::Histo2D& h2, Scatter3DPtr s) const; /// Helper for 2D profile histogram division. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(Profile2DPtr p1, Profile2DPtr p2, Scatter3DPtr s) const; /// Helper for 2D profile histogram division with raw YODA objects /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void divide(const YODA::Profile2D& p1, const YODA::Profile2D& p2, Scatter3DPtr s) const; /// Helper for histogram efficiency calculation. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void efficiency(Histo1DPtr h1, Histo1DPtr h2, Scatter2DPtr s) const; /// Helper for histogram efficiency calculation. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void efficiency(const YODA::Histo1D& h1, const YODA::Histo1D& h2, Scatter2DPtr s) const; /// Helper for histogram asymmetry calculation. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void asymm(Histo1DPtr h1, Histo1DPtr h2, Scatter2DPtr s) const; /// Helper for histogram asymmetry calculation. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void asymm(const YODA::Histo1D& h1, const YODA::Histo1D& h2, Scatter2DPtr s) const; /// Helper for converting a differential histo to an integral one. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void integrate(Histo1DPtr h, Scatter2DPtr s) const; /// Helper for converting a differential histo to an integral one. /// /// @note Assigns to the (already registered) output scatter, @a s. Preserves the path information of the target. void integrate(const Histo1D& h, Scatter2DPtr s) const; //@} public: /// List of registered analysis data objects const vector& analysisObjects() const { return _analysisobjects; } protected: /// @name Data object registration, retrieval, and removal //@{ /// Register a data object in the histogram system void addAnalysisObject(AnalysisObjectPtr ao); /// Get a data object from the histogram system template const std::shared_ptr getAnalysisObject(const std::string& name) const { foreach (const AnalysisObjectPtr& ao, analysisObjects()) { if (ao->path() == histoPath(name)) return dynamic_pointer_cast(ao); } throw Exception("Data object " + histoPath(name) + " not found"); } /// Get a data object from the histogram system (non-const) template std::shared_ptr getAnalysisObject(const std::string& name) { foreach (const AnalysisObjectPtr& ao, analysisObjects()) { if (ao->path() == histoPath(name)) return dynamic_pointer_cast(ao); } throw Exception("Data object " + histoPath(name) + " not found"); } /// Unregister a data object from the histogram system (by name) void removeAnalysisObject(const std::string& path); /// Unregister a data object from the histogram system (by pointer) void removeAnalysisObject(AnalysisObjectPtr ao); /// Get all data object from the AnalysisHandler. vector getAllData(bool includeorphans) const; /// Get a data object from another analysis (e.g. preloaded /// calibration histogram). /// Get a data object from the histogram system (non-const) template std::shared_ptr getAnalysisObject(const std::string & ananame, const std::string& name) { std::string path = "/" + ananame + "/" + name; for ( AnalysisObjectPtr ao : getAllData(true) ) { if ( ao->path() == path ) return dynamic_pointer_cast(ao); } return std::shared_ptr(); } /// Get a named Histo1D object from the histogram system const Histo1DPtr getHisto1D(const std::string& name) const { return getAnalysisObject(name); } /// Get a named Histo1D object from the histogram system (non-const) Histo1DPtr getHisto1D(const std::string& name) { return getAnalysisObject(name); } /// Get a Histo1D object from the histogram system by axis ID codes (non-const) const Histo1DPtr getHisto1D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a Histo1D object from the histogram system by axis ID codes (non-const) Histo1DPtr getHisto1D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a named Histo2D object from the histogram system const Histo2DPtr getHisto2D(const std::string& name) const { return getAnalysisObject(name); } /// Get a named Histo2D object from the histogram system (non-const) Histo2DPtr getHisto2D(const std::string& name) { return getAnalysisObject(name); } /// Get a Histo2D object from the histogram system by axis ID codes (non-const) const Histo2DPtr getHisto2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a Histo2D object from the histogram system by axis ID codes (non-const) Histo2DPtr getHisto2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a named Profile1D object from the histogram system const Profile1DPtr getProfile1D(const std::string& name) const { return getAnalysisObject(name); } /// Get a named Profile1D object from the histogram system (non-const) Profile1DPtr getProfile1D(const std::string& name) { return getAnalysisObject(name); } /// Get a Profile1D object from the histogram system by axis ID codes (non-const) const Profile1DPtr getProfile1D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a Profile1D object from the histogram system by axis ID codes (non-const) Profile1DPtr getProfile1D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a named Profile2D object from the histogram system const Profile2DPtr getProfile2D(const std::string& name) const { return getAnalysisObject(name); } /// Get a named Profile2D object from the histogram system (non-const) Profile2DPtr getProfile2D(const std::string& name) { return getAnalysisObject(name); } /// Get a Profile2D object from the histogram system by axis ID codes (non-const) const Profile2DPtr getProfile2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a Profile2D object from the histogram system by axis ID codes (non-const) Profile2DPtr getProfile2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a named Scatter2D object from the histogram system const Scatter2DPtr getScatter2D(const std::string& name) const { return getAnalysisObject(name); } /// Get a named Scatter2D object from the histogram system (non-const) Scatter2DPtr getScatter2D(const std::string& name) { return getAnalysisObject(name); } /// Get a Scatter2D object from the histogram system by axis ID codes (non-const) const Scatter2DPtr getScatter2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } /// Get a Scatter2D object from the histogram system by axis ID codes (non-const) Scatter2DPtr getScatter2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) { return getAnalysisObject(makeAxisCode(datasetId, xAxisId, yAxisId)); } //@} private: /// Name passed to constructor (used to find .info analysis data file, and as a fallback) string _defaultname; /// Pointer to analysis metadata object unique_ptr _info; /// Storage of all plot objects /// @todo Make this a map for fast lookup by path? vector _analysisobjects; /// @name Cross-section variables //@{ double _crossSection; bool _gotCrossSection; //@} /// The controlling AnalysisHandler object. AnalysisHandler* _analysishandler; /// Collection of cached refdata to speed up many autobookings: the /// reference data file should only be read once. mutable std::map _refdata; /// Options the (this instance of) the analysis map _options; /// The string of options. string _optstring; private: /// @name Utility functions //@{ /// Get the reference data for this paper and cache it. void _cacheRefData() const; //@} /// The assignment operator is private and must never be called. /// In fact, it should not even be implemented. Analysis& operator=(const Analysis&); }; } // Include definition of analysis plugin system so that analyses automatically see it when including Analysis.hh #include "Rivet/AnalysisBuilder.hh" /// @def DECLARE_RIVET_PLUGIN /// Preprocessor define to prettify the global-object plugin hook mechanism. #define DECLARE_RIVET_PLUGIN(clsname) Rivet::AnalysisBuilder plugin_ ## clsname /// @def DECLARE_ALIASED_RIVET_PLUGIN /// Preprocessor define to prettify the global-object plugin hook mechanism, with an extra alias name for this analysis. // #define DECLARE_ALIASED_RIVET_PLUGIN(clsname, alias) Rivet::AnalysisBuilder plugin_ ## clsname ## ( ## #alias ## ) #define DECLARE_ALIASED_RIVET_PLUGIN(clsname, alias) DECLARE_RIVET_PLUGIN(clsname)( #alias ) /// @def DEFAULT_RIVET_ANALYSIS_CONSTRUCTOR /// Preprocessor define to prettify the manky constructor with name string argument #define DEFAULT_RIVET_ANALYSIS_CONSTRUCTOR(clsname) clsname() : Analysis(# clsname) {} /// @def DEFAULT_RIVET_ANALYSIS_CTOR /// Slight abbreviation for DEFAULT_RIVET_ANALYSIS_CONSTRUCTOR #define DEFAULT_RIVET_ANALYSIS_CTOR(clsname) DEFAULT_RIVET_ANALYSIS_CONSTRUCTOR(clsname) #endif diff --git a/include/Rivet/Tools/Percentile.hh b/include/Rivet/Tools/Percentile.hh --- a/include/Rivet/Tools/Percentile.hh +++ b/include/Rivet/Tools/Percentile.hh @@ -1,720 +1,687 @@ #ifndef PERCENTILE_HH #define PERCENTILE_HH #include "Rivet/Event.hh" #include "Rivet/Projections/CentralityProjection.hh" #include "Rivet/ProjectionApplier.hh" namespace Rivet { /// Forward declaration. class Analysis; /// @brief PercentileBase is the base class of all Percentile classes. /// /// This base class contains all non-templated variables and /// infrastructure needed. class PercentileBase { public: /// @brief the main constructor /// /// requiring a pointer, @a ana, to the Analysis to which this - /// object belongs, the name of the CentralityProjection, @a - /// projname, to be used and the number, @a nProj, of multiple - /// centrality definitions that are available in that projection. - PercentileBase(Analysis * ana, string projName, int nProj) - : _ana(ana), _projName(projName), _nProj(nProj) {} + /// object belongs and the name of the CentralityProjection, @a + /// projname, to be used. + PercentileBase(Analysis * ana, string projName) + : _ana(ana), _projName(projName) {} /// @brief Default constructor. PercentileBase() {} /// @initialize the PercentileBase for a new event. /// /// This will perform the assigned CentralityProjection and select /// out the (indices) of the internal AnalysisObjects that are to be /// active in this event. void selectBins(const Event &); /// @brief Helper function to check if @a x is within @a range. static bool inRange(double x, pair range) { return x >= range.first && ( x < range.second || ( x == 100.0 && x == range.second ) ); } - - /// @brief Helper function to get an analysis object from the analysis assigned to. - AnalysisObjectPtr getObject(string path) const; - - /// @brief Helper function to get an analysis object from the analysis assigned to. - template - std::shared_ptr getAnalysisObject(string path) const { - return dynamic_pointer_cast(getObject(path)); - } - /// @brief Copy information from @a other PercentileBase void copyFrom(const PercentileBase & other) { _ana = other._ana; _projName = other._projName; _cent = other._cent; - _nProj = other._nProj; } /// @brief check if @a other PercentileBase is compatible with this. bool compatible(const PercentileBase & other) const { return ( _ana == other._ana && _projName == other._projName && - _cent == other._cent && - _nProj == other._nProj ); + _cent == other._cent ); } /// @breif return the list of centrality bins. /// /// The size of this vector is the same as number of internal /// analysis objects in the sub class PercentileTBase. const vector< pair > & centralities() const { return _cent; } protected: /// The Analysis object to which This object is assigned. Analysis * _ana; /// The name of the CentralityProjection. string _projName; /// The list of indices of the analysis objects that are to be /// filled in the current event. vector _activeBins; /// The list of centrality intervals, one for each included analysis /// object. vector > _cent; - /// The number of internal optional centrality definitions included - /// in the CentralityProjection. - int _nProj; - }; /// @brief PercentileTBase is the base class of all Percentile classes. /// /// This base class contains all template-dependent variables and /// infrastructure needed for Percentile and PercentileXaxis. template class PercentileTBase : public PercentileBase { public: /// Convenient typedef. typedef typename T::Ptr TPtr; /// @brief the main constructor /// /// requiring a pointer, @a ana, to the Analysis to which this - /// object belongs, the name of the CentralityProjection, @a - /// projname, to be used and the number, @a nProj, of multiple - /// centrality definitions that are available in that projection. - PercentileTBase(Analysis * ana, string projName, int nProj) - : PercentileBase(ana, projName, nProj), _histos() {} + /// object belongs and the name of the CentralityProjection, @a + /// projname, to be used. + PercentileTBase(Analysis * ana, string projName) + : PercentileBase(ana, projName), _histos() {} /// @brief Default constructor. PercentileTBase() {} /// @brief Empty destructor. ~PercentileTBase() {} /// @brief add a new percentile bin. /// /// Add an analysis objects which are clones of @a temp that should /// be active for events in the given centrality bin @a /// cent. Several analysis objects may be added depending on the /// number of alternative centrality definitions in the /// CentralityProjection @a proj. This function is common for /// Percentile and PecentileXaxis, but for the latter the @a cent /// argument should be left to its default. - vector - add(const CentralityProjection & proj, const T & temp, - pair cent = {0.0, 100.0} ) { - vector ret; - for ( int i = 0, N = _nProj; i < N; ++i ) { - _cent.push_back(cent); - auto cnt = make_shared(); - string path = temp.path(); - if ( i > 0) path += "[" + proj.projections()[i] + "]"; - auto hist = getAnalysisObject(path); - if ( !hist ) { - hist = make_shared(temp, path); - ret.push_back(hist); - } - _histos.push_back( { hist, cnt } ); - } - return ret; + void add(shared_ptr ao, CounterPtr cnt, + pair cent = {0.0, 100.0} ) { + _cent.push_back(cent); + _histos.push_back( { ao, cnt } ); } /// @brief Copy the information from an @a other Percentile object. /// /// This function differs from a simple assignement as the @a other /// analysis objects are not copied, but supplied separately through /// @a tv. bool add(const PercentileBase & other, const vector & tv) { copyFrom(other); if ( tv.size() != _cent.size() ) return false; for ( auto t : tv ) _histos.push_back( { t, make_shared() } ); return true; } /// @brief initialize for a new event. Select which AnalysisObjects /// should be filled for this event. Keeps track of the number of /// events seen for each centrality bin and AnalysisAbject. void init(const Event & event) { selectBins(event); for (const auto bin : _activeBins) _histos[bin].second->fill(event.weight()); } /// @brief Normalize each AnalysisObject /// /// by dividing by the sum of the events seen for each centrality /// bin. void normalizePerEvent() { for (const auto &hist : _histos) if ( hist.second->numEntries() > 0 && hist.first->numEntries() > 0) hist.first->scaleW(1./hist.second->val()); } /// Simple scaling of each AnalysisObject. void scale(float scale) { for (const auto hist : _histos) hist.first->scaleW(scale); } /// Execute a function for each AnalysisObject. void exec(function f) { for ( auto hist : _histos) f(hist); } /// @brief Access the underlyng AnalysisObjects /// /// The returned vector contains a pair, where the first member is /// the AnalysisObject and the second is a counter keeping track of /// the sum of event weights for which the AnalysisObject has been /// active. const vector, shared_ptr > > & analysisObjects() const{ return _histos; } protected: /// The returned vector contains a pair, where the first member is /// the AnalysisObject and the second is a counter keeping track of /// the sum of event weights for which the AnalysisObject has been /// active. vector, shared_ptr > > _histos; }; /// @brief The Percentile class for centrality binning. /// /// The Percentile class automatically handles the selection of which /// AnalysisObject(s) should be filled depending on the centrality of /// an event. It cointains a list of AnalysisObjects, one for each /// centrality bin requested (note that these bins may be overlapping) /// and each centrality definition is available in the assigned /// CentralityProjection. template class Percentile : public PercentileTBase { public: /// @brief the main constructor /// /// requiring a pointer, @a ana, to the Analysis to which this - /// object belongs, the name of the CentralityProjection, @a - /// projname, to be used and the number, @a nProj, of multiple - /// centrality definitions that are available in that projection. - Percentile(Analysis * ana, string projName, int nProj) - : PercentileTBase(ana, projName, nProj) {} + /// object belongs and the name of the CentralityProjection, @a + /// projname, to be used. + Percentile(Analysis * ana, string projName) + : PercentileTBase(ana, projName) {} /// @brief Default constructor. Percentile() {} /// @brief Empty destructor. ~Percentile() {} /// Needed to access members of the templated base class. using PercentileTBase::_histos; /// Needed to access members of the templated base class. using PercentileTBase::_activeBins; /// Fill each AnalysisObject selected in the last call to /// PercentileTBaseinit template void fill(Args... args) { for (const auto bin : _activeBins) { _histos[bin].first->fill(args...); } } /// Subtract the contents fro another Pecentile. Percentile &operator-=(const Percentile &rhs) { const int nCent = _histos.size(); for (int iCent = 0; iCent < nCent; ++iCent) { *_histos[iCent].first -= *rhs._histos[iCent].first; } } /// Add the contents fro another Pecentile. Percentile &operator+=(const Percentile &rhs) { const int nCent = _histos.size(); for (int iCent = 0; iCent < nCent; ++iCent) { *_histos[iCent].first += *rhs._histos[iCent].first; /// @todo should this also add the Counter? } } /// Make this object look like a pointer. Percentile *operator->() { return this; } /// Pointer to member operator. Percentile &operator->*(function f) { exec(f); return *this; } }; /// @brief The PercentileXaxis class for centrality binning. /// /// The PercentileXaxis class automatically handles the x-axis of an /// AnalysisObject when the x-axis is to be the centrality of an /// event. This could also be done by eg. filling directly a Histo1D /// with the result of a CentralityProjection. However, since the /// CentralityProjection may handle several centrality definitions at /// the same time it is reasonable to instead use /// PercentileXaxis which will fill one histogram for each /// centrality definition. /// /// Operationally this class works like the Percentile class, but only /// one centrality bin (0-100) is included. When fill()ed the first /// argument is always given by the assigned CentralityProjection. template class PercentileXaxis : public PercentileTBase { public: /// @brief the main constructor /// /// requiring a pointer, @a ana, to the Analysis to which this - /// object belongs, the name of the CentralityProjection, @a - /// projname, to be used and the number, @a nProj, of multiple - /// centrality definitions that are available in that projection. - PercentileXaxis(Analysis * ana, string projName, int nProj) - : PercentileTBase(ana, projName, nProj) {} + /// object belongs and the name of the CentralityProjection, @a + /// projname, to be used. + PercentileXaxis(Analysis * ana, string projName) + : PercentileTBase(ana, projName) {} /// @brief Default constructor. PercentileXaxis() {} /// @brief Empty destructor. ~PercentileXaxis() {} /// Needed to access members of the templated base class. using PercentileTBase::_histos; /// Needed to access members of the templated base class. using PercentileTBase::_activeBins; /// Fill each AnalysisObject selected in the last call to /// PercentileTBaseinit template void fill(Args... args) { for (const auto bin : _activeBins) { _histos[bin].first->fill(bin, args...); } } /// Subtract the contents fro another PecentileXaxis. PercentileXaxis &operator-=(const PercentileXaxis &rhs) { const int nCent = _histos.size(); for (int iCent = 0; iCent < nCent; ++iCent) { *_histos[iCent].first -= *rhs._histos[iCent].first; } } /// Add the contents fro another PecentileXaxis. PercentileXaxis &operator+=(const PercentileXaxis &rhs) { const int nCent = this->_histos.size(); for (int iCent = 0; iCent < nCent; ++iCent) { *_histos[iCent].first += *rhs._histos[iCent].first; } } /// Make this object look like a pointer. PercentileXaxis *operator->() { return this; } /// Pointer to member operator. PercentileXaxis &operator->*(function f) { exec(f); return *this; } }; /// @name Combining Percentiles following the naming of functions for // the underlying AnalysisObjects: global operators // @{ template Percentile::RefT> divide(const Percentile numer, const Percentile denom) { typedef typename ReferenceTraits::RefT ScatT; Percentile ret; vector scatters; assert( numer.compatible(denom) ); for ( int i = 0, N = numer.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(divide(*numer.analysisObjects()[i].first, *denom.analysisObjects()[i].first))); ret.add(numer, scatters); return ret; } template Percentile::RefT> divide(const Percentile numer, const Percentile::RefT> denom) { typedef typename ReferenceTraits::RefT ScatT; Percentile ret; vector scatters; assert( numer.compatible(denom) ); for ( int i = 0, N = numer.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(divide(*numer.analysisObjects()[i].first, *denom.analysisObjects()[i].first))); ret.add(numer, scatters); return ret; } template Percentile::RefT> divide(const Percentile::RefT> numer, const Percentile denom) { typedef typename ReferenceTraits::RefT ScatT; Percentile::RefT> ret; vector scatters; assert( numer.compatible(denom) ); for ( int i = 0, N = numer.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(divide(*numer.analysisObjects()[i].first, *denom.analysisObjects()[i].first))); ret.add(numer, scatters); return ret; } template Percentile add(const Percentile pctla, const Percentile pctlb) { Percentile ret; vector aos; assert( pctla.compatible(pctlb) ); for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) aos.push_back(make_shared(add(*pctla.analysisObjects()[i].first, *pctlb.analysisObjects()[i].first))); ret.add(pctla, aos); return ret; } template Percentile::RefT> add(const Percentile pctla, const Percentile::RefT> pctlb) { typedef typename ReferenceTraits::RefT ScatT; Percentile ret; vector scatters; assert( pctla.compatible(pctlb) ); for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(add(*pctla.analysisObjects()[i].first, *pctlb.analysisObjects()[i].first))); ret.add(pctla, scatters); return ret; } template Percentile::RefT> add(const Percentile::RefT> pctla, const Percentile pctlb) { typedef typename ReferenceTraits::RefT ScatT; Percentile ret; vector scatters; assert( pctla.compatible(pctlb) ); for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(add(*pctla.analysisObjects()[i].first, *pctlb.analysisObjects()[i].first))); ret.add(pctla, scatters); return ret; } template Percentile subtract(const Percentile pctla, const Percentile pctlb) { Percentile ret; vector aos; assert( pctla.compatible(pctlb) ); for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) aos.push_back(make_shared(subtract(*pctla.analysisObjects()[i].first, *pctlb.analysisObjects()[i].first))); ret.add(pctla, aos); return ret; } template Percentile::RefT> subtract(const Percentile pctla, const Percentile::RefT> pctlb) { typedef typename ReferenceTraits::RefT ScatT; Percentile ret; vector scatters; assert( pctla.compatible(pctlb) ); for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(subtract(*pctla.analysisObjects()[i].first, *pctlb.analysisObjects()[i].first))); ret.add(pctla, scatters); return ret; } template Percentile::RefT> subtract(const Percentile::RefT> pctla, const Percentile pctlb) { typedef typename ReferenceTraits::RefT ScatT; Percentile ret; vector scatters; assert( pctla.compatible(pctlb) ); for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(subtract(*pctla.analysisObjects()[i].first, *pctlb.analysisObjects()[i].first))); ret.add(pctla, scatters); return ret; } template Percentile::RefT> multiply(const Percentile pctla, const Percentile::RefT> pctlb) { typedef typename ReferenceTraits::RefT ScatT; Percentile ret; vector scatters; assert( pctla.compatible(pctlb) ); for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(multiply(*pctla.analysisObjects()[i].first, *pctlb.analysisObjects()[i].first))); ret.add(pctla, scatters); return ret; } template Percentile::RefT> multiply(const Percentile::RefT> pctla, const Percentile pctlb) { typedef typename ReferenceTraits::RefT ScatT; Percentile ret; vector scatters; assert( pctla.compatible(pctlb) ); for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(multiply(*pctla.analysisObjects()[i].first, *pctlb.analysisObjects()[i].first))); ret.add(pctla, scatters); return ret; } template PercentileXaxis::RefT> divide(const PercentileXaxis numer, const PercentileXaxis denom) { typedef typename ReferenceTraits::RefT ScatT; PercentileXaxis ret; vector scatters; assert( numer.compatible(denom) ); for ( int i = 0, N = numer.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(divide(*numer.analysisObjects()[i].first, *denom.analysisObjects()[i].first))); ret.add(numer, scatters); return ret; } template PercentileXaxis::RefT> divide(const PercentileXaxis numer, const PercentileXaxis::RefT> denom) { typedef typename ReferenceTraits::RefT ScatT; PercentileXaxis ret; vector scatters; assert( numer.compatible(denom) ); for ( int i = 0, N = numer.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(divide(*numer.analysisObjects()[i].first, *denom.analysisObjects()[i].first))); ret.add(numer, scatters); return ret; } template PercentileXaxis::RefT> divide(const PercentileXaxis::RefT> numer, const PercentileXaxis denom) { typedef typename ReferenceTraits::RefT ScatT; PercentileXaxis::RefT> ret; vector scatters; assert( numer.compatible(denom) ); for ( int i = 0, N = numer.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(divide(*numer.analysisObjects()[i].first, *denom.analysisObjects()[i].first))); ret.add(numer, scatters); return ret; } template PercentileXaxis add(const PercentileXaxis pctla, const PercentileXaxis pctlb) { PercentileXaxis ret; vector aos; assert( pctla.compatible(pctlb) ); for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) aos.push_back(make_shared(add(*pctla.analysisObjects()[i].first, *pctlb.analysisObjects()[i].first))); ret.add(pctla, aos); return ret; } template PercentileXaxis::RefT> add(const PercentileXaxis pctla, const PercentileXaxis::RefT> pctlb) { typedef typename ReferenceTraits::RefT ScatT; PercentileXaxis ret; vector scatters; assert( pctla.compatible(pctlb) ); for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(add(*pctla.analysisObjects()[i].first, *pctlb.analysisObjects()[i].first))); ret.add(pctla, scatters); return ret; } template PercentileXaxis::RefT> add(const PercentileXaxis::RefT> pctla, const PercentileXaxis pctlb) { typedef typename ReferenceTraits::RefT ScatT; PercentileXaxis ret; vector scatters; assert( pctla.compatible(pctlb) ); for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(add(*pctla.analysisObjects()[i].first, *pctlb.analysisObjects()[i].first))); ret.add(pctla, scatters); return ret; } template PercentileXaxis subtract(const PercentileXaxis pctla, const PercentileXaxis pctlb) { PercentileXaxis ret; vector aos; assert( pctla.compatible(pctlb) ); for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) aos.push_back(make_shared(subtract(*pctla.analysisObjects()[i].first, *pctlb.analysisObjects()[i].first))); ret.add(pctla, aos); return ret; } template PercentileXaxis::RefT> subtract(const PercentileXaxis pctla, const PercentileXaxis::RefT> pctlb) { typedef typename ReferenceTraits::RefT ScatT; PercentileXaxis ret; vector scatters; assert( pctla.compatible(pctlb) ); for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(subtract(*pctla.analysisObjects()[i].first, *pctlb.analysisObjects()[i].first))); ret.add(pctla, scatters); return ret; } template PercentileXaxis::RefT> subtract(const PercentileXaxis::RefT> pctla, const PercentileXaxis pctlb) { typedef typename ReferenceTraits::RefT ScatT; PercentileXaxis ret; vector scatters; assert( pctla.compatible(pctlb) ); for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(subtract(*pctla.analysisObjects()[i].first, *pctlb.analysisObjects()[i].first))); ret.add(pctla, scatters); return ret; } template PercentileXaxis::RefT> multiply(const PercentileXaxis pctla, const PercentileXaxis::RefT> pctlb) { typedef typename ReferenceTraits::RefT ScatT; PercentileXaxis ret; vector scatters; assert( pctla.compatible(pctlb) ); for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(multiply(*pctla.analysisObjects()[i].first, *pctlb.analysisObjects()[i].first))); ret.add(pctla, scatters); return ret; } template PercentileXaxis::RefT> multiply(const PercentileXaxis::RefT> pctla, const PercentileXaxis pctlb) { typedef typename ReferenceTraits::RefT ScatT; PercentileXaxis ret; vector scatters; assert( pctla.compatible(pctlb) ); for ( int i = 0, N = pctla.analysisObjects().size(); i < N; ++i ) scatters.push_back(make_shared(multiply(*pctla.analysisObjects()[i].first, *pctlb.analysisObjects()[i].first))); ret.add(pctla, scatters); return ret; } template Percentile operator+(const Percentile pctla, const Percentile pctlb) { return add(pctla, pctlb); } template Percentile operator-(const Percentile pctla, const Percentile pctlb) { return subtract(pctla, pctlb); } template Percentile::RefT> operator/(const Percentile numer, const Percentile denom) { return divide(numer, denom); } template PercentileXaxis operator+(const PercentileXaxis pctla, const PercentileXaxis pctlb) { return add(pctla, pctlb); } template PercentileXaxis operator-(const PercentileXaxis pctla, const PercentileXaxis pctlb) { return subtract(pctla, pctlb); } template PercentileXaxis::RefT> operator/(const PercentileXaxis numer, const PercentileXaxis denom) { return divide(numer, denom); } } #endif diff --git a/src/Core/AnalysisHandler.cc b/src/Core/AnalysisHandler.cc --- a/src/Core/AnalysisHandler.cc +++ b/src/Core/AnalysisHandler.cc @@ -1,556 +1,556 @@ // -*- C++ -*- #include "Rivet/Config/RivetCommon.hh" #include "Rivet/AnalysisHandler.hh" #include "Rivet/Analysis.hh" #include "Rivet/Tools/ParticleName.hh" #include "Rivet/Tools/BeamConstraint.hh" #include "Rivet/Tools/Logging.hh" #include "Rivet/Projections/Beam.hh" #include "YODA/IO.h" namespace Rivet { AnalysisHandler::AnalysisHandler(const string& runname) : _runname(runname), _eventcounter("/_EVTCOUNT"), _xs(NAN), _xserr(NAN), _initialised(false), _ignoreBeams(false), _dumpPeriod(0), _dumping(false) { } AnalysisHandler::~AnalysisHandler() { } Log& AnalysisHandler::getLog() const { return Log::getLog("Rivet.Analysis.Handler"); } void AnalysisHandler::init(const GenEvent& ge) { if (_initialised) throw UserError("AnalysisHandler::init has already been called: cannot re-initialize!"); setRunBeams(Rivet::beams(ge)); MSG_DEBUG("Initialising the analysis handler"); _eventcounter.reset(); // Check that analyses are beam-compatible, and remove those that aren't const size_t num_anas_requested = analysisNames().size(); vector anamestodelete; for (const AnaHandle a : _analyses) { if (!_ignoreBeams && !a->isCompatible(beams())) { //MSG_DEBUG(a->name() << " requires beams " << a->requiredBeams() << " @ " << a->requiredEnergies() << " GeV"); anamestodelete.push_back(a->name()); } } for (const string& aname : anamestodelete) { MSG_WARNING("Analysis '" << aname << "' is incompatible with the provided beams: removing"); removeAnalysis(aname); } if (num_anas_requested > 0 && analysisNames().empty()) { cerr << "All analyses were incompatible with the first event's beams\n" << "Exiting, since this probably wasn't intentional!" << endl; exit(1); } // Warn if any analysis' status is not unblemished for (const AnaHandle a : analyses()) { if (toUpper(a->status()) == "PRELIMINARY") { MSG_WARNING("Analysis '" << a->name() << "' is preliminary: be careful, it may change and/or be renamed!"); } else if (toUpper(a->status()) == "OBSOLETE") { MSG_WARNING("Analysis '" << a->name() << "' is obsolete: please update!"); } else if (toUpper(a->status()).find("UNVALIDATED") != string::npos) { MSG_WARNING("Analysis '" << a->name() << "' is unvalidated: be careful, it may be broken!"); } } // Initialize the remaining analyses for (AnaHandle a : _analyses) { MSG_DEBUG("Initialising analysis: " << a->name()); try { // Allow projection registration in the init phase onwards a->_allowProjReg = true; a->init(); //MSG_DEBUG("Checking consistency of analysis: " << a->name()); //a->checkConsistency(); } catch (const Error& err) { cerr << "Error in " << a->name() << "::init method: " << err.what() << endl; exit(1); } MSG_DEBUG("Done initialising analysis: " << a->name()); } _initialised = true; MSG_DEBUG("Analysis handler initialised"); } void AnalysisHandler::analyze(const GenEvent& ge) { // Call init with event as template if not already initialised if (!_initialised) init(ge); assert(_initialised); // Ensure that beam details match those from the first event (if we're checking beams) if ( !_ignoreBeams ) { const PdgIdPair beams = Rivet::beamIds(ge); const double sqrts = Rivet::sqrtS(ge); if (!compatible(beams, _beams) || !fuzzyEquals(sqrts, sqrtS())) { cerr << "Event beams mismatch: " << PID::toBeamsString(beams) << " @ " << sqrts/GeV << " GeV" << " vs. first beams " << this->beams() << " @ " << this->sqrtS()/GeV << " GeV" << endl; exit(1); } } // Create the Rivet event wrapper /// @todo Filter/normalize the event here Event event(ge); // Weights /// @todo Drop this / just report first weight when we support multiweight events _eventcounter.fill(event.weight()); MSG_DEBUG("Event #" << _eventcounter.numEntries() << " weight = " << event.weight()); // Cross-section #ifdef HEPMC_HAS_CROSS_SECTION if (ge.cross_section()) { _xs = ge.cross_section()->cross_section(); _xserr = ge.cross_section()->cross_section_error(); } #endif // Run the analyses for (AnaHandle a : _analyses) { MSG_TRACE("About to run analysis " << a->name()); try { a->analyze(event); } catch (const Error& err) { cerr << "Error in " << a->name() << "::analyze method: " << err.what() << endl; exit(1); } MSG_TRACE("Finished running analysis " << a->name()); } if ( _dumpPeriod > 0 && numEvents()%_dumpPeriod == 0 ) { MSG_INFO("Dumping intermediate results to " << _dumpFile << "."); _dumping = true; finalize(); _dumping = false; writeData(_dumpFile); } } void AnalysisHandler::analyze(const GenEvent* ge) { if (ge == nullptr) { MSG_ERROR("AnalysisHandler received null pointer to GenEvent"); //throw Error("AnalysisHandler received null pointer to GenEvent"); } analyze(*ge); } void AnalysisHandler::finalize() { if (!_initialised) return; // First we make copies of all analysis objects. map backupAOs; for (auto ao : getData(false, true) ) backupAOs[ao->path()] = AnalysisObjectPtr(ao->newclone()); // Now we run the (re-entrant) finalize() functions for all analyses. MSG_INFO("Finalising analyses"); for (AnaHandle a : _analyses) { a->setCrossSection(_xs); try { if ( !_dumping || a->info().reentrant() ) a->finalize(); else if ( _dumping ) MSG_INFO("Skipping periodic dump of " << a->name() << " as it is not declared reentrant."); } catch (const Error& err) { cerr << "Error in " << a->name() << "::finalize method: " << err.what() << endl; exit(1); } } // Now we copy all analysis objects to the list of finalized // ones, and restore the value to their original ones. _finalizedAOs.clear(); for ( auto ao : getData() ) _finalizedAOs.push_back(AnalysisObjectPtr(ao->newclone())); for ( auto ao : getData(false, true) ) { auto aoit = backupAOs.find(ao->path()); if ( aoit == backupAOs.end() ) { AnaHandle ana = analysis(split(ao->path(), "/")[0]); if ( ana ) ana->removeAnalysisObject(ao->path()); } else copyao(aoit->second, ao); } // Print out number of events processed const int nevts = _eventcounter.numEntries(); MSG_INFO("Processed " << nevts << " event" << (nevts != 1 ? "s" : "")); // // Delete analyses // MSG_DEBUG("Deleting analyses"); // _analyses.clear(); // Print out MCnet boilerplate cout << endl; cout << "The MCnet usage guidelines apply to Rivet: see http://www.montecarlonet.org/GUIDELINES" << endl; cout << "Please acknowledge plots made with Rivet analyses, and cite arXiv:1003.0694 (http://arxiv.org/abs/1003.0694)" << endl; } AnalysisHandler& AnalysisHandler::addAnalysis(const string& analysisname, std::map pars) { // Make an option handle. std::string parHandle = ""; for (map::iterator par = pars.begin(); par != pars.end(); ++par) { parHandle +=":"; parHandle += par->first + "=" + par->second; } return addAnalysis(analysisname + parHandle); } AnalysisHandler& AnalysisHandler::addAnalysis(const string& analysisname) { // Check for a duplicate analysis /// @todo Might we want to be able to run an analysis twice, with different params? /// Requires avoiding histo tree clashes, i.e. storing the histos on the analysis objects. string ananame = analysisname; vector anaopt = split(analysisname, ":"); if ( anaopt.size() > 1 ) ananame = anaopt[0]; AnaHandle analysis( AnalysisLoader::getAnalysis(ananame) ); if (analysis.get() != 0) { // < Check for null analysis. MSG_DEBUG("Adding analysis '" << analysisname << "'"); map opts; for ( int i = 1, N = anaopt.size(); i < N; ++i ) { vector opt = split(anaopt[i], "="); if ( opt.size() != 2 ) { MSG_WARNING("Error in option specification. Skipping analysis " << analysisname); return *this; } if ( !analysis->info().validOption(opt[0], opt[1]) ) { MSG_WARNING("Cannot set option '" << opt[0] << "' to '" << opt[1] << "'. Skipping analysis " << analysisname); return *this; } opts[opt[0]] = opt[1]; } for ( auto opt: opts) { analysis->_options[opt.first] = opt.second; analysis->_optstring += ":" + opt.first + "=" + opt.second; } for (const AnaHandle& a : _analyses) { if (a->name() == analysis->name() ) { MSG_WARNING("Analysis '" << analysisname << "' already registered: skipping duplicate"); return *this; } } analysis->_analysishandler = this; _analyses.insert(analysis); } else { MSG_WARNING("Analysis '" << analysisname << "' not found."); } // MSG_WARNING(_analyses.size()); // for (const AnaHandle& a : _analyses) MSG_WARNING(a->name()); return *this; } AnalysisHandler& AnalysisHandler::removeAnalysis(const string& analysisname) { std::shared_ptr toremove; for (const AnaHandle a : _analyses) { if (a->name() == analysisname) { toremove = a; break; } } if (toremove.get() != 0) { MSG_DEBUG("Removing analysis '" << analysisname << "'"); _analyses.erase(toremove); } return *this; } ///////////////////////////// void AnalysisHandler::addData(const std::vector& aos) { for (const AnalysisObjectPtr ao : aos) { string path = ao->path(); if ( path.substr(0, 5) != "/RAW/" ) { _orphanedPreloads.push_back(ao); continue; } path = path.substr(4); ao->setPath(path); if (path.size() > 1) { // path > "/" try { const string ananame = split(path, "/")[0]; AnaHandle a = analysis(ananame); a->addAnalysisObject(ao); /// @todo Need to statistically merge... } catch (const Error& e) { MSG_TRACE("Adding analysis object " << path << " to the list of orphans."); _orphanedPreloads.push_back(ao); } } } } void AnalysisHandler:: mergeYodas(const vector & aofiles, bool equiv) { vector< vector > aosv; vector xsecs; vector xsecerrs; vector sows; set ananames; // First scan all files and extract analysis objects and add the // corresponding anayses.. for ( auto file : aofiles ) { Scatter1DPtr xsec; CounterPtr sow; // For each file make sure that cross section and sum-of-weights // objects are present and stor all RAW ones in a vector; vector aos; _eventcounter.reset(); try { /// @todo Use new YODA SFINAE to fill the smart ptr vector directly vector aos_raw; YODA::read(file, aos_raw); for (AnalysisObject* aor : aos_raw) { AnalysisObjectPtr ao = AnalysisObjectPtr(aor); if ( ao->path().substr(0, 5) != "/RAW/" ) continue; ao->setPath(ao->path().substr(4)); if ( ao->path() == "/_XSEC" ) xsec = dynamic_pointer_cast(ao); else if ( ao->path() == "/_EVTCOUNT" ) sow = dynamic_pointer_cast(ao); else { string ananame = split(ao->path(), "/")[0]; if ( ananames.insert(ananame).second ) addAnalysis(ananame); aos.push_back(ao); } } if ( !xsec || !sow ) { MSG_ERROR( "Error in AnalysisHandler::mergeYodas: The file " << file << " did not contain weights and cross section info."); exit(1); } xsecs.push_back(xsec->point(0).x()); xsecerrs.push_back(sqr(xsec->point(0).xErrAvg())); _eventcounter += *sow; aosv.push_back(aos); } catch (...) { //< YODA::ReadError& throw UserError("Unexpected error in reading file: " + file); } } // Now calculate the scale to be applied for all bins in a file // and get the common cross section and sum of weights. _xs = _xserr = 0.0; for ( int i = 0, N = sows.size(); i < N; ++i ) { double effnent = sows[i]->effNumEntries(); _xs += (equiv? effnent: 1.0)*xsecs[i]; _xserr += (equiv? sqr(effnent): 1.0)*xsecerrs[i]; } vector scales(sows.size(), 1.0); if ( equiv ) { _xs /= _eventcounter.effNumEntries(); _xserr = sqrt(_xserr)/_eventcounter.effNumEntries(); } else { _xserr = sqrt(_xserr); for ( int i = 0, N = sows.size(); i < N; ++i ) scales[i] = (_eventcounter.sumW()/sows[i]->sumW())*(xsecs[i]/_xs); } // Initialize the analyses allowing them to book analysis objects. for (AnaHandle a : _analyses) { MSG_DEBUG("Initialising analysis: " << a->name()); if ( !a->info().reentrant() ) MSG_WARNING("Analysis " << a->name() << " has not been validated to have " << "a reentrant finalize method. The result is unpredictable."); try { // Allow projection registration in the init phase onwards a->_allowProjReg = true; a->init(); //MSG_DEBUG("Checking consistency of analysis: " << a->name()); //a->checkConsistency(); } catch (const Error& err) { cerr << "Error in " << a->name() << "::init method: " << err.what() << endl; exit(1); } MSG_DEBUG("Done initialising analysis: " << a->name()); } _initialised = true; // Get a list of all anaysis objects to handle. map current; for ( auto ao : getData(false, true) ) current[ao->path()] = ao; // Go through all objects to be merged and add them to current // after appropriate scaling. for ( int i = 0, N = aosv.size(); i < N; ++i) for ( auto ao : aosv[i] ) { if ( ao->path() == "/_XSEC" || ao->path() == "_EVTCOUNT" ) continue; auto aoit = current.find(ao->path()); if ( aoit == current.end() ) { MSG_WARNING("" << ao->path() << " was not properly booked."); continue; } if ( !addaos(aoit->second, ao, scales[i]) ) MSG_WARNING("Cannot merge objects with path " << ao->path() <<" of type " << ao->annotation("Type") ); } // Now we can simply finalize() the analysis, leaving the // controlling program to write it out some yoda-file. finalize(); } void AnalysisHandler::readData(const string& filename) { vector aos; try { /// @todo Use new YODA SFINAE to fill the smart ptr vector directly vector aos_raw; YODA::read(filename, aos_raw); for (AnalysisObject* aor : aos_raw) aos.push_back(AnalysisObjectPtr(aor)); } catch (...) { //< YODA::ReadError& throw UserError("Unexpected error in reading file: " + filename); } if (!aos.empty()) addData(aos); } vector AnalysisHandler:: getData(bool includeorphans, bool includetmps) const { vector rtn; // Event counter rtn.push_back( make_shared(_eventcounter) ); // Cross-section + err as scatter YODA::Scatter1D::Points pts; pts.insert(YODA::Point1D(_xs, _xserr)); rtn.push_back( make_shared(pts, "/_XSEC") ); // Analysis histograms for (const AnaHandle a : analyses()) { vector aos = a->analysisObjects(); // MSG_WARNING(a->name() << " " << aos.size()); for (const AnalysisObjectPtr ao : aos) { // Exclude paths from final write-out if they contain a "TMP" layer (i.e. matching "/TMP/") /// @todo This needs to be much more nuanced for re-entrant histogramming if ( !includetmps && ao->path().find("/TMP/" ) != string::npos) continue; rtn.push_back(ao); } } // Sort histograms alphanumerically by path before write-out sort(rtn.begin(), rtn.end(), [](AnalysisObjectPtr a, AnalysisObjectPtr b) {return a->path() < b->path();}); if ( includeorphans ) rtn.insert(rtn.end(), _orphanedPreloads.begin(), _orphanedPreloads.end()); return rtn; } void AnalysisHandler::writeData(const string& filename) const { vector out = _finalizedAOs; out.reserve(2*out.size()); - vector aos = getData(); + vector aos = getData(false, true); for ( auto ao : aos ) { ao = AnalysisObjectPtr(ao->newclone()); ao->setPath("/RAW" + ao->path()); out.push_back(ao); } try { YODA::write(filename, out.begin(), out.end()); } catch (...) { //< YODA::WriteError& throw UserError("Unexpected error in writing file: " + filename); } } std::vector AnalysisHandler::analysisNames() const { std::vector rtn; for (AnaHandle a : _analyses) { rtn.push_back(a->name()); } return rtn; } const AnaHandle AnalysisHandler::analysis(const std::string& analysisname) const { for (const AnaHandle a : analyses()) if (a->name() == analysisname) return a; throw Error("No analysis named '" + analysisname + "' registered in AnalysisHandler"); } AnalysisHandler& AnalysisHandler::addAnalyses(const std::vector& analysisnames) { for (const string& aname : analysisnames) { //MSG_DEBUG("Adding analysis '" << aname << "'"); addAnalysis(aname); } return *this; } AnalysisHandler& AnalysisHandler::removeAnalyses(const std::vector& analysisnames) { for (const string& aname : analysisnames) { removeAnalysis(aname); } return *this; } bool AnalysisHandler::needCrossSection() const { bool rtn = false; for (const AnaHandle a : _analyses) { if (!rtn) rtn = a->needsCrossSection(); if (rtn) break; } return rtn; } AnalysisHandler& AnalysisHandler::setCrossSection(double xs) { _xs = xs; return *this; } bool AnalysisHandler::hasCrossSection() const { return (!std::isnan(crossSection())); } AnalysisHandler& AnalysisHandler::addAnalysis(Analysis* analysis) { analysis->_analysishandler = this; _analyses.insert(AnaHandle(analysis)); return *this; } PdgIdPair AnalysisHandler::beamIds() const { return Rivet::beamIds(beams()); } double AnalysisHandler::sqrtS() const { return Rivet::sqrtS(beams()); } void AnalysisHandler::setIgnoreBeams(bool ignore) { _ignoreBeams=ignore; } } diff --git a/src/Tools/Percentile.cc b/src/Tools/Percentile.cc --- a/src/Tools/Percentile.cc +++ b/src/Tools/Percentile.cc @@ -1,26 +1,20 @@ #include "Rivet/Tools/RivetYODA.hh" #include "Rivet/Tools/Percentile.hh" #include "Rivet/Analysis.hh" using namespace std; namespace Rivet { void PercentileBase::selectBins(const Event & ev) { const CentralityProjection & proj = _ana->apply(ev, _projName); _activeBins.clear(); const int nCent = _cent.size(); for (int iCent = 0; iCent < nCent; ++iCent) { - if ( inRange(proj[iCent % _nProj], _cent[iCent]) ) + if ( inRange(proj(), _cent[iCent]) ) _activeBins.push_back(iCent); } } -AnalysisObjectPtr PercentileBase::getObject(string path) const { - foreach (const AnalysisObjectPtr & ao, _ana->analysisObjects()) - if (ao->path() == path ) return ao; - return AnalysisObjectPtr(); } - -}