diff --git a/analyses/pluginMC/MC_REENTRANT.cc b/analyses/pluginMC/MC_REENTRANT.cc new file mode 100644 --- /dev/null +++ b/analyses/pluginMC/MC_REENTRANT.cc @@ -0,0 +1,87 @@ +// -*- C++ -*- +#include "Rivet/Analysis.hh" +#include "Rivet/Projections/FinalState.hh" +#include "Rivet/Projections/ChargedFinalState.hh" + +namespace Rivet { + + + /// Generic analysis looking att pp pseudorepidity distributions at + /// two collision energies. It usess the possibility to read in a + /// pre-exixsting yoda file, and if histograms for both energies are + /// filled when finalize() is called, a ratio plot is produced. + class MC_REENTRANT : public Analysis { + public: + + /// Constructor + DEFAULT_RIVET_ANALYSIS_CTOR(MC_REENTRANT); + + + /// @name Analysis methods + //@{ + + /// Book histograms and initialise projections before the run + void init() { + + // Projections + const FinalState fs(Cuts::abseta < 5 && Cuts::pT > 500*MeV); + declare(fs, "FS"); + declare(ChargedFinalState(fs), "CFS"); + + // Histograms + /// @todo Choose E/pT ranged based on input energies... can't do anything about kin. cuts, though + _histEta70 = bookHisto1D("Eta70", 50, -5, 5); + _histEta09 = bookHisto1D("Eta09", 50, -5, 5); + _histEtaR = bookScatter2D("EtaR", 50, -5, 5); + fill70 = fill09 = false; + } + + + /// Perform the per-event analysis + void analyze(const Event& event) { + const double weight = event.weight(); + + if (fuzzyEquals(sqrtS()/GeV, 900)) + fill09 = true; + else if (fuzzyEquals(sqrtS()/GeV, 7000)) + fill70 = true; + + // Same for the charged FS particles only + const FinalState& cfs = apply<FinalState>(event, "CFS"); + for (const Particle& p : cfs.particles()) { + if (fuzzyEquals(sqrtS()/GeV, 900)) + _histEta09->fill(p.eta(), weight); + else if (fuzzyEquals(sqrtS()/GeV, 7000)) + _histEta70->fill(p.eta(), weight); + } + } + + + /// Finalize + void finalize() { + if ( fill70 ) scale(_histEta70, 1.0/sumOfWeights()); + if ( fill09 ) scale(_histEta09, 1.0/sumOfWeights()); + if ( _histEta70->numEntries() > 0 && _histEta09->numEntries() > 0 ) + divide(_histEta70, _histEta09, _histEtaR); + } + + //@} + + + private: + + /// @name Histograms + //@{ + Histo1DPtr _histEta09, _histEta70; + Scatter2DPtr _histEtaR; + //@} + + bool fill09, fill70; + + }; + + + // The hook for the plugin system + DECLARE_RIVET_PLUGIN(MC_REENTRANT); + +} diff --git a/bin/rivet b/bin/rivet --- a/bin/rivet +++ b/bin/rivet @@ -1,661 +1,667 @@ #! /usr/bin/env python """\ Run Rivet analyses on HepMC events read from a file or Unix pipe Examples: %prog [options] <hepmcfile> [<hepmcfile2> ...] or mkfifo fifo.hepmc my_generator -o fifo.hepmc & %prog [options] fifo.hepmc ENVIRONMENT: * RIVET_ANALYSIS_PATH: list of paths to be searched for plugin analysis libraries at runtime * RIVET_DATA_PATH: list of paths to be searched for data files * See the documentation for more environment variables. """ from __future__ import print_function import os, sys ## Load the rivet module try: import rivet except: ## If rivet loading failed, try to bootstrap the Python path! try: # TODO: Is this a good idea? Maybe just notify the user that their PYTHONPATH is wrong? import commands modname = sys.modules[__name__].__file__ binpath = os.path.dirname(modname) rivetconfigpath = os.path.join(binpath, "rivet-config") rivetpypath = commands.getoutput(rivetconfigpath + " --pythonpath") sys.path.append(rivetpypath) import rivet except: sys.stderr.write("The rivet Python module could not be loaded: is your PYTHONPATH set correctly?\n") sys.exit(5) rivet.util.check_python_version() rivet.util.set_process_name("rivet") import time, datetime, logging, signal ## Parse command line options from optparse import OptionParser, OptionGroup parser = OptionParser(usage=__doc__, version="rivet v%s" % rivet.version()) anagroup = OptionGroup(parser, "Analysis handling") anagroup.add_option("-a", "--analysis", "--analyses", dest="ANALYSES", action="append", default=[], metavar="ANA", help="add an analysis (or comma-separated list of analyses) to the processing list.") anagroup.add_option("--list-analyses", "--list", dest="LIST_ANALYSES", action="store_true", default=False, help="show the list of available analyses' names. With -v, it shows the descriptions, too") anagroup.add_option("--list-keywords", "--keywords", dest="LIST_KEYWORDS", action="store_true", default=False, help="show the list of available keywords.") anagroup.add_option("--list-used-analyses", action="store_true", dest="LIST_USED_ANALYSES", default=False, help="list the analyses used by this command (after subtraction of inappropriate ones)") anagroup.add_option("--show-analysis", "--show-analyses", "--show", dest="SHOW_ANALYSES", action="append", default=[], help="show the details of an analysis") anagroup.add_option("--show-bibtex", dest="SHOW_BIBTEX", action="store_true", default=False, help="show BibTeX entries for all used analyses") anagroup.add_option("--analysis-path", dest="ANALYSIS_PATH", metavar="PATH", default=None, help="specify the analysis search path (cf. $RIVET_ANALYSIS_PATH).") # TODO: remove/deprecate the append? anagroup.add_option("--analysis-path-append", dest="ANALYSIS_PATH_APPEND", metavar="PATH", default=None, help="append to the analysis search path (cf. $RIVET_ANALYSIS_PATH).") anagroup.add_option("--pwd", dest="ANALYSIS_PATH_PWD", action="store_true", default=False, help="append the current directory (pwd) to the analysis/data search paths (cf. $RIVET_ANALYSIS_PATH).") # TODO: add control for more paths? parser.add_option_group(anagroup) extragroup = OptionGroup(parser, "Extra run settings") extragroup.add_option("-o", "-H", "--histo-file", dest="HISTOFILE", default="Rivet.yoda", help="specify the output histo file path (default = %default)") +extragroup.add_option("-p", "--preload-file", dest="PRELOADFILE", + default=None, help="specify and old yoda file to initialize (default = %default)") extragroup.add_option("--no-histo-file", dest="WRITE_DATA", action="store_false", default=True, help="don't write out any histogram file at the end of the run (default = write)") extragroup.add_option("-x", "--cross-section", dest="CROSS_SECTION", default=None, metavar="XS", help="specify the signal process cross-section in pb") extragroup.add_option("-n", "--nevts", dest="MAXEVTNUM", type="int", default=None, metavar="NUM", help="restrict the max number of events to process") extragroup.add_option("--nskip", dest="EVTSKIPNUM", type="int", default=0, metavar="NUM", help="skip NUM events read from input before beginning processing") extragroup.add_option("--runname", dest="RUN_NAME", default=None, metavar="NAME", help="give an optional run name, to be prepended as a 'top level directory' in histo paths") extragroup.add_option("--ignore-beams", dest="IGNORE_BEAMS", action="store_true", default=False, help="ignore input event beams when checking analysis compatibility. " "WARNING: analyses may not work correctly, or at all, with inappropriate beams") parser.add_option_group(extragroup) timinggroup = OptionGroup(parser, "Timeouts and periodic operations") timinggroup.add_option("--event-timeout", dest="EVENT_TIMEOUT", type="int", default=21600, metavar="NSECS", help="max time in whole seconds to wait for an event to be generated from the specified source (default = %default)") timinggroup.add_option("--run-timeout", dest="RUN_TIMEOUT", type="int", default=None, metavar="NSECS", help="max time in whole seconds to wait for the run to finish. This can be useful on batch systems such " "as the LCG Grid where tokens expire on a fixed wall-clock and can render long Rivet runs unable to write " "out the final histogram file (default = unlimited)") timinggroup.add_option("--histo-interval", dest="HISTO_WRITE_INTERVAL", type=int, default=1000, help="specify the number of events between histogram file updates, default = %default. " "Set to 0 to only write out at the end of the run. Note that intermediate histograms will be those " "from the analyze step only: analysis finalizing is currently not executed until the end of the run.") parser.add_option_group(timinggroup) verbgroup = OptionGroup(parser, "Verbosity control") parser.add_option("-l", dest="NATIVE_LOG_STRS", action="append", default=[], help="set a log level in the Rivet library") verbgroup.add_option("-v", "--verbose", action="store_const", const=logging.DEBUG, dest="LOGLEVEL", default=logging.INFO, help="print debug (very verbose) messages") verbgroup.add_option("-q", "--quiet", action="store_const", const=logging.WARNING, dest="LOGLEVEL", default=logging.INFO, help="be very quiet") parser.add_option_group(verbgroup) opts, args = parser.parse_args() ## Override/modify analysis search path if opts.ANALYSIS_PATH: rivet.setAnalysisLibPaths(opts.ANALYSIS_PATH.split(":")) rivet.setAnalysisDataPaths(opts.ANALYSIS_PATH.split(":")) if opts.ANALYSIS_PATH_APPEND: for ap in opts.ANALYSIS_PATH_APPEND.split(":"): rivet.addAnalysisLibPath(ap) rivet.addAnalysisDataPath(ap) if opts.ANALYSIS_PATH_PWD: rivet.addAnalysisLibPath(os.path.abspath(".")) rivet.addAnalysisDataPath(os.path.abspath(".")) ## Configure logging logging.basicConfig(level=opts.LOGLEVEL, format="%(message)s") for l in opts.NATIVE_LOG_STRS: name, level = None, None try: name, level = l.split("=") except: name = "Rivet" level = l ## Fix name if name != "Rivet" and not name.startswith("Rivet."): name = "Rivet." + name try: ## Get right error type level = rivet.LEVELS.get(level.upper(), None) logging.debug("Setting log level: %s %d" % (name, level)) rivet.setLogLevel(name, level) except: logging.warning("Couldn't process logging string '%s'" % l) ############################ ## Listing available analyses/keywords def getAnalysesByKeyword(alist, kstring): add, veto, ret = [], [], [] bits = [i for i in kstring.replace("^@", "@^").split("@") if len(i) > 0] for b in bits: if b.startswith("^"): veto.append(b.strip("^")) else: add.append(b) add = set(add) veto = set(veto) for a in alist: kwds = set([i.lower() for i in rivet.AnalysisLoader.getAnalysis(a).keywords()]) if kwds.intersection(veto) and len(kwds.intersection(add)) == len(list(add)): ret.append(a) return ret ## List of analyses all_analyses = rivet.AnalysisLoader.analysisNames() if opts.LIST_ANALYSES: ## Treat args as case-insensitive regexes if present regexes = None if args: import re regexes = [re.compile(arg, re.I) for arg in args] try: # import tempfile, subprocess # tf, tfpath = tempfile.mkstemp(prefix="rivet-list.") for aname in all_analyses: if not regexes: toshow = True else: toshow = False for regex in regexes: if regex.search(aname): toshow = True break if toshow: msg = aname if opts.LOGLEVEL <= logging.INFO: a = rivet.AnalysisLoader.getAnalysis(aname) st = "" if a.status() == "VALIDATED" else ("[" + a.status() + "] ") msg = "%-25s %s" % (aname, st + rivet.util.detex(a.summary())) if opts.LOGLEVEL < logging.INFO: if a.keywords(): msg += " [" + " ".join(a.keywords()) + "]" if a.luminosityfb(): msg += " [ \int L = %s fb^{-1} ]"%a.luminosityfb() print(msg) #os.write(tf, msg + "\n") # if os.path.getsize(tfpath) > 0: # pager = subprocess.Popen(["less", "-FX", tfpath]) #, stdin=subprocess.PIPE) # pager.communicate() finally: # os.unlink(tfpath) #< always clean up pass sys.exit(0) def getKeywords(alist): all_keywords = [] for a in alist: all_keywords.extend(rivet.AnalysisLoader.getAnalysis(a).keywords()) all_keywords = [i.lower() for i in all_keywords] return sorted(list(set(all_keywords))) ## List keywords if opts.LIST_KEYWORDS: # a = rivet.AnalysisLoader.getAnalysis(aname) for k in getKeywords(all_analyses): print(k) sys.exit(0) ## Show analyses' details if len(opts.SHOW_ANALYSES) > 0: toshow = [] for i, a in enumerate(opts.SHOW_ANALYSES): a_up = a.upper() if a_up in all_analyses and a_up not in toshow: toshow.append(a_up) else: ## Treat as a case-insensitive regex import re regex = re.compile(a, re.I) for ana in all_analyses: if regex.search(ana) and a_up not in toshow: toshow.append(ana) msgs = [] for i, name in enumerate(sorted(toshow)): import textwrap ana = rivet.AnalysisLoader.getAnalysis(name) msg = "" msg += name + "\n" msg += (len(name) * "=") + "\n\n" msg += rivet.util.detex(ana.summary()) + "\n\n" msg += "Status: " + ana.status() + "\n\n" # TODO: reduce to only show Inspire in v3 if ana.inspireId(): msg += "Inspire ID: " + ana.inspireId() + "\n" msg += "Inspire URL: http://inspire-hep.net/record/" + ana.inspireId() + "\n" msg += "HepData URL: http://hepdata.net/record/ins" + ana.inspireId() + "\n" elif ana.spiresId(): msg += "Spires ID: " + ana.spiresId() + "\n" msg += "Inspire URL: http://inspire-hep.net/search?p=find+key+" + ana.spiresId() + "\n" msg += "HepData URL: http://hepdata.cedar.ac.uk/view/irn" + ana.spiresId() + "\n" if ana.year(): msg += "Year of publication: " + ana.year() + "\n" if ana.bibKey(): msg += "BibTeX key: " + ana.bibKey() + "\n" msg += "Authors:\n" for a in ana.authors(): msg += " " + a + "\n" msg += "\n" msg += "Description:\n" twrap = textwrap.TextWrapper(width=75, initial_indent=2*" ", subsequent_indent=2*" ") msg += twrap.fill(rivet.util.detex(ana.description())) + "\n\n" if ana.experiment(): msg += "Experiment: " + ana.experiment() if ana.collider(): msg += "(%s)" % ana.collider() msg += "\n" # TODO: move this formatting into Analysis or a helper function? if ana.requiredBeams(): def pid_to_str(pid): if pid == 11: return "e-" elif pid == -11: return "e+" elif pid == 2212: return "p+" elif pid == -2212: return "p-" elif pid == 10000: return "*" else: return str(pid) beamstrs = [] for bp in ana.requiredBeams(): beamstrs.append(pid_to_str(bp[0]) + " " + pid_to_str(bp[1])) msg += "Beams:" + ", ".join(beamstrs) + "\n" if ana.requiredEnergies(): msg += "Beam energies:" + "; ".join(["(%0.1f, %0.1f) GeV\n" % (epair[0], epair[1]) for epair in ana.requiredEnergies()]) else: msg += "Beam energies: ANY\n" if ana.runInfo(): msg += "Run details:\n" twrap = textwrap.TextWrapper(width=75, initial_indent=2*" ", subsequent_indent=4*" ") for l in ana.runInfo().split("\n"): msg += twrap.fill(l) + "\n" if ana.luminosityfb(): msg+= "\nIntegrated data luminosity = %s inverse fb.\n"%ana.luminosityfb() if ana.keywords(): msg += "\nAnalysis keywords:" for k in ana.keywords(): msg += " %s"%k msg+= "\n\n" if ana.references(): msg += "\n" + "References:\n" for r in ana.references(): url = None if r.startswith("arXiv:"): code = r.split()[0].replace("arXiv:", "") url = "http://arxiv.org/abs/" + code elif r.startswith("doi:"): code = r.replace("doi:", "") url = "http://dx.doi.org/" + code if url is not None: r += " - " + url msg += " " + r + "\n" ## Add to the output msgs.append(msg) ## Write the combined messages to a temporary file and page it if msgs: try: import tempfile, subprocess tffd, tfpath = tempfile.mkstemp(prefix="rivet-show.") os.write(tffd, "\n\n".join(msgs)) if sys.stdout.isatty(): pager = subprocess.Popen(["less", "-FX", tfpath]) #, stdin=subprocess.PIPE) pager.communicate() else: f = open(tfpath) print(f.read()) f.close() finally: os.unlink(tfpath) #< always clean up sys.exit(0) ############################ ## Actual analysis runs ## We allow comma-separated lists of analysis names -- normalise the list here newanas = [] for a in opts.ANALYSES: if "," in a: newanas += a.split(",") elif "@" in a: #< NB. this bans combination of ana lists and keywords in a single arg temp = getAnalysesByKeyword(all_analyses, a) for i in temp: newanas.append(i) else: newanas.append(a) opts.ANALYSES = newanas ## Parse supplied cross-section if opts.CROSS_SECTION is not None: xsstr = opts.CROSS_SECTION try: opts.CROSS_SECTION = float(xsstr) except: import re suffmatch = re.search(r"[^\d.]", xsstr) if not suffmatch: raise ValueError("Bad cross-section string: %s" % xsstr) factor = base = None suffstart = suffmatch.start() if suffstart != -1: base = xsstr[:suffstart] suffix = xsstr[suffstart:].lower() if suffix == "mb": factor = 1e+9 elif suffix == "mub": factor = 1e+6 elif suffix == "nb": factor = 1e+3 elif suffix == "pb": factor = 1 elif suffix == "fb": factor = 1e-3 elif suffix == "ab": factor = 1e-6 if factor is None or base is None: raise ValueError("Bad cross-section string: %s" % xsstr) xs = float(base) * factor opts.CROSS_SECTION = xs ## Print the available CLI options! #if opts.LIST_OPTIONS: # for o in parser.option_list: # print(o.get_opt_string()) # sys.exit(0) ## Set up signal handling RECVD_KILL_SIGNAL = None def handleKillSignal(signum, frame): "Declare us as having been signalled, and return to default handling behaviour" global RECVD_KILL_SIGNAL logging.critical("Signal handler called with signal " + str(signum)) RECVD_KILL_SIGNAL = signum signal.signal(signum, signal.SIG_DFL) ## Signals to handle signal.signal(signal.SIGTERM, handleKillSignal); signal.signal(signal.SIGHUP, handleKillSignal); signal.signal(signal.SIGINT, handleKillSignal); signal.signal(signal.SIGUSR1, handleKillSignal); signal.signal(signal.SIGUSR2, handleKillSignal); try: signal.signal(signal.SIGXCPU, handleKillSignal); except: pass ## Identify HepMC files/streams ## TODO: check readability, deal with stdin if len(args) > 0: HEPMCFILES = args else: HEPMCFILES = ["-"] ## Event number logging def logNEvt(n, starttime, maxevtnum): if n % 10000 == 0: nevtloglevel = logging.CRITICAL elif n % 1000 == 0: nevtloglevel = logging.WARNING elif n % 100 == 0: nevtloglevel = logging.INFO else: nevtloglevel = logging.DEBUG currenttime = datetime.datetime.now().replace(microsecond=0) elapsedtime = currenttime - starttime logging.log(nevtloglevel, "Event %d (%s elapsed)" % (n, str(elapsedtime))) # if maxevtnum is None: # logging.log(nevtloglevel, "Event %d (%s elapsed)" % (n, str(elapsedtime))) # else: # remainingtime = (maxevtnum-n) * elapsedtime.total_seconds() / float(n) # eta = time.strftime("%a %b %d %H:%M", datetime.localtime(currenttime + remainingtime)) # logging.log(nevtloglevel, "Event %d (%d s elapsed / %d s left) -> ETA: %s" % # (n, elapsedtime, remainingtime, eta)) ## Do some checks on output histo file, before we stat the event loop histo_parentdir = os.path.dirname(os.path.abspath(opts.HISTOFILE)) if not os.path.exists(histo_parentdir): logging.error('Parent path of output histogram file does not exist: %s\nExiting.' % histo_parentdir) sys.exit(4) if not os.access(histo_parentdir,os.W_OK): logging.error('Insufficient permissions to write output histogram file to directory %s\nExiting.' % histo_parentdir) sys.exit(4) ## Set up analysis handler RUNNAME = opts.RUN_NAME or "" ah = rivet.AnalysisHandler(RUNNAME) ah.setIgnoreBeams(opts.IGNORE_BEAMS) + for a in opts.ANALYSES: ## Print warning message and exit if not a valid analysis name if not a in all_analyses: logging.warning("'%s' is not a known Rivet analysis! Do you need to set RIVET_ANALYSIS_PATH or use the --pwd switch?\n" % a) # TODO: lay out more neatly, or even try for a "did you mean XXXX?" heuristic? logging.warning("There are %d currently available analyses:\n" % len(all_analyses) + ", ".join(all_analyses)) sys.exit(1) logging.debug("Adding analysis '%s'" % a) ah.addAnalysis(a) +if opts.PRELOADFILE is not None: + ah.readData(opts.PRELOADFILE) + if opts.SHOW_BIBTEX: bibs = [] for aname in sorted(ah.analysisNames()): ana = rivet.AnalysisLoader.getAnalysis(aname) bibs.append("% " + aname + "\n" + ana.bibTeX()) if bibs: print("\nBibTeX for used Rivet analyses:\n") print("% --------------------------\n") print("\n\n".join(bibs) + "\n") print("% --------------------------\n") ## Read and process events run = rivet.Run(ah) if opts.CROSS_SECTION is not None: logging.info("User-supplied cross-section = %e pb" % opts.CROSS_SECTION) run.setCrossSection(opts.CROSS_SECTION) if opts.LIST_USED_ANALYSES is not None: run.setListAnalyses(opts.LIST_USED_ANALYSES) ## Print platform type import platform starttime = datetime.datetime.now().replace(microsecond=0) logging.info("Rivet %s running on machine %s (%s) at %s" % \ (rivet.version(), platform.node(), platform.machine(), str(starttime))) def min_nonnull(a, b): "A version of min which considers None to always be greater than a real number" rtn = min(a, b) if rtn is not None: return rtn if a is not None: return a return b ## Set up an event timeout handler class TimeoutException(Exception): pass if opts.EVENT_TIMEOUT or opts.RUN_TIMEOUT: def evttimeouthandler(signum, frame): logging.warn("It has taken more than %d secs to get an event! Is the input event stream working?" % min_nonnull(opts.EVENT_TIMEOUT, opts.RUN_TIMEOUT)) raise TimeoutException("Event timeout") signal.signal(signal.SIGALRM, evttimeouthandler) ## Init run based on one event hepmcfile = HEPMCFILES[0] ## Apply a file-level weight derived from the filename hepmcfileweight = 1.0 if ":" in hepmcfile: hepmcfile, hepmcfileweight = hepmcfile.rsplit(":", 1) hepmcfileweight = float(hepmcfileweight) try: if opts.EVENT_TIMEOUT or opts.RUN_TIMEOUT: signal.alarm(min_nonnull(opts.EVENT_TIMEOUT, opts.RUN_TIMEOUT)) init_ok = run.init(hepmcfile, hepmcfileweight) signal.alarm(0) if not init_ok: logging.error("Failed to initialise using event file '%s'... exiting" % hepmcfile) sys.exit(2) except TimeoutException as te: logging.error("Timeout in initialisation from event file '%s'... exiting" % hepmcfile) sys.exit(3) ## Event loop evtnum = 0 for fileidx, hepmcfile in enumerate(HEPMCFILES): ## Apply a file-level weight derived from the filename hepmcfileweight = 1.0 if ":" in hepmcfile: hepmcfile, hepmcfileweight = hepmcfile.rsplit(":", 1) hepmcfileweight = float(hepmcfileweight) ## Open next HepMC file (NB. this doesn't apply to the first file: it was already used for the run init) if fileidx > 0: run.openFile(hepmcfile, hepmcfileweight) if not run.readEvent(): logging.warning("Could not read events from '%s'" % hepmcfile) continue ## Announce new file msg = "Reading events from '%s'" % hepmcfile if hepmcfileweight != 1.0: msg += " (file weight = %e)" % hepmcfileweight logging.info(msg) ## The event loop while opts.MAXEVTNUM is None or evtnum-opts.EVTSKIPNUM < opts.MAXEVTNUM: evtnum += 1 ## Optional event skipping if evtnum <= opts.EVTSKIPNUM: logging.debug("Skipping event #%i" % evtnum) run.skipEvent(); continue ## Only log the event number once we're actually processing logNEvt(evtnum, starttime, opts.MAXEVTNUM) ## Process this event processed_ok = run.processEvent() if not processed_ok: logging.warn("Event processing failed for evt #%i!" % evtnum) break ## Set flag to exit event loop if run timeout exceeded if opts.RUN_TIMEOUT and (time.time() - starttime) > opts.RUN_TIMEOUT: logging.warning("Run timeout of %d secs exceeded... exiting gracefully" % opts.RUN_TIMEOUT) RECVD_KILL_SIGNAL = True ## Exit the loop if signalled if RECVD_KILL_SIGNAL is not None: break ## Read next event (with timeout handling if requested) try: if opts.EVENT_TIMEOUT: signal.alarm(opts.EVENT_TIMEOUT) read_ok = run.readEvent() signal.alarm(0) if not read_ok: break except TimeoutException as te: logging.error("Timeout in reading event from '%s'... exiting" % hepmcfile) sys.exit(3) ## Write a histo file snapshot if appropriate if opts.HISTO_WRITE_INTERVAL is not None and opts.HISTO_WRITE_INTERVAL > 0: if evtnum % opts.HISTO_WRITE_INTERVAL == 0: ah.writeData(opts.HISTOFILE) ## Print end-of-loop messages loopendtime = datetime.datetime.now().replace(microsecond=0) logging.info("Finished event loop at %s" % str(loopendtime)) logging.info("Cross-section = %e pb" % ah.crossSection()) print() ## Finalize and write out data file run.finalize() if opts.WRITE_DATA: ah.writeData(opts.HISTOFILE) print() endtime = datetime.datetime.now().replace(microsecond=0) logging.info("Rivet run completed at %s, time elapsed = %s" % (str(endtime), str(endtime-starttime))) print() logging.info("Histograms written to %s" % os.path.abspath(opts.HISTOFILE)) diff --git a/pyext/rivet/core.pyx b/pyext/rivet/core.pyx --- a/pyext/rivet/core.pyx +++ b/pyext/rivet/core.pyx @@ -1,215 +1,218 @@ # distutils: language = c++ cimport rivet as c from cython.operator cimport dereference as deref # Need to be careful with memory management -- perhaps use the base object that # we used in YODA? cdef extern from "<utility>" namespace "std" nogil: cdef c.unique_ptr[c.Analysis] move(c.unique_ptr[c.Analysis]) cdef class AnalysisHandler: cdef c.AnalysisHandler *_ptr def __cinit__(self): self._ptr = new c.AnalysisHandler() def __del__(self): del self._ptr def setIgnoreBeams(self, ignore=True): self._ptr.setIgnoreBeams(ignore) def addAnalysis(self, name): self._ptr.addAnalysis(name) return self def analysisNames(self): anames = self._ptr.analysisNames() return [a for a in anames] # def analysis(self, aname): # cdef c.Analysis* ptr = self._ptr.analysis(aname) # cdef Analysis pyobj = Analysis.__new__(Analysis) # if not ptr: # return None # pyobj._ptr = ptr # return pyobj + def readData(self, name): + self._ptr.readData(name) + def writeData(self, name): self._ptr.writeData(name) def crossSection(self): return self._ptr.crossSection() def finalize(self): self._ptr.finalize() cdef class Run: cdef c.Run *_ptr def __cinit__(self, AnalysisHandler h): self._ptr = new c.Run(h._ptr[0]) def __del__(self): del self._ptr def setCrossSection(self, double x): self._ptr.setCrossSection(x) return self def setListAnalyses(self, choice): self._ptr.setListAnalyses(choice) return self def init(self, name, weight=1.0): return self._ptr.init(name, weight) def openFile(self, name, weight=1.0): return self._ptr.openFile(name, weight) def readEvent(self): return self._ptr.readEvent() def skipEvent(self): return self._ptr.skipEvent() def processEvent(self): return self._ptr.processEvent() def finalize(self): return self._ptr.finalize() cdef class Analysis: cdef c.unique_ptr[c.Analysis] _ptr def __init__(self): raise RuntimeError('This class cannot be instantiated') def requiredBeams(self): return deref(self._ptr).requiredBeams() def requiredEnergies(self): return deref(self._ptr).requiredEnergies() def keywords(self): return deref(self._ptr).keywords() def authors(self): return deref(self._ptr).authors() def bibKey(self): return deref(self._ptr).bibKey() def name(self): return deref(self._ptr).name() def bibTeX(self): return deref(self._ptr).bibTeX() def references(self): return deref(self._ptr).references() def collider(self): return deref(self._ptr).collider() def description(self): return deref(self._ptr).description() def experiment(self): return deref(self._ptr).experiment() def inspireId(self): return deref(self._ptr).inspireId() def spiresId(self): return deref(self._ptr).spiresId() def runInfo(self): return deref(self._ptr).runInfo() def status(self): return deref(self._ptr).status() def summary(self): return deref(self._ptr).summary() def year(self): return deref(self._ptr).year() def luminosityfb(self): return deref(self._ptr).luminosityfb() #cdef object LEVELS = dict(TRACE = 0, DEBUG = 10, INFO = 20, WARN = 30, WARNING = 30, ERROR = 40, CRITICAL = 50, ALWAYS = 50) cdef class AnalysisLoader: @staticmethod def analysisNames(): return c.AnalysisLoader_analysisNames() @staticmethod def getAnalysis(name): cdef c.unique_ptr[c.Analysis] ptr = c.AnalysisLoader_getAnalysis(name) cdef Analysis pyobj = Analysis.__new__(Analysis) if not ptr: return None pyobj._ptr = move(ptr) # Create python object return pyobj def getAnalysisLibPaths(): return c.getAnalysisLibPaths() def setAnalysisLibPaths(xs): c.setAnalysisLibPaths(xs) def addAnalysisLibPath(path): c.addAnalysisLibPath(path) def setAnalysisDataPaths(xs): c.setAnalysisDataPaths(xs) def addAnalysisDataPath(path): c.addAnalysisDataPath(path) def getAnalysisDataPaths(): return c.getAnalysisDataPaths() def findAnalysisDataFile(q): return c.findAnalysisDataFile(q) def getAnalysisRefPaths(): return c.getAnalysisRefPaths() def findAnalysisRefFile(q): return c.findAnalysisRefFile(q) def getAnalysisInfoPaths(): return c.getAnalysisInfoPaths() def findAnalysisInfoFile(q): return c.findAnalysisInfoFile(q) def getAnalysisPlotPaths(): return c.getAnalysisPlotPaths() def findAnalysisPlotFile(q): return c.findAnalysisPlotFile(q) def version(): return c.version() def setLogLevel(name, level): c.setLogLevel(name, level) diff --git a/pyext/rivet/rivet.pxd b/pyext/rivet/rivet.pxd --- a/pyext/rivet/rivet.pxd +++ b/pyext/rivet/rivet.pxd @@ -1,91 +1,92 @@ from libcpp.map cimport map from libcpp.pair cimport pair from libcpp.vector cimport vector from libcpp cimport bool from libcpp.string cimport string from libcpp.memory cimport unique_ptr ctypedef int PdgId ctypedef pair[PdgId,PdgId] PdgIdPair cdef extern from "Rivet/AnalysisHandler.hh" namespace "Rivet": cdef cppclass AnalysisHandler: void setIgnoreBeams(bool) AnalysisHandler& addAnalysis(string) vector[string] analysisNames() const # Analysis* analysis(string) void writeData(string&) + void readData(string&) double crossSection() void finalize() cdef extern from "Rivet/Run.hh" namespace "Rivet": cdef cppclass Run: Run(AnalysisHandler) Run& setCrossSection(double) # For chaining? Run& setListAnalyses(bool) bool init(string, double) # $2=1.0 bool openFile(string, double) # $2=1.0 bool readEvent() bool skipEvent() bool processEvent() bool finalize() cdef extern from "Rivet/Analysis.hh" namespace "Rivet": cdef cppclass Analysis: vector[PdgIdPair]& requiredBeams() vector[pair[double, double]] requiredEnergies() vector[string] authors() vector[string] references() vector[string] keywords() string name() string bibTeX() string bibKey() string collider() string description() string experiment() string inspireId() string spiresId() string runInfo() string status() string summary() string year() string luminosityfb() # Might need to translate the following errors, although I believe 'what' is now # preserved. But often, we need the exception class name. #Error #RangeError #LogicError #PidError #InfoError #WeightError #UserError cdef extern from "Rivet/AnalysisLoader.hh": vector[string] AnalysisLoader_analysisNames "Rivet::AnalysisLoader::analysisNames" () unique_ptr[Analysis] AnalysisLoader_getAnalysis "Rivet::AnalysisLoader::getAnalysis" (string) cdef extern from "Rivet/Tools/RivetPaths.hh" namespace "Rivet": vector[string] getAnalysisLibPaths() void setAnalysisLibPaths(vector[string]) void addAnalysisLibPath(string) vector[string] getAnalysisDataPaths() void setAnalysisDataPaths(vector[string]) void addAnalysisDataPath(string) string findAnalysisDataFile(string) vector[string] getAnalysisRefPaths() string findAnalysisRefFile(string) vector[string] getAnalysisInfoPaths() string findAnalysisInfoFile(string) vector[string] getAnalysisPlotPaths() string findAnalysisPlotFile(string) cdef extern from "Rivet/Rivet.hh" namespace "Rivet": string version() cdef extern from "Rivet/Tools/Logging.hh": void setLogLevel "Rivet::Log::setLevel" (string, int) diff --git a/src/Core/Analysis.cc b/src/Core/Analysis.cc --- a/src/Core/Analysis.cc +++ b/src/Core/Analysis.cc @@ -1,882 +1,889 @@ // -*- C++ -*- #include "Rivet/Config/RivetCommon.hh" #include "Rivet/Analysis.hh" #include "Rivet/AnalysisHandler.hh" #include "Rivet/AnalysisInfo.hh" #include "Rivet/Tools/BeamConstraint.hh" namespace Rivet { Analysis::Analysis(const string& name) : _crossSection(-1.0), _gotCrossSection(false), _analysishandler(NULL) { ProjectionApplier::_allowProjReg = false; _defaultname = name; unique_ptr<AnalysisInfo> ai = AnalysisInfo::make(name); assert(ai); _info = move(ai); assert(_info); } double Analysis::sqrtS() const { return handler().sqrtS(); } const ParticlePair& Analysis::beams() const { return handler().beams(); } const PdgIdPair Analysis::beamIds() const { return handler().beamIds(); } const string Analysis::histoDir() const { /// @todo Cache in a member variable string _histoDir; if (_histoDir.empty()) { _histoDir = "/" + name(); if (handler().runName().length() > 0) { _histoDir = "/" + handler().runName() + _histoDir; } replace_all(_histoDir, "//", "/"); //< iterates until none } return _histoDir; } const string Analysis::histoPath(const string& hname) const { const string path = histoDir() + "/" + hname; return path; } const string Analysis::histoPath(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const { return histoDir() + "/" + makeAxisCode(datasetId, xAxisId, yAxisId); } const string Analysis::makeAxisCode(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId) const { stringstream axisCode; axisCode << "d"; if (datasetId < 10) axisCode << 0; axisCode << datasetId; axisCode << "-x"; if (xAxisId < 10) axisCode << 0; axisCode << xAxisId; axisCode << "-y"; if (yAxisId < 10) axisCode << 0; axisCode << yAxisId; return axisCode.str(); } Log& Analysis::getLog() const { string logname = "Rivet.Analysis." + name(); return Log::getLog(logname); } size_t Analysis::numEvents() const { return handler().numEvents(); } double Analysis::sumOfWeights() const { return handler().sumOfWeights(); } /////////////////////////////////////////// bool Analysis::isCompatible(const ParticlePair& beams) const { return isCompatible(beams.first.pid(), beams.second.pid(), beams.first.energy(), beams.second.energy()); } bool Analysis::isCompatible(PdgId beam1, PdgId beam2, double e1, double e2) const { PdgIdPair beams(beam1, beam2); pair<double,double> energies(e1, e2); return isCompatible(beams, energies); } bool Analysis::isCompatible(const PdgIdPair& beams, const pair<double,double>& energies) const { // First check the beam IDs bool beamIdsOk = false; foreach (const PdgIdPair& bp, requiredBeams()) { if (compatible(beams, bp)) { beamIdsOk = true; break; } } if (!beamIdsOk) return false; // Next check that the energies are compatible (within 1% or 1 GeV, whichever is larger, for a bit of UI forgiveness) /// @todo Use some sort of standard ordering to improve comparisons, esp. when the two beams are different particles bool beamEnergiesOk = requiredEnergies().size() > 0 ? false : true; typedef pair<double,double> DoublePair; foreach (const DoublePair& ep, requiredEnergies()) { if ((fuzzyEquals(ep.first, energies.first, 0.01) && fuzzyEquals(ep.second, energies.second, 0.01)) || (fuzzyEquals(ep.first, energies.second, 0.01) && fuzzyEquals(ep.second, energies.first, 0.01)) || (abs(ep.first - energies.first) < 1*GeV && abs(ep.second - energies.second) < 1*GeV) || (abs(ep.first - energies.second) < 1*GeV && abs(ep.second - energies.first) < 1*GeV)) { beamEnergiesOk = true; break; } } return beamEnergiesOk; /// @todo Need to also check internal consistency of the analysis' /// beam requirements with those of the projections it uses. } /////////////////////////////////////////// Analysis& Analysis::setCrossSection(double xs) { _crossSection = xs; _gotCrossSection = true; return *this; } double Analysis::crossSection() const { if (!_gotCrossSection || std::isnan(_crossSection)) { string errMsg = "You did not set the cross section for the analysis " + name(); throw Error(errMsg); } return _crossSection; } double Analysis::crossSectionPerEvent() const { const double sumW = sumOfWeights(); assert(sumW != 0.0); return _crossSection / sumW; } //////////////////////////////////////////////////////////// // Histogramming void Analysis::_cacheRefData() const { if (_refdata.empty()) { MSG_TRACE("Getting refdata cache for paper " << name()); _refdata = getRefData(name()); } } CounterPtr Analysis::bookCounter(const string& cname, const string& title) { // const string& xtitle, // const string& ytitle) { const string path = histoPath(cname); CounterPtr ctr = make_shared<Counter>(path, title); addAnalysisObject(ctr); MSG_TRACE("Made counter " << cname << " for " << name()); // hist->setAnnotation("XLabel", xtitle); // hist->setAnnotation("YLabel", ytitle); return ctr; } CounterPtr Analysis::bookCounter(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, const string& title) { // const string& xtitle, // const string& ytitle) { const string axisCode = makeAxisCode(datasetId, xAxisId, yAxisId); return bookCounter(axisCode, title); } Histo1DPtr Analysis::bookHisto1D(const string& hname, size_t nbins, double lower, double upper, const string& title, const string& xtitle, const string& ytitle) { Histo1DPtr hist; try { // try to bind to pre-existing // AnalysisObjectPtr ao = getAnalysisObject(path); // hist = dynamic_pointer_cast<Histo1D>(ao); hist = getHisto1D(hname); /// @todo Test that cast worked /// @todo Also test that binning is as expected? MSG_TRACE("Bound pre-existing histogram " << hname << " for " << name()); } catch (...) { // binding failed; make it from scratch hist = make_shared<Histo1D>(nbins, lower, upper, histoPath(hname), title); addAnalysisObject(hist); MSG_TRACE("Made histogram " << hname << " for " << name()); } hist->setTitle(title); hist->setAnnotation("XLabel", xtitle); hist->setAnnotation("YLabel", ytitle); return hist; } Histo1DPtr Analysis::bookHisto1D(const string& hname, const vector<double>& binedges, const string& title, const string& xtitle, const string& ytitle) { Histo1DPtr hist; try { // try to bind to pre-existing // AnalysisObjectPtr ao = getAnalysisObject(path); // hist = dynamic_pointer_cast<Histo1D>(ao); hist = getHisto1D(hname); /// @todo Test that cast worked /// @todo Also test that binning is as expected? MSG_TRACE("Bound pre-existing histogram " << hname << " for " << name()); } catch (...) { // binding failed; make it from scratch hist = make_shared<Histo1D>(binedges, histoPath(hname), title); addAnalysisObject(hist); MSG_TRACE("Made histogram " << hname << " for " << name()); } hist->setTitle(title); hist->setAnnotation("XLabel", xtitle); hist->setAnnotation("YLabel", ytitle); return hist; } Histo1DPtr Analysis::bookHisto1D(const string& hname, const initializer_list<double>& binedges, const string& title, const string& xtitle, const string& ytitle) { return bookHisto1D(hname, vector<double>{binedges}, title, xtitle, ytitle); } Histo1DPtr Analysis::bookHisto1D(const string& hname, const Scatter2D& refscatter, const string& title, const string& xtitle, const string& ytitle) { Histo1DPtr hist; try { // try to bind to pre-existing // AnalysisObjectPtr ao = getAnalysisObject(path); // hist = dynamic_pointer_cast<Histo1D>(ao); hist = getHisto1D(hname); /// @todo Test that cast worked /// @todo Also test that binning is as expected? MSG_TRACE("Bound pre-existing histogram " << hname << " for " << name()); } catch (...) { // binding failed; make it from scratch hist = make_shared<Histo1D>(refscatter, histoPath(hname)); if (hist->hasAnnotation("IsRef")) hist->rmAnnotation("IsRef"); addAnalysisObject(hist); MSG_TRACE("Made histogram " << hname << " for " << name()); } hist->setTitle(title); hist->setAnnotation("XLabel", xtitle); hist->setAnnotation("YLabel", ytitle); return hist; } Histo1DPtr Analysis::bookHisto1D(const string& hname, const string& title, const string& xtitle, const string& ytitle) { const Scatter2D& refdata = refData(hname); return bookHisto1D(hname, refdata, title, xtitle, ytitle); } Histo1DPtr Analysis::bookHisto1D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, const string& title, const string& xtitle, const string& ytitle) { const string axisCode = makeAxisCode(datasetId, xAxisId, yAxisId); return bookHisto1D(axisCode, title, xtitle, ytitle); } /// @todo Add booking methods which take a path, titles and *a reference Scatter from which to book* ///////////////// Histo2DPtr Analysis::bookHisto2D(const string& hname, size_t nxbins, double xlower, double xupper, size_t nybins, double ylower, double yupper, const string& title, const string& xtitle, const string& ytitle, const string& ztitle) { const string path = histoPath(hname); Histo2DPtr hist = make_shared<Histo2D>(nxbins, xlower, xupper, nybins, ylower, yupper, path, title); addAnalysisObject(hist); MSG_TRACE("Made 2D histogram " << hname << " for " << name()); hist->setAnnotation("XLabel", xtitle); hist->setAnnotation("YLabel", ytitle); hist->setAnnotation("ZLabel", ztitle); return hist; } Histo2DPtr Analysis::bookHisto2D(const string& hname, const vector<double>& xbinedges, const vector<double>& ybinedges, const string& title, const string& xtitle, const string& ytitle, const string& ztitle) { const string path = histoPath(hname); Histo2DPtr hist = make_shared<Histo2D>(xbinedges, ybinedges, path, title); addAnalysisObject(hist); MSG_TRACE("Made 2D histogram " << hname << " for " << name()); hist->setAnnotation("XLabel", xtitle); hist->setAnnotation("YLabel", ytitle); hist->setAnnotation("ZLabel", ztitle); return hist; } Histo2DPtr Analysis::bookHisto2D(const string& hname, const initializer_list<double>& xbinedges, const initializer_list<double>& ybinedges, const string& title, const string& xtitle, const string& ytitle, const string& ztitle) { return bookHisto2D(hname, vector<double>{xbinedges}, vector<double>{ybinedges}, title, xtitle, ytitle, ztitle); } Histo2DPtr Analysis::bookHisto2D(const string& hname, const Scatter3D& refscatter, const string& title, const string& xtitle, const string& ytitle, const string& ztitle) { const string path = histoPath(hname); Histo2DPtr hist( new Histo2D(refscatter, path) ); addAnalysisObject(hist); MSG_TRACE("Made 2D histogram " << hname << " for " << name()); if (hist->hasAnnotation("IsRef")) hist->rmAnnotation("IsRef"); hist->setTitle(title); hist->setAnnotation("XLabel", xtitle); hist->setAnnotation("YLabel", ytitle); hist->setAnnotation("ZLabel", ztitle); return hist; } Histo2DPtr Analysis::bookHisto2D(const string& hname, const string& title, const string& xtitle, const string& ytitle, const string& ztitle) { const Scatter3D& refdata = refData<Scatter3D>(hname); return bookHisto2D(hname, refdata, title, xtitle, ytitle, ztitle); } Histo2DPtr Analysis::bookHisto2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, const string& title, const string& xtitle, const string& ytitle, const string& ztitle) { const string axisCode = makeAxisCode(datasetId, xAxisId, yAxisId); return bookHisto2D(axisCode, title, xtitle, ytitle, ztitle); } ///////////////// Profile1DPtr Analysis::bookProfile1D(const string& hname, size_t nbins, double lower, double upper, const string& title, const string& xtitle, const string& ytitle) { const string path = histoPath(hname); Profile1DPtr prof = make_shared<Profile1D>(nbins, lower, upper, path, title); addAnalysisObject(prof); MSG_TRACE("Made profile histogram " << hname << " for " << name()); prof->setAnnotation("XLabel", xtitle); prof->setAnnotation("YLabel", ytitle); return prof; } Profile1DPtr Analysis::bookProfile1D(const string& hname, const vector<double>& binedges, const string& title, const string& xtitle, const string& ytitle) { const string path = histoPath(hname); Profile1DPtr prof = make_shared<Profile1D>(binedges, path, title); addAnalysisObject(prof); MSG_TRACE("Made profile histogram " << hname << " for " << name()); prof->setAnnotation("XLabel", xtitle); prof->setAnnotation("YLabel", ytitle); return prof; } Profile1DPtr Analysis::bookProfile1D(const string& hname, const initializer_list<double>& binedges, const string& title, const string& xtitle, const string& ytitle) { return bookProfile1D(hname, vector<double>{binedges}, title, xtitle, ytitle); } Profile1DPtr Analysis::bookProfile1D(const string& hname, const Scatter2D& refscatter, const string& title, const string& xtitle, const string& ytitle) { const string path = histoPath(hname); Profile1DPtr prof = make_shared<Profile1D>(refscatter, path); addAnalysisObject(prof); MSG_TRACE("Made profile histogram " << hname << " for " << name()); if (prof->hasAnnotation("IsRef")) prof->rmAnnotation("IsRef"); prof->setTitle(title); prof->setAnnotation("XLabel", xtitle); prof->setAnnotation("YLabel", ytitle); return prof; } Profile1DPtr Analysis::bookProfile1D(const string& hname, const string& title, const string& xtitle, const string& ytitle) { const Scatter2D& refdata = refData(hname); return bookProfile1D(hname, refdata, title, xtitle, ytitle); } Profile1DPtr Analysis::bookProfile1D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, const string& title, const string& xtitle, const string& ytitle) { const string axisCode = makeAxisCode(datasetId, xAxisId, yAxisId); return bookProfile1D(axisCode, title, xtitle, ytitle); } /////////////////// Profile2DPtr Analysis::bookProfile2D(const string& hname, size_t nxbins, double xlower, double xupper, size_t nybins, double ylower, double yupper, const string& title, const string& xtitle, const string& ytitle, const string& ztitle) { const string path = histoPath(hname); Profile2DPtr prof = make_shared<Profile2D>(nxbins, xlower, xupper, nybins, ylower, yupper, path, title); addAnalysisObject(prof); MSG_TRACE("Made 2D profile histogram " << hname << " for " << name()); prof->setAnnotation("XLabel", xtitle); prof->setAnnotation("YLabel", ytitle); prof->setAnnotation("ZLabel", ztitle); return prof; } Profile2DPtr Analysis::bookProfile2D(const string& hname, const vector<double>& xbinedges, const vector<double>& ybinedges, const string& title, const string& xtitle, const string& ytitle, const string& ztitle) { const string path = histoPath(hname); Profile2DPtr prof = make_shared<Profile2D>(xbinedges, ybinedges, path, title); addAnalysisObject(prof); MSG_TRACE("Made 2D profile histogram " << hname << " for " << name()); prof->setAnnotation("XLabel", xtitle); prof->setAnnotation("YLabel", ytitle); prof->setAnnotation("ZLabel", ztitle); return prof; } Profile2DPtr Analysis::bookProfile2D(const string& hname, const initializer_list<double>& xbinedges, const initializer_list<double>& ybinedges, const string& title, const string& xtitle, const string& ytitle, const string& ztitle) { return bookProfile2D(hname, vector<double>{xbinedges}, vector<double>{ybinedges}, title, xtitle, ytitle, ztitle); } Profile2DPtr Analysis::bookProfile2D(const string& hname, const Scatter3D& refscatter, const string& title, const string& xtitle, const string& ytitle, const string& ztitle) { const string path = histoPath(hname); Profile2DPtr prof( new Profile2D(refscatter, path) ); addAnalysisObject(prof); MSG_TRACE("Made 2D profile histogram " << hname << " for " << name()); if (prof->hasAnnotation("IsRef")) prof->rmAnnotation("IsRef"); prof->setTitle(title); prof->setAnnotation("XLabel", xtitle); prof->setAnnotation("YLabel", ytitle); prof->setAnnotation("ZLabel", ztitle); return prof; } Profile2DPtr Analysis::bookProfile2D(const string& hname, const string& title, const string& xtitle, const string& ytitle, const string& ztitle) { const Scatter3D& refdata = refData<Scatter3D>(hname); return bookProfile2D(hname, refdata, title, xtitle, ytitle, ztitle); } Profile2DPtr Analysis::bookProfile2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, const string& title, const string& xtitle, const string& ytitle, const string& ztitle) { const string axisCode = makeAxisCode(datasetId, xAxisId, yAxisId); return bookProfile2D(axisCode, title, xtitle, ytitle, ztitle); } ///////////////// Scatter2DPtr Analysis::bookScatter2D(unsigned int datasetId, unsigned int xAxisId, unsigned int yAxisId, bool copy_pts, const string& title, const string& xtitle, const string& ytitle) { const string axisCode = makeAxisCode(datasetId, xAxisId, yAxisId); return bookScatter2D(axisCode, copy_pts, title, xtitle, ytitle); } Scatter2DPtr Analysis::bookScatter2D(const string& hname, bool copy_pts, const string& title, const string& xtitle, const string& ytitle) { Scatter2DPtr s; const string path = histoPath(hname); if (copy_pts) { const Scatter2D& refdata = refData(hname); s = make_shared<Scatter2D>(refdata, path); for (Point2D& p : s->points()) p.setY(0, 0); } else { s = make_shared<Scatter2D>(path); } addAnalysisObject(s); MSG_TRACE("Made scatter " << hname << " for " << name()); if (s->hasAnnotation("IsRef")) s->rmAnnotation("IsRef"); s->setTitle(title); s->setAnnotation("XLabel", xtitle); s->setAnnotation("YLabel", ytitle); return s; } Scatter2DPtr Analysis::bookScatter2D(const string& hname, size_t npts, double lower, double upper, const string& title, const string& xtitle, const string& ytitle) { + Scatter2DPtr s; const string path = histoPath(hname); - Scatter2DPtr s = make_shared<Scatter2D>(path); - const double binwidth = (upper-lower)/npts; - for (size_t pt = 0; pt < npts; ++pt) { - const double bincentre = lower + (pt + 0.5) * binwidth; - s->addPoint(bincentre, 0, binwidth/2.0, 0); + try { // try to bind to pre-existing + s = getAnalysisObject<Scatter2D>(hname); + /// @todo Also test that binning is as expected? + MSG_TRACE("Bound pre-existing scatter " << path << " for " << name()); + } catch (...) { // binding failed; make it from scratch + s = make_shared<Scatter2D>(path); + const double binwidth = (upper-lower)/npts; + for (size_t pt = 0; pt < npts; ++pt) { + const double bincentre = lower + (pt + 0.5) * binwidth; + s->addPoint(bincentre, 0, binwidth/2.0, 0); + } + addAnalysisObject(s); + MSG_TRACE("Made scatter " << hname << " for " << name()); } - addAnalysisObject(s); - MSG_TRACE("Made scatter " << hname << " for " << name()); s->setTitle(title); s->setAnnotation("XLabel", xtitle); s->setAnnotation("YLabel", ytitle); return s; } Scatter2DPtr Analysis::bookScatter2D(const string& hname, const vector<double>& binedges, const string& title, const string& xtitle, const string& ytitle) { const string path = histoPath(hname); Scatter2DPtr s = make_shared<Scatter2D>(path); for (size_t pt = 0; pt < binedges.size()-1; ++pt) { const double bincentre = (binedges[pt] + binedges[pt+1]) / 2.0; const double binwidth = binedges[pt+1] - binedges[pt]; s->addPoint(bincentre, 0, binwidth/2.0, 0); } addAnalysisObject(s); MSG_TRACE("Made scatter " << hname << " for " << name()); s->setTitle(title); s->setAnnotation("XLabel", xtitle); s->setAnnotation("YLabel", ytitle); return s; } ///////////////////// void Analysis::divide(CounterPtr c1, CounterPtr c2, Scatter1DPtr s) const { const string path = s->path(); *s = *c1 / *c2; s->setPath(path); } void Analysis::divide(const Counter& c1, const Counter& c2, Scatter1DPtr s) const { const string path = s->path(); *s = c1 / c2; s->setPath(path); } void Analysis::divide(Histo1DPtr h1, Histo1DPtr h2, Scatter2DPtr s) const { const string path = s->path(); *s = *h1 / *h2; s->setPath(path); } void Analysis::divide(const Histo1D& h1, const Histo1D& h2, Scatter2DPtr s) const { const string path = s->path(); *s = h1 / h2; s->setPath(path); } void Analysis::divide(Profile1DPtr p1, Profile1DPtr p2, Scatter2DPtr s) const { const string path = s->path(); *s = *p1 / *p2; s->setPath(path); } void Analysis::divide(const Profile1D& p1, const Profile1D& p2, Scatter2DPtr s) const { const string path = s->path(); *s = p1 / p2; s->setPath(path); } void Analysis::divide(Histo2DPtr h1, Histo2DPtr h2, Scatter3DPtr s) const { const string path = s->path(); *s = *h1 / *h2; s->setPath(path); } void Analysis::divide(const Histo2D& h1, const Histo2D& h2, Scatter3DPtr s) const { const string path = s->path(); *s = h1 / h2; s->setPath(path); } void Analysis::divide(Profile2DPtr p1, Profile2DPtr p2, Scatter3DPtr s) const { const string path = s->path(); *s = *p1 / *p2; s->setPath(path); } void Analysis::divide(const Profile2D& p1, const Profile2D& p2, Scatter3DPtr s) const { const string path = s->path(); *s = p1 / p2; s->setPath(path); } /// @todo Counter and Histo2D efficiencies and asymms void Analysis::efficiency(Histo1DPtr h1, Histo1DPtr h2, Scatter2DPtr s) const { const string path = s->path(); *s = YODA::efficiency(*h1, *h2); s->setPath(path); } void Analysis::efficiency(const Histo1D& h1, const Histo1D& h2, Scatter2DPtr s) const { const string path = s->path(); *s = YODA::efficiency(h1, h2); s->setPath(path); } void Analysis::asymm(Histo1DPtr h1, Histo1DPtr h2, Scatter2DPtr s) const { const string path = s->path(); *s = YODA::asymm(*h1, *h2); s->setPath(path); } void Analysis::asymm(const Histo1D& h1, const Histo1D& h2, Scatter2DPtr s) const { const string path = s->path(); *s = YODA::asymm(h1, h2); s->setPath(path); } void Analysis::scale(CounterPtr cnt, double factor) { if (!cnt) { MSG_WARNING("Failed to scale counter=NULL in analysis " << name() << " (scale=" << factor << ")"); return; } if (std::isnan(factor) || std::isinf(factor)) { MSG_WARNING("Failed to scale counter=" << cnt->path() << " in analysis: " << name() << " (invalid scale factor = " << factor << ")"); factor = 0; } MSG_TRACE("Scaling counter " << cnt->path() << " by factor " << factor); try { cnt->scaleW(factor); } catch (YODA::Exception& we) { MSG_WARNING("Could not scale counter " << cnt->path()); return; } } void Analysis::normalize(Histo1DPtr histo, double norm, bool includeoverflows) { if (!histo) { MSG_WARNING("Failed to normalize histo=NULL in analysis " << name() << " (norm=" << norm << ")"); return; } MSG_TRACE("Normalizing histo " << histo->path() << " to " << norm); try { histo->normalize(norm, includeoverflows); } catch (YODA::Exception& we) { MSG_WARNING("Could not normalize histo " << histo->path()); return; } } void Analysis::scale(Histo1DPtr histo, double factor) { if (!histo) { MSG_WARNING("Failed to scale histo=NULL in analysis " << name() << " (scale=" << factor << ")"); return; } if (std::isnan(factor) || std::isinf(factor)) { MSG_WARNING("Failed to scale histo=" << histo->path() << " in analysis: " << name() << " (invalid scale factor = " << factor << ")"); factor = 0; } MSG_TRACE("Scaling histo " << histo->path() << " by factor " << factor); try { histo->scaleW(factor); } catch (YODA::Exception& we) { MSG_WARNING("Could not scale histo " << histo->path()); return; } } void Analysis::normalize(Histo2DPtr histo, double norm, bool includeoverflows) { if (!histo) { MSG_ERROR("Failed to normalize histo=NULL in analysis " << name() << " (norm=" << norm << ")"); return; } MSG_TRACE("Normalizing histo " << histo->path() << " to " << norm); try { histo->normalize(norm, includeoverflows); } catch (YODA::Exception& we) { MSG_WARNING("Could not normalize histo " << histo->path()); return; } } void Analysis::scale(Histo2DPtr histo, double factor) { if (!histo) { MSG_ERROR("Failed to scale histo=NULL in analysis " << name() << " (scale=" << factor << ")"); return; } if (std::isnan(factor) || std::isinf(factor)) { MSG_ERROR("Failed to scale histo=" << histo->path() << " in analysis: " << name() << " (invalid scale factor = " << factor << ")"); factor = 0; } MSG_TRACE("Scaling histo " << histo->path() << " by factor " << factor); try { histo->scaleW(factor); } catch (YODA::Exception& we) { MSG_WARNING("Could not scale histo " << histo->path()); return; } } void Analysis::integrate(Histo1DPtr h, Scatter2DPtr s) const { // preserve the path info const string path = s->path(); *s = toIntegralHisto(*h); s->setPath(path); } void Analysis::integrate(const Histo1D& h, Scatter2DPtr s) const { // preserve the path info const string path = s->path(); *s = toIntegralHisto(h); s->setPath(path); } /// @todo 2D versions of integrate... defined how, exactly?!? ////////////////////////////////// void Analysis::addAnalysisObject(AnalysisObjectPtr ao) { _analysisobjects.push_back(ao); } void Analysis::removeAnalysisObject(const string& path) { for (vector<AnalysisObjectPtr>::iterator it = _analysisobjects.begin(); it != _analysisobjects.end(); ++it) { if ((*it)->path() == path) { _analysisobjects.erase(it); break; } } } void Analysis::removeAnalysisObject(AnalysisObjectPtr ao) { for (vector<AnalysisObjectPtr>::iterator it = _analysisobjects.begin(); it != _analysisobjects.end(); ++it) { if (*it == ao) { _analysisobjects.erase(it); break; } } } }