Page Menu
Home
HEPForge
Search
Configure Global Search
Log In
Files
F10881162
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Flag For Later
Size
8 KB
Subscribers
None
View Options
diff --git a/data/anainfo/ATLAS_2011_S9212353.info b/data/anainfo/ATLAS_2011_S9212353.info
--- a/data/anainfo/ATLAS_2011_S9212353.info
+++ b/data/anainfo/ATLAS_2011_S9212353.info
@@ -1,38 +1,36 @@
Name: ATLAS_2011_S9212353
Year: 2011
Summary: Single lepton search for supersymmetry
Experiment: ATLAS
Collider: LHC
SpiresID: 9212353
Status: UNVALIDATED
-Authors:
- -
References:
- Phys. Rev.D85:012006,2012
- arXiv:1109.6606
RunInfo:
BSM signal events at 7000 GeV.
NumEvents: 25000 for BSM signals
Beams: [p+, p+]
Energies: [7000]
Description:
'Single lepton search for supersymmmetric particles by ATLAS at 7 TeV.
Event counts in electron and muon signal regions are implemented as one-bin histograms.
Histograms for missing transverse energy and effective mass are implemented for the two
signal regions.'
BibKey: ATLAS:2011ad
BibTeX: '@Article{ATLAS:2011ad,
author = "Aad, Georges and others",
collaboration = "ATLAS",
title = "{Search for supersymmetry in final states with jets,
missing transverse momentum and one isolated lepton in
sqrt{s} = 7 TeV pp collisions using 1 fb-1 of ATLAS data}",
journal = "Phys. Rev.",
volume = "D85",
year = "2012",
pages = "012006",
eprint = "1109.6606",
archivePrefix = "arXiv",
primaryClass = "hep-ex",
SLACcitation = "%%CITATION = 1109.6606;%%"
}'
diff --git a/src/Core/AnalysisInfo.cc b/src/Core/AnalysisInfo.cc
--- a/src/Core/AnalysisInfo.cc
+++ b/src/Core/AnalysisInfo.cc
@@ -1,158 +1,142 @@
#include "Rivet/Rivet.hh"
#include "Rivet/RivetBoost.hh"
#include "Rivet/AnalysisInfo.hh"
#include "Rivet/Tools/Utils.hh"
#include "Rivet/Tools/RivetPaths.hh"
#include "Rivet/Tools/Logging.hh"
#include "yaml-cpp/yaml.h"
#include <iostream>
#include <fstream>
#include <unistd.h>
namespace Rivet {
namespace {
Log& getLog() {
return Log::getLog("Rivet.AnalysisInfo");
}
}
/// Static factory method
AnalysisInfo* AnalysisInfo::make(const std::string& ananame) {
// Returned AI, in semi-null state
AnalysisInfo* ai = new AnalysisInfo();
ai->_beams += make_pair(PID::ANY, PID::ANY);
ai->_name = ananame;
/// If no ana data file found, return null AI
const string datapath = findAnalysisInfoFile(ananame + ".info");
if (datapath.empty()) {
MSG_DEBUG("No datafile " << ananame + ".info found");
return ai;
}
// Read data from YAML document
MSG_DEBUG("Reading analysis data from " << datapath);
YAML::Node doc;
try {
doc = YAML::LoadFile(datapath);
} catch (const YAML::ParserException& ex) {
MSG_ERROR("Parse error when reading analysis data from " << datapath << " (" << ex.what() << ")");
return ai;
}
#define THROW_INFOERR(KEY) throw InfoError("Problem in info parsing while accessing key " + string(KEY) + " in file " + datapath)
+
+ // Simple scalars (test for nullness before casting)
#define TRY_GETINFO(KEY, VAR) try { if (doc[KEY] && !doc[KEY].IsNull()) ai->_ ## VAR = doc[KEY].as<string>(); } catch (...) { THROW_INFOERR(KEY); }
+ TRY_GETINFO("Name", name);
+ TRY_GETINFO("Summary", summary);
+ TRY_GETINFO("Status", status);
+ TRY_GETINFO("RunInfo", runInfo);
+ TRY_GETINFO("Description", description);
+ TRY_GETINFO("Experiment", experiment);
+ TRY_GETINFO("Collider", collider);
+ TRY_GETINFO("Year", year);
+ TRY_GETINFO("SpiresID", spiresId);
+ TRY_GETINFO("InspireID", inspireId);
+ TRY_GETINFO("BibKey", bibKey);
+ TRY_GETINFO("BibTeX", bibTeX);
+ #undef TRY_GETINFO
- TRY_GETINFO("Name", name);
- // if (doc["Summary"]) ai->_summary = doc["Summary"].as<string>();
- TRY_GETINFO("Summary", summary);
- // if (doc["Status"]) ai->_status = doc["Status"].as<string>();
- TRY_GETINFO("Status", status);
- // if (doc["RunInfo"]) ai->_runInfo = doc["RunInfo"].as<string>();
- TRY_GETINFO("RunInfo", runInfo);
- // if (doc["Description"]) ai->_description = doc["Description"].as<string>();
- TRY_GETINFO("Description", description);
- // if (doc["Experiment"]) ai->_experiment = doc["Experiment"].as<string>();
- TRY_GETINFO("Experiment", experiment);
- // if (doc["Collider"]) ai->_collider = doc["Collider"].as<string>();
- TRY_GETINFO("Collider", collider);
- // if (doc["Year"]) ai->_year = doc["Year"].as<string>();
- TRY_GETINFO("Year", year);
- // if (doc["SpiresID"]) ai->_spiresId = doc["SpiresID"].as<string>();
- TRY_GETINFO("SpiresID", spiresId);
- // if (doc["InspireID"]) ai->_inspireId = doc["InspireID"].as<string>();
- TRY_GETINFO("InspireID", inspireId);
- // if (doc["BibKey"]) ai->_bibKey = doc["BibKey"].as<string>();
- TRY_GETINFO("BibKey", bibKey);
- // if (doc["BibTeX"]) ai->_bibTeX = doc["BibTeX"].as<string>();
- TRY_GETINFO("BibTeX", bibTeX);
+ // Sequences (test the seq *and* each entry for nullness before casting)
+ #define TRY_GETINFO_SEQ(KEY, VAR) try { \
+ if (doc[KEY] && !doc[KEY].IsNull()) { \
+ const YAML::Node& VAR = doc[KEY]; \
+ for (size_t i = 0; i < VAR.size(); ++i) \
+ if (!VAR[i].IsNull()) ai->_ ## VAR += VAR[i].as<string>(); \
+ } } catch (...) { THROW_INFOERR(KEY); }
+ TRY_GETINFO_SEQ("Authors", authors);
+ TRY_GETINFO_SEQ("References", references);
+ TRY_GETINFO_SEQ("ToDo", todos);
+ #undef TRY_GETINFO_SEQ
+ // A boolean with some name flexibility
try {
if (doc["NeedsCrossSection"]) ai->_needsCrossSection = doc["NeedsCrossSection"].as<bool>();
else if (doc["NeedsCrossSection"]) ai->_needsCrossSection = doc["NeedCrossSection"].as<bool>();
} catch (...) {
THROW_INFOERR("NeedsCrossSection|NeedCrossSection");
}
- try {
- if (doc["Authors"]) {
- const YAML::Node& authors = doc["Authors"];
- for (size_t i = 0; i < authors.size(); ++i) ai->_authors += authors[i].as<string>();
- }
- } catch (...) { THROW_INFOERR("Authors"); }
-
- try {
- if (doc["References"]) {
- const YAML::Node& refs = doc["References"];
- for (size_t i = 0; i < refs.size(); ++i) ai->_references += refs[i].as<string>();
- }
- } catch (...) { THROW_INFOERR("References"); }
-
- try {
- if (doc["ToDo"]) {
- const YAML::Node& todos = doc["ToDo"];
- for (size_t i = 0; i < todos.size(); ++i) ai->_todos += todos[i].as<string>();
- }
- } catch (...) { THROW_INFOERR("ToDo"); }
-
+ // Beam particle identities
try {
if (doc["Beams"]) {
const YAML::Node& beams = doc["Beams"];
vector<PdgIdPair> beam_pairs;
if (beams.size() == 2 && beams[0].IsScalar() && beams[0].IsScalar()) {
beam_pairs += PID::make_pdgid_pair(beams[0].as<string>(), beams[1].as<string>());
} else {
for (size_t i = 0; i < beams.size(); ++i) {
const YAML::Node& bp = beams[i];
if (bp.size() != 2 || !bp[0].IsScalar() || !bp[0].IsScalar())
throw InfoError("Beam ID pairs have to be either a 2-tuple or a list of 2-tuples of particle names");
beam_pairs += PID::make_pdgid_pair(bp[0].as<string>(), bp[1].as<string>());
}
}
ai->_beams = beam_pairs;
}
} catch (...) { THROW_INFOERR("beams"); }
+ // Beam energies
try {
if (doc["Energies"]) {
vector< pair<double,double> > beam_energy_pairs;
for (size_t i = 0; i < doc["Energies"].size(); ++i) {
const YAML::Node& be = doc["Energies"][i];
if (be.IsScalar()) {
// If beam energy is a scalar, then assume symmetric beams each with half that energy
beam_energy_pairs += make_pair(be.as<double>()/2.0, be.as<double>()/2.0);
} else if (be.IsSequence()) {
if (be.size() != 2)
throw InfoError("Beam energies have to be a list of either numbers or pairs of numbers");
beam_energy_pairs += make_pair(be[0].as<double>(), be[1].as<double>());
} else {
throw InfoError("Beam energies have to be a list of either numbers or pairs of numbers");
}
}
ai->_energies = beam_energy_pairs;
}
} catch (...) { THROW_INFOERR("Energies"); }
- #undef TRY_GETINFO
#undef THROW_INFOERR
MSG_TRACE("AnalysisInfo pointer = " << ai);
return ai;
}
string toString(const AnalysisInfo& ai) {
stringstream ss;
ss << ai.name();
ss << " - " << ai.summary();
// ss << " - " << ai.beams();
// ss << " - " << ai.energies();
ss << " (" << ai.status() << ")";
return ss.str();
}
}
File Metadata
Details
Attached
Mime Type
text/x-diff
Expires
Sat, May 3, 5:56 AM (16 h, 1 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
4982885
Default Alt Text
(8 KB)
Attached To
rRIVETHG rivethg
Event Timeline
Log In to Comment