Page MenuHomeHEPForge

No OneTemporary

diff --git a/FixedOrderGen/src/config.cc b/FixedOrderGen/src/config.cc
index 48aeaff..f0d6197 100644
--- a/FixedOrderGen/src/config.cc
+++ b/FixedOrderGen/src/config.cc
@@ -1,409 +1,408 @@
/**
* \authors The HEJ collaboration (see AUTHORS for details)
* \date 2019
* \copyright GPLv2 or later
*/
#include "config.hh"
#include <cctype>
#include "Subleading.hh"
#include "HEJ/config.hh"
#include "HEJ/YAMLreader.hh"
namespace HEJFOG{
using HEJ::set_from_yaml;
using HEJ::set_from_yaml_if_defined;
using HEJ::pid::ParticleID;
namespace{
//! Get YAML tree of supported options
/**
* The configuration file is checked against this tree of options
* in assert_all_options_known.
*/
YAML::Node const & get_supported_options(){
const static YAML::Node supported = [](){
YAML::Node supported;
static const auto opts = {
"process", "events", "subleading fraction","subleading channels",
"scales", "scale factors", "max scale ratio", "pdf",
"event output", "analysis", "import scales"
};
// add subnodes to "supported" - the assigned value is irrelevant
for(auto && opt: opts) supported[opt] = "";
for(auto && jet_opt: {"min pt", "peak pt", "algorithm", "R", "max rapidity"}){
supported["jets"][jet_opt] = "";
}
for(auto && particle_type: {"Higgs", "Wp", "W+", "Wm", "W-", "Z"}){
for(auto && particle_opt: {"mass", "width"}){
supported["particle properties"][particle_type][particle_opt] = "";
}
supported["particle properties"][particle_type]["decays"]["into"] = "";
supported["particle properties"][particle_type]["decays"]["branching ratio"] = "";
}
for(auto && opt: {"mt", "use impact factors", "include bottom", "mb"}){
supported["Higgs coupling"][opt] = "";
}
for(auto && beam_opt: {"energy", "particles"}){
supported["beam"][beam_opt] = "";
}
for(auto && unweight_opt: {"sample size", "max deviation"}){
supported["unweight"][unweight_opt] = "";
}
for(auto && opt: {"name", "seed"}){
supported["random generator"][opt] = "";
}
return supported;
}();
return supported;
}
JetParameters get_jet_parameters(
YAML::Node const & node, std::string const & entry
){
const auto p = HEJ::get_jet_parameters(node, entry);
JetParameters result;
result.def = p.def;
result.min_pt = p.min_pt;
set_from_yaml(result.max_y, node, entry, "max rapidity");
set_from_yaml_if_defined(result.peak_pt, node, entry, "peak pt");
if(result.peak_pt && *result.peak_pt <= result.min_pt)
throw std::invalid_argument{
"Value of option 'peak pt' has to be larger than 'min pt'."
};
return result;
}
Beam get_Beam(
YAML::Node const & node, std::string const & entry
){
Beam beam;
std::vector<HEJ::ParticleID> particles;
set_from_yaml(beam.energy, node, entry, "energy");
set_from_yaml_if_defined(particles, node, entry, "particles");
if(! particles.empty()){
for(HEJ::ParticleID particle: particles){
if(particle != HEJ::pid::p && particle != HEJ::pid::p_bar){
throw std::invalid_argument{
"Unsupported value in option " + entry + ": particles:"
" only proton ('p') and antiproton ('p_bar') beams are supported"
};
}
}
if(particles.size() != 2){
throw std::invalid_argument{"Not exactly two beam particles"};
}
beam.particles.front() = particles.front();
beam.particles.back() = particles.back();
}
return beam;
}
std::vector<std::string> split(
std::string const & str, std::string const & delims
){
std::vector<std::string> result;
for(size_t begin, end = 0; end != str.npos;){
begin = str.find_first_not_of(delims, end);
if(begin == str.npos) break;
end = str.find_first_of(delims, begin + 1);
result.emplace_back(str.substr(begin, end - begin));
}
return result;
}
std::invalid_argument invalid_incoming(std::string const & what){
return std::invalid_argument{
"Incoming particle type " + what + " not supported,"
" incoming particles have to be 'p', 'p_bar' or partons"
};
}
std::invalid_argument invalid_outgoing(std::string const & what){
return std::invalid_argument{
"Outgoing particle type " + what + " not supported,"
" outgoing particles have to be 'j', 'photon', 'H', 'Wm', 'Wp', 'e-', 'e+', 'nu_e', 'nu_e_bar'"
};
}
HEJ::ParticleID reconstruct_boson_id(
std::vector<HEJ::ParticleID> const & ids
){
assert(ids.size()==2);
const int pidsum = ids[0] + ids[1];
if(pidsum == +1) {
assert(HEJ::is_antilepton(ids[0]));
if(HEJ::is_antineutrino(ids[0])) {
throw HEJ::not_implemented{"lepton-flavour violating final state"};
}
assert(HEJ::is_neutrino(ids[1]));
// charged antilepton + neutrino means we had a W+
return HEJ::pid::Wp;
}
if(pidsum == -1) {
assert(HEJ::is_antilepton(ids[0]));
if(HEJ::is_neutrino(ids[1])) {
throw HEJ::not_implemented{"lepton-flavour violating final state"};
}
assert(HEJ::is_antineutrino(ids[0]));
// charged lepton + antineutrino means we had a W-
return HEJ::pid::Wm;
}
throw HEJ::not_implemented{
"final state with leptons "+HEJ::name(ids[0])+" and "+HEJ::name(ids[1])
+" not supported"
};
}
Process get_process(
YAML::Node const & node, std::string const & entry
){
Process result;
std::string process_string;
set_from_yaml(process_string, node, entry);
assert(! process_string.empty());
const auto particles = split(process_string, " \n\t\v=>");
if(particles.size() < 3){
throw std::invalid_argument{
"Bad format in option process: '" + process_string
+ "', expected format is 'in1 in2 => out1 ...'"
};
}
result.incoming.front() = HEJ::to_ParticleID(particles[0]);
result.incoming.back() = HEJ::to_ParticleID(particles[1]);
for(size_t i = 0; i < result.incoming.size(); ++i){
const HEJ::ParticleID in = result.incoming[i];
if(
in != HEJ::pid::proton && in != HEJ::pid::p_bar
&& !HEJ::is_parton(in)
){
throw invalid_incoming(particles[i]);
}
}
result.njets = 0;
for(size_t i = result.incoming.size(); i < particles.size(); ++i){
assert(! particles[i].empty());
if(particles[i] == "j") ++result.njets;
else if(std::isdigit(particles[i].front())
&& particles[i].back() == 'j')
result.njets += std::stoi(particles[i]);
else{
const auto pid = HEJ::to_ParticleID(particles[i]);
if(pid==HEJ::pid::Higgs || pid==HEJ::pid::Wp || pid==HEJ::pid::Wm){
if(result.boson)
throw std::invalid_argument{
"More than one outgoing boson is not supported"
};
if(!result.boson_decay.empty())
throw std::invalid_argument{
"Production of a boson together with a lepton is not supported"
};
result.boson = pid;
} else if (HEJ::is_anylepton(pid)){
// Do not accept more leptons, if two leptons are already mentioned
if( result.boson_decay.size()>=2 )
throw std::invalid_argument{"Too many leptons provided"};
if(result.boson)
throw std::invalid_argument{
"Production of a lepton together with a boson is not supported"
};
result.boson_decay.emplace_back(pid);
} else {
throw invalid_outgoing(particles[i]);
}
}
}
if(result.njets < 2){
throw std::invalid_argument{
"Process has to include at least two jets ('j')"
};
}
if(!result.boson_decay.empty()){
std::sort(begin(result.boson_decay),end(result.boson_decay));
assert(!result.boson);
result.boson = reconstruct_boson_id(result.boson_decay);
}
return result;
}
HEJFOG::Subleading to_subleading_channel(YAML::Node const & yaml){
std::string name;
using namespace HEJFOG::channels;
set_from_yaml(name, yaml);
if(name == "none")
return none;
if(name == "all")
return all;
if(name == "unordered" || name == "uno")
return uno;
if(name == "qqx")
return qqx;
throw HEJ::unknown_option("Unknown subleading channel '"+name+"'");
-
}
unsigned int get_subleading_channels(YAML::Node const & node){
using YAML::NodeType;
using namespace HEJFOG::channels;
// all channels allowed by default
if(!node) return all;
switch(node.Type()){
case NodeType::Undefined:
return all;
case NodeType::Null:
return none;
case NodeType::Scalar:
return to_subleading_channel(node);
case NodeType::Map:
throw HEJ::invalid_type{"map is not a valid option for subleading channels"};
case NodeType::Sequence:
unsigned int channels = HEJFOG::Subleading::none;
for(auto && channel_node: node){
channels |= get_subleading_channels(channel_node);
}
return channels;
}
throw std::logic_error{"unreachable"};
}
Decay get_decay(YAML::Node const & node){
Decay decay;
set_from_yaml(decay.products, node, "into");
decay.branching_ratio=1;
set_from_yaml_if_defined(decay.branching_ratio, node, "branching ratio");
return decay;
}
std::vector<Decay> get_decays(YAML::Node const & node){
using YAML::NodeType;
if(!node) return {};
switch(node.Type()){
case NodeType::Null:
case NodeType::Undefined:
return {};
case NodeType::Scalar:
throw HEJ::invalid_type{"value is not a list of decays"};
case NodeType::Map:
return {get_decay(node)};
case NodeType::Sequence:
std::vector<Decay> result;
for(auto && decay_str: node){
result.emplace_back(get_decay(decay_str));
}
return result;
}
throw std::logic_error{"unreachable"};
}
ParticleProperties get_particle_properties(
YAML::Node const & node, std::string const & entry
){
ParticleProperties result;
set_from_yaml(result.mass, node, entry, "mass");
set_from_yaml(result.width, node, entry, "width");
try{
result.decays = get_decays(node[entry]["decays"]);
}
catch(HEJ::missing_option const & ex){
throw HEJ::missing_option{entry + ": decays: " + ex.what()};
}
catch(HEJ::invalid_type const & ex){
throw HEJ::invalid_type{entry + ": decays: " + ex.what()};
}
return result;
}
ParticlesPropMap get_all_particles_properties(YAML::Node const & node){
ParticlesPropMap result;
for(auto const & entry: node) {
const auto name = entry.first.as<std::string>();
const auto id = HEJ::to_ParticleID(name);
result.emplace(id, get_particle_properties(node,name));
}
return result;
}
UnweightSettings get_unweight(
YAML::Node const & node, std::string const & entry
){
UnweightSettings result;
set_from_yaml(result.sample_size, node, entry, "sample size");
if(result.sample_size <= 0){
throw std::invalid_argument{
"negative sample size " + std::to_string(result.sample_size)
};
}
set_from_yaml(result.max_dev, node, entry, "max deviation");
return result;
}
Config to_Config(YAML::Node const & yaml){
try{
HEJ::assert_all_options_known(yaml, get_supported_options());
}
catch(HEJ::unknown_option const & ex){
throw HEJ::unknown_option{std::string{"Unknown option '"} + ex.what() + "'"};
}
Config config;
config.process = get_process(yaml, "process");
set_from_yaml(config.events, yaml, "events");
config.jets = get_jet_parameters(yaml, "jets");
config.beam = get_Beam(yaml, "beam");
for(size_t i = 0; i < config.process.incoming.size(); ++i){
const auto & in = config.process.incoming[i];
using namespace HEJ::pid;
if( (in == p || in == p_bar) && in != config.beam.particles[i]){
throw std::invalid_argument{
"Particle type of beam " + std::to_string(i+1) + " incompatible"
+ " with type of incoming particle " + std::to_string(i+1)
};
}
}
set_from_yaml(config.pdf_id, yaml, "pdf");
set_from_yaml(config.subleading_fraction, yaml, "subleading fraction");
if(config.subleading_fraction < 0 || config.subleading_fraction > 1){
throw std::invalid_argument{
"subleading fraction has to be between 0 and 1"
};
}
if(config.subleading_fraction == 0)
config.subleading_channels = Subleading::none;
else
config.subleading_channels = get_subleading_channels(yaml["subleading channels"]);
if(yaml["particle properties"]){
config.particles_properties = get_all_particles_properties(
yaml["particle properties"]);
}
if(config.process.boson
&& config.particles_properties.find(*(config.process.boson))
== config.particles_properties.end())
throw HEJ::missing_option("Process wants to generate boson "
+std::to_string(*(config.process.boson))+", but particle properties are missing");
set_from_yaml_if_defined(config.analysis_parameters, yaml, "analysis");
config.scales = HEJ::to_ScaleConfig(yaml);
set_from_yaml_if_defined(config.output, yaml, "event output");
config.rng = HEJ::to_RNGConfig(yaml, "random generator");
config.Higgs_coupling = HEJ::get_Higgs_coupling(yaml, "Higgs coupling");
if(yaml["unweight"]) config.unweight = get_unweight(yaml, "unweight");
return config;
}
} // namespace anonymous
Config load_config(std::string const & config_file){
try{
return to_Config(YAML::LoadFile(config_file));
}
catch(...){
std::cerr << "Error reading " << config_file << ":\n ";
throw;
}
}
}
diff --git a/src/YAMLreader.cc b/src/YAMLreader.cc
index 4298e54..c1b7b1d 100644
--- a/src/YAMLreader.cc
+++ b/src/YAMLreader.cc
@@ -1,478 +1,484 @@
/**
* \authors The HEJ collaboration (see AUTHORS for details)
* \date 2019
* \copyright GPLv2 or later
*/
#include "HEJ/YAMLreader.hh"
#include <algorithm>
#include <iostream>
#include <limits>
#include <map>
#include <string>
#include <unordered_map>
#include <vector>
#include <dlfcn.h>
#include "HEJ/ScaleFunction.hh"
#include "HEJ/event_types.hh"
#include "HEJ/output_formats.hh"
#include "HEJ/Constants.hh"
namespace HEJ{
class Event;
namespace{
//! Get YAML tree of supported options
/**
* The configuration file is checked against this tree of options
* in assert_all_options_known.
*/
YAML::Node const & get_supported_options(){
const static YAML::Node supported = [](){
YAML::Node supported;
static const auto opts = {
"trials", "min extparton pt", "max ext soft pt fraction",
- "FKL", "unordered", "extremal qqx", "central qqx", "non-resummable",
"scales", "scale factors", "max scale ratio", "import scales",
"log correction", "event output", "analysis", "regulator parameter"
};
// add subnodes to "supported" - the assigned value is irrelevant
for(auto && opt: opts) supported[opt] = "";
for(auto && jet_opt: {"min pt", "algorithm", "R"}){
supported["resummation jets"][jet_opt] = "";
supported["fixed order jets"][jet_opt] = "";
}
for(auto && opt: {"mt", "use impact factors", "include bottom", "mb"}){
supported["Higgs coupling"][opt] = "";
}
for(auto && opt: {"name", "seed"}){
supported["random generator"][opt] = "";
}
+ for(auto && opt: {"FKL", "unordered", "extremal qqx", "central qqx", "non-resummable"}){
+ supported["event treatment"][opt] = "";
+ }
return supported;
}();
return supported;
}
fastjet::JetAlgorithm to_JetAlgorithm(std::string const & algo){
using namespace fastjet;
static const std::map<std::string, fastjet::JetAlgorithm> known = {
{"kt", kt_algorithm},
{"cambridge", cambridge_algorithm},
{"antikt", antikt_algorithm},
{"cambridge for passive", cambridge_for_passive_algorithm},
{"plugin", plugin_algorithm}
};
const auto res = known.find(algo);
if(res == known.end()){
throw std::invalid_argument("Unknown jet algorithm " + algo);
}
return res->second;
}
EventTreatment to_EventTreatment(std::string const & name){
static const std::map<std::string, EventTreatment> known = {
{"reweight", EventTreatment::reweight},
{"keep", EventTreatment::keep},
{"discard", EventTreatment::discard}
};
const auto res = known.find(name);
if(res == known.end()){
throw std::invalid_argument("Unknown event treatment " + name);
}
return res->second;
}
} // namespace anonymous
namespace detail{
void set_from_yaml(fastjet::JetAlgorithm & setting, YAML::Node const & yaml){
setting = to_JetAlgorithm(yaml.as<std::string>());
}
void set_from_yaml(EventTreatment & setting, YAML::Node const & yaml){
setting = to_EventTreatment(yaml.as<std::string>());
}
void set_from_yaml(ParticleID & setting, YAML::Node const & yaml){
setting = to_ParticleID(yaml.as<std::string>());
}
} // namespace detail
JetParameters get_jet_parameters(
YAML::Node const & node,
std::string const & entry
){
assert(node);
JetParameters result;
fastjet::JetAlgorithm jet_algo = fastjet::antikt_algorithm;
double R;
set_from_yaml_if_defined(jet_algo, node, entry, "algorithm");
set_from_yaml(R, node, entry, "R");
result.def = fastjet::JetDefinition{jet_algo, R};
set_from_yaml(result.min_pt, node, entry, "min pt");
return result;
}
RNGConfig to_RNGConfig(
YAML::Node const & node,
std::string const & entry
){
assert(node);
RNGConfig result;
set_from_yaml(result.name, node, entry, "name");
set_from_yaml_if_defined(result.seed, node, entry, "seed");
return result;
}
HiggsCouplingSettings get_Higgs_coupling(
YAML::Node const & node,
std::string const & entry
){
assert(node);
static constexpr double mt_max = 2e4;
#ifndef HEJ_BUILD_WITH_QCDLOOP
if(node[entry]){
throw std::invalid_argument{
"Higgs coupling settings require building HEJ 2 "
"with QCDloop support"
};
}
#endif
HiggsCouplingSettings settings;
set_from_yaml_if_defined(settings.mt, node, entry, "mt");
set_from_yaml_if_defined(settings.mb, node, entry, "mb");
set_from_yaml_if_defined(settings.include_bottom, node, entry, "include bottom");
set_from_yaml_if_defined(settings.use_impact_factors, node, entry, "use impact factors");
if(settings.use_impact_factors){
if(settings.mt != std::numeric_limits<double>::infinity()){
throw std::invalid_argument{
"Conflicting settings: "
"impact factors may only be used in the infinite top mass limit"
};
}
}
else{
// huge values of the top mass are numerically unstable
settings.mt = std::min(settings.mt, mt_max);
}
return settings;
}
FileFormat to_FileFormat(std::string const & name){
static const std::map<std::string, FileFormat> known = {
{"Les Houches", FileFormat::Les_Houches},
{"HepMC", FileFormat::HepMC},
{"HepMC2", FileFormat::HepMC2},
{"HepMC3", FileFormat::HepMC3}
};
const auto res = known.find(name);
if(res == known.end()){
throw std::invalid_argument("Unknown file format " + name);
}
return res->second;
}
std::string extract_suffix(std::string const & filename){
size_t separator = filename.rfind('.');
if(separator == filename.npos) return {};
return filename.substr(separator + 1);
}
FileFormat format_from_suffix(std::string const & filename){
const std::string suffix = extract_suffix(filename);
if(suffix == "lhe") return FileFormat::Les_Houches;
if(suffix == "hepmc") return FileFormat::HepMC;
if(suffix == "hepmc3") return FileFormat::HepMC3;
if(suffix == "hepmc2") return FileFormat::HepMC2;
throw std::invalid_argument{
"Can't determine format for output file " + filename
};
}
void assert_all_options_known(
YAML::Node const & conf, YAML::Node const & supported
){
if(!conf.IsMap()) return;
if(!supported.IsMap()) throw invalid_type{"must not have sub-entries"};
for(auto const & entry: conf){
const auto name = entry.first.as<std::string>();
if(! supported[name]) throw unknown_option{name};
/* check sub-options, e.g. 'resummation jets: min pt'
* we don't check analysis sub-options
* those depend on the analysis being used and should be checked there
* similar for "import scales"
*/
if(name != "analysis" && name != "import scales"){
try{
assert_all_options_known(conf[name], supported[name]);
}
catch(unknown_option const & ex){
throw unknown_option{name + ": " + ex.what()};
}
catch(invalid_type const & ex){
throw invalid_type{name + ": " + ex.what()};
}
}
}
}
} // namespace HEJ
namespace YAML {
Node convert<HEJ::OutputFile>::encode(HEJ::OutputFile const & outfile) {
Node node;
node[to_string(outfile.format)] = outfile.name;
return node;
};
bool convert<HEJ::OutputFile>::decode(Node const & node, HEJ::OutputFile & out) {
switch(node.Type()){
case NodeType::Map: {
YAML::const_iterator it = node.begin();
out.format = HEJ::to_FileFormat(it->first.as<std::string>());
out.name = it->second.as<std::string>();
return true;
}
case NodeType::Scalar:
out.name = node.as<std::string>();
out.format = HEJ::format_from_suffix(out.name);
return true;
default:
return false;
}
}
} // namespace YAML
namespace HEJ{
namespace detail{
void set_from_yaml(OutputFile & setting, YAML::Node const & yaml){
setting = yaml.as<OutputFile>();
}
}
namespace{
void update_fixed_order_jet_parameters(
JetParameters & fixed_order_jets, YAML::Node const & yaml
){
if(!yaml["fixed order jets"]) return;
set_from_yaml_if_defined(
fixed_order_jets.min_pt, yaml, "fixed order jets", "min pt"
);
fastjet::JetAlgorithm algo = fixed_order_jets.def.jet_algorithm();
set_from_yaml_if_defined(algo, yaml, "fixed order jets", "algorithm");
double R = fixed_order_jets.def.R();
set_from_yaml_if_defined(R, yaml, "fixed order jets", "R");
fixed_order_jets.def = fastjet::JetDefinition{algo, R};
}
// like std::stod, but throw if not the whole string can be converted
double to_double(std::string const & str){
std::size_t pos;
const double result = std::stod(str, &pos);
if(pos < str.size()){
throw std::invalid_argument(str + " is not a valid double value");
}
return result;
}
using EventScale = double (*)(Event const &);
void import_scale_functions(
std::string const & file,
std::vector<std::string> const & scale_names,
std::unordered_map<std::string, EventScale> & known
) {
auto handle = dlopen(file.c_str(), RTLD_NOW);
char * error = dlerror();
if(error != nullptr) throw std::runtime_error{error};
for(auto const & scale: scale_names) {
void * sym = dlsym(handle, scale.c_str());
error = dlerror();
if(error != nullptr) throw std::runtime_error{error};
known.emplace(scale, reinterpret_cast<EventScale>(sym));
}
}
auto get_scale_map(
YAML::Node const & yaml
) {
std::unordered_map<std::string, EventScale> scale_map;
scale_map.emplace("H_T", H_T);
scale_map.emplace("max jet pperp", max_jet_pt);
scale_map.emplace("jet invariant mass", jet_invariant_mass);
scale_map.emplace("m_j1j2", m_j1j2);
if(yaml["import scales"]) {
if(! yaml["import scales"].IsMap()) {
throw invalid_type{"Entry 'import scales' is not a map"};
}
for(auto const & import: yaml["import scales"]) {
const auto file = import.first.as<std::string>();
const auto scale_names =
import.second.IsSequence()
?import.second.as<std::vector<std::string>>()
:std::vector<std::string>{import.second.as<std::string>()};
import_scale_functions(file, scale_names, scale_map);
}
}
return scale_map;
}
// simple (as in non-composite) scale functions
/**
* An example for a simple scale function would be H_T,
* H_T/2 is then composite (take H_T and then divide by 2)
*/
ScaleFunction parse_simple_ScaleFunction(
std::string const & scale_fun,
std::unordered_map<std::string, EventScale> const & known
) {
assert(
scale_fun.empty() ||
(!std::isspace(scale_fun.front()) && !std::isspace(scale_fun.back()))
);
const auto it = known.find(scale_fun);
if(it != end(known)) return {it->first, it->second};
try{
const double scale = to_double(scale_fun);
return {scale_fun, FixedScale{scale}};
} catch(std::invalid_argument const &){}
throw std::invalid_argument{"Unknown scale choice: " + scale_fun};
}
std::string trim_front(std::string const & str){
const auto new_begin = std::find_if(
begin(str), end(str), [](char c){ return ! std::isspace(c); }
);
return std::string(new_begin, end(str));
}
std::string trim_back(std::string str){
size_t pos = str.size() - 1;
// use guaranteed wrap-around behaviour to check whether we have
// traversed the whole string
for(; pos < str.size() && std::isspace(str[pos]); --pos) {}
str.resize(pos + 1); // note that pos + 1 can be 0
return str;
}
ScaleFunction parse_ScaleFunction(
std::string const & scale_fun,
std::unordered_map<std::string, EventScale> const & known
){
assert(
scale_fun.empty() ||
(!std::isspace(scale_fun.front()) && !std::isspace(scale_fun.back()))
);
// parse from right to left => a/b/c gives (a/b)/c
const size_t delim = scale_fun.find_last_of("*/");
if(delim == scale_fun.npos){
return parse_simple_ScaleFunction(scale_fun, known);
}
const std::string first = trim_back(std::string{scale_fun, 0, delim});
const std::string second = trim_front(std::string{scale_fun, delim+1});
if(scale_fun[delim] == '/'){
return parse_ScaleFunction(first, known)
/ parse_ScaleFunction(second, known);
}
else{
assert(scale_fun[delim] == '*');
return parse_ScaleFunction(first, known)
* parse_ScaleFunction(second, known);
}
}
- EventTreatMap get_event_treatment(
- YAML::Node const & yaml
- ){
+ EventTreatMap get_event_treatment(YAML::Node const & node){
+ using YAML::NodeType;
using namespace event_type;
+ // all channels allowed by default
EventTreatMap treat {
- {no_2_jets, EventTreatment::discard},
- {bad_final_state, EventTreatment::discard},
- {FKL, EventTreatment::reweight},
- {unob, EventTreatment::keep},
- {unof, EventTreatment::keep},
- {qqxexb, EventTreatment::keep},
- {qqxexf, EventTreatment::keep},
- {qqxmid, EventTreatment::keep},
- {non_resummable, EventTreatment::keep}
- };
- set_from_yaml(treat.at(FKL), yaml, "FKL");
- set_from_yaml(treat.at(unob), yaml, "unordered");
+ {no_2_jets, EventTreatment::discard},
+ {bad_final_state, EventTreatment::discard},
+ {FKL, EventTreatment::reweight},
+ {unob, EventTreatment::reweight},
+ {unof, EventTreatment::reweight},
+ {qqxexb, EventTreatment::reweight},
+ {qqxexf, EventTreatment::reweight},
+ {qqxmid, EventTreatment::reweight},
+ {non_resummable, EventTreatment::keep}
+ };
+ if(!node || node.IsNull()) return treat;
+ if(!node.IsMap()){
+ throw HEJ::invalid_type{"\"event treatment\" has to be a map"};
+ }
+ set_from_yaml_if_defined(treat.at(FKL), node, "FKL");
+ set_from_yaml_if_defined(treat.at(unob), node, "unordered");
treat.at(unof) = treat.at(unob);
- set_from_yaml(treat.at(qqxexb), yaml, "extremal qqx");
- set_from_yaml(treat.at(qqxexf), yaml, "extremal qqx");
- set_from_yaml(treat.at(qqxmid), yaml, "central qqx");
- set_from_yaml(treat.at(non_resummable), yaml, "non-resummable");
+ set_from_yaml_if_defined(treat.at(qqxexb), node, "extremal qqx");
+ treat.at(qqxexf) = treat.at(qqxexb);
+ set_from_yaml_if_defined(treat.at(qqxmid), node, "central qqx");
+ set_from_yaml_if_defined(treat.at(non_resummable), node, "non-resummable");
if(treat[non_resummable] == EventTreatment::reweight){
throw std::invalid_argument{"Cannot reweight Fixed Order events"};
}
return treat;
}
Config to_Config(YAML::Node const & yaml){
try{
assert_all_options_known(yaml, get_supported_options());
}
catch(unknown_option const & ex){
throw unknown_option{std::string{"Unknown option '"} + ex.what() + "'"};
}
Config config;
config.resummation_jets = get_jet_parameters(yaml, "resummation jets");
config.fixed_order_jets = config.resummation_jets;
update_fixed_order_jet_parameters(config.fixed_order_jets, yaml);
set_from_yaml(config.min_extparton_pt, yaml, "min extparton pt");
// Sets the standard value, then changes this if defined
config.regulator_lambda=CLAMBDA;
set_from_yaml_if_defined(config.regulator_lambda, yaml, "regulator parameter");
config.max_ext_soft_pt_fraction = 0.1;
set_from_yaml_if_defined(
config.max_ext_soft_pt_fraction, yaml, "max ext soft pt fraction"
);
set_from_yaml(config.trials, yaml, "trials");
set_from_yaml(config.log_correction, yaml, "log correction");
- config.treat = get_event_treatment(yaml);
+ config.treat = get_event_treatment(yaml["event treatment"]);
set_from_yaml_if_defined(config.output, yaml, "event output");
config.rng = to_RNGConfig(yaml, "random generator");
set_from_yaml_if_defined(config.analysis_parameters, yaml, "analysis");
config.scales = to_ScaleConfig(yaml);
config.Higgs_coupling = get_Higgs_coupling(yaml, "Higgs coupling");
return config;
}
} // namespace anonymous
ScaleConfig to_ScaleConfig(YAML::Node const & yaml){
ScaleConfig config;
auto scale_funs = get_scale_map(yaml);
std::vector<std::string> scales;
set_from_yaml(scales, yaml, "scales");
config.base.reserve(scales.size());
std::transform(
begin(scales), end(scales), std::back_inserter(config.base),
[scale_funs](auto const & entry){
return parse_ScaleFunction(entry, scale_funs);
}
);
set_from_yaml_if_defined(config.factors, yaml, "scale factors");
config.max_ratio = std::numeric_limits<double>::infinity();
set_from_yaml_if_defined(config.max_ratio, yaml, "max scale ratio");
return config;
}
Config load_config(std::string const & config_file){
try{
return to_Config(YAML::LoadFile(config_file));
}
catch(...){
std::cerr << "Error reading " << config_file << ":\n ";
throw;
}
}
} // namespace HEJ

File Metadata

Mime Type
text/x-diff
Expires
Sun, Feb 23, 2:27 PM (15 h, 39 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
4486592
Default Alt Text
(32 KB)

Event Timeline