Page MenuHomeHEPForge

No OneTemporary

diff --git a/Hadronization/ClusterFissioner.cc b/Hadronization/ClusterFissioner.cc
--- a/Hadronization/ClusterFissioner.cc
+++ b/Hadronization/ClusterFissioner.cc
@@ -1,1171 +1,1176 @@
// -*- C++ -*-
//
// ClusterFissioner.cc is a part of Herwig - A multi-purpose Monte Carlo event generator
// Copyright (C) 2002-2019 The Herwig Collaboration
//
// Herwig is licenced under version 3 of the GPL, see COPYING for details.
// Please respect the MCnet academic guidelines, see GUIDELINES for details.
//
//
// Thisk is the implementation of the non-inlined, non-templated member
// functions of the ClusterFissioner class.
//
#include "ClusterFissioner.h"
#include <ThePEG/Interface/ClassDocumentation.h>
#include <ThePEG/Interface/Reference.h>
#include <ThePEG/Interface/Parameter.h>
#include <ThePEG/Interface/Switch.h>
#include <ThePEG/Persistency/PersistentOStream.h>
#include <ThePEG/Persistency/PersistentIStream.h>
#include <ThePEG/PDT/EnumParticles.h>
#include "Herwig/Utilities/Kinematics.h"
#include "CheckId.h"
#include "Cluster.h"
#include "ThePEG/Repository/UseRandom.h"
#include "ThePEG/Repository/EventGenerator.h"
#include <ThePEG/Utilities/DescribeClass.h>
using namespace Herwig;
DescribeClass<ClusterFissioner,Interfaced>
describeClusterFissioner("Herwig::ClusterFissioner","Herwig.so");
ClusterFissioner::ClusterFissioner() :
_clMaxLight(3.35*GeV),
_clMaxBottom(3.35*GeV),
_clMaxCharm(3.35*GeV),
_clMaxExotic(3.35*GeV),
_clPowLight(2.0),
_clPowBottom(2.0),
_clPowCharm(2.0),
_clPowExotic(2.0),
_pSplitLight(1.0),
_pSplitBottom(1.0),
_pSplitCharm(1.0),
_pSplitExotic(1.0),
_fissionPwtUquark(1),
_fissionPwtDquark(1),
_fissionPwtSquark(0.5),
_fissionCluster(0),
_btClM(1.0*GeV),
_iopRem(1),
_kappa(1.0e15*GeV/meter),
_enhanceSProb(0),
_m0Fission(2.*GeV),
_massMeasure(0),
_probPowFactor(4.0),
_probShift(0.0),
_kinThresholdShift(1.0*sqr(GeV))
{}
IBPtr ClusterFissioner::clone() const {
return new_ptr(*this);
}
IBPtr ClusterFissioner::fullclone() const {
return new_ptr(*this);
}
void ClusterFissioner::persistentOutput(PersistentOStream & os) const {
os << _hadronSelector << ounit(_clMaxLight,GeV)
<< ounit(_clMaxBottom,GeV) << ounit(_clMaxCharm,GeV)
<< ounit(_clMaxExotic,GeV) << _clPowLight << _clPowBottom
<< _clPowCharm << _clPowExotic << _pSplitLight
<< _pSplitBottom << _pSplitCharm << _pSplitExotic
<< _fissionCluster << _fissionPwtUquark << _fissionPwtDquark << _fissionPwtSquark
<< ounit(_btClM,GeV)
<< _iopRem << ounit(_kappa, GeV/meter)
<< _enhanceSProb << ounit(_m0Fission,GeV) << _massMeasure
<< _probPowFactor << _probShift << ounit(_kinThresholdShift,sqr(GeV));
}
void ClusterFissioner::persistentInput(PersistentIStream & is, int) {
is >> _hadronSelector >> iunit(_clMaxLight,GeV)
>> iunit(_clMaxBottom,GeV) >> iunit(_clMaxCharm,GeV)
>> iunit(_clMaxExotic,GeV) >> _clPowLight >> _clPowBottom
>> _clPowCharm >> _clPowExotic >> _pSplitLight
>> _pSplitBottom >> _pSplitCharm >> _pSplitExotic
>> _fissionCluster >> _fissionPwtUquark >> _fissionPwtDquark >> _fissionPwtSquark
>> iunit(_btClM,GeV) >> _iopRem
>> iunit(_kappa, GeV/meter)
>> _enhanceSProb >> iunit(_m0Fission,GeV) >> _massMeasure
>> _probPowFactor >> _probShift >> iunit(_kinThresholdShift,sqr(GeV));
}
void ClusterFissioner::Init() {
static ClassDocumentation<ClusterFissioner> documentation
("Class responsibles for chopping up the clusters");
static Reference<ClusterFissioner,HadronSelector>
interfaceHadronSelector("HadronSelector",
"A reference to the HadronSelector object",
&Herwig::ClusterFissioner::_hadronSelector,
false, false, true, false);
// ClMax for light, Bottom, Charm and exotic (e.g. Susy) quarks
static Parameter<ClusterFissioner,Energy>
interfaceClMaxLight ("ClMaxLight","cluster max mass for light quarks (unit [GeV])",
&ClusterFissioner::_clMaxLight, GeV, 3.35*GeV, ZERO, 10.0*GeV,
false,false,false);
static Parameter<ClusterFissioner,Energy>
interfaceClMaxBottom ("ClMaxBottom","cluster max mass for b quarks (unit [GeV])",
&ClusterFissioner::_clMaxBottom, GeV, 3.35*GeV, ZERO, 10.0*GeV,
false,false,false);
static Parameter<ClusterFissioner,Energy>
interfaceClMaxCharm ("ClMaxCharm","cluster max mass for c quarks (unit [GeV])",
&ClusterFissioner::_clMaxCharm, GeV, 3.35*GeV, ZERO, 10.0*GeV,
false,false,false);
static Parameter<ClusterFissioner,Energy>
interfaceClMaxExotic ("ClMaxExotic","cluster max mass for exotic quarks (unit [GeV])",
&ClusterFissioner::_clMaxExotic, GeV, 3.35*GeV, ZERO, 10.0*GeV,
false,false,false);
// ClPow for light, Bottom, Charm and exotic (e.g. Susy) quarks
static Parameter<ClusterFissioner,double>
interfaceClPowLight ("ClPowLight","cluster mass exponent for light quarks",
&ClusterFissioner::_clPowLight, 0, 2.0, 0.0, 10.0,false,false,false);
static Parameter<ClusterFissioner,double>
interfaceClPowBottom ("ClPowBottom","cluster mass exponent for b quarks",
&ClusterFissioner::_clPowBottom, 0, 2.0, 0.0, 10.0,false,false,false);
static Parameter<ClusterFissioner,double>
interfaceClPowCharm ("ClPowCharm","cluster mass exponent for c quarks",
&ClusterFissioner::_clPowCharm, 0, 2.0, 0.0, 10.0,false,false,false);
static Parameter<ClusterFissioner,double>
interfaceClPowExotic ("ClPowExotic","cluster mass exponent for exotic quarks",
&ClusterFissioner::_clPowExotic, 0, 2.0, 0.0, 10.0,false,false,false);
// PSplit for light, Bottom, Charm and exotic (e.g. Susy) quarks
static Parameter<ClusterFissioner,double>
interfacePSplitLight ("PSplitLight","cluster mass splitting param for light quarks",
&ClusterFissioner::_pSplitLight, 0, 1.0, 0.0, 10.0,false,false,false);
static Parameter<ClusterFissioner,double>
interfacePSplitBottom ("PSplitBottom","cluster mass splitting param for b quarks",
&ClusterFissioner::_pSplitBottom, 0, 1.0, 0.0, 10.0,false,false,false);
static Parameter<ClusterFissioner,double>
interfacePSplitCharm ("PSplitCharm","cluster mass splitting param for c quarks",
&ClusterFissioner::_pSplitCharm, 0, 1.0, 0.0, 10.0,false,false,false);
static Parameter<ClusterFissioner,double>
interfacePSplitExotic ("PSplitExotic","cluster mass splitting param for exotic quarks",
&ClusterFissioner::_pSplitExotic, 0, 1.0, 0.0, 10.0,false,false,false);
static Switch<ClusterFissioner,int> interfaceFission
("Fission",
"Option for different Fission options",
&ClusterFissioner::_fissionCluster, 1, false, false);
static SwitchOption interfaceFissionDefault
(interfaceFission,
"default",
"Normal cluster fission which depends on the hadron selector class.",
0);
static SwitchOption interfaceFissionNew
(interfaceFission,
"new",
"Alternative cluster fission which does not depend on the hadron selector class",
1);
static Parameter<ClusterFissioner,double> interfaceFissionPwtUquark
("FissionPwtUquark",
"Weight for fission in U quarks",
&ClusterFissioner::_fissionPwtUquark, 1, 0.0, 1.0,
false, false, Interface::limited);
static Parameter<ClusterFissioner,double> interfaceFissionPwtDquark
("FissionPwtDquark",
"Weight for fission in D quarks",
&ClusterFissioner::_fissionPwtDquark, 1, 0.0, 1.0,
false, false, Interface::limited);
static Parameter<ClusterFissioner,double> interfaceFissionPwtSquark
("FissionPwtSquark",
"Weight for fission in S quarks",
&ClusterFissioner::_fissionPwtSquark, 0.5, 0.0, 1.0,
false, false, Interface::limited);
static Switch<ClusterFissioner,int> interfaceRemnantOption
("RemnantOption",
"Option for the treatment of remnant clusters",
&ClusterFissioner::_iopRem, 1, false, false);
static SwitchOption interfaceRemnantOptionSoft
(interfaceRemnantOption,
"Soft",
"Both clusters produced in the fission of the beam cluster"
" are treated as soft clusters.",
0);
static SwitchOption interfaceRemnantOptionHard
(interfaceRemnantOption,
"Hard",
"Only the cluster containing the remnant is treated as a soft cluster.",
1);
static SwitchOption interfaceRemnantOptionVeryHard
(interfaceRemnantOption,
"VeryHard",
"Even remnant clusters are treated as hard, i.e. all clusters the same",
2);
static Parameter<ClusterFissioner,Energy> interfaceBTCLM
("SoftClusterFactor",
"Parameter for the mass spectrum of remnant clusters",
&ClusterFissioner::_btClM, GeV, 1.*GeV, 0.1*GeV, 10.0*GeV,
false, false, Interface::limited);
static Parameter<ClusterFissioner,Tension> interfaceStringTension
("StringTension",
"String tension used in vertex displacement calculation",
&ClusterFissioner::_kappa, GeV/meter,
1.0e15*GeV/meter, ZERO, ZERO,
false, false, Interface::lowerlim);
static Switch<ClusterFissioner,int> interfaceEnhanceSProb
("EnhanceSProb",
"Option for enhancing strangeness",
&ClusterFissioner::_enhanceSProb, 0, false, false);
static SwitchOption interfaceEnhanceSProbNo
(interfaceEnhanceSProb,
"No",
"No strangeness enhancement.",
0);
static SwitchOption interfaceEnhanceSProbScaled
(interfaceEnhanceSProb,
"Scaled",
"Scaled strangeness enhancement",
1);
static SwitchOption interfaceEnhanceSProbExponential
(interfaceEnhanceSProb,
"Exponential",
"Exponential strangeness enhancement",
2);
static Switch<ClusterFissioner,int> interfaceMassMeasure
("MassMeasure",
"Option to use different mass measures",
&ClusterFissioner::_massMeasure,0,false,false);
static SwitchOption interfaceMassMeasureMass
(interfaceMassMeasure,
"Mass",
"Mass Measure",
0);
static SwitchOption interfaceMassMeasureLambda
(interfaceMassMeasure,
"Lambda",
"Lambda Measure",
1);
static Parameter<ClusterFissioner,Energy> interfaceFissionMassScale
("FissionMassScale",
"Cluster fission mass scale",
&ClusterFissioner::_m0Fission, GeV, 2.0*GeV, 0.1*GeV, 50.*GeV,
false, false, Interface::limited);
static Parameter<ClusterFissioner,double> interfaceProbPowFactor
("ProbablityPowerFactor",
"Power factor in ClausterFissioner bell probablity function",
&ClusterFissioner::_probPowFactor, 2.0, 1.0, 20.0,
false, false, Interface::limited);
static Parameter<ClusterFissioner,double> interfaceProbShift
("ProbablityShift",
"Shifts from the center in ClausterFissioner bell probablity function",
&ClusterFissioner::_probShift, 0.0, -10.0, 10.0,
false, false, Interface::limited);
static Parameter<ClusterFissioner,Energy2> interfaceKineticThresholdShift
("KineticThresholdShift",
"Shifts from the kinetic threshold in ClausterFissioner",
&ClusterFissioner::_kinThresholdShift, sqr(GeV), 0.*sqr(GeV), -10.0*sqr(GeV), 10.0*sqr(GeV),
false, false, Interface::limited);
}
tPVector ClusterFissioner::fission(ClusterVector & clusters, bool softUEisOn) {
// return if no clusters
if (clusters.empty()) return tPVector();
/*****************
* Loop over the (input) collection of cluster pointers, and store in
* the vector splitClusters all the clusters that need to be split
* (these are beam clusters, if soft underlying event is off, and
* heavy non-beam clusters).
********************/
stack<ClusterPtr> splitClusters;
for(ClusterVector::iterator it = clusters.begin() ;
it != clusters.end() ; ++it) {
/**************
* Skip 3-component clusters that have been redefined (as 2-component
* clusters) or not available clusters. The latter check is indeed
* redundant now, but it is used for possible future extensions in which,
* for some reasons, some of the clusters found by ClusterFinder are tagged
* straight away as not available.
**************/
if((*it)->isRedefined() || !(*it)->isAvailable()) continue;
// if the cluster is a beam cluster add it to the vector of clusters
// to be split or if it is heavy
if((*it)->isBeamCluster() || isHeavy(*it)) splitClusters.push(*it);
}
tPVector finalhadrons;
cut(splitClusters, clusters, finalhadrons, softUEisOn);
return finalhadrons;
}
void ClusterFissioner::cut(stack<ClusterPtr> & clusterStack,
ClusterVector &clusters, tPVector & finalhadrons,
bool softUEisOn) {
/**************************************************
* This method does the splitting of the cluster pointed by cluPtr
* and "recursively" by all of its cluster children, if heavy. All of these
* new children clusters are added (indeed the pointers to them) to the
* collection of cluster pointers collecCluPtr. The method works as follows.
* Initially the vector vecCluPtr contains just the input pointer to the
* cluster to be split. Then it will be filled "recursively" by all
* of the cluster's children that are heavy enough to require, in their turn,
* to be split. In each loop, the last element of the vector vecCluPtr is
* considered (only once because it is then removed from the vector).
* This approach is conceptually recursive, but avoid the overhead of
* a concrete recursive function. Furthermore it requires minimal changes
* in the case that the fission of an heavy cluster could produce more
* than two cluster children as assumed now.
*
* Draw the masses: for normal, non-beam clusters a power-like mass dist
* is used, whereas for beam clusters a fast-decreasing exponential mass
* dist is used instead (to avoid many iterative splitting which could
* produce an unphysical large transverse energy from a supposed soft beam
* remnant process).
****************************************/
// Here we recursively loop over clusters in the stack and cut them
while (!clusterStack.empty()) {
// take the last element of the vector
ClusterPtr iCluster = clusterStack.top(); clusterStack.pop();
// split it
cutType ct = iCluster->numComponents() == 2 ?
cutTwo(iCluster, finalhadrons, softUEisOn) :
cutThree(iCluster, finalhadrons, softUEisOn);
// There are cases when we don't want to split, even if it fails mass test
if(!ct.first.first || !ct.second.first) {
// if an unsplit beam cluster leave if for the underlying event
if(iCluster->isBeamCluster() && softUEisOn)
iCluster->isAvailable(false);
continue;
}
// check if clusters
ClusterPtr one = dynamic_ptr_cast<ClusterPtr>(ct.first.first);
ClusterPtr two = dynamic_ptr_cast<ClusterPtr>(ct.second.first);
// is a beam cluster must be split into two clusters
if(iCluster->isBeamCluster() && (!one||!two) && softUEisOn) {
iCluster->isAvailable(false);
continue;
}
// There should always be a intermediate quark(s) from the splitting
assert(ct.first.second && ct.second.second);
/// \todo sort out motherless quark pairs here. Watch out for 'quark in final state' errors
iCluster->addChild(ct.first.first);
// iCluster->addChild(ct.first.second);
// ct.first.second->addChild(ct.first.first);
iCluster->addChild(ct.second.first);
// iCluster->addChild(ct.second.second);
// ct.second.second->addChild(ct.second.first);
// Sometimes the clusters decay C -> H + C' rather then C -> C' + C''
if(one) {
clusters.push_back(one);
if(one->isBeamCluster() && softUEisOn)
one->isAvailable(false);
if(isHeavy(one) && one->isAvailable())
clusterStack.push(one);
}
if(two) {
clusters.push_back(two);
if(two->isBeamCluster() && softUEisOn)
two->isAvailable(false);
if(isHeavy(two) && two->isAvailable())
clusterStack.push(two);
}
}
}
namespace {
/**
* Check if can't make a hadron from the partons
*/
bool cantMakeHadron(tcPPtr p1, tcPPtr p2) {
return ! CheckId::canBeHadron(p1->dataPtr(), p2->dataPtr());
}
/**
* Check if can't make a diquark from the partons
*/
bool cantMakeDiQuark(tcPPtr p1, tcPPtr p2) {
long id1 = p1->id(), id2 = p2->id();
return ! (QuarkMatcher::Check(id1) && QuarkMatcher::Check(id2) && id1*id2>0);
}
}
ClusterFissioner::cutType
ClusterFissioner::cutTwo(ClusterPtr & cluster, tPVector & finalhadrons,
bool softUEisOn) {
// need to make sure only 2-cpt clusters get here
assert(cluster->numComponents() == 2);
tPPtr ptrQ1 = cluster->particle(0);
tPPtr ptrQ2 = cluster->particle(1);
Energy Mc = cluster->mass();
assert(ptrQ1);
assert(ptrQ2);
// And check if those particles are from a beam remnant
bool rem1 = cluster->isBeamRemnant(0);
bool rem2 = cluster->isBeamRemnant(1);
// workout which distribution to use
bool soft1(false),soft2(false);
switch (_iopRem) {
case 0:
soft1 = rem1 || rem2;
soft2 = rem2 || rem1;
break;
case 1:
soft1 = rem1;
soft2 = rem2;
break;
}
// Initialization for the exponential ("soft") mass distribution.
static const int max_loop = 1000;
int counter = 0;
Energy Mc1 = ZERO, Mc2 = ZERO,m1=ZERO,m2=ZERO,m=ZERO;
tcPDPtr toHadron1, toHadron2;
PPtr newPtr1 = PPtr ();
PPtr newPtr2 = PPtr ();
bool succeeded = false;
do
{
succeeded = false;
++counter;
if (_enhanceSProb == 0){
drawNewFlavour(newPtr1,newPtr2);
}
else {
Energy2 mass2 = clustermass(cluster);
drawNewFlavourEnhanced(newPtr1,newPtr2,mass2);
}
// check for right ordering
assert (ptrQ2);
assert (newPtr2);
assert (ptrQ2->dataPtr());
assert (newPtr2->dataPtr());
if(cantMakeHadron(ptrQ1, newPtr1) || cantMakeHadron(ptrQ2, newPtr2)) {
swap(newPtr1, newPtr2);
// check again
if(cantMakeHadron(ptrQ1, newPtr1) || cantMakeHadron(ptrQ2, newPtr2)) {
throw Exception()
<< "ClusterFissioner cannot split the cluster ("
<< ptrQ1->PDGName() << ' ' << ptrQ2->PDGName()
<< ") into hadrons.\n" << Exception::runerror;
}
}
// Check that new clusters can produce particles and there is enough
// phase space to choose the drawn flavour
m1 = ptrQ1->data().constituentMass();
m2 = ptrQ2->data().constituentMass();
m = newPtr1->data().constituentMass();
// Do not split in the case there is no phase space available
if(Mc < m1+m + m2+m) continue;
// power for splitting
double exp1=_pSplitLight;
double exp2=_pSplitLight;
if (CheckId::isExotic(ptrQ1->dataPtr())) exp1 = _pSplitExotic;
else if(CheckId::hasBottom(ptrQ1->dataPtr()))exp1 = _pSplitBottom;
else if(CheckId::hasCharm(ptrQ1->dataPtr())) exp1 = _pSplitCharm;
if (CheckId::isExotic(ptrQ2->dataPtr())) exp2 = _pSplitExotic;
else if(CheckId::hasBottom(ptrQ2->dataPtr())) exp2 = _pSplitBottom;
else if(CheckId::hasCharm(ptrQ2->dataPtr())) exp2 = _pSplitCharm;
// If, during the drawing of candidate masses, too many attempts fail
// (because the phase space available is tiny)
/// \todo run separate loop here?
Mc1 = drawChildMass(Mc,m1,m2,m,exp1,soft1);
Mc2 = drawChildMass(Mc,m2,m1,m,exp2,soft2);
bool C1 = ( Mc1*Mc1 )/( m1*m1 + 2.*m*m + _kinThresholdShift ) < 1.0 ? true : false;
bool C2 = ( Mc2*Mc2 )/( m2*m2 + 2.*m*m + _kinThresholdShift ) < 1.0 ? true : false;
bool C3 = ( Mc1*Mc1 + Mc2*Mc2 )/( Mc*Mc ) > 1.0 ? true : false;
if( C1 || C2 || C3 ) continue;
/**************************
* New (not present in Fortran Herwig):
* check whether the fragment masses Mc1 and Mc2 are above the
* threshold for the production of the lightest pair of hadrons with the
* right flavours. If not, then set by hand the mass to the lightest
* single hadron with the right flavours, in order to solve correctly
* the kinematics, and (later in this method) create directly such hadron
* and add it to the children hadrons of the cluster that undergoes the
* fission (i.e. the one pointed by iCluPtr). Notice that in this special
* case, the heavy cluster that undergoes the fission has one single
* cluster child and one single hadron child. We prefer this approach,
* rather than to create a light cluster, with the mass set equal to
* the lightest hadron, and let then the class LightClusterDecayer to do
* the job to decay it to that single hadron, for two reasons:
* First, because the sum of the masses of the two constituents can be,
* in this case, greater than the mass of that hadron, hence it would
* be impossible to solve the kinematics for such two components, and
* therefore we would have a cluster whose components are undefined.
* Second, the algorithm is faster, because it avoids the reshuffling
* procedure that would be necessary if we used LightClusterDecayer
* to decay the light cluster to the lightest hadron.
****************************/
toHadron1 = _hadronSelector->chooseSingleHadron(ptrQ1->dataPtr(), newPtr1->dataPtr(),Mc1);
if(toHadron1) Mc1 = toHadron1->mass();
toHadron2 = _hadronSelector->chooseSingleHadron(ptrQ2->dataPtr(), newPtr2->dataPtr(),Mc2);
if(toHadron2) Mc2 = toHadron2->mass();
// if a beam cluster not allowed to decay to hadrons
if(cluster->isBeamCluster() && (toHadron1||toHadron2) && softUEisOn)
continue;
// Check if the decay kinematics is still possible: if not then
// force the one-hadron decay for the other cluster as well.
if(Mc1 + Mc2 > Mc) {
if(!toHadron1) {
toHadron1 = _hadronSelector->chooseSingleHadron(ptrQ1->dataPtr(), newPtr1->dataPtr(),Mc-Mc2);
if(toHadron1) Mc1 = toHadron1->mass();
}
else if(!toHadron2) {
toHadron2 = _hadronSelector->chooseSingleHadron(ptrQ2->dataPtr(), newPtr2->dataPtr(),Mc-Mc1);
if(toHadron2) Mc2 = toHadron2->mass();
}
}
succeeded = (Mc >= Mc1+Mc2);
}
while (!succeeded && counter < max_loop);
if(counter >= max_loop) {
static const PPtr null = PPtr();
return cutType(PPair(null,null),PPair(null,null));
}
// Determined the (5-components) momenta (all in the LAB frame)
Lorentz5Momentum pClu = cluster->momentum(); // known
Lorentz5Momentum p0Q1 = ptrQ1->momentum(); // known (mom Q1 before fission)
Lorentz5Momentum pClu1, pClu2, pQ1, pQone, pQtwo, pQ2; //unknown
pClu1.setMass(Mc1);
pClu2.setMass(Mc2);
pQ1.setMass(m1);
pQ2.setMass(m2);
pQone.setMass(m);
pQtwo.setMass(m);
calculateKinematics(pClu,p0Q1,toHadron1,toHadron2,
pClu1,pClu2,pQ1,pQone,pQtwo,pQ2); // out
/******************
* The previous methods have determined the kinematics and positions
* of C -> C1 + C2.
* In the case that one of the two product is light, that means either
* decayOneHadronClu1 or decayOneHadronClu2 is true, then the momenta
* of the components of that light product have not been determined,
* and a (light) cluster will not be created: the heavy father cluster
* decays, in this case, into a single (not-light) cluster and a
* single hadron. In the other, "normal", cases the father cluster
* decays into two clusters, each of which has well defined components.
* Notice that, in the case of components which point to particles, the
* momenta of the components is properly set to the new values, whereas
* we do not change the momenta of the pointed particles, because we
* want to keep all of the information (that is the new momentum of a
* component after the splitting, which is contained in the _momentum
* member of the Component class, and the (old) momentum of that component
* before the splitting, which is contained in the momentum of the
* pointed particle). Please not make confusion of this only apparent
* inconsistency!
********************/
LorentzPoint posC,pos1,pos2;
posC = cluster->vertex();
calculatePositions(pClu, posC, pClu1, pClu2, pos1, pos2);
cutType rval;
if(toHadron1) {
rval.first = produceHadron(toHadron1, newPtr1, pClu1, pos1);
finalhadrons.push_back(rval.first.first);
}
else {
rval.first = produceCluster(ptrQ1, newPtr1, pClu1, pos1, pQ1, pQone, rem1);
}
if(toHadron2) {
rval.second = produceHadron(toHadron2, newPtr2, pClu2, pos2);
finalhadrons.push_back(rval.second.first);
}
else {
rval.second = produceCluster(ptrQ2, newPtr2, pClu2, pos2, pQ2, pQtwo, rem2);
}
return rval;
}
ClusterFissioner::cutType
ClusterFissioner::cutThree(ClusterPtr & cluster, tPVector & finalhadrons,
bool softUEisOn) {
// need to make sure only 3-cpt clusters get here
assert(cluster->numComponents() == 3);
// extract quarks
tPPtr ptrQ[3] = {cluster->particle(0),cluster->particle(1),cluster->particle(2)};
assert( ptrQ[0] && ptrQ[1] && ptrQ[2] );
// find maximum mass pair
Energy mmax(ZERO);
Lorentz5Momentum pDiQuark;
int iq1(-1),iq2(-1);
Lorentz5Momentum psum;
for(int q1=0;q1<3;++q1) {
psum+= ptrQ[q1]->momentum();
for(int q2=q1+1;q2<3;++q2) {
Lorentz5Momentum ptest = ptrQ[q1]->momentum()+ptrQ[q2]->momentum();
ptest.rescaleMass();
Energy mass = ptest.m();
if(mass>mmax) {
mmax = mass;
pDiQuark = ptest;
iq1 = q1;
iq2 = q2;
}
}
}
// and the spectators
int iother(-1);
for(int ix=0;ix<3;++ix) if(ix!=iq1&&ix!=iq2) iother=ix;
assert(iq1>=0&&iq2>=0&&iother>=0);
// And check if those particles are from a beam remnant
bool rem1 = cluster->isBeamRemnant(iq1);
bool rem2 = cluster->isBeamRemnant(iq2);
// workout which distribution to use
bool soft1(false),soft2(false);
switch (_iopRem) {
case 0:
soft1 = rem1 || rem2;
soft2 = rem2 || rem1;
break;
case 1:
soft1 = rem1;
soft2 = rem2;
break;
}
// Initialization for the exponential ("soft") mass distribution.
static const int max_loop = 1000;
int counter = 0;
Energy Mc1 = ZERO, Mc2 = ZERO, m1=ZERO, m2=ZERO, m=ZERO;
tcPDPtr toHadron;
bool toDiQuark(false);
PPtr newPtr1 = PPtr(),newPtr2 = PPtr();
PDPtr diquark;
bool succeeded = false;
do {
succeeded = false;
++counter;
if (_enhanceSProb == 0) {
drawNewFlavour(newPtr1,newPtr2);
}
else {
Energy2 mass2 = clustermass(cluster);
drawNewFlavourEnhanced(newPtr1,newPtr2, mass2);
}
// randomly pick which will be (anti)diquark and which a mesonic cluster
if(UseRandom::rndbool()) {
swap(iq1,iq2);
swap(rem1,rem2);
}
// check first order
if(cantMakeHadron(ptrQ[iq1], newPtr1) || cantMakeDiQuark(ptrQ[iq2], newPtr2)) {
swap(newPtr1,newPtr2);
}
// check again
if(cantMakeHadron(ptrQ[iq1], newPtr1) || cantMakeDiQuark(ptrQ[iq2], newPtr2)) {
throw Exception()
<< "ClusterFissioner cannot split the cluster ("
<< ptrQ[iq1]->PDGName() << ' ' << ptrQ[iq2]->PDGName()
<< ") into a hadron and diquark.\n" << Exception::runerror;
}
// Check that new clusters can produce particles and there is enough
// phase space to choose the drawn flavour
m1 = ptrQ[iq1]->data().constituentMass();
m2 = ptrQ[iq2]->data().constituentMass();
m = newPtr1->data().constituentMass();
// Do not split in the case there is no phase space available
if(mmax < m1+m + m2+m) continue;
// power for splitting
double exp1(_pSplitLight),exp2(_pSplitLight);
if (CheckId::isExotic (ptrQ[iq1]->dataPtr())) exp1 = _pSplitExotic;
else if(CheckId::hasBottom(ptrQ[iq1]->dataPtr())) exp1 = _pSplitBottom;
else if(CheckId::hasCharm (ptrQ[iq1]->dataPtr())) exp1 = _pSplitCharm;
if (CheckId::isExotic (ptrQ[iq2]->dataPtr())) exp2 = _pSplitExotic;
else if(CheckId::hasBottom(ptrQ[iq2]->dataPtr())) exp2 = _pSplitBottom;
else if(CheckId::hasCharm (ptrQ[iq2]->dataPtr())) exp2 = _pSplitCharm;
// If, during the drawing of candidate masses, too many attempts fail
// (because the phase space available is tiny)
/// \todo run separate loop here?
Mc1 = drawChildMass(mmax,m1,m2,m,exp1,soft1);
Mc2 = drawChildMass(mmax,m2,m1,m,exp2,soft2);
if(Mc1 < m1+m || Mc2 < m+m2 || Mc1+Mc2 > mmax) continue;
// check if need to force meson clster to hadron
toHadron = _hadronSelector->chooseSingleHadron(ptrQ[iq1]->dataPtr(), newPtr1->dataPtr(),Mc1);
if(toHadron) Mc1 = toHadron->mass();
// check if need to force diquark cluster to be on-shell
toDiQuark = false;
diquark = _hadronSelector->makeDiquark(ptrQ[iq2]->dataPtr(),newPtr2->dataPtr());
if(Mc2 < diquark->constituentMass()) {
Mc2 = diquark->constituentMass();
toDiQuark = true;
}
// if a beam cluster not allowed to decay to hadrons
if(cluster->isBeamCluster() && toHadron && softUEisOn)
continue;
// Check if the decay kinematics is still possible: if not then
// force the one-hadron decay for the other cluster as well.
if(Mc1 + Mc2 > mmax) {
if(!toHadron) {
toHadron = _hadronSelector->chooseSingleHadron(ptrQ[iq1]->dataPtr(), newPtr1->dataPtr(),mmax-Mc2);
if(toHadron) Mc1 = toHadron->mass();
}
else if(!toDiQuark) {
Mc2 = _hadronSelector->massLightestHadron(ptrQ[iq2]->dataPtr(), newPtr2->dataPtr());
toDiQuark = true;
}
}
succeeded = (mmax >= Mc1+Mc2);
}
while (!succeeded && counter < max_loop);
// check no of tries
if(counter >= max_loop) return cutType();
// Determine the (5-components) momenta (all in the LAB frame)
Lorentz5Momentum p0Q1 = ptrQ[iq1]->momentum();
// to be determined
Lorentz5Momentum pClu1(Mc1), pClu2(Mc2), pQ1(m1), pQone(m), pQtwo(m), pQ2(m2);
calculateKinematics(pDiQuark,p0Q1,toHadron,toDiQuark,
pClu1,pClu2,pQ1,pQone,pQtwo,pQ2);
// positions of the new clusters
LorentzPoint pos1,pos2;
Lorentz5Momentum pBaryon = pClu2+ptrQ[iother]->momentum();
calculatePositions(cluster->momentum(), cluster->vertex(), pClu1, pBaryon, pos1, pos2);
// first the mesonic cluster/meson
cutType rval;
if(toHadron) {
rval.first = produceHadron(toHadron, newPtr1, pClu1, pos1);
finalhadrons.push_back(rval.first.first);
}
else {
rval.first = produceCluster(ptrQ[iq1], newPtr1, pClu1, pos1, pQ1, pQone, rem1);
}
if(toDiQuark) {
rem2 |= cluster->isBeamRemnant(iother);
PPtr newDiQuark = diquark->produceParticle(pClu2);
rval.second = produceCluster(newDiQuark, ptrQ[iother], pBaryon, pos2, pClu2,
ptrQ[iother]->momentum(), rem2);
}
else {
rval.second = produceCluster(ptrQ[iq2], newPtr2, pBaryon, pos2, pQ2, pQtwo, rem2,
ptrQ[iother],cluster->isBeamRemnant(iother));
}
cluster->isAvailable(false);
return rval;
}
ClusterFissioner::PPair
ClusterFissioner::produceHadron(tcPDPtr hadron, tPPtr newPtr, const Lorentz5Momentum &a,
const LorentzPoint &b) const {
PPair rval;
if(hadron->coloured()) {
rval.first = (_hadronSelector->lightestHadron(hadron,newPtr->dataPtr()))->produceParticle();
}
else
rval.first = hadron->produceParticle();
rval.second = newPtr;
rval.first->set5Momentum(a);
rval.first->setVertex(b);
return rval;
}
ClusterFissioner::PPair ClusterFissioner::produceCluster(tPPtr ptrQ, tPPtr newPtr,
const Lorentz5Momentum & a,
const LorentzPoint & b,
const Lorentz5Momentum & c,
const Lorentz5Momentum & d,
bool isRem,
tPPtr spect, bool remSpect) const {
PPair rval;
rval.second = newPtr;
ClusterPtr cluster = !spect ? new_ptr(Cluster(ptrQ,rval.second)) : new_ptr(Cluster(ptrQ,rval.second,spect));
rval.first = cluster;
cluster->set5Momentum(a);
cluster->setVertex(b);
assert(cluster->particle(0)->id() == ptrQ->id());
cluster->particle(0)->set5Momentum(c);
cluster->particle(1)->set5Momentum(d);
cluster->setBeamRemnant(0,isRem);
if(remSpect) cluster->setBeamRemnant(2,remSpect);
return rval;
}
void ClusterFissioner::drawNewFlavour(PPtr& newPtrPos,PPtr& newPtrNeg) const {
// Flavour is assumed to be only u, d, s, with weights
// (which are not normalized probabilities) given
// by the same weights as used in HadronsSelector for
// the decay of clusters into two hadrons.
double prob_d;
double prob_u;
double prob_s;
switch(_fissionCluster){
case 0:
prob_d = _hadronSelector->pwt(1);
prob_u = _hadronSelector->pwt(2);
prob_s = _hadronSelector->pwt(3);
break;
case 1:
prob_d = _fissionPwtDquark;
prob_u = _fissionPwtUquark;
prob_s = _fissionPwtSquark;
break;
default :
assert(false);
}
int choice = UseRandom::rnd3(prob_u, prob_d, prob_s);
long idNew = 0;
switch (choice) {
case 0: idNew = ThePEG::ParticleID::u; break;
case 1: idNew = ThePEG::ParticleID::d; break;
case 2: idNew = ThePEG::ParticleID::s; break;
}
newPtrPos = getParticle(idNew);
newPtrNeg = getParticle(-idNew);
assert (newPtrPos);
assert(newPtrNeg);
assert (newPtrPos->dataPtr());
assert(newPtrNeg->dataPtr());
}
void ClusterFissioner::drawNewFlavourEnhanced(PPtr& newPtrPos,PPtr& newPtrNeg,
Energy2 mass2) const {
// Flavour is assumed to be only u, d, s, with weights
// (which are not normalized probabilities) given
// by the same weights as used in HadronsSelector for
// the decay of clusters into two hadrons.
double prob_d;
double prob_u;
double prob_s = 0.;
double scale = abs(double(sqr(_m0Fission)/mass2));
// Choose which splitting weights you wish to use
switch(_fissionCluster){
// 0: ClusterFissioner and ClusterDecayer use the same weights
case 0:
prob_d = _hadronSelector->pwt(1);
prob_u = _hadronSelector->pwt(2);
/* Strangeness enhancement:
Case 1: probability scaling
Case 2: Exponential scaling
*/
if (_enhanceSProb == 1)
prob_s = (_maxScale < scale) ? 0. : pow(_hadronSelector->pwt(3),scale);
else if (_enhanceSProb == 2)
prob_s = (_maxScale < scale) ? 0. : exp(-scale);
break;
/* 1: ClusterFissioner uses its own unique set of weights,
i.e. decoupled from ClusterDecayer */
case 1:
prob_d = _fissionPwtDquark;
prob_u = _fissionPwtUquark;
if (_enhanceSProb == 1)
prob_s = (_maxScale < scale) ? 0. : pow(_fissionPwtSquark,scale);
else if (_enhanceSProb == 2)
prob_s = (_maxScale < scale) ? 0. : exp(-scale);
break;
default:
assert(false);
}
int choice = UseRandom::rnd3(prob_u, prob_d, prob_s);
long idNew = 0;
switch (choice) {
case 0: idNew = ThePEG::ParticleID::u; break;
case 1: idNew = ThePEG::ParticleID::d; break;
case 2: idNew = ThePEG::ParticleID::s; break;
}
newPtrPos = getParticle(idNew);
newPtrNeg = getParticle(-idNew);
assert (newPtrPos);
assert(newPtrNeg);
assert (newPtrPos->dataPtr());
assert(newPtrNeg->dataPtr());
}
Energy2 ClusterFissioner::clustermass(const ClusterPtr & cluster){
Lorentz5Momentum pIn = cluster->momentum();
Energy2 endpointmass2 = sqr(cluster->particle(0)->mass() +
cluster->particle(1)->mass());
Energy2 singletm2 = pIn.m2();
// Return either the cluster mass, or the lambda measure
return (_massMeasure == 0) ? singletm2 : singletm2 - endpointmass2;
}
Energy ClusterFissioner::drawChildMass(const Energy M, const Energy m1,
const Energy m2, const Energy m,
const double expt, const bool soft) const {
/***************************
* This method, given in input the cluster mass Mclu of an heavy cluster C,
* made of consituents of masses m1 and m2, draws the masses Mclu1 and Mclu2
* of, respectively, the children cluster C1, made of constituent masses m1
* and m, and cluster C2, of mass Mclu2 and made of constituent masses m2
* and m. The mass is extracted from one of the two following mass
* distributions:
* --- power-like ("normal" distribution)
* d(Prob) / d(M^exponent) = const
* where the exponent can be different from the two children C1 (exp1)
* and C2 (exponent2).
* --- exponential ("soft" distribution)
* d(Prob) / d(M^2) = exp(-b*M)
* where b = 2.0 / average.
* Such distributions are limited below by the masses of
* the constituents quarks, and above from the mass of decaying cluster C.
* The choice of which of the two mass distributions to use for each of the
* two cluster children is dictated by iRemnant (see below).
* If the number of attempts to extract a pair of mass values that are
* kinematically acceptable is above some fixed number (max_loop, see below)
* the method gives up and returns false; otherwise, when it succeeds, it
* returns true.
*
* These distributions have been modified from HERWIG:
* Before these were:
* Mclu1 = m1 + (Mclu - m1 - m2)*pow( rnd(), 1.0/exponent1 );
* The new one coded here is a more efficient version, same density
* but taking into account 'in phase space from' beforehand
***************************/
// hard cluster
if(!soft) {
return pow(UseRandom::rnd(pow((M-m1-m2-m)*UnitRemoval::InvE, expt),
pow(m*UnitRemoval::InvE, expt)), 1./expt
)*UnitRemoval::E + m1;
}
// Otherwise it uses a soft mass distribution
else {
static const InvEnergy b = 2.0 / _btClM;
Energy max = M-m1-m2-2.0*m;
double rmin = b*max;
rmin = ( rmin < 50 ) ? exp(-rmin) : 0.;
double r1;
do {
r1 = UseRandom::rnd(rmin, 1.0) * UseRandom::rnd(rmin, 1.0);
}
while (r1 < rmin);
return m1 + m - log(r1)/b;
}
}
void ClusterFissioner::calculateKinematics(const Lorentz5Momentum & pClu,
const Lorentz5Momentum & p0Q1,
const bool toHadron1,
const bool toHadron2,
Lorentz5Momentum & pClu1,
Lorentz5Momentum & pClu2,
Lorentz5Momentum & pQ1,
Lorentz5Momentum & pQbar,
Lorentz5Momentum & pQ,
Lorentz5Momentum & pQ2bar) const {
/******************
* This method solves the kinematics of the two body cluster decay:
* C (Q1 Q2bar) ---> C1 (Q1 Qbar) + C2 (Q Q2bar)
* In input we receive the momentum of C, pClu, and the momentum
* of the quark Q1 (constituent of C), p0Q1, both in the LAB frame.
* Furthermore, two boolean variables inform whether the two fission
* products (C1, C2) decay immediately into a single hadron (in which
* case the cluster itself is identify with that hadron) and we do
* not have to solve the kinematics of the components (Q1,Qbar) for
* C1 and (Q,Q2bar) for C2.
* The output is given by the following momenta (all 5-components,
* and all in the LAB frame):
* pClu1 , pClu2 respectively of C1 , C2
* pQ1 , pQbar respectively of Q1 , Qbar in C1
* pQ , pQ2bar respectively of Q , Q2 in C2
* The assumption, suggested from the string model, is that, in C frame,
* C1 and its constituents Q1 and Qbar are collinear, and collinear to
* the direction of Q1 in C (that is before cluster decay); similarly,
* (always in the C frame) C2 and its constituents Q and Q2bar are
* collinear (and therefore anti-collinear with C1,Q1,Qbar).
* The solution is then obtained by using Lorentz boosts, as follows.
* The kinematics of C1 and C2 is solved in their parent C frame,
* and then boosted back in the LAB. The kinematics of Q1 and Qbar
* is solved in their parent C1 frame and then boosted back in the LAB;
* similarly, the kinematics of Q and Q2bar is solved in their parent
* C2 frame and then boosted back in the LAB. In each of the three
* "two-body decay"-like cases, we use the fact that the direction
* of the motion of the decay products is known in the rest frame of
* their parent. This is obvious for the first case in which the
* parent rest frame is C; but it is also true in the other two cases
* where the rest frames are C1 and C2. This is because C1 and C2
* are boosted w.r.t. C in the same direction where their components,
* respectively (Q1,Qbar) and (Q,Q2bar) move in C1 and C2 rest frame
* respectively.
* Of course, although the notation used assumed that C = (Q1 Q2bar)
* where Q1 is a quark and Q2bar an antiquark, indeed everything remain
* unchanged also in all following cases:
* Q1 quark, Q2bar antiquark; --> Q quark;
* Q1 antiquark , Q2bar quark; --> Q antiquark;
* Q1 quark, Q2bar diquark; --> Q quark
* Q1 antiquark, Q2bar anti-diquark; --> Q antiquark
* Q1 diquark, Q2bar quark --> Q antiquark
* Q1 anti-diquark, Q2bar antiquark; --> Q quark
**************************/
// Calculate the unit three-vector, in the C frame, along which
// all of the constituents and children clusters move.
Lorentz5Momentum u(p0Q1);
u.boost( -pClu.boostVector() ); // boost from LAB to C
// the unit three-vector is then u.vect().unit()
// Calculate the momenta of C1 and C2 in the (parent) C frame first,
// where the direction of C1 is u.vect().unit(), and then boost back in the
// LAB frame.
if (pClu.m() < pClu1.mass() + pClu2.mass() ) {
throw Exception() << "Impossible Kinematics in ClusterFissioner::calculateKinematics() (A)"
<< Exception::eventerror;
}
Kinematics::twoBodyDecay(pClu, pClu1.mass(), pClu2.mass(),
u.vect().unit(), pClu1, pClu2);
// In the case that cluster1 does not decay immediately into a single hadron,
// calculate the momenta of Q1 (as constituent of C1) and Qbar in the
// (parent) C1 frame first, where the direction of Q1 is u.vect().unit(),
// and then boost back in the LAB frame.
if(!toHadron1) {
if (pClu1.m() < pQ1.mass() + pQbar.mass() ) {
throw Exception() << "Impossible Kinematics in ClusterFissioner::calculateKinematics() (B)"
<< Exception::eventerror;
}
Kinematics::twoBodyDecay(pClu1, pQ1.mass(), pQbar.mass(),
u.vect().unit(), pQ1, pQbar);
}
// In the case that cluster2 does not decay immediately into a single hadron,
// Calculate the momenta of Q and Q2bar (as constituent of C2) in the
// (parent) C2 frame first, where the direction of Q is u.vect().unit(),
// and then boost back in the LAB frame.
if(!toHadron2) {
if (pClu2.m() < pQ.mass() + pQ2bar.mass() ) {
throw Exception() << "Impossible Kinematics in ClusterFissioner::calculateKinematics() (C)"
<< Exception::eventerror;
}
Kinematics::twoBodyDecay(pClu2, pQ.mass(), pQ2bar.mass(),
u.vect().unit(), pQ, pQ2bar);
}
}
void ClusterFissioner::calculatePositions(const Lorentz5Momentum & pClu,
const LorentzPoint & positionClu,
const Lorentz5Momentum & pClu1,
const Lorentz5Momentum & pClu2,
LorentzPoint & positionClu1,
LorentzPoint & positionClu2) const {
// Determine positions of cluster children.
// See Marc Smith's thesis, page 127, formulas (4.122) and (4.123).
Energy Mclu = pClu.m();
Energy Mclu1 = pClu1.m();
Energy Mclu2 = pClu2.m();
// Calculate the unit three-vector, in the C frame, along which
// children clusters move.
Lorentz5Momentum u(pClu1);
u.boost( -pClu.boostVector() ); // boost from LAB to C frame
+
// the unit three-vector is then u.vect().unit()
Energy pstarChild = Kinematics::pstarTwoBodyDecay(Mclu,Mclu1,Mclu2);
// First, determine the relative positions of the children clusters
// in the parent cluster reference frame.
+
+ Energy2 mag2 = u.vect().mag2();
+ InvEnergy fact = mag2>ZERO ? 1./sqrt(mag2) : 1./GeV;
+
Length x1 = ( 0.25*Mclu + 0.5*( pstarChild + (sqr(Mclu2) - sqr(Mclu1))/(2.0*Mclu)))/_kappa;
Length t1 = Mclu/_kappa - x1;
- LorentzDistance distanceClu1( x1 * u.vect().unit(), t1 );
+ LorentzDistance distanceClu1( x1 * fact * u.vect(), t1 );
Length x2 = (-0.25*Mclu + 0.5*(-pstarChild + (sqr(Mclu2) - sqr(Mclu1))/(2.0*Mclu)))/_kappa;
Length t2 = Mclu/_kappa + x2;
- LorentzDistance distanceClu2( x2 * u.vect().unit(), t2 );
+ LorentzDistance distanceClu2( x2 * fact * u.vect(), t2 );
// Then, transform such relative positions from the parent cluster
// reference frame to the Lab frame.
distanceClu1.boost( pClu.boostVector() );
distanceClu2.boost( pClu.boostVector() );
// Finally, determine the absolute positions in the Lab frame.
positionClu1 = positionClu + distanceClu1;
positionClu2 = positionClu + distanceClu2;
}
bool ClusterFissioner::ProbablityFunction(double scale, double threshold) {
double cut = UseRandom::rnd(0.0,1.0);
return 1./(1.+pow(abs((threshold-_probShift)/scale),_probPowFactor)) > cut ? true : false;
}
bool ClusterFissioner::isHeavy(tcClusterPtr clu) {
// default
double clpow = _clPowLight;
Energy clmax = _clMaxLight;
// particle data for constituents
tcPDPtr cptr[3]={tcPDPtr(),tcPDPtr(),tcPDPtr()};
for(unsigned int ix=0;ix<min(clu->numComponents(),3);++ix) {
cptr[ix]=clu->particle(ix)->dataPtr();
}
// different parameters for exotic, bottom and charm clusters
if(CheckId::isExotic(cptr[0],cptr[1],cptr[1])) {
clpow = _clPowExotic;
clmax = _clMaxExotic;
}
else if(CheckId::hasBottom(cptr[0],cptr[1],cptr[1])) {
clpow = _clPowBottom;
clmax = _clMaxBottom;
}
else if(CheckId::hasCharm(cptr[0],cptr[1],cptr[1])) {
clpow = _clPowCharm;
clmax = _clMaxCharm;
}
//some smooth probablity function to create a dynamic thershold
double scale = pow(clu->mass()/GeV , clpow);
double threshold = pow(clmax/GeV, clpow)
+ pow(clu->sumConstituentMasses()/GeV, clpow);
bool aboveCutoff = ProbablityFunction(scale,threshold);
//regular checks
// bool aboveCutoff = (
// pow(clu->mass()*UnitRemoval::InvE , clpow)
// >
// pow(clmax*UnitRemoval::InvE, clpow)
// + pow(clu->sumConstituentMasses()*UnitRemoval::InvE, clpow)
// );
// required test for SUSY clusters, since aboveCutoff alone
// cannot guarantee (Mc > m1 + m2 + 2*m) in cut()
static const Energy minmass
= getParticleData(ParticleID::d)->constituentMass();
scale = clu->mass()/GeV;
threshold = clu->sumConstituentMasses()/GeV + 2.0 * minmass/GeV;
bool canSplitMinimally = ProbablityFunction(scale,threshold);
//bool canSplitMinimally = clu->mass() > clu->sumConstituentMasses() + 2.0 * minmass;
return aboveCutoff && canSplitMinimally;
}
diff --git a/Tests/Rivet/EE/EE-Psi2S.in b/Tests/Rivet/EE/EE-Psi2S.in
--- a/Tests/Rivet/EE/EE-Psi2S.in
+++ b/Tests/Rivet/EE/EE-Psi2S.in
@@ -1,33 +1,35 @@
# -*- ThePEG-repository -*-
# e+ e- -> psi(2S)
create Herwig::MEee2VectorMeson /Herwig/MatrixElements/MEPsi2S HwMELepton.so
set /Herwig/MatrixElements/MEPsi2S:VectorMeson /Herwig/Particles/psi(2S)
set /Herwig/MatrixElements/MEPsi2S:Coupling 19.25684
set /Herwig/MatrixElements/SubProcess:MatrixElements 0 /Herwig/MatrixElements/MEPsi2S
set EventGenerator:EventHandler:LuminosityFunction:Energy 3.686097
set /Herwig/Generators/EventGenerator:EventHandler:Cuts:MHatMin 0.2
set /Herwig/Particles/e-:PDF /Herwig/Partons/NoPDF
set /Herwig/Particles/e+:PDF /Herwig/Partons/NoPDF
# psi(2S) -> lambda anti-lambda and sigma anti-sigma
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2017_I1510563
# psi(2S) -> p pbar and n nbar
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2018_I1658762
# psi(2S) -> xi- and Sigma+/-
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2016_I1422780
# psi(2S) -> xi0 and Sigma*0
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2017_I1506414
# psi(2S) -> xi*-
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2019_I1747092
# MARKII psi(2s) -> J/Psi pi+pi-
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 MARKII_1979_I144382
# BES psi(2s) -> J/Psi pi+pi-
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BES_1999_I507637
# BES psi(2s) -> sigma+ sigmabar-
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2020_I1791570
# psi(2S) -> pi+ pi- pi0
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2012_I1088606
# psi(2S) -> pi+ pi- eta'
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2017_I1621266
# psi(2S) -> xi- xi+bar correlations
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2022_I2099144
-
+# psi(2S) -> gamma chi_c correlations
+insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2017_I1507887
+insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEO_2009_I832707
diff --git a/Tests/Rivet/EE/EE-Psi3770.in b/Tests/Rivet/EE/EE-Psi3770.in
--- a/Tests/Rivet/EE/EE-Psi3770.in
+++ b/Tests/Rivet/EE/EE-Psi3770.in
@@ -1,114 +1,117 @@
# -*- ThePEG-repository -*-
# e+ e- -> psi(3770)
create Herwig::MEee2VectorMeson /Herwig/MatrixElements/MEPsi3770 HwMELepton.so
set /Herwig/MatrixElements/MEPsi3770:VectorMeson /Herwig/Particles/psi(3770)
set /Herwig/MatrixElements/MEPsi3770:Coupling 58.12041
set /Herwig/MatrixElements/SubProcess:MatrixElements 0 /Herwig/MatrixElements/MEPsi3770
set EventGenerator:EventHandler:LuminosityFunction:Energy 3.7711
set /Herwig/Generators/EventGenerator:EventHandler:Cuts:MHatMin 0.2
set /Herwig/Particles/e-:PDF /Herwig/Partons/NoPDF
set /Herwig/Particles/e+:PDF /Herwig/Partons/NoPDF
########## branching ratios ################
# D0 inclusive
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 PDG_D0
# D+ inclusive
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 PDG_DPLUS
########## semi-leptonic ###################
# CLEO D lepton spectra
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEOC_2009_I823313
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEOC_2006_I715096
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEOC_2008_I769777
# D0 -> K- semi-leptonic
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2018_I1697371
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BABAR_2007_I1091435
# D+ -> eta
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEOC_2011_I875526
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2018_I1662660
# D0/+ -> pi semi-leptonic
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2018_I1655158
# D0 -> Kbar0 pi- e+ nu_e
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2018_I1705754
# D -> pi pi e+ nu_e
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2019_I1694530
+insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEO_2013_I1081165
# D0 -> pi- semi-leptonic
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BABAR_2015_I1334693
# D0 -> pi-, K- semi-leptonic
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2015_I1391138
-# D+ -> K0,pi0 semi-leptonic
+# D+ -> K0 pi0 semi-leptonic
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2017_I1519425
+# D+ -> K- pi+ semi-leptonic
+insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2016_I1411645
# D+ -> omega l nu
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2015_I1386254
######### Two body decays ###################
# D0 -> omega phi
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2022_I1900094
########## 3-body decays ####################
# Dalitz plot analysis of D -> Kpipi decays
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 E691_1992_I342947
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 MARKIII_1987_I247266
# Dalitz plot analysis of D0 -> KS0 pi0 pi0
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEOC_2011_I913909
# Dalitz plot analysis of D+ -> K0S pi+ pi0
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2014_I1277070
# Dalitz plot analysis of D0 -> K0S pi+ pi-
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEO_2003_I633196
# Dalitz plot analysis of D+ -> K- pi+ pi+
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 E791_2002_I585322
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 FOCUS_2007_I750701
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEOC_2008_I780363
# # Kinematic distributions in the decay D0 -> K-pi+pi0
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEOII_2001_I537154
# Dalitz decay of D0 -> K0S pi0 eta
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEO_2004_I649917
# Dalitz decay of D0 -> K-pi+eta
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BELLE_2020_I1785816
# D -> K pi omega
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2021_I1940222
# D -> K pi eta'
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2018_I1693610
# Dalitz plot analysis of D0 -> K0S K+ K-
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2020_I1799437
# Dalitz plot analysis of D0 -> K0S pi+ pi- and D0 -> K0S K+ K-
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BABAR_2010_I853279
# Dalitz plot analysis of D+ -> K+K+K-
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 LHCB_2019_I1720423
# Dalitz plot analysis of D+ -> K+ K- pi+
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEO_2008_I791716
# Dalitz plot analysis of D+ -> K+ K0S pi0
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2021_I1859124
# Dalitz plot analysis of D0 -> K+ K- pi0
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BABAR_2007_I749390
# Dalitz plot analysis of D0 -> K0S K+- pi-+
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEO_2012_I1094160
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 LHCB_2016_I1394391
# Dalitz decay of D+ and D+_s -> K+pi+pi-
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 FOCUS_2004_I654030
# Dalitz plot analysis of D+ -> pi+ pi+ pi-
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 E791_2001_I530320
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEOC_2007_I749602
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 FOCUS_2003_I635446
# Dalitz plot analysis of D0 -> pi+ pi- pi0
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BABAR_2007_I747154
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BABAR_2016_I1441203
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEO_2005_I679349
# D0 -> eta pi+pi-
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEOC_2008_I779705
# D0 -> pi0 eta eta
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2018_I1662665
########## Four body decays ####################
# D+ -> KS0 pi+pi+pi-
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2019_I1714778
# D0 -> K- pi+ pi0 pi0
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 BESIII_2019_I1725265
# D0 -> K-K-K+pi+
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 FOCUS_2003_I626320
# D0 -> K+ K- pi+ pi-
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEO_2012_I1086166
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 FOCUS_2004_I663820
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 LHCB_2018_I1704426
# D0 -> K+ K- pi+ pi- and 2pi+ 2pi-
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEO_2017_I1519168
# D0 -> 2pi+ 2pi-
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 FOCUS_2007_I741543
########## Spectra ##############################
# CLEO eta, eta' phi spectra in D decays
insert /Herwig/Analysis/RivetAnalysis:Analyses 0 CLEOC_2006_I728043
diff --git a/Tests/python/LowEnergy-EE.py.in b/Tests/python/LowEnergy-EE.py.in
--- a/Tests/python/LowEnergy-EE.py.in
+++ b/Tests/python/LowEnergy-EE.py.in
@@ -1,537 +1,540 @@
#! @PYTHON@
from __future__ import print_function
import yoda,os,math,subprocess,optparse
from string import Template
# get the path for the rivet data
p = subprocess.Popen(["rivet-config", "--datadir"],stdout=subprocess.PIPE)
path=p.communicate()[0].strip().decode("UTF-8")
#Define the arguments
op = optparse.OptionParser(usage=__doc__)
op.add_option("--process" , dest="processes" , default=[], action="append")
op.add_option("--path" , dest="path" , default=path)
op.add_option("--non-perturbative", dest="nonPerturbative" , default=False, action="store_true")
op.add_option("--perturbative" , dest="perturbative" , default=False, action="store_true")
op.add_option("--dest" , dest="dest" , default="Rivet")
op.add_option("--list" , dest="list" , default=False, action="store_true")
op.add_option("--flavour" , dest="flavour" , default="All" )
op.add_option("--plots" , dest="plot" , default=False, action="store_true")
opts, args = op.parse_args()
path=opts.path
thresholds = [0.7,2.*.5,2.*1.87,2.*5.28]
# the list of analyses and processes
analyses = { 'KK' : {}, 'PiPi' : {}, 'PPbar' : {}, "3Pi" : {},
"EtaprimePiPi" : {}, "4Pi" : {}, "EtaPhi" : {}, "EtaOmega" : {},
"2K1Pi" : {}, "2K2Pi" : {}, "4K" : {}, "6m" : {},
"EtaPiPi" : {}, "OmegaPi" : {}, "PiGamma" : {}, "EtaGamma" : {},
"PhiPi" : {}, "OmegaPiPi" : {}, "DD" : {}, "BB" : {},
"5Pi" : {}, "LL" : {}, "Baryon" : {} }
# pi+pi-
analyses["PiPi"]["KLOE_2009_I797438" ] = ["d02-x01-y01"]
analyses["PiPi"]["KLOE_2005_I655225" ] = ["d02-x01-y01"]
analyses["PiPi"]["KLOE2_2017_I1634981" ] = ["d01-x01-y01"]
analyses["PiPi"]["BABAR_2009_I829441" ] = ["d01-x01-y01"]
analyses["PiPi"]["DM1_1978_I134061" ] = ["d01-x01-y01"]
analyses["PiPi"]["DM2_1989_I267118" ] = ["d01-x01-y01"]
analyses["PiPi"]["CMD2_2007_I728302" ] = ["d02-x01-y01"]
analyses["PiPi"]["CMD2_2006_I728191" ] = ["d03-x01-y01"]
analyses["PiPi"]["BESIII_2016_I1385603"] = ["d01-x01-y01"]
analyses["PiPi"]["SND_2005_I686349" ] = ["d01-x01-y01"]
analyses["PiPi"]["CMD_1985_I221309" ] = ["d01-x01-y01","d02-x01-y01"]
analyses["PiPi"]["CMD2_2002_I568807" ] = ["d01-x01-y02"]
analyses["PiPi"]["CMD2_1999_I498859" ] = ["d01-x01-y01"]
analyses['PiPi']["CLEOC_2005_I693873" ] = ["d01-x01-y01"]
analyses['PiPi']["ND_1991_I321108" ] = ["d11-x01-y01"]
analyses['PiPi']["OLYA_1984_I208231" ] = ["d01-x01-y01"]
analyses['PiPi']["SND_2020_I1789269" ] = ["d01-x01-y04"]
# K+K- and K_S^0 K_L^0
analyses['KK']["BESIII_2018_I1704558"] = ["d01-x01-y01"]
analyses['KK']["BABAR_2013_I1238807" ] = ["d01-x01-y01"]
analyses['KK']["DM1_1981_I156053" ] = ["d01-x01-y01"]
analyses['KK']["DM1_1981_I156054" ] = ["d01-x01-y01"]
analyses['KK']["CLEOC_2005_I693873" ] = ["d01-x01-y02"]
analyses['KK']["BABAR_2015_I1383130" ] = ["d01-x01-y04"]
analyses['KK']["DM2_1988_I262690" ] = ["d01-x01-y01"]
analyses['KK']["SND_2007_I755881" ] = ["d01-x01-y01"]
analyses['KK']["SND_2001_I533574" ] = ["d01-x01-y01","d01-x01-y02","d01-x01-y03",
"d02-x01-y01","d02-x01-y02","d02-x01-y03"]
analyses['KK']["SND_2006_I720035" ] = ["d01-x01-y01"]
analyses['KK']["BABAR_2014_I1287920" ] = ["d09-x01-y01"]
analyses['KK']["CMD2_2003_I601222" ] = ["d01-x01-y01"]
analyses['KK']["CMD3_2016_I1444990" ] = ["d01-x01-y06"]
analyses['KK']["CMD2_1995_I406880" ] = ["d01-x01-y01","d01-x01-y02"]
analyses['KK']["CMD2_1999_I502164" ] = ["d01-x01-y01","d02-x01-y01",
"d03-x01-y01","d04-x01-y01"]
analyses['KK']["CMD2_2008_I782516" ] = ["d01-x01-y01","d02-x01-y01"]
analyses['KK']["ND_1991_I321108" ] = ["d12-x01-y01","d13-x01-y01"]
analyses['KK']["OLYA_1981_I173076" ] = ["d01-x01-y01"]
analyses['KK']["SND_2016_I1484677" ] = ["d01-x01-y01","d02-x01-y01"]
analyses['KK']["BABAR_2020_I1769654" ] = ["d01-x01-y01"]
analyses['KK']["BESIII_2021_I1866051"] = ["d01-x01-y01"]
# proton-antiproton
analyses['PPbar']["BESIII_2021_I1966612"] = ["d01-x01-y01"]
analyses['PPbar']["BESIII_2019_I1736235"] = ["d01-x01-y01"]
analyses['PPbar']["BESIII_2019_I1718337"] = ["d01-x01-y01"]
analyses['PPbar']["BESIII_2015_I1358937"] = ["d01-x01-y05"]
analyses['PPbar']["BABAR_2013_I1217421" ] = ["d01-x01-y01"]
analyses['PPbar']["BABAR_2013_I1247058" ] = ["d01-x01-y01"]
analyses['PPbar']["SND_2014_I1321689" ] = ["d01-x01-y01","d02-x01-y01"]
analyses['PPbar']["CMD3_2016_I1385598" ] = ["d01-x01-y06"]
analyses['PPbar']["CLEOC_2005_I693873" ] = ["d01-x01-y03"]
analyses['PPbar']["BABAR_2006_I700020" ] = ["d01-x01-y01","d02-x01-y01"]
analyses['PPbar']["DM2_1983_I190558" ] = ["d01-x01-y01"]
analyses["PPbar"]["DM2_1990_I297706" ] = ["d01-x01-y01"]
analyses["PPbar"]["DM1_1979_I141565" ] = ["d01-x01-y01"]
analyses["PPbar"]["FENICE_1998_I471263" ] = ["d01-x01-y01"]
analyses["PPbar"]["FENICE_1994_I377833" ] = ["d01-x01-y01"]
analyses['PPbar']["BESII_2005_I685906" ] = ["d01-x01-y01"]
analyses['PPbar']["BESIII_2014_I1286898"] = ["d01-x01-y06"]
analyses['PPbar']["BESIII_2021_I1853007"] = ["d01-x01-y01"]
analyses['PPbar']["BESIII_2021_I1847766"] = ["d01-x01-y01"]
# pi0 gamma
analyses["PiGamma"]["SND_2018_I1694988"] = ["d01-x01-y01"]
analyses["PiGamma"]["SND_2016_I1418483"] = ["d01-x01-y05"]
analyses["PiGamma"]["SND_2003_I612867" ] = ["d01-x01-y01"]
analyses["PiGamma"]["CMD2_2005_I658856"] = ["d02-x01-y01"]
analyses["PiGamma"]["SND_2000_I524221" ] = ["d01-x01-y02"]
# eta gamma
analyses["EtaGamma"]["CMD2_2005_I658856" ] = ["d01-x01-y01"]
analyses["EtaGamma"]["SND_2006_I717778" ] = ["d01-x01-y01","d02-x01-y01"]
analyses["EtaGamma"]["SND_2014_I1275333" ] = ["d01-x01-y01"]
analyses["EtaGamma"]["SND_2000_I524221" ] = ["d01-x01-y01"]
analyses["EtaGamma"]["CMD2_1999_I503154" ] = ["d01-x01-y01"]
analyses["EtaGamma"]["CMD2_2001_I554522" ] = ["d01-x01-y01"]
analyses['EtaGamma']["CMD2_1995_I406880" ] = ["d01-x01-y04"]
analyses['EtaGamma']["BABAR_2006_I716277"] = ["d01-x01-y01"]
# 3 pion
analyses["3Pi"]["BABAR_2004_I656680" ] = ["d01-x01-y01"]
analyses["3Pi"]["BABAR_2021_I1937349" ] = ["d01-x01-y01"]
analyses["3Pi"]["BESIII_2019_I1773081" ] = ["d01-x01-y01"]
analyses["3Pi"]["SND_2002_I582183" ] = ["d01-x01-y01"]
analyses["3Pi"]["SND_2003_I619011" ] = ["d01-x01-y01"]
analyses["3Pi"]["SND_1999_I508003" ] = ["d01-x01-y01"]
analyses["3Pi"]["SND_2001_I533574" ] = ["d01-x01-y04","d02-x01-y04"]
analyses["3Pi"]["CMD2_2000_I523691" ] = ["d01-x01-y01"]
analyses["3Pi"]["CMD2_1998_I480170" ] = ["d01-x01-y01"]
analyses['3Pi']["CMD2_1995_I406880" ] = ["d01-x01-y03"]
analyses['3Pi']["DM2_1992_I339265" ] = ["d01-x01-y01"]
analyses['3Pi']["DM1_1980_I140174" ] = ["d01-x01-y01"]
analyses['3Pi']["ND_1991_I321108" ] = ["d05-x01-y01","d10-x01-y04"]
analyses['3Pi']["GAMMAGAMMA_1981_I158474"] = ["d01-x01-y01"]
analyses["3Pi"]["CLEO_2006_I691720" ] = ["d01-x01-y01"]
analyses["3Pi"]["SND_2015_I1389908" ] = ["d01-x01-y01"]
analyses["3Pi"]["SND_2020_I1809286" ] = ["d01-x01-y01","d02-x01-y01",
"d03-x01-y01","d03-x01-y02","d03-x01-y03"]
# eta pipi
analyses["EtaPiPi"]["BABAR_2007_I758568" ] = ["d01-x01-y01","d02-x01-y01"]
analyses["EtaPiPi"]["BABAR_2018_I1647139" ] = ["d01-x01-y01"]
analyses["EtaPiPi"]["BABAR_2018_I1700745" ] = ["d01-x01-y01","d02-x01-y01"]
analyses["EtaPiPi"]["BESIII_2022_I2039027"] = ["d01-x01-y01","d02-x01-y01"]
analyses["EtaPiPi"]["CMD2_2000_I532970" ] = ["d02-x01-y01"]
analyses['EtaPiPi']["CMD3_2019_I1744510" ] = ["d02-x01-y01"]
analyses["EtaPiPi"]["DM2_1988_I264144" ] = ["d01-x01-y01"]
analyses['EtaPiPi']["ND_1991_I321108" ] = ["d06-x01-y01","d14-x01-y01"]
analyses["EtaPiPi"]["SND_2015_I1332929" ] = ["d01-x01-y01"]
analyses["EtaPiPi"]["SND_2018_I1638368" ] = ["d01-x01-y01"]
# eta' pipi
analyses["EtaprimePiPi"]["BABAR_2007_I758568" ] = ["d05-x01-y01","d06-x01-y01"]
analyses["EtaprimePiPi"]["BESIII_2020_I1836509"] = ["d01-x01-y01"]
# Eta Phi
analyses["EtaPhi"]["BABAR_2006_I709730" ] = ["d02-x01-y01"]
analyses["EtaPhi"]["BABAR_2006_I731865" ] = ["d01-x01-y02"]
analyses["EtaPhi"]["BABAR_2007_I758568" ] = ["d08-x01-y01","d09-x01-y01"]
analyses["EtaPhi"]["BABAR_2008_I765258" ] = ["d04-x01-y01","d05-x01-y01"]
analyses["EtaPhi"]["BABAR_2017_I1511276" ] = ["d03-x01-y01"]
+analyses["EtaPhi"]["BABAR_2022_I2120528" ] = ["d04-x01-y01","d05-x01-y01"]
analyses["EtaPhi"]["BELLE_2009_I823878" ] = ["d01-x01-y01"]
analyses["EtaPhi"]["BESII_2008_I801210" ] = ["d01-x01-y03"]
analyses["EtaPhi"]["BESIII_2021_I1857930"] = ["d01-x01-y01"]
analyses["EtaPhi"]["CMD3_2017_I1606078" ] = ["d01-x01-y01"]
analyses["EtaPhi"]["CMD3_2019_I1740541" ] = ["d01-x01-y06","d02-x01-y06","d03-x01-y06"]
analyses["EtaPhi"]["SND_2018_I1693737" ] = ["d01-x01-y01"]
analyses["EtaPhi"]["SND_2019_I1726419" ] = ["d01-x01-y01","d01-x01-y03"]
analyses["EtaPhi"]["SND_2021_I1942539" ] = ["d01-x01-y01"]
# Eta Omega
analyses["EtaOmega"]["BABAR_2006_I709730" ] = ["d02-x01-y01"]
analyses["EtaOmega"]["BABAR_2021_I1938254" ] = ["d01-x01-y01","d02-x01-y01","d03-x01-y01","d04-x01-y01"]
analyses["EtaOmega"]["BESII_2008_I801210" ] = ["d01-x01-y03"]
analyses["EtaOmega"]["BESIII_2020_I1817739"] = ["d01-x01-y01"]
analyses["EtaOmega"]["BESIII_2022_I2047667"] = ["d01-x01-y02"]
analyses["EtaOmega"]["CMD3_2017_I1606078" ] = ["d01-x01-y01","d01-x01-y02"]
analyses["EtaOmega"]["SND_2016_I1473343" ] = ["d01-x01-y01"]
analyses["EtaOmega"]["SND_2019_I1726419" ] = ["d01-x01-y01","d01-x01-y02"]
analyses["EtaOmega"]["SND_2020_I1800477" ] = ["d01-x01-y01","d03-x01-y01"]
# 4 pions
analyses["4Pi"]["BABAR_2017_I1621593" ] = ["d01-x01-y01","d02-x01-y01"]
analyses["4Pi"]["BABAR_2012_I1086164" ] = ["d01-x01-y01"]
analyses["4Pi"]["CMD2_2000_I531667" ] = ["d01-x01-y01"]
analyses["4Pi"]["CMD2_2004_I648023" ] = ["d01-x01-y01"]
analyses["4Pi"]["BABAR_2005_I676691" ] = ["d01-x01-y01"]
analyses["4Pi"]["CMD2_2000_I511375" ] = ["d01-x01-y01"]
analyses["4Pi"]["CMD2_1999_I483994" ] = ["d01-x01-y01","d02-x01-y01","d03-x01-y01"]
analyses["4Pi"]["BESII_2008_I801210" ] = ["d01-x01-y01"]
analyses["4Pi"]["BESIII_2022_I2047667" ] = ["d01-x01-y01"]
analyses["4Pi"]["KLOE_2008_I791841" ] = ["d01-x01-y01"]
analyses['4Pi']["ND_1991_I321108" ] = ["d07-x01-y01","d08-x01-y01","d10-x01-y01","d10-x01-y02",
"d01-x01-y01","d02-x01-y01","d03-x01-y01","d04-x01-y01","d10-x01-y03"]
analyses['4Pi']["BESII_2007_I750713" ] = ["d01-x01-y03"]
analyses['4Pi']["SND_2001_I579319" ] = ["d01-x01-y01","d02-x01-y01"]
analyses['4Pi']["DM1_1982_I168552" ] = ["d01-x01-y01"]
analyses['4Pi']["DM1_1979_I132828" ] = ["d01-x01-y01"]
analyses['4Pi']["GAMMAGAMMA_1980_I153382"] = ["d01-x01-y01"]
analyses['4Pi']["GAMMAGAMMA_1981_I158474"] = ["d01-x01-y02"]
analyses["4Pi"]["BESIII_2020_I1817739" ] = ["d02-x01-y01"]
analyses["4Pi"]["BESIII_2021_I1929314" ] = ["d01-x01-y03"]
# (these are Omega(-> pi0 gamma) pi0)
analyses["OmegaPi"]["CMD2_2003_I616446" ] = ["d01-x01-y01"]
analyses["OmegaPi"]["SND_2000_I503946" ] = ["d01-x01-y01"]
analyses["OmegaPi"]["SND_2000_I527752" ] = ["d01-x01-y01"]
analyses["OmegaPi"]["SND_2016_I1489182" ] = ["d01-x01-y01"]
# non Omega
analyses["OmegaPi"]["SND_2002_I587084" ] = ["d01-x01-y01"]
analyses["OmegaPi"]["CMD2_2004_I630009" ] = ["d01-x01-y01"]
analyses["OmegaPi"]["KLOE_2008_I791841" ] = ["d02-x01-y01"]
# from 4 Pion
analyses["OmegaPi"]["CMD2_1999_I483994" ] = ["d03-x01-y01"]
analyses['OmegaPi']["ND_1991_I321108" ] = ["d01-x01-y01","d02-x01-y01","d03-x01-y01",
"d04-x01-y01","d10-x01-y03"]
analyses["OmegaPi"]["BESIII_2020_I1817739"] = ["d02-x01-y01"]
# omega 2 pi
analyses["OmegaPiPi"]["BABAR_2007_I758568" ] = ["d01-x01-y01","d03-x01-y01","d04-x01-y01"]
analyses["OmegaPiPi"]["BABAR_2018_I1700745" ] = ["d01-x01-y01","d03-x01-y01"]
analyses['OmegaPiPi']["BESIII_2021_I1999208"] = ["d01-x01-y01"]
analyses["OmegaPiPi"]["CMD2_2000_I532970" ] = ["d01-x01-y01"]
analyses["OmegaPiPi"]["DM1_1981_I166964" ] = ["d01-x01-y01"]
analyses["OmegaPiPi"]["DM2_1992_I339265" ] = ["d02-x01-y01"]
analyses['OmegaPiPi']["ND_1991_I321108" ] = ["d14-x01-y01"]
# 5 pion
analyses["5Pi"]["CMD2_2000_I532970" ] = ["d03-x01-y01"]
analyses["5Pi"]["BABAR_2007_I758568" ] = ["d01-x01-y01"]
analyses['5Pi']["ND_1991_I321108" ] = ["d14-x01-y01"]
analyses['5Pi']["GAMMAGAMMA_1981_I158474" ] = ["d01-x01-y03"]
analyses["5Pi"]["BABAR_2018_I1700745" ] = ["d01-x01-y01"]
analyses["5Pi"]["BESIII_2021_I1929314" ] = ["d01-x01-y07"]
# 2K 1 pi
analyses["2K1Pi"]["BABAR_2008_I765258" ] = ["d01-x01-y01","d02-x01-y01"]
analyses["2K1Pi"]["BABAR_2017_I1511276" ] = ["d01-x01-y01"]
analyses["2K1Pi"]["BESII_2008_I801208" ] = ["d01-x01-y01"]
analyses["2K1Pi"]["BESIII_2018_I1691798"] = ["d01-x01-y01"]
analyses["2K1Pi"]["BESIII_2022_I2033007"] = ["d01-x01-y01","d03-x01-y01","d04-x01-y01"]
analyses["2K1Pi"]["DM1_1982_I176801" ] = ["d01-x01-y01"]
analyses["2K1Pi"]["DM2_1991_I318558" ] = ["d01-x01-y01","d02-x01-y01"]
analyses["2K1Pi"]["SND_2018_I1637194" ] = ["d01-x01-y01"]
analyses["2K1Pi"]["SND_2020_I1806118" ] = ["d01-x01-y01"]
# phi pi
analyses["PhiPi"]["BABAR_2008_I765258" ] = ["d02-x01-y01","d03-x01-y01"]
analyses["PhiPi"]["BABAR_2017_I1511276" ] = ["d01-x01-y01","d02-x01-y01"]
analyses["PhiPi"]["BESIII_2022_I2033007"] = ["d01-x01-y01","d02-x01-y01"]
analyses["PhiPi"]["SND_2020_I1806118" ] = ["d02-x01-y01"]
# 2K 2 pi
analyses["2K2Pi"]["BABAR_2005_I676691" ] = ["d02-x01-y01"]
analyses["2K2Pi"]["BABAR_2007_I747875" ] = ["d01-x01-y01","d02-x01-y01","d03-x01-y01",
"d04-x01-y01","d05-x01-y01","d07-x01-y01"]
analyses["2K2Pi"]["BABAR_2012_I892684" ] = ["d01-x01-y01","d02-x01-y01","d03-x01-y01",
"d04-x01-y01","d05-x01-y01",
"d06-x01-y01","d07-x01-y01"]
analyses["2K2Pi"]["BABAR_2014_I1287920" ] = ["d09-x01-y01","d10-x01-y01","d11-x01-y01"]
analyses["2K2Pi"]["BABAR_2017_I1511276" ] = ["d03-x01-y01","d04-x01-y01"]
analyses["2K2Pi"]["BABAR_2017_I1591716" ] = ["d01-x01-y01","d02-x01-y01"]
analyses['2K2Pi']["BESII_2007_I750713" ] = ["d01-x01-y04"]
analyses["2K2Pi"]["BESII_2008_I801210" ] = ["d01-x01-y02"]
analyses["2K2Pi"]["BESII_2008_I801208" ] = ["d01-x01-y02"]
analyses['2K2Pi']["BESIII_2018_I1699641"] = ["d01-x01-y01","d02-x01-y01"]
analyses['2K2Pi']["BESIII_2020_I1775344"] = ["d01-x01-y01","d02-x01-y01","d03-x01-y01",
"d04-x01-y01","d05-x01-y01","d06-x01-y01"]
analyses['2K2Pi']["BESIII_2021_I1997451"] = ["d01-x01-y01"]
analyses["2K2Pi"]["BELLE_2009_I809630" ] = ["d01-x01-y01"]
analyses["2K2Pi"]["CMD3_2016_I1395968" ] = ["d01-x01-y01"]
analyses['2K2Pi']["CMD3_2019_I1770428" ] = ["d01-x01-y06"]
analyses["2K2Pi"]["DM1_1982_I169382" ] = ["d01-x01-y01"]
analyses["2K2Pi"]["BESIII_2021_I1929314"] = ["d01-x01-y01"]
# 4K
analyses["4K"]["BESIII_2019_I1743841"] = ["d01-x01-y01","d02-x01-y01"]
analyses["4K"]["BESIII_2021_I1929314"] = ["d01-x01-y02"]
analyses["4K"]["BABAR_2005_I676691" ] = ["d03-x01-y01"]
analyses["4K"]["BABAR_2014_I1287920" ] = ["d12-x01-y01"]
analyses["4K"]["BABAR_2012_I892684" ] = ["d08-x01-y01"]
analyses["4K"]["BABAR_2007_I747875" ] = ["d07-x01-y01"]
analyses['4K']["BESII_2007_I750713" ] = ["d01-x01-y06","d01-x01-y07"]
# 6 mesons
analyses["6m"]["BESIII_2021_I1929314"] = ["d01-x01-y05","d01-x01-y06"]
analyses["6m"]["CMD3_2013_I1217420" ] = ["d01-x01-y01"]
analyses["6m"]["SND_2019_I1726419" ] = ["d01-x01-y01","d01-x01-y04"]
analyses["6m"]["CMD3_2017_I1606078" ] = ["d01-x01-y03","d01-x01-y04"]
analyses["6m"]["CMD3_2019_I1720610" ] = ["d01-x01-y01","d01-x01-y02","d01-x01-y03"]
analyses["6m"]["BABAR_2018_I1700745"] = ["d04-x01-y01","d05-x01-y01"]
analyses["6m"]["SND_2016_I1471515" ] = ["d01-x01-y06"]
analyses["6m"]["DM1_1981_I166353" ] = ["d01-x01-y01"]
analyses["6m"]["BABAR_2006_I709730" ] = ["d01-x01-y01","d02-x01-y01","d03-x01-y01"]
analyses["6m"]["BABAR_2007_I758568" ] = ["d05-x01-y01","d07-x01-y01",
"d08-x01-y01","d09-x01-y01","d10-x01-y01","d11-x01-y01"]
analyses["6m"]["BESII_2007_I763880" ] = ["d01-x01-y01","d01-x01-y02","d01-x01-y03","d01-x01-y04",
"d01-x01-y05","d01-x01-y06","d01-x01-y07"]
analyses["6m"]["BESII_2007_I762901" ] = ["d01-x01-y01","d01-x01-y02","d01-x01-y03","d01-x01-y04",
"d01-x01-y06","d01-x01-y07","d01-x01-y08","d01-x01-y09","d01-x01-y10"]
analyses["6m"]["CLEO_2006_I691720" ] = ["d01-x01-y02","d01-x01-y03","d01-x01-y04","d01-x01-y05",
"d01-x01-y07","d01-x01-y08","d01-x01-y09","d01-x01-y10","d01-x01-y11",
"d01-x01-y12","d01-x01-y13","d01-x01-y14","d01-x01-y15","d01-x01-y17"]
analyses["6m"]["BESII_2008_I801210" ] = ["d01-x01-y03","d01-x01-y04","d01-x01-y05"]
analyses["6m"]["BESII_2008_I801208" ] = ["d01-x01-y03","d01-x01-y04","d01-x01-y05","d01-x01-y06"]
analyses["6m"]["MARKI_1982_I169326" ] = ["d06-x01-y01"]
analyses["6m"]["MARKI_1975_I100592" ] = ["d01-x01-y01","d02-x01-y01"]
analyses['6m']["BESII_2007_I750713" ] = ["d01-x01-y08","d01-x01-y09","d01-x01-y11",
"d01-x01-y12","d01-x01-y13","d01-x01-y14",
"d01-x01-y15","d01-x01-y16","d01-x01-y17","d01-x01-y18"]
analyses['6m']["SND_2016_I1473343" ] = ["d01-x01-y01"]
analyses['6m']["BESIII_2020_I1788734"] = ["d01-x01-y01"]
analyses['6m']["BABAR_2021_I1844422" ] = ["d01-x01-y01","d02-x01-y01","d03-x01-y01",
"d04-x01-y01","d05-x01-y01","d06-x01-y01"]
analyses['6m']["BESIII_2020_I1837725" ] = ["d01-x01-y01"]
analyses["6m"]["BABAR_2021_I1938254"] = ["d01-x01-y01","d02-x01-y01","d03-x01-y01","d04-x01-y01","d05-x01-y01"]
analyses["6m"]["CMD3_2022_I2108984"] = ["d01-x01-y01","d02-x01-y01","d02-x01-y02"]
+analyses["6m"]["BABAR_2022_I2120528"] = ["d01-x01-y01","d02-x01-y01","d03-x01-y01","d04-x01-y01","d05-x01-y01",
+ "d06-x01-y01","d07-x01-y01","d08-x01-y01","d09-x01-y01","d10-x01-y01"]
# other baryon processes
analyses['Baryon']["BESIII_2017_I1509241" ] = ["d01-x01-y01"]
analyses['Baryon']["BESIII_2021_I1845443" ] = ["d01-x01-y01","d02-x01-y01"]
analyses['Baryon']["BESIII_2021_I1859248" ] = ["d01-x01-y01"]
analyses["Baryon"]["BESIII_2021_I1929314" ] = ["d01-x01-y04","d01-x01-y08"]
# DD
analyses["DD"]["BELLE_2007_I723333" ] = ["d01-x01-y01","d02-x01-y01"]
analyses["DD"]["BELLE_2007_I756012" ] = ["d01-x01-y01"]
analyses["DD"]["BELLE_2007_I756643" ] = ["d01-x01-y01"]
analyses["DD"]["BELLE_2008_I757220" ] = ["d01-x01-y01","d02-x01-y01"]
analyses["DD"]["BELLE_2008_I759073" ] = ["d01-x01-y01"]
analyses["DD"]["BELLE_2020_I1789775" ] = ["d01-x01-y01"]
analyses["DD"]["BELLE_2019_I1762826" ] = ["d01-x01-y01"]
analyses["DD"]["BABAR_2008_I776519" ] = ["d01-x01-y01","d01-x01-y02"]
analyses["DD"]["BELLE_2008_I791660" ] = ["d01-x01-y01"]
analyses["DD"]["BELLE_2013_I1225975" ] = ["d01-x01-y01"]
analyses["DD"]["BELLE_2014_I1282602" ] = ["d01-x01-y01"]
analyses["DD"]["BELLE_2015_I1324785" ] = ["d01-x01-y01"]
analyses["DD"]["BESIII_2022_I1989527" ] = ["d01-x01-y03","d02-x01-y03"]
analyses["DD"]["BESIII_2021_I1933191" ] = ["d01-x01-y03"]
analyses["DD"]["BESIII_2016_I1457597" ] = ["d01-x01-y07"]
analyses["DD"]["BESIII_2015_I1355215" ] = ["d01-x01-y10"]
analyses["DD"]["BESIII_2015_I1377204" ] = ["d01-x01-y10"]
analyses["DD"]["BESIII_2016_I1495838" ] = ["d01-x01-y01","d02-x01-y01"]
analyses["DD"]["CRYSTAL_BALL_1986_I238081"] = ["d02-x01-y01"]
analyses["DD"]["CLEOC_2008_I777917" ] = ["d01-x01-y01","d01-x01-y02","d01-x01-y03",
"d02-x01-y01","d02-x01-y02","d02-x01-y03",
"d03-x01-y01","d03-x01-y02","d03-x01-y03",
"d04-x01-y01","d04-x01-y02",
"d05-x01-y01","d05-x01-y02"]
analyses["DD"]["BELLE_2017_I1613517" ] = ["d01-x01-y01","d01-x01-y02"]
analyses["DD"]["BESIII_2014_I1323621" ] = ["d01-x01-y01"]
analyses["DD"]["BESIII_2015_I1406939" ] = ["d02-x01-y06","d03-x01-y06"]
analyses["DD"]["BESIII_2017_I1628093" ] = ["d01-x01-y01"]
analyses["DD"]["BESIII_2019_I1723934" ] = ["d01-x01-y01"]
analyses["DD"]["BESIII_2019_I1756876" ] = ["d01-x01-y09","d01-x01-y10"]
analyses["DD"]["BABAR_2007_I729388" ] = ["d01-x01-y01"]
analyses["DD"]["BESIII_2015_I1329785" ] = ["d01-x01-y08","d02-x01-y08","d03-x01-y08"]
analyses["DD"]["BESIII_2017_I1494065" ] = ["d01-x01-y01","d02-x01-y01"]
analyses["DD"]["BESIII_2017_I1596897" ] = ["d01-x01-y01"]
analyses["DD"]["BESIII_2018_I1653121" ] = ["d01-x01-y01","d01-x01-y02"]
analyses["DD"]["BESIII_2020_I1762922" ] = ["d01-x01-y01"]
analyses["DD"]["BESIII_2018_I1633425" ] = ["d01-x01-y01"]
analyses["DD"]["BESIII_2018_I1685535" ] = ["d01-x01-y01","d02-x01-y01"]
analyses["DD"]["BELLE_2011_I878228" ] = ["d01-x01-y01","d01-x01-y02","d01-x01-y03"]
analyses["DD"]["BABAR_2010_I864027" ] = ["d01-x01-y01","d01-x01-y02","d01-x01-y03"]
analyses["DD"]["BABAR_2009_I815035" ] = ["d01-x01-y01","d01-x01-y02","d01-x01-y03","d02-x01-y01"]
analyses["DD"]["BES_1999_I508349" ] = ["d01-x01-y01","d01-x01-y02","d01-x01-y03","d01-x01-y04"]
analyses["DD"]["BESIII_2020_I1795949" ] = ["d01-x01-y01","d02-x01-y01"]
analyses["DD"]["BESIII_2021_I1867196" ] = ["d01-x01-y01","d02-x01-y01","d03-x01-y01"]
# BB
analyses["BB"]["BELLE_2008_I764099" ] = ["d01-x01-y01","d02-x01-y01",
"d03-x01-y01","d04-x01-y01"]
analyses["BB"]["BELLE_2016_I1389855" ] = ["d01-x01-y02","d01-x01-y03"]
analyses["BB"]["BELLE_2021_I1859137" ] = ["d01-x01-y01","d01-x01-y02","d01-x01-y03"]
analyses["BB"]["CLEO_1991_I29927" ] = ["d01-x01-y01"]
analyses["BB"]["CLEO_1999_I474676" ] = ["d01-x01-y01","d01-x01-y02"]
analyses["BB"]["CUSB_1991_I325661" ] = ["d01-x01-y01"]
# hyperons
analyses["LL"]["BABAR_2007_I760730" ] = ["d01-x01-y01","d02-x01-y01","d03-x01-y01"]
analyses["LL"]["BESIII_2018_I1627871"] = ["d01-x01-y01"]
analyses["LL"]["BESIII_2019_I1726357"] = ["d01-x01-y01"]
analyses["LL"]["BESIII_2019_I1758883"] = ["d01-x01-y05"]
analyses["LL"]["BESIII_2020_I1814783"] = ["d01-x01-y01","d01-x01-y02",
"d02-x01-y01","d02-x01-y02"]
analyses["LL"]["BESIII_2020_I1823448"] = ["d01-x01-y04"]
analyses["LL"]["BESIII_2021_I1866233"] = ["d01-x01-y01"]
analyses["LL"]["BESIII_2021_I1940960"] = ["d01-x01-y01"]
analyses["LL"]["BESIII_2021_I1900124"] = ["d01-x01-y01"]
analyses["LL"]["DM2_1990_I297706" ] = ["d02-x01-y01"]
# list the analysis if required and quit()
allProcesses=False
if "All" in opts.processes :
allProcesses=True
processes = sorted(list(analyses.keys()))
else :
processes = sorted(list(set(opts.processes)))
if(opts.list) :
for process in processes :
print (" ".join(analyses[process]))
quit()
if(opts.plot) :
output=""
for process in processes:
for analysis in analyses[process] :
if(analysis=="CMD3_2019_I1770428") :
for iy in range(1,3) :
output+= " -m/%s/%s" % (analysis,"d02-x01-y0%s"%iy)
elif(analysis=="BES_1999_I508349") :
for ix in range(2,4) :
for iy in range(1,3) :
output+= " -m/%s/%s" % (analysis,"d0%s-x01-y0%s"%(ix,iy))
elif(analysis=="BESIII_2019_I1726357") :
for ix in range(2,4) :
output+= " -m/%s/%s" % (analysis,"d0%s-x01-y01"% ix)
elif(analysis=="BESIII_2020_I1775344") :
for ix in range(1,6) :
output+= " -m/%s/%s" % (analysis,"d07-x01-y0%s"% ix)
output+= " -m/%s/%s" % (analysis,"d08-x01-y0%s"% ix)
elif(analysis=="BESIII_2020_I1814783") :
for ix in range(1,3) :
output+= " -m/%s/%s" % (analysis,"d03-x01-y0%s"% ix)
elif(analysis=="SND_2020_I1809286") :
for ix in range(1,5) :
output+= " -m/%s/%s" % (analysis,"d04-x01-y0%s"% ix)
for plot in analyses[process][analysis]:
output+= " -m/%s/%s" % (analysis,plot)
print (output)
quit()
# mapping of process to me to use
me = { "PiPi" : "MEee2Pions",
"KK" : "MEee2Kaons",
"3Pi" : "MEee3Pions",
"4Pi" : "MEee4Pions",
"EtaPiPi" : "MEee2EtaPiPi",
"EtaprimePiPi" : "MEee2EtaPrimePiPi",
"EtaPhi" : "MEee2EtaPhi",
"EtaOmega" : "MEee2EtaOmega",
"OmegaPi" : "MEee2OmegaPi",
"OmegaPiPi" : "MEee2OmegaPiPi",
"PhiPi" : "MEee2PhiPi",
"PiGamma" : "MEee2PiGamma",
"EtaGamma" : "MEee2EtaGamma",
"PPbar" : "MEee2PPbar",
"LL" : "MEee2LL" ,
"2K1Pi" : "MEee2KKPi" }
# get the particle masses from Herwig
particles = { "pi+" : 0., "pi0" : 0. ,"eta" : 0. ,"eta'" : 0. ,"phi" : 0. ,"omega" : 0. ,"p+" : 0. ,"K+" : 0.}
for val in particles :
tempTxt = "get /Herwig/Particles/%s:NominalMass\nget /Herwig/Particles/%s:WidthLoCut\n" % (val,val)
with open("temp.in",'w') as f:
f.write(tempTxt)
p = subprocess.Popen(["../src/Herwig", "read","temp.in"],stdout=subprocess.PIPE)
vals = p.communicate()[0].split()
mass = float(vals[0])-float(vals[1])
particles[val]=mass
os.remove("temp.in")
# minimum CMS energies for specific processes
minMass = { "PiPi" : 2.*particles["pi+"],
"KK" : 2.*particles["K+"],
"3Pi" : 2.*particles["pi+"]+particles["pi0"],
"4Pi" : 2.*particles["pi+"]+2.*particles["pi0"],
"EtaPiPi" : particles["eta"]+2.*particles["pi+"],
"EtaprimePiPi" : particles["eta'"]+2.*particles["pi+"],
"EtaPhi" : particles["phi"]+particles["eta"],
"EtaOmega" : particles["omega"]+particles["eta"],
"OmegaPi" : particles["omega"]+particles["pi0"],
"OmegaPiPi" : particles["omega"]+2.*particles["pi0"],
"PhiPi" : particles["phi"]+particles["pi0"],
"PiGamma" : particles["pi0"],
"EtaGamma" : particles["eta"],
"PPbar" : 2.*particles["p+"],
"LL" : 0.,
"2K1Pi" : 2.*particles["K+"]+particles["pi0"] }
# energies we need
energies={}
def nearestEnergy(en) :
Emin=0
delta=1e30
anals=[]
for val in energies :
if(abs(val-en)<delta) :
delta = abs(val-en)
Emin = val
anals=energies[val]
return (Emin,delta,anals)
for process in processes:
if(process not in analyses) : continue
matrix=""
if( process in me ) :
matrix = me[process]
for analysis in analyses[process] :
aos=yoda.read(os.path.join(os.path.join(os.getcwd(),path),analysis+".yoda"))
if(len(aos)==0) : continue
for plot in analyses[process][analysis] :
histo = aos["/REF/%s/%s" %(analysis,plot)]
for point in histo.points() :
energy = point.x()
if(analysis=="KLOE_2009_I797438" or
analysis=="KLOE_2005_I655225" or
analysis=="KLOE2_2017_I1634981" or
analysis=="FENICE_1994_I377833") :
energy = math.sqrt(energy)
if(energy>200) :
energy *= 0.001
emin,delta,anals = nearestEnergy(energy)
if(energy in energies) :
if(analysis not in energies[energy][1]) :
energies[energy][1].append(analysis)
if(matrix!="" and matrix not in energies[energy][0] and
minMass[process]<=energy) :
energies[energy][0].append(matrix)
elif(delta<1e-7) :
if(analysis not in anals[1]) :
anals[1].append(analysis)
if(matrix!="" and matrix not in anals[0] and
minMass[process]<=energy) :
anals[0].append(matrix)
else :
if(matrix=="") :
energies[energy]=[[],[analysis]]
elif(minMass[process]<=energy) :
energies[energy]=[[matrix],[analysis]]
with open("python/LowEnergy-EE-Perturbative.in", 'r') as f:
templateText = f.read()
perturbative=Template( templateText )
with open("python/LowEnergy-EE-NonPerturbative.in", 'r') as f:
templateText = f.read()
nonPerturbative=Template( templateText )
targets=""
for energy in sorted(energies) :
anal=""
for analysis in energies[energy][1]:
anal+= "insert /Herwig/Analysis/Rivet:Analyses 0 %s\n" %analysis
proc=""
matrices = energies[energy][0]
if(allProcesses) : matrices = me.values()
for matrix in matrices:
proc+="insert SubProcess:MatrixElements 0 %s\n" % matrix
proc+="set %s:Flavour %s\n" % (matrix,opts.flavour)
maxflavour =5
if(energy<thresholds[1]) :
maxflavour=2
elif(energy<thresholds[2]) :
maxflavour=3
elif(energy<thresholds[3]) :
maxflavour=4
# input file for perturbative QCD
if(opts.perturbative and energy> thresholds[0]) :
inputPerturbative = perturbative.substitute({"ECMS" : "%8.6f" % energy, "ANALYSES" : anal,
"lepton" : "", "maxflavour" : maxflavour, 'name' : "EE"})
with open(opts.dest+"/Rivet-LowEnergy-EE-Perturbative-%8.6f.in" % energy ,'w') as f:
f.write(inputPerturbative)
targets += "Rivet-LowEnergy-EE-Perturbative-%8.6f.yoda " % energy
# input file for currents
if(opts.nonPerturbative and proc!="") :
inputNonPerturbative = nonPerturbative.substitute({"ECMS" : "%8.6f" % energy, "ANALYSES" : anal,
"processes" : proc, 'name' : "EE"})
with open(opts.dest+"/Rivet-LowEnergy-EE-NonPerturbative-%8.6f.in" % energy ,'w') as f:
f.write(inputNonPerturbative)
targets += "Rivet-LowEnergy-EE-NonPerturbative-%8.6f.yoda " % energy
print (targets)

File Metadata

Mime Type
text/x-diff
Expires
Sun, Feb 23, 2:55 PM (16 h, 7 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
4475903
Default Alt Text
(86 KB)

Event Timeline