diff --git a/Handlers/StdXCombGroup.cc b/Handlers/StdXCombGroup.cc --- a/Handlers/StdXCombGroup.cc +++ b/Handlers/StdXCombGroup.cc @@ -1,390 +1,402 @@ // -*- C++ -*- // // StdXCombGroup.cc is a part of ThePEG - Toolkit for HEP Event Generation // Copyright (C) 1999-2019 Leif Lonnblad // Copyright (C) 2009-2019 Simon Platzer // // ThePEG is licenced under version 3 of the GPL, see COPYING for details. // Please respect the MCnet academic guidelines, see GUIDELINES for details. // // // This is the implementation of the non-inlined, non-templated member // functions of the StdXCombGroup class. // #include "StdXCombGroup.h" #include "ThePEG/MatrixElement/MEGroup.h" #include "ThePEG/Handlers/StandardEventHandler.h" #include "ThePEG/Handlers/SubProcessHandler.h" #include "ThePEG/Cuts/Cuts.h" #include "ThePEG/PDF/PartonExtractor.h" #include "ThePEG/Utilities/Debug.h" #include "ThePEG/Utilities/Maths.h" #include "ThePEG/PDT/ParticleData.h" #include "ThePEG/Persistency/PersistentOStream.h" #include "ThePEG/Persistency/PersistentIStream.h" #include "ThePEG/Utilities/SimplePhaseSpace.h" #include "ThePEG/Utilities/UtilityBase.h" #include "ThePEG/EventRecord/Particle.h" #include "ThePEG/EventRecord/SubProcessGroup.h" #include "ThePEG/Vectors/LorentzRotation.h" #include "ThePEG/MatrixElement/MEBase.h" #include "ThePEG/MatrixElement/ColourLines.h" #include "ThePEG/Handlers/LuminosityFunction.h" #include "ThePEG/Handlers/CascadeHandler.h" #include "ThePEG/Repository/EventGenerator.h" #include "ThePEG/EventRecord/TmpTransform.h" using namespace ThePEG; StdXCombGroup::StdXCombGroup(Energy newMaxEnergy, const cPDPair & inc, tEHPtr newEventHandler,tSubHdlPtr newSubProcessHandler, tPExtrPtr newExtractor, tCascHdlPtr newCKKW, const PBPair & newPartonBins, tCutsPtr newCuts, tMEGroupPtr newME, const DiagramVector & newDiagrams, bool mir, tStdXCombPtr newHead) : StandardXComb(newMaxEnergy,inc,newEventHandler,newSubProcessHandler, newExtractor, newCKKW, newPartonBins, newCuts, newME, newDiagrams, mir, newHead), theMEGroup(newME), theDependent(), theLastHeadCrossSection(ZERO) {} StdXCombGroup::StdXCombGroup() : StandardXComb(), theDependent() {} void StdXCombGroup::build(const PartonPairVec& allPBins) { for ( MEVector::const_iterator me = theMEGroup->dependent().begin(); me != theMEGroup->dependent().end(); ++me ) { vector dep = theMEGroup->makeDependentXCombs(this,diagrams().front()->partons(),*me,allPBins); copy(dep.begin(),dep.end(),back_inserter(theDependent)); } } StdXCombGroup::~StdXCombGroup() { } void StdXCombGroup::clean() { StandardXComb::clean(); theLastHeadCrossSection = ZERO; for ( vector::const_iterator dep = theDependent.begin(); dep != theDependent.end(); ++dep ) (**dep).clean(); } int StdXCombGroup::nDim() const { if ( meGroup()->willProject() ) return StandardXComb::nDim() + 1; return StandardXComb::nDim(); } CrossSection StdXCombGroup::dSigDR(const pair ll, int nr, const double * r) { if ( matrixElement()->keepRandomNumbers() ) { lastRandomNumbers().resize(nDim()); copy(r,r+nDim(),lastRandomNumbers().begin()); } pExtractor()->select(this); setPartonBinInfo(); lastP1P2(ll); lastS(sqr(maxEnergy())/exp(lastP1() + lastP2())); meMomenta().resize(mePartonData().size()); matrixElement()->setXComb(this); double rProject = 0.0; size_t shift = 0; if ( meGroup()->willProject() ) { rProject = r[partonDims.first]; shift = 1; } PPair partons; if ( !matrixElement()->haveX1X2() ) { if ( !pExtractor()->generateL(partonBinInstances(), r, r + nr - partonDims.second) ) { lastCrossSection(ZERO); return ZERO; } partons = make_pair(partonBinInstances().first->parton(), partonBinInstances().second->parton()); lastSHat(lastS()/exp(partonBinInstances().first->l() + partonBinInstances().second->l())); meMomenta()[0] = partons.first->momentum(); meMomenta()[1] = partons.second->momentum(); } else { if ( !matrixElement()->generateKinematics(r + partonDims.first + shift) ) { lastCrossSection(ZERO); return ZERO; } lastSHat((meMomenta()[0]+meMomenta()[1]).m2()); matrixElement()->setKinematics(); lastScale(matrixElement()->scale()); partons.first = mePartonData()[0]->produceParticle(meMomenta()[0]); partons.second = mePartonData()[1]->produceParticle(meMomenta()[1]); Direction<0> dir(true); partonBinInstances().first = new_ptr(PartonBinInstance(lastParticles().first,partons.first, partonBins().first,lastScale())); dir.reverse(); partonBinInstances().second = new_ptr(PartonBinInstance(lastParticles().second,partons.second, partonBins().second,lastScale())); } lastPartons(partons); if ( lastSHat() < cuts()->sHatMin() ) { lastCrossSection(ZERO); return ZERO; } lastY(0.5*(partonBinInstances().second->l() - partonBinInstances().first->l())); if ( !cuts()->initSubProcess(lastSHat(), lastY(), mirror()) ) { lastCrossSection(ZERO); return ZERO; } if ( mirror() ) swap(meMomenta()[0], meMomenta()[1]); if ( matrixElement()->wantCMS() && !matrixElement()->haveX1X2() ) SimplePhaseSpace::CMS(meMomenta()[0], meMomenta()[1], lastSHat()); Energy summ = ZERO; if ( meMomenta().size() == 3 ) { if ( !matrixElement()->haveX1X2() ) meMomenta()[2] = Lorentz5Momentum(sqrt(lastSHat())); } else { for ( int i = 2, N = meMomenta().size(); i < N; ++i ) { if ( !matrixElement()->haveX1X2() ) meMomenta()[i] = Lorentz5Momentum(mePartonData()[i]->mass()); summ += mePartonData()[i]->massMin(); } if ( sqr(summ) >= lastSHat() ) { lastCrossSection(ZERO); return ZERO; } } if ( !matrixElement()->haveX1X2() ) lastScale(max(lastSHat()/4.0, cuts()->scaleMin())); lastSHat(pExtractor()->generateSHat(lastS(), partonBinInstances(), r, r + nr - partonDims.second, matrixElement()->haveX1X2())); if ( !cuts()->sHat(lastSHat()) ) { lastCrossSection(ZERO); return ZERO; } r += partonDims.first + shift; lastX1X2(make_pair(lastPartons().first->momentum().plus()/ lastParticles().first->momentum().plus(), lastPartons().second->momentum().minus()/ lastParticles().second->momentum().minus())); if ( !cuts()->x1(lastX1()) || !cuts()->x2(lastX2()) ) { lastCrossSection(ZERO); return ZERO; } lastY((lastPartons().first->momentum() + lastPartons().second->momentum()).rapidity()); if ( !cuts()->yHat(lastY()) ) { lastCrossSection(ZERO); return ZERO; } if ( !cuts()->initSubProcess(lastSHat(), lastY(), mirror()) ) { lastCrossSection(ZERO); return ZERO; } meMomenta()[0] = lastPartons().first->momentum(); meMomenta()[1] = lastPartons().second->momentum(); if ( mirror() ) swap(meMomenta()[0], meMomenta()[1]); if ( matrixElement()->wantCMS() && !matrixElement()->haveX1X2() ) SimplePhaseSpace::CMS(meMomenta()[0], meMomenta()[1], lastSHat()); if ( meMomenta().size() == 3 ) { if ( !matrixElement()->haveX1X2() ) meMomenta()[2] = Lorentz5Momentum(sqrt(lastSHat())); } else { if ( sqr(summ) >= lastSHat() ) { lastCrossSection(ZERO); return ZERO; } } if ( !matrixElement()->haveX1X2() ) { if ( !matrixElement()->generateKinematics(r) ) { lastCrossSection(ZERO); return ZERO; } } lastScale(matrixElement()->scale()); if ( !cuts()->scale(lastScale()) ) { lastCrossSection(ZERO); return ZERO; } pair evalPDFS = make_pair(matrixElement()->havePDFWeight1(), matrixElement()->havePDFWeight2()); if ( mirror() ) swap(evalPDFS.first,evalPDFS.second); lastPDFWeight(pExtractor()->fullFn(partonBinInstances(), lastScale(), evalPDFS)); if ( lastPDFWeight() == 0.0 ) { lastCrossSection(ZERO); return ZERO; } matrixElement()->setKinematics(); CrossSection xsec = matrixElement()->dSigHatDR() * lastPDFWeight(); bool noHeadPass = !willPassCuts() || xsec == ZERO; if ( noHeadPass ) { xsec = ZERO; lastCrossSection(ZERO); } xsec *= cutWeight(); lastAlphaS(matrixElement()->alphaS()); lastAlphaEM(matrixElement()->alphaEM()); lastHeadCrossSection(xsec); CrossSection depxsec = ZERO; vector activeXCombs; for ( vector::const_iterator dep = theDependent.begin(); dep != theDependent.end(); ++dep ) { if ( noHeadPass && (**dep).matrixElement()->headCuts() ) continue; depxsec += (**dep).dSigDR(r + theMEGroup->dependentOffset((**dep).matrixElement())); if ( theMEGroup->groupReweighted() ) activeXCombs.push_back(*dep); } + CrossSection headxsec = xsec; + if ( xsec != ZERO && depxsec != ZERO ) { if ( abs(xsec) < abs(depxsec) ) { volatile double rw = (1.+depxsec/xsec); xsec = xsec*rw; } else { volatile double rw = (1.+xsec/depxsec); xsec = depxsec*rw; } } else if ( xsec == ZERO && depxsec != ZERO ) { xsec = depxsec; } lastCrossSection(xsec); if ( xsec != ZERO ) theMEGroup->lastEventStatistics(); matrixElement()->fillProjectors(); if ( !projectors().empty() ) { lastProjector(projectors().select(rProject)); } if ( theMEGroup->groupReweighted() && !activeXCombs.empty() ) { - xsec = theMEGroup->reweightHead(activeXCombs)*lastHeadCrossSection(); + headxsec = theMEGroup->reweightHead(activeXCombs)*lastHeadCrossSection(); + xsec = headxsec; depxsec = ZERO; for ( vector::const_iterator dep = activeXCombs.begin(); dep != activeXCombs.end(); ++dep ) { depxsec += theMEGroup->reweightDependent(*dep,activeXCombs)*(**dep).lastCrossSection(); } if ( xsec != ZERO ) { double rw = 1.0 + depxsec/xsec; xsec *= rw; } else { xsec = depxsec; } } subProcess(SubProPtr()); if ( CKKWHandler() && matrixElement()->maxMultCKKW() > 0 && matrixElement()->maxMultCKKW() > matrixElement()->minMultCKKW() ) { newSubProcess(theMEGroup->subProcessGroups()); CKKWHandler()->setXComb(this); - xsec *= CKKWHandler()->reweightCKKW(matrixElement()->minMultCKKW(), - matrixElement()->maxMultCKKW()); + double factor = CKKWHandler()->reweightCKKW(matrixElement()->minMultCKKW(), + matrixElement()->maxMultCKKW()); + xsec *= factor; + headxsec *= factor; + depxsec *= factor; } if ( matrixElement()->reweighted() ) { newSubProcess(theMEGroup->subProcessGroups()); - xsec *= matrixElement()->reWeight() * matrixElement()->preWeight(); + double factor = matrixElement()->reWeight() * matrixElement()->preWeight(); + xsec *= factor; + headxsec *= factor; + depxsec *= factor; } + if ( theMEGroup->discard(headxsec,depxsec) ) + return ZERO; + return xsec; } void StdXCombGroup::newSubProcess(bool) { StandardXComb::newSubProcess(theMEGroup->subProcessGroups()); if ( !theMEGroup->subProcessGroups() ) return; subProcess()->groupWeight(lastHeadCrossSection()/lastCrossSection()); Ptr::tptr group = dynamic_ptr_cast::tptr>(subProcess()); assert(group); for ( vector::iterator dep = theDependent.begin(); dep != theDependent.end(); ++dep ) { if ( (**dep).lastCrossSection() == ZERO ) continue; tSubProPtr ds; try { ds = (**dep).construct(); } catch(Veto&) { throw Exception() << "A veto was encountered while constructing a dependent sub process.\n" << "This situation should not have happened. Will veto the event." << Exception::eventerror; } if ( ds ) group->add(ds); } } tSubProPtr StdXCombGroup::construct() { matrixElement()->setXComb(this); setPartonBinInfo(); matrixElement()->setKinematics(); newSubProcess(theMEGroup->subProcessGroups()); return subProcess(); } void StdXCombGroup::Init() {} void StdXCombGroup::persistentOutput(PersistentOStream & os) const { os << theDependent << theMEGroup << ounit(theLastHeadCrossSection,nanobarn); } void StdXCombGroup::persistentInput(PersistentIStream & is, int) { is >> theDependent >> theMEGroup >> iunit(theLastHeadCrossSection,nanobarn); } ClassDescription StdXCombGroup::initStdXCombGroup; diff --git a/LesHouches/LesHouchesEventHandler.cc b/LesHouches/LesHouchesEventHandler.cc --- a/LesHouches/LesHouchesEventHandler.cc +++ b/LesHouches/LesHouchesEventHandler.cc @@ -1,621 +1,646 @@ // -*- C++ -*- // // LesHouchesEventHandler.cc is a part of ThePEG - Toolkit for HEP Event Generation // Copyright (C) 1999-2019 Leif Lonnblad // // ThePEG is licenced under version 3 of the GPL, see COPYING for details. // Please respect the MCnet academic guidelines, see GUIDELINES for details. // // // This is the implementation of the non-inlined, non-templated member // functions of the LesHouchesEventHandler class. // #include "LesHouchesEventHandler.h" #include "LesHouchesReader.h" #include "ThePEG/Interface/ClassDocumentation.h" #include "ThePEG/Interface/RefVector.h" #include "ThePEG/Interface/Switch.h" #include "ThePEG/Interface/Parameter.h" #include "ThePEG/Repository/Repository.h" #include "ThePEG/Handlers/LuminosityFunction.h" #include "ThePEG/Handlers/XComb.h" #include "ThePEG/Handlers/CascadeHandler.h" #include "ThePEG/Cuts/Cuts.h" #include "ThePEG/PDF/PartonExtractor.h" #include "ThePEG/Utilities/LoopGuard.h" #include "ThePEG/EventRecord/Event.h" #include "ThePEG/EventRecord/Collision.h" #include "ThePEG/EventRecord/Step.h" #include "ThePEG/EventRecord/SubProcess.h" #include "ThePEG/Utilities/EnumIO.h" #include "ThePEG/Utilities/Maths.h" #include "ThePEG/Repository/UseRandom.h" #include "ThePEG/Utilities/Throw.h" #include "ThePEG/Utilities/Debug.h" #include "ThePEG/Persistency/PersistentOStream.h" #include "ThePEG/Persistency/PersistentIStream.h" using namespace ThePEG; LesHouchesEventHandler::~LesHouchesEventHandler() {} IBPtr LesHouchesEventHandler::clone() const { return new_ptr(*this); } IBPtr LesHouchesEventHandler::fullclone() const { return new_ptr(*this); } void LesHouchesEventHandler::doinit() { EventHandler::doinit(); for ( int i = 0, N = readers().size(); i < N; ++i ) { readers()[i]->init(); } ntries = 0; } void LesHouchesEventHandler::initialize() { if ( lumiFnPtr() ) Repository::clog() << "The LuminosityFunction '" << lumiFnPtr()->name() << "' assigned to the LesHouchesEventHandler '" << name() << "' will not be active in this run. Instead the incoming " << "particles will be determined by the used LesHouchesReader objects.\n" << Exception::warning; if ( readers().empty() ) throw LesHouchesInitError() << "No readers were defined for the LesHouchesEventHandler '" << name() << "'" << Exception::warning; // Go through all the readers and collect information about cross // sections and processes. typedef map ProcessMap; ProcessMap processes; PDPair incoming; Energy MaxEA = ZERO; Energy MaxEB = ZERO; weightnames.clear(); for ( int i = 0, N = readers().size(); i < N; ++i ) { LesHouchesReader & reader = *readers()[i]; reader.initialize(*this); if ( i == 0 ) weightnames = reader.optWeightsNamesFunc(); else if ( reader.optWeightsNamesFunc() != weightnames ) throw LesHouchesInitError() << "the optional weights names for the LesHouchesEventHandler do not match '" << name() << "'" << Exception::warning; // Check that the incoming particles are consistent between the // readers. if ( !incoming.first ) { incoming.first = getParticleData(reader.heprup.IDBMUP.first); if ( !incoming.first ) Throw() << "Unknown beam PID " << reader.heprup.IDBMUP.first << ". Have you created a matching BeamParticle object?" << Exception::runerror; } if ( !incoming.second ) { incoming.second = getParticleData(reader.heprup.IDBMUP.second); if ( !incoming.second ) Throw() << "Unknown beam PID " << reader.heprup.IDBMUP.first << ". Have you created a matching BeamParticle object?" << Exception::runerror; } if ( incoming.first->id() != reader.heprup.IDBMUP.first || incoming.second->id() != reader.heprup.IDBMUP.second ) Repository::clog() << "The different LesHouchesReader objects in the " << "LesHouchesEventHandler '" << name() << "' have different " << "types of colliding particles." << Exception::warning; MaxEA = max(MaxEA, reader.heprup.EBMUP.first*GeV); MaxEB = max(MaxEB, reader.heprup.EBMUP.second*GeV); // Check that the weighting of the events in the different readers // is consistent with the ones requested for this event // handler. Also collect the sum of the maximum weights. if ( reader.negativeWeights() && weightOption() > 0 ) throw LesHouchesInitError() << "The reader '" << reader.name() << "' contains negatively weighted events, " << "which is not allowed for the LesHouchesEventHandler '" << name() << "'." << Exception::warning; // Check that we do not have the same process numbers in different // readers. for ( int ip = 0; ip < reader.heprup.NPRUP; ++ip ) { if ( reader.heprup.LPRUP[ip] ) { ProcessMap::iterator pit = processes.find(reader.heprup.LPRUP[ip]); if ( pit == processes.end() ) processes[reader.heprup.LPRUP[ip]] = readers()[i]; else if ( warnPNum ) { Throw() << "In the LesHouchesEventHandler '" << name() << "', both the '" << pit->second->name() << "' and '" << reader.name() << "' contains sub-process number " << pit->first << ". This process may be double-counted in this run." << Exception::warning; } } } selector().insert(reader.stats.maxXSec(), i); } stats.maxXSec(selector().sum()); histStats.maxXSec(selector().sum()); for (map::iterator it= opt.begin(); it!=opt.end(); ++it) { (it->second).stats.maxXSec(selector().sum()); (it->second).histStats.maxXSec(selector().sum()); } // Check that we have any cross section at all. if ( stats.maxXSec() <= ZERO ) throw LesHouchesInitError() << "The sum of the cross sections of the readers in the " << "LesHouchesEventHandler '" << name() << "' was zero." << Exception::warning; // We now create a LuminosityFunction object to inform others about // the energy of the beam. theIncoming = incoming; lumiFn(new_ptr(LuminosityFunction(MaxEA, MaxEB))); } void LesHouchesEventHandler::doinitrun() { EventHandler::doinitrun(); stats.reset(); histStats.reset(); weightnames.clear(); for ( int i = 0, N = readers().size(); i < N; ++i ) { readers()[i]->initrun(); LesHouchesReader & reader = *readers()[i]; reader.initialize(*this); if ( i == 0 ) weightnames = reader.optWeightsNamesFunc(); else if ( reader.optWeightsNamesFunc() != weightnames ) throw LesHouchesInitError() << "the optional weights names for the LesHouchesEventHandler do not match '" << name() << "'" << Exception::warning; } for(unsigned int ww = 0; ww < weightnames.size(); ++ww) { const OptWeight optweight = { XSecStat(), ZERO, XSecStat() }; opt.insert(make_pair(weightnames[ww], optweight)); } ntries = 0; } EventPtr LesHouchesEventHandler::generateEvent() { LoopGuard loopGuard(*this, maxLoop()); while ( true ) { loopGuard(); currentReader(readers()[selector().select(UseRandom::current())]); skipEvents(); currentReader()->reset(); double weight = currentReader()->getEvent(); if ( weightOption() == unitweight && weight < 0.0 ) weight = 0.0; if ( weightOption() == unitweight || weightOption() == unitnegweight ) { CrossSection newmax = selector().reweight(weight); if ( newmax > CrossSection() ) increaseMaxXSec(newmax); } select(weight/currentReader()->preweight); histStats.select(weight); if ( !weighted() ) { if ( weightOption() == unitweight || weightOption() == unitnegweight ) { if ( !rndbool(abs(weight)) ) continue; weight = Math::sign(1.0, weight); } else if ( weight == 0.0 ) continue; } else if ( weight == 0.0 ) continue; accept(); // Divide by the bias introduced by the preweights in the reader. weight /= currentReader()->preweight; try { theLastXComb = currentReader()->getXComb(); // fact for weight normalization const double fact = theNormWeight ? double(selector().sum()/picobarn) : 1.; - currentEvent(new_ptr(Event(lastParticles(), this, generator()->runName(), + //whether to use the LHE event number or not for the event identification + if(UseLHEEvent==0 || currentReader()->LHEEventNum() == -1) { + currentEvent(new_ptr(Event(lastParticles(), this, generator()->runName(), generator()->currentEventNumber(), weight*fact ))); + } + else if(UseLHEEvent==1 && currentReader()->LHEEventNum() != -1) { + currentEvent(new_ptr(Event(lastParticles(), this, generator()->runName(), + currentReader()->LHEEventNum(), weight*fact ))); + } + + currentEvent()->optionalWeights() = currentReader()->optionalEventWeights(); // normalize the optional weights for(map::iterator it = currentEvent()->optionalWeights().begin(); it!=currentEvent()->optionalWeights().end();++it) it->second *= fact; performCollision(); if ( !currentCollision() ) throw Veto(); return currentEvent(); } catch (Veto) { reject(weight); } catch (Stop) { } catch (Exception &) { reject(weight); throw; } } } void LesHouchesEventHandler::skipEvents() { if ( weightOption() == 2 || weightOption() == -2 ) return; //does it make sense to skip events if we are using varying weights? // Don't do this for readers which seem to generate events on the fly. if ( currentReader()->active() || currentReader()->NEvents() <= 0 ) return; // Estimate the fration of the total events available from // currentReader() which will be requested. double frac = currentReader()->stats.maxXSec()/stats.maxXSec(); if ( stats.accepted() > 0 ) frac *= double(stats.attempts())/double(stats.accepted()); else frac *= double(stats.attempts() + 1); double xscan = generator()->N()*frac/currentReader()->NEvents(); // Estimate the number of times we need to go through the events for // the currentReader(), and how many events on average we need to // skip for each attempted event to go through the file an integer // number of times. double nscan = ceil(xscan); double meanskip = nscan/xscan - 1.0; // Skip an average numer of steps with a Poissonian distribution. currentReader()-> skip(UseRandom::rndPoisson(meanskip)%currentReader()->NEvents()); } void LesHouchesEventHandler::select(double weight) { stats.select(weight); currentReader()->select(weight); for (map::const_iterator it = currentReader()->optionalEventWeights().begin(); it != currentReader()->optionalEventWeights().end(); ++it) { const double w = it->second; OptWeight & o = opt[it->first]; o.histStats.select(w); o.stats.select(w); } } tCollPtr LesHouchesEventHandler::performCollision() { lastExtractor()->select(lastXCombPtr()); if ( CKKWHandler() ) CKKWHandler()->setXComb(lastXCombPtr()); currentCollision(new_ptr(Collision(lastParticles(), currentEvent(), this))); if ( currentEvent() ) currentEvent()->addCollision(currentCollision()); currentStep(new_ptr(Step(currentCollision(), this))); currentCollision()->addStep(currentStep()); currentStep()->addSubProcess(currentReader()->getSubProcess()); lastExtractor()->constructRemnants(lastXCombPtr()->partonBinInstances(), subProcess(), currentStep()); if ( !currentReader()->cuts().passCuts(*currentCollision()) ) throw Veto(); initGroups(); if ( ThePEG_DEBUG_ITEM(1) ) { if ( currentEvent() ) generator()->logfile() << *currentEvent(); else generator()->logfile() << *currentCollision(); } return continueCollision(); } EventPtr LesHouchesEventHandler::continueEvent() { try { continueCollision(); } catch (Veto) { const double fact = theNormWeight ? double(selector().sum()/picobarn) : 1.; reject(currentEvent()->weight()/fact); } catch (Stop) { } catch (Exception &) { const double fact = theNormWeight ? double(selector().sum()/picobarn) : 1.; reject(currentEvent()->weight()/fact); throw; } return currentEvent(); } void LesHouchesEventHandler::dofinish() { EventHandler::dofinish(); if ( selector().compensating() ) generator()->log() << "Warning: The run was ended while the LesHouchesEventHandler '" << name() << "' was still trying to compensate for weights larger than 1. " << "The cross section estimates may therefore be statistically " << "inaccurate." << endl; } void LesHouchesEventHandler::statistics(ostream & os) const { if ( statLevel() == 0 ) return; string line = "=======================================" "=======================================\n"; if ( stats.accepted() <= 0 ) { os << line << "No events generated by event handler '" << name() << "'." << endl; return; } os << line << "Statistics for Les Houches event handler \'" << name() << "\':\n" << " " << "generated number of Cross-section\n" << " " << " events attempts (nb)\n"; os << line << "Total:" << setw(42) << stats.accepted() << setw(13) << stats.attempts() << setw(17) << ouniterr(stats.xSec(), stats.xSecErr(), nanobarn) << endl << line; if ( statLevel() == 1 ) return; if ( statLevel() == 2 ) { os << "Per Les Houches Reader breakdown:\n"; for ( int i = 0, N = readers().size(); i < N; ++i ) { LesHouchesReader & reader = *readers()[i]; string n = reader.name(); n.resize(37, ' '); os << n << setw(11) << reader.stats.accepted() << setw(13) << reader.stats.attempts() << setw(17) << ouniterr(reader.stats.xSec(), reader.stats.xSecErr(), nanobarn) << endl; } os << line; } else { os << "Per Les Houches Reader (and process #) breakdown:\n"; for ( int i = 0, N = readers().size(); i < N; ++i ) { LesHouchesReader & reader = *readers()[i]; string n = reader.name() + " (all)"; n.resize(37, ' '); os << n << setw(11) << reader.stats.accepted() << setw(13) << reader.stats.attempts() << setw(17) << ouniterr(reader.stats.xSec(), reader.stats.xSecErr(), nanobarn) << endl; CrossSection xsectot = reader.stats.xSec(); if ( xsectot != ZERO ) xsectot /= reader.stats.sumWeights(); typedef LesHouchesReader::StatMap::const_iterator const_iterator; for ( const_iterator i = reader.statmap.begin(); i != reader.statmap.end(); ++i ) { ostringstream ss; ss << reader.name() << " (" << i->first << ")"; string n = ss.str(); n.resize(37, ' '); os << n << setw(11) << i->second.accepted() << setw(13) << i->second.attempts() << setw(17) << ouniterr(i->second.sumWeights()*xsectot, sqrt(i->second.sumWeights2())*xsectot, nanobarn) << endl; } os << line; } } string warn = "Warning: Result may be statistically incorrect since\n" " the following LesHouchesReaders were oversampled:\n"; for ( int i = 0, N = readers().size(); i < N; ++i ) { LesHouchesReader & reader = *readers()[i]; if ( reader.NEvents() > 0 && reader.stats.attempts() > reader.NEvents() ) { os << warn; warn = ""; os << "'" << reader.name() << "' (by a factor " << double(reader.stats.attempts())/double(reader.NEvents()) << ")" << endl; } } } void LesHouchesEventHandler::increaseMaxXSec(CrossSection maxxsec) { stats.maxXSec(selector().sum()); histStats.maxXSec(selector().sum()); currentReader()->increaseMaxXSec(maxxsec); } void LesHouchesEventHandler::accept() { ntries++; stats.accept(); histStats.accept(); currentReader()->accept(); for (map::iterator it = opt.begin(); it!=opt.end(); ++it) { OptWeight & tmp = it->second; tmp.histStats.accept(); tmp.stats.accept(); } } void LesHouchesEventHandler::reject(double w) { ntries++; stats.reject(w); histStats.reject(w); currentReader()->reject(w); for (map::const_iterator it = currentReader()->optionalEventWeights().begin(); it != currentReader()->optionalEventWeights().end(); ++it) { const double w = it->second; OptWeight & o = opt[it->first]; o.histStats.reject(w); o.stats.reject(w); } } const map & LesHouchesEventHandler::optintegratedXSecMap() const { static map result; result.clear(); for ( map::const_iterator it= opt.begin(); it!=opt.end(); ++it ) { result[it->first] = ( it->second.stats.sumWeights() / it->second.stats.attempts() ) * picobarn; } return result; } CrossSection LesHouchesEventHandler::histogramScale() const { return histStats.xSec()/histStats.sumWeights(); } CrossSection LesHouchesEventHandler::integratedXSec() const { return histStats.xSec(); } CrossSection LesHouchesEventHandler::integratedXSecErr() const { return histStats.xSecErr(); } int LesHouchesEventHandler::ntriesinternal() const { return stats.attempts(); } void LesHouchesEventHandler::persistentOutput(PersistentOStream & os) const { os << stats << histStats << theReaders << theSelector << oenum(theWeightOption) << theUnitTolerance << theCurrentReader << warnPNum - << theNormWeight; + << theNormWeight << UseLHEEvent; } void LesHouchesEventHandler::persistentInput(PersistentIStream & is, int) { is >> stats >> histStats >> theReaders >> theSelector >> ienum(theWeightOption) >> theUnitTolerance >> theCurrentReader >> warnPNum - >> theNormWeight; + >> theNormWeight >> UseLHEEvent; } ClassDescription LesHouchesEventHandler::initLesHouchesEventHandler; // Definition of the static class description member. void LesHouchesEventHandler::setUnitTolerance(double x) { theUnitTolerance = x; selector().tolerance(unitTolerance()); } void LesHouchesEventHandler::Init() { static ClassDocumentation documentation ("This is the main class administrating the selection of hard " "subprocesses from a set of ThePEG::LesHouchesReader objects."); static RefVector interfaceLesHouchesReaders ("LesHouchesReaders", "Objects capable of reading events from an event file or an " "external matrix element generator.", &LesHouchesEventHandler::theReaders, -1, false, false, true, false, false); static Switch interfaceWeightOption ("WeightOption", "The different ways to weight events in the Les Houches event handler. " "Whether weighted or not and whether or not negative weights are allowed.", &LesHouchesEventHandler::theWeightOption, unitweight, true, false); static SwitchOption interfaceWeightOptionUnitWeight (interfaceWeightOption, "UnitWeight", "All events have unit weight.", unitweight); static SwitchOption interfaceWeightOptionNegUnitWeight (interfaceWeightOption, "NegUnitWeight", "All events have weight +1 or maybe -1.", unitnegweight); static SwitchOption interfaceWeightOptionVarWeight (interfaceWeightOption, "VarWeight", "Events may have varying but positive weights.", varweight); static SwitchOption interfaceWeightOptionVarNegWeight (interfaceWeightOption, "VarNegWeight", "Events may have varying weights, both positive and negative.", varnegweight); static Switch interfaceWarnPNum ("WarnPNum", "Warn if the same process number is used in more than one " "LesHouchesReader.", &LesHouchesEventHandler::warnPNum, true, true, false); static SwitchOption interfaceWarnPNumWarning (interfaceWarnPNum, "Warning", "Give a warning message.", true); static SwitchOption interfaceWarnPNumNoWarning (interfaceWarnPNum, "NoWarning", "Don't give a warning message.", false); static Parameter interfaceUnitTolerance ("UnitTolerance", "If the WeightOption is set to unit weight, do not start compensating unless the a weight is found to be this much larger than unity.", &LesHouchesEventHandler::theUnitTolerance, 1.0e-6, 0.0, 0, true, false, Interface::lowerlim, &LesHouchesEventHandler::setUnitTolerance, (double(LesHouchesEventHandler::*)()const)(0), (double(LesHouchesEventHandler::*)()const)(0), (double(LesHouchesEventHandler::*)()const)(0), (double(LesHouchesEventHandler::*)()const)(0)); static Switch interfaceWeightNormalization ("WeightNormalization", "How to normalize the output weights", &LesHouchesEventHandler::theNormWeight, 0, false, false); static SwitchOption interfaceWeightNormalizationUnit (interfaceWeightNormalization, "Normalized", "Standard normalization, i.e. +/- for unweighted events", 0); static SwitchOption interfaceWeightNormalizationCrossSection (interfaceWeightNormalization, "CrossSection", "Normalize the weights to the max cross section in pb", 1); + static Switch interfaceEventNumbering + ("EventNumbering", + "How to number the events", + &LesHouchesEventHandler::UseLHEEvent, 0, false, false); + static SwitchOption interfaceEventNumberingIncremental + (interfaceEventNumbering, + "Incremental", + "Standard incremental numbering (i.e. as they are generated)", + 0); + static SwitchOption interfaceEventNumberingLHE + (interfaceEventNumbering, + "LHE", + "Corresponding to the LHE event number", + 1); + + interfaceLesHouchesReaders.rank(10); interfaceWeightOption.rank(9); } diff --git a/LesHouches/LesHouchesEventHandler.h b/LesHouches/LesHouchesEventHandler.h --- a/LesHouches/LesHouchesEventHandler.h +++ b/LesHouches/LesHouchesEventHandler.h @@ -1,474 +1,479 @@ // -*- C++ -*- // // LesHouchesEventHandler.h is a part of ThePEG - Toolkit for HEP Event Generation // Copyright (C) 1999-2019 Leif Lonnblad // // ThePEG is licenced under version 3 of the GPL, see COPYING for details. // Please respect the MCnet academic guidelines, see GUIDELINES for details. // #ifndef THEPEG_LesHouchesEventHandler_H #define THEPEG_LesHouchesEventHandler_H // // This is the declaration of the LesHouchesEventHandler class. // #include "ThePEG/Handlers/EventHandler.h" #include "LesHouchesEventHandler.fh" #include "LesHouchesReader.fh" #include "ThePEG/Utilities/CompSelector.h" #include "ThePEG/Utilities/XSecStat.h" namespace ThePEG { /** * The LesHouchesEventHandler inherits from the general EventHandler * class and administers the reading of events generated by external * matrix element generator programs according to the Les Houches * accord. * * The class has a list of LesHouchesReaders which * typically are connected to files with event data produced by * external matrix element generator programs. When an event is * requested by LesHouchesEventHandler, one of the readers are chosen, * an event is read in and then passed to the different * StepHandler defined in the underlying * EventHandler class. * * @see \ref LesHouchesEventHandlerInterfaces "The interfaces" * defined for LesHouchesEventHandler. */ class LesHouchesEventHandler: public EventHandler { public: /** * A vector of LesHouchesReader objects. */ typedef vector ReaderVector; /** * A selector of readers. */ typedef CompSelector ReaderSelector; /** * Enumerate the weighting options. */ enum WeightOpt { unitweight = 1, /**< All events have unit weight. */ unitnegweight = -1, /**< All events have wight +/- 1. */ varweight = 2, /**< Varying positive weights. */ varnegweight = -2 /**< Varying positive or negative weights. */ }; public: /** @name Standard constructors and destructors. */ //@{ /** * The default constructor. */ LesHouchesEventHandler() : theWeightOption(unitweight), theUnitTolerance(1.0e-6), warnPNum(true), - theNormWeight(0) + theNormWeight(0), UseLHEEvent(0) { selector().tolerance(unitTolerance()); } /** * The destructor. */ virtual ~LesHouchesEventHandler(); //@} public: /** @name Initialization and finalization functions. */ //@{ /** * Initialize this event handler and all related objects needed to * generate events. */ virtual void initialize(); /** * Write out accumulated statistics about intergrated cross sections * and stuff. */ virtual void statistics(ostream &) const; /** * Histogram scale. A histogram bin which has been filled with the * weights associated with the Event objects should be scaled by * this factor to give the correct cross section. */ virtual CrossSection histogramScale() const; /** * The estimated total integrated cross section of the processes * generated in this run. * @return 0 if no integrated cross section could be estimated. */ virtual CrossSection integratedXSec() const; /** * The number of attempts inside the statistics object */ virtual int ntriesinternal() const; /** * The estimated error in the total integrated cross section of the * processes generated in this run. * @return 0 if no integrated cross section error could be estimated. */ virtual CrossSection integratedXSecErr() const; /** * Map to aid the calculation of the optional weights' integrated cross section */ virtual const map & optintegratedXSecMap() const; //@} /** @name Functions used for the actual generation */ //@{ /** * Generate an event. */ virtual EventPtr generateEvent(); /** * Create the Event and Collision objects. Used by the * generateEvent() function. */ virtual tCollPtr performCollision(); /** * Continue generating an event if the generation has been stopped * before finishing. */ virtual EventPtr continueEvent(); //@} /** @name Functions to manipulate statistics. */ //@{ /** * An event has been selected. Signal that an event has been * selected with the given \a weight. If unit weights are requested, * the event will be accepted with that weight. This also takes care * of the statistics collection of the selected reader object. */ void select(double weight); /** * Accept the current event, taking care of the statistics * collection of the corresponding reader objects. */ void accept(); /** * Reject the current event, taking care of the statistics * collection of the corresponding reader objects. */ void reject(double weight); /** * Increase the overestimated cross section for the selected reader. */ void increaseMaxXSec(CrossSection maxxsec); /** * Skip some events. To ensure a reader file is scanned an even * number of times, skip a number of events for the selected reader. */ void skipEvents(); //@} /** @name Simple access functions. */ //@{ /** * The way weights are to be treated. */ WeightOpt weightOption() const { return theWeightOption; } /** * If the weight option is set to unit weight, do not start * compensating unless the weight is this much larger than unity. */ double unitTolerance() const { return theUnitTolerance; } /** * Access the list of readers. */ const ReaderVector & readers() const { return theReaders; } /** * The selector to choose readers according to their overestimated * cross section. */ const ReaderSelector & selector() const { return theSelector; } /** * The currently selected reader object. */ tLesHouchesReaderPtr currentReader() const { return theCurrentReader; } /** * Set the currently selected reader object. */ void currentReader(tLesHouchesReaderPtr x) { theCurrentReader = x; } //@} public: /** @name Functions used by the persistent I/O system. */ //@{ /** * Function used to write out object persistently. * @param os the persistent output stream written to. */ void persistentOutput(PersistentOStream & os) const; /** * Function used to read in object persistently. * @param is the persistent input stream read from. * @param version the version number of the object when written. */ void persistentInput(PersistentIStream & is, int version); //@} /** * The standard Init function used to initialize the interfaces. * Called exactly once for each class by the class description system * before the main function starts or * when this class is dynamically loaded. */ static void Init(); /** * The currently selected reader object. */ tLesHouchesReaderPtr theCurrentReader; protected: /** @name Clone Methods. */ //@{ /** * Make a simple clone of this object. * @return a pointer to the new object. */ virtual IBPtr clone() const; /** Make a clone of this object, possibly modifying the cloned object * to make it sane. * @return a pointer to the new object. */ virtual IBPtr fullclone() const; //@} protected: /** @name Standard Interfaced functions. */ //@{ /** * Initialize this object after the setup phase before saving an * EventGenerator to disk. * @throws InitException if object could not be initialized properly. */ virtual void doinit(); /** * Initialize this object. Called in the run phase just before * a run begins. */ virtual void doinitrun(); /** * Finalize this object. Called in the run phase just after a * run has ended. Used eg. to write out statistics. */ virtual void dofinish(); //@} protected: /** * Access the list of readers. */ ReaderVector & readers() { return theReaders; } /** * The selector to choose readers according to their overestimated * cross section. */ ReaderSelector & selector() { return theSelector; } /** * Helper function for the interface; */ void setUnitTolerance(double); /** * Collect statistics for this event handler. */ XSecStat stats; /** * Collect statistics for this event handler. To be used for * histogram scaling. */ XSecStat histStats; /** * The weight identifiers for the events */ vector weightnames; /** * Collect statistics for this event handler's optional weights. */ struct OptWeight { /** * Collect statistics for the optional weights */ XSecStat stats; /** * Calculate the cross section for the optional weights */ CrossSection xs; /** * Collect statistics for this event handler's optional weights. To be used for * histogram scaling. */ XSecStat histStats; }; /** Map statistics to weight name strings */ map opt; /** * Counter for the number of tries for the purpose of statistics */ int ntries; /** * Return the optional weights' statistics */ const map & optWeights() const { return opt; } private: /** * The list of readers. */ ReaderVector theReaders; /** * The selector to choose readers according to their overestimated * cross section. */ ReaderSelector theSelector; /** * The way weights are to be treated. */ WeightOpt theWeightOption; /** * If the weight option is set to unit weight, do not start * compensating unless the weight is this much larger than unity. */ double theUnitTolerance; /** * Warn if the same process number is used in more than one * LesHouchesReader. */ bool warnPNum; /** * How to normalize the weights */ unsigned int theNormWeight; + /** + * How to number the events + */ + unsigned int UseLHEEvent; + public: /** @cond EXCEPTIONCLASSES */ /** * Exception class used if no readers were assigned. */ class LesHouchesInitError: public InitException {}; /** * Exception class used if the same process number is used by more * than ne reader. */ class LesHouchesPNumException: public InitException {}; /** @endcond */ private: /** * The static object used to initialize the description of this class. * Indicates that this is a concrete class with persistent data. */ static ClassDescription initLesHouchesEventHandler; /** * The assignment operator is private and must never be called. * In fact, it should not even be implemented. */ LesHouchesEventHandler & operator=(const LesHouchesEventHandler &) = delete; }; } // CLASSDOC OFF #include "ThePEG/Utilities/ClassTraits.h" namespace ThePEG { /** @cond TRAITSPECIALIZATIONS */ /** This template specialization informs ThePEG about the * base classes of LesHouchesEventHandler. */ template <> struct BaseClassTrait { /** Typedef of the first base class of LesHouchesEventHandler. */ typedef EventHandler NthBase; }; /** This template specialization informs ThePEG about the name of * the LesHouchesEventHandler class and the shared object where it is defined. */ template <> struct ClassTraits : public ClassTraitsBase { /** Return a platform-independent class name */ static string className() { return "ThePEG::LesHouchesEventHandler"; } /** Return the name of the shared library be loaded to get access to * the LesHouchesEventHandler class and every other class it uses * (except the base class). */ static string library() { return "LesHouches.so"; } }; /** @endcond */ } #endif /* THEPEG_LesHouchesEventHandler_H */ diff --git a/LesHouches/LesHouchesFileReader.cc b/LesHouches/LesHouchesFileReader.cc --- a/LesHouches/LesHouchesFileReader.cc +++ b/LesHouches/LesHouchesFileReader.cc @@ -1,950 +1,966 @@ // -*- C++ -*- // // LesHouchesFileReader.cc is a part of ThePEG - Toolkit for HEP Event Generation // Copyright (C) 1999-2019 Leif Lonnblad // // ThePEG is licenced under version 3 of the GPL, see COPYING for details. // Please respect the MCnet academic guidelines, see GUIDELINES for details. // // // This is the implementation of the non-inlined, non-templated member // functions of the LesHouchesFileReader class. // #include "LesHouchesFileReader.h" #include "ThePEG/Interface/ClassDocumentation.h" #include "ThePEG/Interface/Reference.h" #include "ThePEG/Interface/Switch.h" #include "ThePEG/Interface/Parameter.h" #include "ThePEG/Utilities/Throw.h" #include "ThePEG/PDT/DecayMode.h" #include "ThePEG/Persistency/PersistentOStream.h" #include "ThePEG/Persistency/PersistentIStream.h" #include #include using namespace ThePEG; LesHouchesFileReader:: LesHouchesFileReader(const LesHouchesFileReader & x) : LesHouchesReader(x), neve(x.neve), ieve(0), LHFVersion(x.LHFVersion), outsideBlock(x.outsideBlock), headerBlock(x.headerBlock), initComments(x.initComments), initAttributes(x.initAttributes), eventComments(x.eventComments), eventAttributes(x.eventAttributes), theFileName(x.theFileName), theQNumbers(x.theQNumbers), theIncludeFxFxTags(x.theIncludeFxFxTags), theIncludeCentral(x.theIncludeCentral), theDecayer(x.theDecayer) {} LesHouchesFileReader::~LesHouchesFileReader() {} IBPtr LesHouchesFileReader::clone() const { return new_ptr(*this); } IBPtr LesHouchesFileReader::fullclone() const { return new_ptr(*this); } bool LesHouchesFileReader::preInitialize() const { return true; } void LesHouchesFileReader::doinit() { LesHouchesReader::doinit(); //cout << "theDecayer->fullName() = " << theDecayer->fullName() << endl; //if(theDecayer) { // cout << "DECAYER RETURNS TRUE" << endl; // } // are we using QNUMBERS if(!theQNumbers) return; // parse the header block and create // any new particles needed in QNUMBERS blocks string block = headerBlock; string line = ""; bool readingSLHA = false; int (*pf)(int) = tolower; unsigned int newNumber(0); do { line = StringUtils::car(block,"\r\n"); block = StringUtils::cdr(block,"\r\n"); if(line[0]=='#') continue; // are we reading the SLHA block if(readingSLHA) { // reached the end of slha block ? if(line.find(" split = StringUtils::split(line,"#"); // check for a qnumbers block transform(split[0].begin(), split[0].end(), split[0].begin(), pf); // if not contine if(split[0].find("block qnumbers")==string::npos) continue; // get name from comment string name; if(split.size()>=2) { name = StringUtils::stripws(split[1]); } else { ++newNumber; ostringstream tname; tname << "NP" << newNumber; name = tname.str(); } // extract the PDG code split = StringUtils::split(split[0]," "); istringstream is(split[2]); long PDGCode(0); is >> PDGCode; // get the charge, spin, colour and whether an antiparticle int charge(0),spin(0),colour(0),anti(0); for(unsigned int ix=0;ix<4;++ix) { line = StringUtils::car(block,"\r\n"); block = StringUtils::cdr(block,"\r\n"); int dummy[2]; istringstream is(line); is >> dummy[0] >> dummy[1]; switch (dummy[0]) { case 1: charge = dummy[1]; break; case 2: spin = dummy[1]; break; case 3: colour = dummy[1]; break; case 4: anti = dummy[1]; break; default: assert(false); } } // check if particles already exist PDPair newParticle; newParticle.first = getParticleData(PDGCode); if(newParticle.first) Throw() << "Particle with PDG code " << PDGCode << " whose creation was requested in a QNUMBERS Block" << " already exists. Retaining the original particle" << Exception::warning; if(anti) { newParticle.second = getParticleData(-PDGCode); if(newParticle.second) Throw() << "Anti-particle with PDG code " << -PDGCode << " whose creation was requested in a QNUMBERS Block" << " already exists. Retaining the original particle" << Exception::warning; if(( newParticle.first && !newParticle.second ) || ( newParticle.second && !newParticle.first ) ) Throw() << "Either particle or anti-particle with PDG code " << PDGCode << " whose creation was requested in a QNUMBERS Block" << " already exists, but not both the particle and antiparticle. " << " Something dodgy here stopping" << Exception::runerror; } // already exists continue if(newParticle.first) continue; // create the particles // particle with no anti particle if( anti == 0 ) { // construct the name if(name=="") { ostringstream temp; temp << PDGCode; name = temp.str(); } // create the ParticleData object newParticle.first = ParticleData::Create(PDGCode,name); } // particle anti-particle pair else { // construct the names string nameAnti; if(name=="") { ostringstream temp; temp << PDGCode; name = temp.str(); ostringstream temp2; temp << -PDGCode; nameAnti = temp2.str(); } else { nameAnti=name; for(string::iterator it=nameAnti.begin();it!=nameAnti.end();++it) { if(*it=='+') nameAnti.replace(it,it+1,"-"); else if(*it=='-') nameAnti.replace(it,it+1,"+"); } if(nameAnti==name) nameAnti += "bar"; } // create the ParticleData objects newParticle = ParticleData::Create(PDGCode,name,nameAnti); } // set the particle properties if(colour==1) colour = 0; newParticle.first->iColour(PDT::Colour(colour)); newParticle.first->iSpin (PDT::Spin (spin )); newParticle.first->iCharge(PDT::Charge(charge)); // register it generator()->preinitRegister(newParticle.first, "/Herwig/Particles/"+newParticle.first->PDGName()); // set the antiparticle properties if(newParticle.second) { if(colour==3||colour==6) colour *= -1; charge = -charge; newParticle.second->iColour(PDT::Colour(colour)); newParticle.second->iSpin (PDT::Spin (spin )); newParticle.second->iCharge(PDT::Charge(charge)); // register it generator()->preinitRegister(newParticle.second, "/Herwig/Particles/"+newParticle.second->PDGName()); } } // start of SLHA block ? else if(line.find("> id >> mass; // skip resetting masses on SM particles // as it can cause problems later on in event generation if(abs(id)<=6 || (abs(id)>=11 && abs(id)<=16) || abs(id)==23 || abs(id)==24) { // Throw() << "Standard model mass for PID " // << id // << " will not be changed." // << Exception::warning; block = StringUtils::cdr(block,"\r\n"); line = StringUtils::car(block,"\r\n"); continue; } // magnitude of mass for susy models mass = abs(mass); // set the mass tPDPtr particle = getParticleData(id); if(!particle) throw SetupException() << "LesHouchesFileReader::doinit() - Particle with PDG code not" << id << " not found." << Exception::runerror; const InterfaceBase * ifb = BaseRepository::FindInterface(particle, "NominalMass"); ostringstream os; os << mass; ifb->exec(*particle, "set", os.str()); // read the next line block = StringUtils::cdr(block,"\r\n"); line = StringUtils::car(block,"\r\n"); }; } // found a decay block else if(line.find("decay") == 0) { // get PGD code and width istringstream iss(line); string dummy; long parent(0); Energy width(ZERO); iss >> dummy >> parent >> iunit(width, GeV); // get the ParticleData object PDPtr inpart = getParticleData(parent); if(!inpart) { throw SetupException() << "LesHouchesFileReader::doinit() - A ParticleData object with the PDG code " << parent << " does not exist. " << Exception::runerror; return; } if ( abs(inpart->id()) == 6 || abs(inpart->id()) == 15 || abs(inpart->id()) == 23 || abs(inpart->id()) == 24 || abs(inpart->id()) == 25 ) { Throw() << "\n" "************************************************************************\n" "* Your LHE file changes the width of " << inpart->PDGName() << ".\n" "* This can cause serious problems in the event generation!\n" "************************************************************************\n" "\n" << Exception::warning; } else if (inpart->width() > ZERO && width <= ZERO) { Throw() << "\n" "************************************************************************\n" "* Your LHE file zeroes the non-zero width of " << inpart->PDGName() << ".\n" "* If " << inpart->PDGName() << " is a decaying SM particle,\n" "* this can cause serious problems in the event generation!\n" "************************************************************************\n" "\n" << Exception::warning; } // set the width inpart->width(width); if( width > ZERO ) { inpart->cTau(hbarc/width); inpart->widthCut(5.*width); inpart->stable(false); } // construct prefix for DecayModes string prefix(inpart->name() + "->"), tag(prefix),line(""); unsigned int nmode(0); // read any decay modes line = StringUtils::car(block,"\r\n"); while(line[0] != 'D' && line[0] != 'B' && line[0] != 'd' && line[0] != 'b' && line[0] != '<' && line != "") { // skip comments if(line[0] == '#') { block = StringUtils::cdr(block,"\r\n"); line = StringUtils::car(block,"\r\n"); continue; } // read decay mode and construct the tag istringstream is(line); double brat(0.); unsigned int nda(0),npr(0); is >> brat >> nda; while( true ) { long t; is >> t; if( is.fail() ) break; if( t == abs(parent) ) throw SetupException() << "An error occurred while read a decay of the " << inpart->PDGName() << ". One of its products has the same PDG code " << "as the parent particle in LesHouchesFileReader::doinit()." << " Please check the Les Houches file.\n" << Exception::runerror; tcPDPtr p = getParticleData(t); if( !p ) throw SetupException() << "LesHouchesFileReader::doinit() -" << " An unknown PDG code has been encounterd " << "while reading a decay mode. ID: " << t << Exception::runerror; ++npr; tag += p->name() + ","; } if( npr != nda ) throw SetupException() << "LesHouchesFileReader::doinit() - While reading a decay of the " << inpart->PDGName() << " from an SLHA file, an inconsistency " << "between the number of decay products and the value in " << "the 'NDA' column was found. Please check if the spectrum " << "file is correct.\n" << Exception::warning; // create the DecayMode if( npr > 1 ) { if( nmode==0 ) { generator()->preinitInterface(inpart, "VariableRatio" , "set","false"); if(inpart->massGenerator()) { ok = false; Throw() << inpart->PDGName() << " already has a MassGenerator set" << " this is incompatible with using QNUMBERS " << "Use\n" << "set " << inpart->fullName() << ":Mass_generator NULL\n" << "to fix this." << Exception::warning; } if(inpart->widthGenerator()) { ok = false; Throw() << inpart->PDGName() << " already has a WidthGenerator set" << " this is incompatible with using QNUMBERS " << "Use\n" << "set " << inpart->fullName() << ":Width_generator NULL\n" << "to fix this." << Exception::warning; } unsigned int ntemp=0; for(DecaySet::const_iterator dit = inpart->decayModes().begin(); dit != inpart->decayModes().end(); ++dit ) { if((**dit).on()) ++ntemp; } if(ntemp!=0) { ok = false; Throw() << inpart->PDGName() << " already has DecayModes" << " this is incompatible with using QNUMBERS " << "Use\n" << "do " << inpart->fullName() << ":SelectDecayModes none\n" << " to fix this." << Exception::warning; } } inpart->stable(false); tag.replace(tag.size() - 1, 1, ";"); DMPtr dm = generator()->findDecayMode(tag); if(!theDecayer) Throw() << "LesHouchesFileReader::doinit() Decayer must be set using the " << "LesHouchesFileReader:Decayer" << " must be set to allow the creation of new" << " decay modes." << Exception::runerror; if(!dm) { dm = generator()->preinitCreateDecayMode(tag); if(!dm) Throw() << "LesHouchesFileReader::doinit() - Needed to create " << "new decaymode but one could not be created for the tag " << tag << Exception::warning; } generator()->preinitInterface(dm, "Decayer", "set", theDecayer->fullName()); ostringstream br; br << setprecision(13) << brat; generator()->preinitInterface(dm, "BranchingRatio", "set", br.str()); generator()->preinitInterface(dm, "Active", "set", "Yes"); if(dm->CC()) { generator()->preinitInterface(dm->CC(), "BranchingRatio", "set", br.str()); generator()->preinitInterface(dm->CC(), "Active", "set", "Yes"); } ++nmode; } tag=prefix; // read the next line block = StringUtils::cdr(block,"\r\n"); line = StringUtils::car(block,"\r\n"); }; if(nmode>0) { inpart->update(); if(inpart->CC()) inpart->CC()->update(); } } } // start of SLHA block ? else if(line.find("() << "The file associated with '" << name() << "' does not contain a " << "proper formatted Les Houches event file. The events may not be " << "properly sampled." << Exception::warning; } //vector LesHouchesFileReader::optWeightNamesFunc() { return optionalWeightsNames; } vector LesHouchesFileReader::optWeightsNamesFunc() { return optionalWeightsNames; } void LesHouchesFileReader::open() { if ( filename().empty() ) throw LesHouchesFileError() << "No Les Houches file name. " << "Use 'set " << name() << ":FileName'." << Exception::runerror; cfile.open(filename()); if ( !cfile ) throw LesHouchesFileError() << "The LesHouchesFileReader '" << name() << "' could not open the " << "event file called '" << theFileName << "'." << Exception::runerror; cfile.readline(); if ( !cfile.find(" attributes = StringUtils::xmlAttributes("LesHouchesEvents", cfile.getline()); LHFVersion = attributes["version"]; if ( LHFVersion.empty() ) return; bool readingHeader = false; bool readingInit = false; headerBlock = ""; // char (cwgtinfo_weights_info[250][15]); string hs; // int cwgtinfo_nn(0); // Loop over all lines until we hit the tag. bool readingInitWeights = false, readingInitWeights_sc = false; string weightinfo; while ( cfile.readline() ) { // found the init block for multiple weights if(cfile.find(""; erase_substr(sub, str_arrow); scalename = sub; break; } ++ws; } while (isc); /* now get the relevant information * e.g. scales or PDF sets used */ string startDEL = "'>"; //starting delimiter string stopDEL = ""; //end delimiter int firstLim = hs.find(startDEL); //find start of delimiter if(firstLim == -1) { startDEL = ">"; firstLim = hs.find(startDEL); } // unsigned lastLim = hs.find(stopDEL); //find end of delimitr string scinfo = hs.substr(firstLim); //define the information for the scale erase_substr(scinfo,stopDEL); erase_substr(scinfo,startDEL); scinfo = StringUtils::stripws(scinfo); /* fill in the map * indicating the information to be appended to each scale * i.e. scinfo for each scalname */ scalemap[scalename] = scinfo.c_str(); string str_id = "id="; string str_prime = "'"; erase_substr(scalename, str_id); erase_substr(scalename, str_prime); optionalWeightsNames.push_back(scalename); } } if ( cfile.find("") ) { //cout << "found init block" << endl; // We have hit the init block, so we should expect to find the // standard information in the following. But first check for // attributes. initAttributes = StringUtils::xmlAttributes("init", cfile.getline()); readingInit = true; cfile.readline(); if ( !( cfile >> heprup.IDBMUP.first >> heprup.IDBMUP.second >> heprup.EBMUP.first >> heprup.EBMUP.second >> heprup.PDFGUP.first >> heprup.PDFGUP.second >> heprup.PDFSUP.first >> heprup.PDFSUP.second >> heprup.IDWTUP >> heprup.NPRUP ) ) { heprup.NPRUP = -42; LHFVersion = ""; return; } heprup.resize(); for ( int i = 0; i < heprup.NPRUP; ++i ) { cfile.readline(); if ( !( cfile >> heprup.XSECUP[i] >> heprup.XERRUP[i] >> heprup.XMAXUP[i] >> heprup.LPRUP[i] ) ) { heprup.NPRUP = -42; LHFVersion = ""; return; } } } if ( cfile.find("> sub; if(we==2) { npLO = atoi(sub.c_str()); } if(we==5) { npNLO = atoi(sub.c_str()); } ++we; } while (ievat); optionalnpLO = npLO; optionalnpNLO = npNLO; std::stringstream npstringstream; npstringstream << "np " << npLO << " " << npNLO; std::string npstrings = npstringstream.str(); /* the FxFx merging information * becomes part of the optionalWeights, labelled -999 * for future reference */ if(theIncludeFxFxTags) optionalWeights[npstrings.c_str()] = -999; if ( !cfile.readline() ) return false; // The first line determines how many subsequent particle lines we // have. if ( !( cfile >> hepeup.NUP >> hepeup.IDPRUP >> hepeup.XWGTUP >> hepeup.SCALUP >> hepeup.AQEDUP >> hepeup.AQCDUP ) ) return false; hepeup.resize(); // Read all particle lines. for ( int i = 0; i < hepeup.NUP; ++i ) { if ( !cfile.readline() ) return false; if ( !( cfile >> hepeup.IDUP[i] >> hepeup.ISTUP[i] >> hepeup.MOTHUP[i].first >> hepeup.MOTHUP[i].second >> hepeup.ICOLUP[i].first >> hepeup.ICOLUP[i].second >> hepeup.PUP[i][0] >> hepeup.PUP[i][1] >> hepeup.PUP[i][2] >> hepeup.PUP[i][3] >> hepeup.PUP[i][4] >> hepeup.VTIMUP[i] >> hepeup.SPINUP[i] ) ) return false; //print momenta for debugging // cout << hepeup.PUP[i][0] << " " << hepeup.PUP[i][1] << " " << hepeup.PUP[i][2] << " " << hepeup.PUP[i][3] << " " << hepeup.PUP[i][4] << endl; if(std::isnan(hepeup.PUP[i][0])||std::isnan(hepeup.PUP[i][1])|| std::isnan(hepeup.PUP[i][2])||std::isnan(hepeup.PUP[i][3])|| std::isnan(hepeup.PUP[i][4])) throw Exception() << "nan's as momenta in Les Houches file " << Exception::eventerror; if(hepeup.MOTHUP[i].first -1==i || hepeup.MOTHUP[i].second-1==i) { throw Exception() << "Particle has itself as a mother in Les Houches " << "file, this is not allowed\n" << Exception::eventerror; } } + LHEeventnum = -1; // set the LHEeventnum to -1, this will be the default if the tag is not found + // Now read any additional comments and named weights. // read until the end of rwgt is found bool readingWeights = false, readingaMCFast = false, readingMG5ClusInfo = false; while ( cfile.readline() && !cfile.find("")) { if(cfile.find("> sub; if(wi==1) { string str_arrow = ">" ; erase_substr(sub, str_arrow); weightName = sub; } if(wi==2) weightValue = atof(sub.c_str()); ++wi; } while (iss); // store the optional weights found in the temporary map optionalWeightsTemp[weightName] = weightValue; } /* reading of aMCFast weights */ if(readingaMCFast) { std::stringstream amcfstringstream; amcfstringstream << "aMCFast " << cfile.getline(); std::string amcfstrings = amcfstringstream.str(); string str_newline = "\n"; erase_substr(amcfstrings,str_newline); optionalWeights[amcfstrings.c_str()] = -111; //for the aMCFast we give them a weight -111 for future identification } /* read additional MG5 Clustering information * used in LO merging */ if(readingMG5ClusInfo) { string hs = cfile.getline(); string startDEL = ""; //end delimiter unsigned firstLim = hs.find(startDEL); //find start of delimiter // unsigned lastLim = hs.find(stopDEL); //find end of delimitr string mg5clusinfo = hs.substr(firstLim); //define the information for the scale erase_substr(mg5clusinfo,stopDEL); erase_substr(mg5clusinfo,startDEL); string str_arrow = ">"; erase_substr(mg5clusinfo,str_arrow); string str_quotation = "\""; erase_substr(mg5clusinfo,str_quotation); optionalWeights[mg5clusinfo.c_str()] = -222; //for the mg5 scale info weights we give them a weight -222 for future identification } //store MG5 clustering information if(cfile.find("::const_iterator it=optionalWeightsTemp.begin(); it!=optionalWeightsTemp.end(); ++it){ //to avoid issues with inconsistencies of defining the weight ids, remove "" and '' string id_1 = it->first; erase_substr(id_1, str_quote); erase_substr(id_1, str_doublequote); for (map::const_iterator it2=scalemap.begin(); it2!=scalemap.end(); ++it2){ //find the scale id in the scale information and add this information string id_2 = it2->first; erase_substr(id_2, str_quote); erase_substr(id_2, str_doublequote); if(id_1==id_2) { string info = it2->second; string str_newline = "\n"; erase_substr(info, str_newline); //set the optional weights optionalWeights[info] = it->second; } } } /* additionally, we set the "central" scale * this is actually the default event weight */ string central = "central"; if (theIncludeCentral) optionalWeights[central] = hepeup.XWGTUP; if ( !cfile ) return false; return true; } void LesHouchesFileReader::close() { cfile.close(); } void LesHouchesFileReader::persistentOutput(PersistentOStream & os) const { os << neve << LHFVersion << outsideBlock << headerBlock << initComments << initAttributes << eventComments << eventAttributes << theFileName << theQNumbers << theIncludeFxFxTags << theIncludeCentral << theDecayer ; } void LesHouchesFileReader::persistentInput(PersistentIStream & is, int) { is >> neve >> LHFVersion >> outsideBlock >> headerBlock >> initComments >> initAttributes >> eventComments >> eventAttributes >> theFileName >> theQNumbers >> theIncludeFxFxTags >> theIncludeCentral >> theDecayer; ieve = 0; } ClassDescription LesHouchesFileReader::initLesHouchesFileReader; // Definition of the static class description member. void LesHouchesFileReader::Init() { static ClassDocumentation documentation ("ThePEG::LesHouchesFileReader is an base class to be used for objects " "which reads event files from matrix element generators. This class is " "able to read plain event files conforming to the Les Houches Event File " "accord."); static Parameter interfaceFileName ("FileName", "The name of a file containing events conforming to the Les Houches " "protocol to be read into ThePEG. A file name ending in " ".gz will be read from a pipe which uses " "zcat. If a file name ends in | the " "preceeding string is interpreted as a command, the output of which " "will be read through a pipe.", &LesHouchesFileReader::theFileName, "", false, false); interfaceFileName.fileType(); interfaceFileName.rank(11); static Switch interfaceQNumbers ("QNumbers", "Whether or not to read search for and read a QNUMBERS" " block in the header of the file.", &LesHouchesFileReader::theQNumbers, false, false, false); static SwitchOption interfaceQNumbersYes (interfaceQNumbers, "Yes", "Use QNUMBERS", true); static SwitchOption interfaceQNumbersNo (interfaceQNumbers, "No", "Don't use QNUMBERS", false); static Switch interfaceIncludeFxFxTags ("IncludeFxFxTags", "Include FxFx tags", &LesHouchesFileReader::theIncludeFxFxTags, false, true, false); static SwitchOption interfaceIncludeFxFxTagsYes (interfaceIncludeFxFxTags, "Yes", "Use the FxFx tags", true); static SwitchOption interfaceIncludeFxFxTagsNo (interfaceIncludeFxFxTags, "No", "Don't use the FxFx tags", false); static Switch interfaceIncludeCentral ("IncludeCentral", "Include definition of central weight", &LesHouchesFileReader::theIncludeCentral, false, true, false); static SwitchOption interfaceIncludeCentralYes (interfaceIncludeCentral, "Yes", "include definition of central weight", true); static SwitchOption interfaceIncludeCentralNo (interfaceIncludeCentral, "No", "Don't include definition of central weight", false); static Reference interfaceDecayer ("Decayer", "Decayer to use for any decays read from the QNUMBERS Blocks", &LesHouchesFileReader::theDecayer, false, false, true, true, false); } void LesHouchesFileReader::erase_substr(std::string& subject, const std::string& search) { size_t pos = 0; while((pos = subject.find(search, pos)) != std::string::npos) { subject.erase( pos, search.length() ); } } diff --git a/LesHouches/LesHouchesReader.cc b/LesHouches/LesHouchesReader.cc --- a/LesHouches/LesHouchesReader.cc +++ b/LesHouches/LesHouchesReader.cc @@ -1,1594 +1,1595 @@ // -*- C++ -*- // // LesHouchesReader.cc is a part of ThePEG - Toolkit for HEP Event Generation // Copyright (C) 1999-2019 Leif Lonnblad // // ThePEG is licenced under version 3 of the GPL, see COPYING for details. // Please respect the MCnet academic guidelines, see GUIDELINES for details. // // // This is the implementation of the non-inlined, non-templated member // functions of the LesHouchesReader class. // #include "LesHouchesReader.h" #include "ThePEG/Interface/ClassDocumentation.h" #include "ThePEG/Interface/Parameter.h" #include "ThePEG/Interface/Reference.h" #include "ThePEG/Interface/RefVector.h" #include "ThePEG/Interface/Switch.h" #include "ThePEG/Interface/Command.h" #include "config.h" #include "ThePEG/Persistency/PersistentOStream.h" #include "ThePEG/Persistency/PersistentIStream.h" #include "ThePEG/PDF/PartonExtractor.h" #include "ThePEG/PDF/NoPDF.h" #include "ThePEG/Cuts/Cuts.h" #include "ThePEG/EventRecord/TmpTransform.h" #include "ThePEG/Utilities/UtilityBase.h" #include "ThePEG/Handlers/XComb.h" #include "ThePEG/Handlers/CascadeHandler.h" #include "LesHouchesEventHandler.h" #include "ThePEG/Utilities/Throw.h" #include "ThePEG/Utilities/HoldFlag.h" #include "ThePEG/Utilities/Debug.h" #include "ThePEG/Helicity/WaveFunction/SpinorWaveFunction.h" using namespace ThePEG; LesHouchesReader::LesHouchesReader(bool active) : theNEvents(0), position(0), reopened(0), theMaxScan(-1), scanning(false), isActive(active), theCacheFileName(""), doCutEarly(true), preweight(1.0), reweightPDF(false), doInitPDFs(false), theMaxMultCKKW(0), theMinMultCKKW(0), lastweight(1.0), maxFactor(1.0), optionalnpLO(0), optionalnpNLO(0), weightScale(1.0*picobarn), skipping(false), theMomentumTreatment(0), useWeightWarnings(true),theReOpenAllowed(true), theIncludeSpin(true) {} LesHouchesReader::LesHouchesReader(const LesHouchesReader & x) : HandlerBase(x), LastXCombInfo<>(x), heprup(x.heprup), hepeup(x.hepeup), inData(x.inData), inPDF(x.inPDF), outPDF(x.outPDF), thePartonExtractor(x.thePartonExtractor), thePartonBins(x.thePartonBins), theXCombs(x.theXCombs), theCuts(x.theCuts), theNEvents(x.theNEvents), position(x.position), reopened(x.reopened), theMaxScan(x.theMaxScan), scanning(false), isActive(x.isActive), theCacheFileName(x.theCacheFileName), doCutEarly(x.doCutEarly), stats(x.stats), statmap(x.statmap), thePartonBinInstances(x.thePartonBinInstances), reweights(x.reweights), preweights(x.preweights), preweight(x.preweight), reweightPDF(x.reweightPDF), doInitPDFs(x.doInitPDFs), theMaxMultCKKW(x.theMaxMultCKKW), theMinMultCKKW(x.theMinMultCKKW), lastweight(x.lastweight), maxFactor(x.maxFactor), weightScale(x.weightScale), xSecWeights(x.xSecWeights), maxWeights(x.maxWeights), skipping(x.skipping), theMomentumTreatment(x.theMomentumTreatment), useWeightWarnings(x.useWeightWarnings), theReOpenAllowed(x.theReOpenAllowed), theIncludeSpin(x.theIncludeSpin) {} LesHouchesReader::~LesHouchesReader() {} void LesHouchesReader::doinitrun() { HandlerBase::doinitrun(); stats.reset(); for ( StatMap::iterator i = statmap.begin(); i != statmap.end(); ++i ) i->second.reset(); open(); if ( cacheFileName().length() ) openReadCacheFile(); position = 0; reopened = 0; } bool LesHouchesReader::preInitialize() const { if ( HandlerBase::preInitialize() ) return true; if ( doInitPDFs && ! ( inPDF.first && inPDF.second ) ) return true; return false; } void LesHouchesReader::doinit() { HandlerBase::doinit(); open(); close(); if ( !heprup.IDBMUP.first || !heprup.IDBMUP.second ) Throw() << "No information about incoming particles were found in " << "LesHouchesReader '" << name() << "'." << Exception::warning; inData = make_pair(getParticleData(heprup.IDBMUP.first), getParticleData(heprup.IDBMUP.second)); if ( heprup.EBMUP.first <= 0.0 || heprup.EBMUP.second <= 0.0 ) Throw() << "No information about the energy of incoming particles were found in " << "LesHouchesReader '" << name() << "'." << Exception::warning; if ( doInitPDFs && ! ( inPDF.first && inPDF.second ) ) { initPDFs(); if ( ! ( inPDF.first && inPDF.second ) ) Throw() << "LesHouchesReader '" << name() << "' could not create PDFBase objects in pre-initialization." << Exception::warning; } else if ( !inPDF.first || !inPDF.second ) Throw() << "No information about the PDFs of incoming particles were found in " << "LesHouchesReader '" << name() << "'." << Exception::warning; } void LesHouchesReader::initPDFs() { if ( inPDF.first && inPDF.second ) return; string remhname; if ( heprup.PDFSUP.first && !inPDF.first) { inPDF.first = dynamic_ptr_cast (generator()->preinitCreate("ThePEG::LHAPDF", fullName() + "/PDFA", "ThePEGLHAPDF.so")); if ( !inPDF.first ) { Throw() << "LesHouchesReader '" << name() << "' could not use information " << "about the PDFs used because the LHAPDF library was not properly " "defined." << Exception::warning; return; } remhname = fullName() + "/DummyRemH"; generator()->preinitCreate("ThePEG::NoRemnants", remhname); generator()->preinitInterface(inPDF.first, "RemnantHandler", "set", remhname); if ( heprup.PDFGUP.first > 0 && heprup.PDFGUP.first < 10 ) { ostringstream os; os << heprup.PDFGUP.first << " " << heprup.PDFSUP.first; generator()->preinitInterface(inPDF.first, "PDFLIBNumbers", "set", os.str()); } else { ostringstream os; os << heprup.PDFGUP.first*1000 + heprup.PDFSUP.first; generator()->preinitInterface(inPDF.first, "PDFNumber", "set", os.str()); } generator()->preinitInterface(inPDF.first, "RangeException", "newdef", "Freeze"); } if ( heprup.PDFSUP.second && !inPDF.second) { inPDF.second = dynamic_ptr_cast (generator()->preinitCreate("ThePEG::LHAPDF", fullName() + "/PDFB", "ThePEGLHAPDF.so")); if ( !inPDF.second ) { Throw() << "LesHouchesReader '" << name() << "' could not use information " << "about the PDFs used because the LHAPDF library was not properly " "defined." << Exception::warning; return; } if ( remhname == "" ) { remhname = fullName() + "/DummyRemH"; generator()->preinitCreate("ThePEG::NoRemnants", remhname); } generator()->preinitInterface(inPDF.second, "RemnantHandler", "set", remhname); if ( heprup.PDFGUP.second > 0 && heprup.PDFGUP.second < 10 ) { ostringstream os; os << heprup.PDFGUP.second << " " << heprup.PDFSUP.second; generator()->preinitInterface(inPDF.second, "PDFLIBNumbers", "set", os.str()); } else { ostringstream os; os << heprup.PDFGUP.second*1000 + heprup.PDFSUP.second; generator()->preinitInterface(inPDF.second, "PDFNumber", "set", os.str()); } generator()->preinitInterface(inPDF.second, "RangeException", "newdef", "Freeze"); } if ( ! ( inPDF.first && inPDF.second ) ) Throw() << "LesHouchesReader '" << name() << "' could not find information about the PDFs used." << Exception::warning; } void LesHouchesReader::initialize(LesHouchesEventHandler & eh) { Energy2 Smax = ZERO; double Y = 0.0; if ( !theCuts ) { theCuts = eh.cuts(); if ( !theCuts ) Throw() << "No Cuts object was assigned to the LesHouchesReader '" << name() << "' nor was one\nassigned to the controlling " << "LesHouchesEventHandler '" << eh.name() << "'.\nAt least one of them " << "needs to have a Cuts object." << Exception::runerror; Smax = cuts().SMax(); Y = cuts().Y(); } theCKKW = eh.CKKWHandler(); if ( !partonExtractor() ) { thePartonExtractor = eh.partonExtractor(); if ( !partonExtractor() ) Throw() << "No PartonExtractor object was assigned to the LesHouchesReader '" << name() << "' nor was one\nassigned to the controlling " << "LesHouchesEventHandler '" << eh.name() << "'.\nAt least one of them " << "needs to have a PartonExtractor object." << Exception::runerror; } open(); Energy emax = 2.0*sqrt(heprup.EBMUP.first*heprup.EBMUP.second)*GeV; theCuts->initialize(sqr(emax), 0.5*log(heprup.EBMUP.first/heprup.EBMUP.second)); if ( Smax > ZERO && ( Smax != cuts().SMax() || Y != cuts().Y() ) ) Throw() << "The LesHouchesReader '" << name() << "' uses the same Cuts object " << "as another LesHouchesReader which has not got the same energies of " << "the colliding particles. For the generation to work properly " << "different LesHouchesReader object with different colliding particles " << "must be assigned different (although possibly identical) Cuts " << "objects." << Exception::warning; thePartonBins = partonExtractor()->getPartons(emax, inData, cuts()); for ( int i = 0, N = partonBins().size(); i < N; ++i ) { theXCombs[partonBins()[i]] = new_ptr(XComb(emax, inData, &eh, partonExtractor(), CKKWHandler(), partonBins()[i], theCuts)); partonExtractor()->nDims(partonBins()[i]); } outPDF = make_pair(partonExtractor()->getPDF(inData.first), partonExtractor()->getPDF(inData.second)); close(); if ( !heprup.IDWTUP && useWeightWarnings ) Throw() << "No information about the weighting scheme was found. The events " << "produced by LesHouchesReader " << name() << " may not be sampled correctly." << Exception::warning; if ( abs(heprup.IDWTUP) > 1 && !eh.weighted() && useWeightWarnings ) Throw() << "LesHouchesReader " << name() << " has the IDWTUP flag set to " << heprup.IDWTUP << " which is not supported by this reader, the " << "produced events may not be sampled correctly. It is up to " << "sub-classes of LesHouchesReader to correctly convert to match IDWTUP " << "+/- 1. Will try to make intelligent guesses to get " << "correct statistics.\nIn most cases this should be sufficient. " << "Unset WeightWarnings to avoid this message," << "or set Weighted to on." << Exception::warning; if ( heprup.IDWTUP != eh.weightOption() && abs(heprup.IDWTUP) < 3 && useWeightWarnings ) Throw() << "LesHouchesReader " << name() << " has the IDWTUP flag set to " << heprup.IDWTUP << ", which does not correspond\nto the weight option " << eh.weightOption() << " set in " << "the LesHouchesEventHandler " << eh.name() << ".\n\n" << "Use the following handler setting instead:\n" << " set " << eh.name() << ":WeightOption " << heprup.IDWTUP << "\nWill try to make intelligent guesses to get " << "correct statistics. In most cases this should be sufficient. " << "Unset WeightWarnings to avoid this message" << Exception::warning; scan(); initStat(); } long LesHouchesReader::scan() { open(); // Shall we write the events to a cache file for fast reading? If so // we write to a temporary file if the caches events should be // randomized. if ( cacheFileName().length() ) openWriteCacheFile(); // Keep track of the number of events scanned. long neve = 0; long cuteve = 0; bool negw = false; // If the open() has not already gotten information about subprocesses // and cross sections we have to scan through the events. if ( !heprup.NPRUP || cacheFile() || abs(heprup.IDWTUP) != 1 ) { // why scan if IDWTUP != 1? HoldFlag<> isScanning(scanning); double oldsum = 0.0; vector lprup; vector newmax; vector oldeve; vector neweve; vector sumlprup; vector sumsqlprup; vector nscanned; for ( int i = 0; ( maxScan() < 0 || i < maxScan() ) && readEvent(); ++i ) { if ( !checkPartonBin() ) Throw() << "Found event in LesHouchesReader '" << name() << "' which cannot be handeled by the assigned PartonExtractor '" << partonExtractor()->name() << "'." << Exception::runerror; vector::iterator idit = find(lprup.begin(), lprup.end(), hepeup.IDPRUP); int id = lprup.size(); if ( idit == lprup.end() ) { lprup.push_back(hepeup.IDPRUP); newmax.push_back(0.0); neweve.push_back(0); oldeve.push_back(0); sumlprup.push_back(0.); sumsqlprup.push_back(0.); nscanned.push_back(0); } else { id = idit - lprup.begin(); } ++neve; ++oldeve[id]; oldsum += hepeup.XWGTUP; sumlprup[id] += hepeup.XWGTUP; sumsqlprup[id] += sqr(hepeup.XWGTUP); ++nscanned[id]; if ( cacheFile() ) { if ( eventWeight() == 0.0 ) { ++cuteve; continue; } cacheEvent(); } ++neweve[id]; newmax[id] = max(newmax[id], abs(eventWeight())); if ( eventWeight() < 0.0 ) negw = true; } //end of scanning events xSecWeights.resize(oldeve.size(), 1.0); for ( int i = 0, N = oldeve.size(); i < N; ++i ) if ( oldeve[i] ) xSecWeights[i] = double(neweve[i])/double(oldeve[i]); if ( maxScan() < 0 || neve > NEvents() ) NEvents(neve - cuteve); if ( lprup.size() == heprup.LPRUP.size() ) { for ( int id = 0, N = lprup.size(); id < N; ++id ) { vector::iterator idit = find(heprup.LPRUP.begin(), heprup.LPRUP.end(), hepeup.IDPRUP); if ( idit == heprup.LPRUP.end() ) { Throw() << "When scanning events, the LesHouschesReader '" << name() << "' found undeclared processes." << Exception::warning; heprup.NPRUP = 0; break; } int idh = idit - heprup.LPRUP.begin(); heprup.XMAXUP[idh] = newmax[id]; } } if ( heprup.NPRUP == 0 ) { // No heprup block was supplied or something went wrong. heprup.NPRUP = lprup.size(); heprup.LPRUP.resize(lprup.size()); heprup.XMAXUP.resize(lprup.size()); for ( int id = 0, N = lprup.size(); id < N; ++id ) { heprup.LPRUP[id] = lprup[id]; heprup.XMAXUP[id] = newmax[id]; } } if ( abs(heprup.IDWTUP) != 1 ) { // Try to fix things if abs(heprup.IDWTUP) != 1. double sumxsec = 0.0; if(abs(heprup.IDWTUP)==3) { for ( int id = 0; id < heprup.NPRUP; ++id ) sumxsec += heprup.XSECUP[id]; } else { for ( int id = 0; id < heprup.NPRUP; ++id ) { //set the cross section directly from the event weights read heprup.XSECUP[id] = sumlprup[id]/nscanned[id]; heprup.XERRUP[id] = (sumsqlprup[id]/nscanned[id] - sqr(sumlprup[id]/nscanned[id])) / nscanned[id]; if(heprup.XERRUP[id] < 0.) { if( heprup.XERRUP[id]/(sumsqlprup[id]/nscanned[id])>-1e-10) heprup.XERRUP[id] = 0.; else { Throw() << "Negative error when scanning events in LesHouschesReader '" << name() << Exception::warning; heprup.XERRUP[id] = 0.; } } heprup.XERRUP[id] = sqrt( heprup.XERRUP[id] ); heprup.XMAXUP[id] = newmax[id]; sumxsec += heprup.XSECUP[id]; } } weightScale = picobarn*neve*sumxsec/oldsum; } } if ( cacheFile() ) closeCacheFile(); if ( negw ) heprup.IDWTUP = min(-abs(heprup.IDWTUP), -1); return neve; } void LesHouchesReader::setWeightScale(long) {} void LesHouchesReader::initStat() { stats.reset(); statmap.clear(); if ( heprup.NPRUP <= 0 ) return; double sumx = 0.0; xSecWeights.resize(heprup.NPRUP, 1.0); maxWeights.clear(); for ( int ip = 0; ip < heprup.NPRUP; ++ip ) { sumx = max(heprup.XMAXUP[ip]*xSecWeights[ip], sumx); statmap[heprup.LPRUP[ip]] = XSecStat(heprup.XMAXUP[ip]*weightScale*xSecWeights[ip]); maxWeights[heprup.LPRUP[ip]] = heprup.XMAXUP[ip]; } stats.maxXSec(sumx*weightScale); maxFactor = 1.0; } void LesHouchesReader::increaseMaxXSec(CrossSection maxxsec) { for ( int i = 0; i < heprup.NPRUP; ++i ) statmap[heprup.LPRUP[i]].maxXSec(statmap[heprup.LPRUP[i]].maxXSec()* maxxsec/stats.maxXSec()); maxFactor *= maxxsec/stats.maxXSec(); stats.maxXSec(maxxsec); } tXCombPtr LesHouchesReader::getXComb() { if ( lastXCombPtr() ) return lastXCombPtr(); fillEvent(); connectMothers(); tcPBPair sel = createPartonBinInstances(); tXCombPtr lastXC = xCombs()[sel]; // clean up the old XComb object before switching to a new one if ( theLastXComb && theLastXComb != lastXC ) theLastXComb->clean(); theLastXComb = lastXC; lastXCombPtr()->subProcess(SubProPtr()); lastXCombPtr()->setPartonBinInstances(partonBinInstances(), sqr(hepeup.SCALUP)*GeV2); lastXCombPtr()->lastAlphaS(hepeup.AQCDUP); lastXCombPtr()->lastAlphaEM(hepeup.AQEDUP); return lastXCombPtr(); } tSubProPtr LesHouchesReader::getSubProcess() { getXComb(); if ( subProcess() ) return subProcess(); lastXCombPtr()->subProcess(new_ptr(SubProcess(lastPartons(), tCollPtr(), this))); subProcess()->setOutgoing(outgoing().begin(), outgoing().end()); subProcess()->setIntermediates(intermediates().begin(), intermediates().end()); return subProcess(); } void LesHouchesReader::fillEvent() { if ( !particleIndex.empty() ) return; particleIndex.clear(); colourIndex.clear(); colourIndex(0, tColinePtr()); createParticles(); createBeams(); } void LesHouchesReader::reopen() { // If we didn't know how many events there were, we know now. if ( NEvents() <= 0 ) NEvents(position); ++reopened; // How large fraction of the events have we actually used? And how // large will we have used if we go through the file again? double frac = double(stats.attempts())/double(NEvents()); if ( frac*double(reopened + 1)/double(reopened) > 1.0 && NEvents() - stats.attempts() < generator()->N() - generator()->currentEventNumber() ) { if(theReOpenAllowed) generator()->logWarning(LesHouchesReopenWarning() << "Reopening LesHouchesReader '" << name() << "' after accessing " << stats.attempts() << " events out of " << NEvents() << Exception::warning); else throw LesHouchesReopenWarning() << "More events requested than available in LesHouchesReader " << name() << Exception::runerror; } if ( cacheFile() ) { closeCacheFile(); openReadCacheFile(); if ( !uncacheEvent() ) Throw() << "Could not reopen LesHouchesReader '" << name() << "'." << Exception::runerror; } else { close(); open(); if ( !readEvent() ) Throw() << "Could not reopen LesHouchesReader '" << name() << "'." << Exception::runerror; } } void LesHouchesReader::reset() { particleIndex.clear(); colourIndex.clear(); if ( theLastXComb ) theLastXComb->clean(); theLastXComb = tXCombPtr(); } bool LesHouchesReader::readEvent() { reset(); if ( !doReadEvent() ) return false; // If we are just skipping event we do not need to reweight or do // anything fancy. if ( skipping ) { return true; } if ( cacheFile() && !scanning ) { return true; } // Reweight according to the re- and pre-weights objects in the // LesHouchesReader base class. lastweight = reweight(); if ( !reweightPDF && !cutEarly() ) return true; // We should try to reweight the PDFs or make early cuts here. fillEvent(); double x1 = incoming().first->momentum().plus()/ beams().first->momentum().plus(); if ( reweightPDF && inPDF.first && outPDF.first && inPDF.first != outPDF.first ) { if ( hepeup.XPDWUP.first <= 0.0 ) hepeup.XPDWUP.first = inPDF.first->xfx(inData.first, incoming().first->dataPtr(), sqr(hepeup.SCALUP*GeV), x1); double xf = outPDF.first->xfx(inData.first, incoming().first->dataPtr(), sqr(hepeup.SCALUP*GeV), x1); lastweight *= xf/hepeup.XPDWUP.first; hepeup.XPDWUP.first = xf; } double x2 = incoming().second->momentum().minus()/ beams().second->momentum().minus(); if ( reweightPDF && inPDF.second && outPDF.second && inPDF.second != outPDF.second ) { if ( hepeup.XPDWUP.second <= 0.0 ) hepeup.XPDWUP.second = inPDF.second->xfx(inData.second, incoming().second->dataPtr(), sqr(hepeup.SCALUP*GeV), x2); double xf = outPDF.second->xfx(inData.second, incoming().second->dataPtr(), sqr(hepeup.SCALUP*GeV), x2); lastweight *= xf/hepeup.XPDWUP.second; hepeup.XPDWUP.second = xf; } if ( cutEarly() ) { if ( !cuts().initSubProcess((incoming().first->momentum() + incoming().second->momentum()).m2(), 0.5*log(x1/x2)) ) lastweight = 0.0; tSubProPtr sub = getSubProcess(); TmpTransform tmp(sub, Utilities::getBoostToCM(sub->incoming())); if ( !cuts().passCuts(*sub) ) lastweight = 0.0; } return true; } double LesHouchesReader::getEvent() { if ( cacheFile() ) { if ( !uncacheEvent() ) reopen(); } else { if ( !readEvent() ) reopen(); } ++position; double max = maxWeights[hepeup.IDPRUP]*maxFactor; // normalize all the weights to the max weight for(map::iterator it=optionalWeights.begin(); it!=optionalWeights.end();++it) { it->second = (max != 0.0) ? it->second/max : 0.0; } return (max != 0.0) ? eventWeight()/max : 0.0; } void LesHouchesReader::skip(long n) { HoldFlag<> skipflag(skipping); while ( n-- ) getEvent(); } double LesHouchesReader::reweight() { preweight = 1.0; if ( reweights.empty() && preweights.empty() && !( CKKWHandler() && maxMultCKKW() > 0 && maxMultCKKW() > minMultCKKW() ) ) return 1.0; fillEvent(); getSubProcess(); for ( int i = 0, N = preweights.size(); i < N; ++i ) { preweights[i]->setXComb(lastXCombPtr()); preweight *= preweights[i]->weight(); } double weight = preweight; for ( int i = 0, N = reweights.size(); i < N; ++i ) { reweights[i]->setXComb(lastXCombPtr()); weight *= reweights[i]->weight(); } // If we are caching events we do not want to do CKKW reweighting. if ( cacheFile() ) return weight; if ( CKKWHandler() && maxMultCKKW() > 0 && maxMultCKKW() > minMultCKKW() ) { CKKWHandler()->setXComb(lastXCombPtr()); weight *= CKKWHandler()->reweightCKKW(minMultCKKW(), maxMultCKKW()); } return weight; } bool LesHouchesReader::checkPartonBin() { // First find the positions of the incoming partons. pair< vector, vector > inc; for ( int i = 0; i < hepeup.NUP; ++i ) { if ( hepeup.ISTUP[i] == -9 ) { if ( inc.first.empty() ) inc.first.push_back(i); else if ( inc.second.empty() ) inc.second.push_back(i); } else if ( hepeup.ISTUP[i] == -1 ) { if ( inc.first.size() && hepeup.MOTHUP[i].first == inc.first.back() + 1 ) inc.first.push_back(i); else if ( inc.second.size() && hepeup.MOTHUP[i].first == inc.second.back() + 1 ) inc.second.push_back(i); else if ( inc.first.empty() ) { inc.first.push_back(-1); inc.first.push_back(i); } else if ( inc.second.empty() ) { inc.second.push_back(-1); inc.second.push_back(i); } else if ( inc.first.size() <= inc.second.size() ) inc.first.push_back(i); else inc.second.push_back(i); } } // Now store the corresponding id numbers pair< vector, vector > ids; ids.first.push_back(inc.first[0] < 0? heprup.IDBMUP.first: hepeup.IDUP[inc.first[0]]); for ( int i = 1, N = inc.first.size(); i < N; ++i ) ids.first.push_back(hepeup.IDUP[inc.first[i]]); ids.second.push_back(inc.second[0] < 0? heprup.IDBMUP.second: hepeup.IDUP[inc.second[0]]); for ( int i = 1, N = inc.second.size(); i < N; ++i ) ids.second.push_back(hepeup.IDUP[inc.second[i]]); // Find the correct pair of parton bins. PBPair pbp; for ( int i = 0, N = partonBins().size(); i < N; ++i ) { tcPBPtr curr = partonBins()[i].first; int icurr = inc.first.size() - 1; while ( curr && icurr >= 0 ) { if ( curr->parton()->id () != ids.first[icurr] ) break; curr = curr->incoming(); --icurr; } if(!(!partonBins()[i].first->incoming() && !partonBins()[i].first->particle() && partonBins()[i].first->parton()->id () == ids.first[0] && ( inc.first.size()==1 || (inc.first.size()==2 && ids.first[0]==ids.first[1]))) && ( curr || icurr >= 0 ) ) continue; curr = partonBins()[i].second; icurr = inc.second.size() - 1; while ( curr && icurr >= 0 ) { if ( curr->parton()->id () != ids.second[icurr] ) break; curr = curr->incoming(); --icurr; } if(!(!partonBins()[i].second->incoming() && !partonBins()[i].second->particle() && partonBins()[i].second->parton()->id () == ids.second[0] && ( inc.second.size()==1 || (inc.second.size()==2 && ids.second[0]==ids.second[1]))) && ( curr || icurr >= 0 ) ) continue; pbp = partonBins()[i]; } // If we are only checking we return here. return ( pbp.first && pbp.second ); } namespace { bool recursionNotNull(tcPBPtr bin, tcPPtr p) { while ( bin && p ) { if ( p->dataPtr() != bin->parton() ) break; bin = bin->incoming(); p = p->parents().size()? p->parents()[0]: tPPtr(); } return bin || p; } } tcPBPair LesHouchesReader::createPartonBinInstances() { tcPBPair sel; for ( int i = 0, N = partonBins().size(); i < N; ++i ) { tcPBPtr bin = partonBins()[i].first; tcPPtr p = incoming().first; if ( recursionNotNull(bin,p) ) continue; bin = partonBins()[i].second; p = incoming().second; if ( recursionNotNull(bin,p) ) continue; sel = partonBins()[i]; break; } if ( !sel.first || !sel.second ) Throw() << "Could not find appropriate PartonBin objects for event produced by " << "LesHouchesReader '" << name() << "'." << Exception::runerror; Direction<0> dir(true); thePartonBinInstances.first = new_ptr(PartonBinInstance(incoming().first, sel.first, -sqr(hepeup.SCALUP*GeV))); if ( thePartonBinInstances.first->xi() > 1.00001 ) { Throw() << "Found an event with momentum fraction larger than unity (x1=" << thePartonBinInstances.first->xi() << "). The event will be skipped." << Exception::warning; throw Veto(); } dir.reverse(); thePartonBinInstances.second = new_ptr(PartonBinInstance(incoming().second, sel.second, -sqr(hepeup.SCALUP*GeV))); if ( thePartonBinInstances.second->xi() > 1.00001 ) { Throw() << "Found an event with momentum fraction larger than unity (x2=" << thePartonBinInstances.second->xi() << "). The event will be skipped." << Exception::warning; throw Veto(); } return sel; } void LesHouchesReader::createParticles() { theBeams = PPair(); theIncoming = PPair(); theOutgoing = PVector(); theIntermediates = PVector(); for ( int i = 0, N = hepeup.IDUP.size(); i < N; ++i ) { if ( !hepeup.IDUP[i] ) continue; Lorentz5Momentum mom(hepeup.PUP[i][0]*GeV, hepeup.PUP[i][1]*GeV, hepeup.PUP[i][2]*GeV, hepeup.PUP[i][3]*GeV, hepeup.PUP[i][4]*GeV); // cout << hepeup.PUP[i][0] << " " << hepeup.PUP[i][1] << " " << hepeup.PUP[i][2] << " " << hepeup.PUP[i][3] << " " << hepeup.PUP[i][4] << endl; if(theMomentumTreatment == 1) mom.rescaleEnergy(); else if(theMomentumTreatment == 2) mom.rescaleMass(); // cout << hepeup.PUP[i][0] << " " << hepeup.PUP[i][1] << " " << hepeup.PUP[i][2] << " " << hepeup.PUP[i][3] << " " << hepeup.PUP[i][4] << endl; PDPtr pd = getParticleData(hepeup.IDUP[i]); if (!pd) { Throw() << "LesHouchesReader '" << name() << "' found unknown particle ID " << hepeup.IDUP[i] << " in Les Houches common block structure.\n" << "You need to define the new particle in an input file.\n" << Exception::runerror; } if ( ! pd->coloured() && ( hepeup.ICOLUP[i].first != 0 || hepeup.ICOLUP[i].second != 0 ) ) { Throw() << "LesHouchesReader " << name() << ": " << pd->PDGName() << " is not a coloured particle.\nIt should not have " << "(anti-)colour lines " << hepeup.ICOLUP[i].first << ' ' << hepeup.ICOLUP[i].second << " set; the event file needs to be fixed." << Exception::runerror; } PPtr p = pd->produceParticle(mom); if(hepeup.ICOLUP[i].first>=0 && hepeup.ICOLUP[i].second >=0) { tColinePtr c = colourIndex(hepeup.ICOLUP[i].first); if ( c ) { c->addColoured(p); } c = colourIndex(hepeup.ICOLUP[i].second); if ( c ) c->addAntiColoured(p); } else { tColinePtr c1 = colourIndex(abs(hepeup.ICOLUP[i].first )); tColinePtr c2 = colourIndex(abs(hepeup.ICOLUP[i].second)); if(pd->hasColour()) { c1->addColouredIndexed(p,1); c2->addColouredIndexed(p,2); } else { c1->addAntiColouredIndexed(p,1); c2->addAntiColouredIndexed(p,2); } } particleIndex(i + 1, p); switch ( hepeup.ISTUP[i] ) { case -9: if ( !theBeams.first ) theBeams.first = p; else if ( !theBeams.second ) theBeams.second = p; else Throw() << "To many incoming beam particles in the LesHouchesReader '" << name() << "'." << Exception::runerror; break; case -1: if ( !theIncoming.first ) theIncoming.first = p; else if ( !theIncoming.second ) theIncoming.second = p; else if ( particleIndex(theIncoming.first) == hepeup.MOTHUP[i].first ) theIncoming.first = p; else if ( particleIndex(theIncoming.second) == hepeup.MOTHUP[i].first ) theIncoming.second = p; else Throw() << "To many incoming particles to hard subprocess in the " << "LesHouchesReader '" << name() << "'." << Exception::runerror; p->scale(sqr(hepeup.SCALUP*GeV)); break; case 1: theOutgoing.push_back(p); p->scale(sqr(hepeup.SCALUP*GeV)); break; case -2: case 2: case 3: theIntermediates.push_back(p); break; default: Throw() << "Unknown status code (" << hepeup.ISTUP[i] << ") in the LesHouchesReader '" << name() << "'." << Exception::runerror; } // value 9 is defined as "Unknown or unpolarized particles" double spinup = hepeup.SPINUP[i]; if ( abs(spinup - 9) < 1.0e-3 ) spinup = 0.; if ( spinup < -1. || spinup > 1. ) { Throw() << "Polarization must be between -1 and 1, not " << spinup << " as found in the " << "LesHouches event file.\nThe event file needs to be fixed." << Exception::runerror; } if( theIncludeSpin && abs(pd->id()) == ParticleID::tauminus && spinup !=0) { if(pd->iSpin() == PDT::Spin1Half ) { vector wave; Helicity::SpinorWaveFunction(wave,p,Helicity::outgoing,true); RhoDMatrix rho(pd->iSpin(),true); rho(0,0) = 0.5*(1.-spinup); rho(1,1) = 0.5*(1.+spinup); p->spinInfo()->rhoMatrix() = rho; p->spinInfo()-> DMatrix() = rho; } } } // check the colour flows, and if necessary create any sources/sinks // hard process // get the particles in the hard process PVector external; for ( int i = 0, N = hepeup.IDUP.size(); i < N; ++i ) { unsigned int moth; switch ( hepeup.ISTUP[i] ) { case -1: external.push_back(particleIndex.find(i+1)); break; case 1: case 2: case 3: moth = hepeup.MOTHUP[i].first; if(moth!=0 && (hepeup.ISTUP[moth]==-1||hepeup.ISTUP[moth]==-2|| hepeup.ISTUP[moth]==-9)) external.push_back(particleIndex.find(i+1)); moth = hepeup.MOTHUP[i].second; if(moth!=0 && (hepeup.ISTUP[moth]==-1||hepeup.ISTUP[moth]==-2|| hepeup.ISTUP[moth]==-9)) external.push_back(particleIndex.find(i+1)); break; case -2: case -9: default: break; } } // check the incoming/outgoing lines match vector unMatchedColour,unMatchedAntiColour; for(unsigned int ix=0;ix col = external[ix]->colourInfo()-> colourLines(); vector anti = external[ix]->colourInfo()->antiColourLines(); if(hepeup.ISTUP[particleIndex(external[ix])-1]<0) swap(col,anti); if(!col.empty()) { for(unsigned int ic1=0;ic1 col2; if(hepeup.ISTUP[particleIndex(external[iy])-1]<0) { if(external[iy]->colourInfo()->colourLines().empty()) continue; col2 = external[iy]->colourInfo()->colourLines(); } else if(hepeup.ISTUP[particleIndex(external[iy])-1]>0) { if(external[iy]->colourInfo()->antiColourLines().empty()) continue; col2 = external[iy]->colourInfo()->antiColourLines(); } for(unsigned int ic2=0;ic2(col[ic1])); } } if(!anti.empty()) { for(unsigned int ic1=0;ic1 anti2; if(hepeup.ISTUP[particleIndex(external[iy])-1]<0) { if(external[iy]->colourInfo()->antiColourLines().empty()) continue; anti2 = external[iy]->colourInfo()->antiColourLines(); } else if(hepeup.ISTUP[particleIndex(external[iy])-1]>0) { if(external[iy]->colourInfo()->colourLines().empty()) continue; anti2 = external[iy]->colourInfo()->colourLines(); } for(unsigned int ic2=0;ic2(anti[ic1])); } } } // might have source/sink if( unMatchedColour.size() + unMatchedAntiColour.size() != 0) { if(unMatchedColour.size() == 3 ) { unMatchedColour[0]->setSourceNeighbours(unMatchedColour[1], unMatchedColour[2]); } else if(unMatchedColour.size() != 0 && ThePEG_DEBUG_LEVEL) { Throw() << "LesHouchesReader '" << name() << "' found inconsistent colour " << "flow in Les Houches common block structure for hard process.\n" << hepeup << Exception::runerror; } if(unMatchedAntiColour.size() == 3 ) { unMatchedAntiColour[0]->setSinkNeighbours(unMatchedAntiColour[1], unMatchedAntiColour[2]); } else if(unMatchedAntiColour.size() != 0 && ThePEG_DEBUG_LEVEL) { Throw() << "LesHouchesReader '" << name() << "' found inconsistent colour " << "flow in Les Houches common block structure for hard process.\n" << hepeup << Exception::runerror; } } // any subsequent decays for ( int i = 0, N = hepeup.IDUP.size(); i < N; ++i ) { if(hepeup.ISTUP[i] !=2 && hepeup.ISTUP[i] !=3) continue; PVector external; external.push_back(particleIndex.find(i+1)); for ( int j = 0; j < N; ++j ) { if(hepeup.MOTHUP[j].first==i+1|| hepeup.MOTHUP[j].second==i+1) external.push_back(particleIndex.find(j+1)); } // check the incoming/outgoing lines match vector unMatchedColour,unMatchedAntiColour; for(unsigned int ix=0;ix col = external[ix]->colourInfo()-> colourLines(); vector anti = external[ix]->colourInfo()->antiColourLines(); if(ix==0) swap(col,anti); if(!col.empty()) { for(unsigned int ic1=0;ic1 col2; if(iy==0) { if(external[iy]->colourInfo()->colourLines().empty()) continue; col2 = external[iy]->colourInfo()->colourLines(); } else { if(external[iy]->colourInfo()->antiColourLines().empty()) continue; col2 = external[iy]->colourInfo()->antiColourLines(); } for(unsigned int ic2=0;ic2(col[ic1])); } } if(!anti.empty()) { for(unsigned int ic1=0;ic1 anti2; if(iy==0) { if(external[iy]->colourInfo()->antiColourLines().empty()) continue; anti2 = external[iy]->colourInfo()->antiColourLines(); } else { if(external[iy]->colourInfo()->colourLines().empty()) continue; anti2 = external[iy]->colourInfo()->colourLines(); } for(unsigned int ic2=0;ic2(anti[ic1])); } } } // might have source/sink if( unMatchedColour.size() + unMatchedAntiColour.size() != 0) { if(unMatchedColour.size() == 3 ) { unMatchedColour[0]->setSourceNeighbours(unMatchedColour[1], unMatchedColour[2]); } else if(unMatchedColour.size() != 0 && ThePEG_DEBUG_LEVEL) { Throw() << "LesHouchesReader '" << name() << "' found inconsistent colour " << "flow in Les Houches common block structure for decay of \n" << *external[0] << "\n" << hepeup << Exception::runerror; } if(unMatchedAntiColour.size() == 3 ) { unMatchedAntiColour[0]->setSinkNeighbours(unMatchedAntiColour[1], unMatchedAntiColour[2]); } else if(unMatchedAntiColour.size() != 0 && ThePEG_DEBUG_LEVEL) { Throw() << "LesHouchesReader '" << name() << "' found inconsistent colour " << "flow in Les Houches common block structure for decay of\n" << *external[0] << "\n" << hepeup << Exception::runerror; } } } } void LesHouchesReader::createBeams() { if ( !theBeams.first && dynamic_ptr_cast::tcp>(inPDF.first) ) { theBeams.first = theIncoming.first; } else if ( !theBeams.first ) { theBeams.first = getParticleData(heprup.IDBMUP.first)->produceParticle(); double m = theBeams.first->mass()/GeV; theBeams.first->set5Momentum (Lorentz5Momentum(ZERO, ZERO, sqrt(sqr(heprup.EBMUP.first) - sqr(m))*GeV, heprup.EBMUP.first*GeV, m*GeV)); hepeup.IDUP.push_back(heprup.IDBMUP.first); hepeup.ISTUP.push_back(-9); hepeup.MOTHUP.push_back(make_pair(0, 0)); hepeup.ICOLUP.push_back(make_pair(0, 0)); hepeup.VTIMUP.push_back(0.0); hepeup.SPINUP.push_back(0.0); particleIndex(hepeup.IDUP.size(), theBeams.first); hepeup.MOTHUP[particleIndex(theIncoming.first) - 1].first = hepeup.IDUP.size(); } if ( !theBeams.second && dynamic_ptr_cast::tcp>(inPDF.second) ) { theBeams.second = theIncoming.second; } else if ( !theBeams.second ) { theBeams.second = getParticleData(heprup.IDBMUP.second)->produceParticle(); double m = theBeams.second->mass()/GeV; theBeams.second->set5Momentum (Lorentz5Momentum(ZERO, ZERO, -sqrt(sqr(heprup.EBMUP.second) - sqr(m))*GeV, heprup.EBMUP.second*GeV, m*GeV)); hepeup.IDUP.push_back(heprup.IDBMUP.second); hepeup.ISTUP.push_back(-9); hepeup.MOTHUP.push_back(make_pair(0, 0)); hepeup.ICOLUP.push_back(make_pair(0, 0)); hepeup.VTIMUP.push_back(0.0); hepeup.SPINUP.push_back(0.0); particleIndex(hepeup.IDUP.size(), theBeams.second); hepeup.MOTHUP[particleIndex(theIncoming.second) - 1].first = hepeup.IDUP.size(); } } void LesHouchesReader::connectMothers() { const ObjectIndexer & pi = particleIndex; for ( int i = 0, N = hepeup.IDUP.size(); i < N; ++i ) { if ( pi(hepeup.MOTHUP[i].first) ) pi(hepeup.MOTHUP[i].first)->addChild(pi(i + 1)); if ( pi(hepeup.MOTHUP[i].second) && hepeup.MOTHUP[i].second != hepeup.MOTHUP[i].first ) pi(hepeup.MOTHUP[i].second)->addChild(pi(i + 1)); } } void LesHouchesReader::openReadCacheFile() { if ( cacheFile() ) closeCacheFile(); cacheFile().open(cacheFileName(), "r"); position = 0; } void LesHouchesReader::openWriteCacheFile() { if ( cacheFile() ) closeCacheFile(); cacheFile().open(cacheFileName(), "w"); } void LesHouchesReader::closeCacheFile() { cacheFile().close(); } void LesHouchesReader::cacheEvent() const { static vector buff; cacheFile().write(&hepeup.NUP, sizeof(hepeup.NUP)); buff.resize(eventSize(hepeup.NUP)); char * pos = &buff[0]; pos = mwrite(pos, hepeup.IDPRUP); pos = mwrite(pos, hepeup.XWGTUP); pos = mwrite(pos, hepeup.XPDWUP); pos = mwrite(pos, hepeup.SCALUP); pos = mwrite(pos, hepeup.AQEDUP); pos = mwrite(pos, hepeup.AQCDUP); pos = mwrite(pos, hepeup.IDUP[0], hepeup.NUP); pos = mwrite(pos, hepeup.ISTUP[0], hepeup.NUP); pos = mwrite(pos, hepeup.MOTHUP[0], hepeup.NUP); pos = mwrite(pos, hepeup.ICOLUP[0], hepeup.NUP); for ( int i = 0; i < hepeup.NUP; ++i ) pos = mwrite(pos, hepeup.PUP[i][0], 5); pos = mwrite(pos, hepeup.VTIMUP[0], hepeup.NUP); pos = mwrite(pos, hepeup.SPINUP[0], hepeup.NUP); pos = mwrite(pos, lastweight); pos = mwrite(pos, optionalWeights); + pos = mwrite(pos, LHEeventnum); for(size_t ff = 0; ff < optionalWeightsNames.size(); ff++) { pos = mwrite(pos, optionalWeightsNames[ff]); } pos = mwrite(pos, optionalnpLO); pos = mwrite(pos, optionalnpNLO); pos = mwrite(pos, preweight); cacheFile().write(&buff[0], buff.size(), 1); } bool LesHouchesReader::uncacheEvent() { reset(); static vector buff; if ( cacheFile().read(&hepeup.NUP, sizeof(hepeup.NUP)) != 1 ) return false; buff.resize(eventSize(hepeup.NUP)); if ( cacheFile().read(&buff[0], buff.size()) != 1 ) return false; const char * pos = &buff[0]; pos = mread(pos, hepeup.IDPRUP); pos = mread(pos, hepeup.XWGTUP); pos = mread(pos, hepeup.XPDWUP); pos = mread(pos, hepeup.SCALUP); pos = mread(pos, hepeup.AQEDUP); pos = mread(pos, hepeup.AQCDUP); hepeup.IDUP.resize(hepeup.NUP); pos = mread(pos, hepeup.IDUP[0], hepeup.NUP); hepeup.ISTUP.resize(hepeup.NUP); pos = mread(pos, hepeup.ISTUP[0], hepeup.NUP); hepeup.MOTHUP.resize(hepeup.NUP); pos = mread(pos, hepeup.MOTHUP[0], hepeup.NUP); hepeup.ICOLUP.resize(hepeup.NUP); pos = mread(pos, hepeup.ICOLUP[0], hepeup.NUP); hepeup.PUP.resize(hepeup.NUP); for ( int i = 0; i < hepeup.NUP; ++i ) pos = mread(pos, hepeup.PUP[i][0], 5); hepeup.VTIMUP.resize(hepeup.NUP); pos = mread(pos, hepeup.VTIMUP[0], hepeup.NUP); hepeup.SPINUP.resize(hepeup.NUP); pos = mread(pos, hepeup.SPINUP[0], hepeup.NUP); pos = mread(pos, lastweight); pos = mread(pos, optionalWeights); for(size_t ff = 0; ff < optionalWeightsNames.size(); ff++) { pos = mread(pos, optionalWeightsNames[ff]); } pos = mread(pos, optionalnpLO); pos = mread(pos, optionalnpNLO); pos = mread(pos, preweight); - + pos = mread(pos, LHEeventnum); // If we are skipping, we do not have to do anything else. if ( skipping ) return true; if ( CKKWHandler() && maxMultCKKW() > 0 && maxMultCKKW() > minMultCKKW() ) { // The cached event has not been submitted to CKKW reweighting, so // we do that now. fillEvent(); getSubProcess(); CKKWHandler()->setXComb(lastXCombPtr()); lastweight *= CKKWHandler()->reweightCKKW(minMultCKKW(), maxMultCKKW()); } return true; } void LesHouchesReader::persistentOutput(PersistentOStream & os) const { os << heprup.IDBMUP << heprup.EBMUP << heprup.PDFGUP << heprup.PDFSUP << heprup.IDWTUP << heprup.NPRUP << heprup.XSECUP << heprup.XERRUP << heprup.XMAXUP << heprup.LPRUP << hepeup.NUP << hepeup.IDPRUP << hepeup.XWGTUP << hepeup.XPDWUP << hepeup.SCALUP << hepeup.AQEDUP << hepeup.AQCDUP << hepeup.IDUP << hepeup.ISTUP << hepeup.MOTHUP << hepeup.ICOLUP << hepeup.PUP << hepeup.VTIMUP << hepeup.SPINUP << inData << inPDF << outPDF << thePartonExtractor << theCKKW << thePartonBins << theXCombs << theCuts << theNEvents << position << reopened << theMaxScan << isActive << theCacheFileName << doCutEarly << stats << statmap << thePartonBinInstances << theBeams << theIncoming << theOutgoing << theIntermediates << reweights << preweights << preweight << reweightPDF << doInitPDFs - << theLastXComb << theMaxMultCKKW << theMinMultCKKW << lastweight << optionalWeights << optionalnpLO << optionalnpNLO + << theLastXComb << theMaxMultCKKW << theMinMultCKKW << lastweight << optionalWeights << optionalnpLO << optionalnpNLO << LHEeventnum << maxFactor << ounit(weightScale, picobarn) << xSecWeights << maxWeights << theMomentumTreatment << useWeightWarnings << theReOpenAllowed << theIncludeSpin; } void LesHouchesReader::persistentInput(PersistentIStream & is, int) { if ( cacheFile() ) closeCacheFile(); is >> heprup.IDBMUP >> heprup.EBMUP >> heprup.PDFGUP >> heprup.PDFSUP >> heprup.IDWTUP >> heprup.NPRUP >> heprup.XSECUP >> heprup.XERRUP >> heprup.XMAXUP >> heprup.LPRUP >> hepeup.NUP >> hepeup.IDPRUP >> hepeup.XWGTUP >> hepeup.XPDWUP >> hepeup.SCALUP >> hepeup.AQEDUP >> hepeup.AQCDUP >> hepeup.IDUP >> hepeup.ISTUP >> hepeup.MOTHUP >> hepeup.ICOLUP >> hepeup.PUP >> hepeup.VTIMUP >> hepeup.SPINUP >> inData >> inPDF >> outPDF >> thePartonExtractor >> theCKKW >> thePartonBins >> theXCombs >> theCuts >> theNEvents >> position >> reopened >> theMaxScan >> isActive >> theCacheFileName >> doCutEarly >> stats >> statmap >> thePartonBinInstances >> theBeams >> theIncoming >> theOutgoing >> theIntermediates >> reweights >> preweights >> preweight >> reweightPDF >> doInitPDFs - >> theLastXComb >> theMaxMultCKKW >> theMinMultCKKW >> lastweight >> optionalWeights >> optionalnpLO >> optionalnpNLO + >> theLastXComb >> theMaxMultCKKW >> theMinMultCKKW >> lastweight >> optionalWeights >> optionalnpLO >> optionalnpNLO >> LHEeventnum >> maxFactor >> iunit(weightScale, picobarn) >> xSecWeights >> maxWeights >> theMomentumTreatment >> useWeightWarnings >> theReOpenAllowed >> theIncludeSpin; } AbstractClassDescription LesHouchesReader::initLesHouchesReader; // Definition of the static class description member. void LesHouchesReader::setBeamA(long id) { heprup.IDBMUP.first = id; } long LesHouchesReader::getBeamA() const { return heprup.IDBMUP.first; } void LesHouchesReader::setBeamB(long id) { heprup.IDBMUP.second = id; } long LesHouchesReader::getBeamB() const { return heprup.IDBMUP.second; } void LesHouchesReader::setEBeamA(Energy e) { heprup.EBMUP.first = e/GeV; } Energy LesHouchesReader::getEBeamA() const { return heprup.EBMUP.first*GeV; } void LesHouchesReader::setEBeamB(Energy e) { heprup.EBMUP.second = e/GeV; } Energy LesHouchesReader::getEBeamB() const { return heprup.EBMUP.second*GeV; } void LesHouchesReader::setPDFA(PDFPtr pdf) { inPDF.first = pdf; } PDFPtr LesHouchesReader::getPDFA() const { return inPDF.first; } void LesHouchesReader::setPDFB(PDFPtr pdf) { inPDF.second = pdf; } PDFPtr LesHouchesReader::getPDFB() const { return inPDF.second; } void LesHouchesReader::Init() { static ClassDocumentation documentation ("ThePEG::LesHouchesReader is an abstract base class to be used " "for objects which reads event files or streams from matrix element " "generators."); static Parameter interfaceBeamA ("BeamA", "The PDG id of the incoming particle along the positive z-axis. " "If zero the corresponding information is to be deduced from the " "event stream/file.", 0, 0, 0, 0, true, false, false, &LesHouchesReader::setBeamA, &LesHouchesReader::getBeamA, 0, 0, 0); static Parameter interfaceBeamB ("BeamB", "The PDG id of the incoming particle along the negative z-axis. " "If zero the corresponding information is to be deduced from the " "event stream/file.", 0, 0, 0, 0, true, false, false, &LesHouchesReader::setBeamB, &LesHouchesReader::getBeamB, 0, 0, 0); static Parameter interfaceEBeamA ("EBeamA", "The energy of the incoming particle along the positive z-axis. " "If zero the corresponding information is to be deduced from the " "event stream/file.", 0, GeV, ZERO, ZERO, 1000000000.0*GeV, true, false, true, &LesHouchesReader::setEBeamA, &LesHouchesReader::getEBeamA, 0, 0, 0); static Parameter interfaceEBeamB ("EBeamB", "The energy of the incoming particle along the negative z-axis. " "If zero the corresponding information is to be deduced from the " "event stream/file.", 0, GeV, ZERO, ZERO, 1000000000.0*GeV, true, false, true, &LesHouchesReader::setEBeamB, &LesHouchesReader::getEBeamB, 0, 0, 0); static Reference interfacePDFA ("PDFA", "The PDF used for incoming particle along the positive z-axis. " "If null the corresponding information is to be deduced from the " "event stream/file.", 0, true, false, true, true, false, &LesHouchesReader::setPDFA, &LesHouchesReader::getPDFA, 0); static Reference interfacePDFB ("PDFB", "The PDF used for incoming particle along the negative z-axis. " "If null the corresponding information is to be deduced from the " "event stream/file.", 0, true, false, true, true, false, &LesHouchesReader::setPDFB, &LesHouchesReader::getPDFB, 0); static Parameter interfaceMaxScan ("MaxScan", "The maximum number of events to scan to obtain information about " "processes and cross section in the intialization.", &LesHouchesReader::theMaxScan, -1, 0, 0, true, false, false); static Parameter interfaceCacheFileName ("CacheFileName", "Name of file used to cache the events from the reader in a fast-readable " "form. If empty, no cache file will be generated.", &LesHouchesReader::theCacheFileName, "", true, false); interfaceCacheFileName.fileType(); static Switch interfaceCutEarly ("CutEarly", "Determines whether to apply cuts to events before converting to " "ThePEG format.", &LesHouchesReader::doCutEarly, true, true, false); static SwitchOption interfaceCutEarlyYes (interfaceCutEarly, "Yes", "Event are cut before converted.", true); static SwitchOption interfaceCutEarlyNo (interfaceCutEarly, "No", "Events are not cut before converted.", false); static Reference interfacePartonExtractor ("PartonExtractor", "The PartonExtractor object used to construct remnants. If no object is " "provided the LesHouchesEventHandler object must provide one instead.", &LesHouchesReader::thePartonExtractor, true, false, true, true, false); static Reference interfaceCuts ("Cuts", "The Cuts object to be used for this reader. Note that these " "must not be looser cuts than those used in the actual generation. " "If no object is provided the LesHouchesEventHandler object must " "provide one instead.", &LesHouchesReader::theCuts, true, false, true, true, false); static RefVector interfaceReweights ("Reweights", "A list of ThePEG::ReweightBase objects to modify this the weight of " "this reader.", &LesHouchesReader::reweights, 0, false, false, true, false); static RefVector interfacePreweights ("Preweights", "A list of ThePEG::ReweightBase objects to bias the phase space for this " "reader without influencing the actual cross section.", &LesHouchesReader::preweights, 0, false, false, true, false); static Switch interfaceReweightPDF ("ReweightPDF", "If the PDFs used in the generation for this reader is different " "from the ones assumed by the associated PartonExtractor object, " "should the events be reweighted to fit the latter?", &LesHouchesReader::reweightPDF, false, true, false); static SwitchOption interfaceReweightPDFNo (interfaceReweightPDF, "No", "The event weights are kept as they are.", false); static SwitchOption interfaceReweightPDFYes (interfaceReweightPDF, "Yes", "The events are reweighted.", true); static Switch interfaceInitPDFs ("InitPDFs", "If no PDFs were specified in PDFA or " "PDFBfor this reader, try to extract the " "information from the event file and assign the relevant PDFBase" "objects when the reader is initialized.", &LesHouchesReader::doInitPDFs, false, true, false); static SwitchOption interfaceInitPDFsYes (interfaceInitPDFs, "Yes", "Extract PDFs during initialization.", true); static SwitchOption interfaceInitPDFsNo (interfaceInitPDFs, "No", "Do not extract PDFs during initialization.", false); static Parameter interfaceMaxMultCKKW ("MaxMultCKKW", "If this reader is to be used (possibly together with others) for CKKW-" "reweighting and veto, this should give the multiplicity of outgoing " "particles in the highest multiplicity matrix element in the group. " "If set to zero, no CKKW procedure should be applied.", &LesHouchesReader::theMaxMultCKKW, 0, 0, 0, true, false, Interface::lowerlim); static Parameter interfaceMinMultCKKW ("MinMultCKKW", "If this reader is to be used (possibly together with others) for CKKW-" "reweighting and veto, this should give the multiplicity of outgoing " "particles in the lowest multiplicity matrix element in the group. If " "larger or equal to MaxMultCKKW, no CKKW " "procedure should be applied.", &LesHouchesReader::theMinMultCKKW, 0, 0, 0, true, false, Interface::lowerlim); static Switch interfaceMomentumTreatment ("MomentumTreatment", "Treatment of the momenta supplied by the interface", &LesHouchesReader::theMomentumTreatment, 0, false, false); static SwitchOption interfaceMomentumTreatmentAccept (interfaceMomentumTreatment, "Accept", "Just accept the momenta given", 0); static SwitchOption interfaceMomentumTreatmentRescaleEnergy (interfaceMomentumTreatment, "RescaleEnergy", "Rescale the energy supplied so it is consistent with the mass", 1); static SwitchOption interfaceMomentumTreatmentRescaleMass (interfaceMomentumTreatment, "RescaleMass", "Rescale the mass supplied so it is consistent with the" " energy and momentum", 2); static Switch interfaceWeightWarnings ("WeightWarnings", "Determines if warnings about possible weight incompatibilities should " "be issued when this reader is initialized.", &LesHouchesReader::useWeightWarnings, true, true, false); static SwitchOption interfaceWeightWarningsWarnAboutWeights (interfaceWeightWarnings, "WarnAboutWeights", "Warn about possible incompatibilities with the weight option in the " "Les Houches common block and the requested weight treatment.", true); static SwitchOption interfaceWeightWarningsDontWarnAboutWeights (interfaceWeightWarnings, "DontWarnAboutWeights", "Do not warn about possible incompatibilities with the weight option " "in the Les Houches common block and the requested weight treatment.", false); static Switch interfaceAllowedTopReOpen ("AllowedToReOpen", "Can the file be reopened if more events are requested than the file contains?", &LesHouchesReader::theReOpenAllowed, true, false, false); static SwitchOption interfaceAllowedTopReOpenYes (interfaceAllowedTopReOpen, "Yes", "Allowed to reopen the file", true); static SwitchOption interfaceAllowedTopReOpenNo (interfaceAllowedTopReOpen, "No", "Not allowed to reopen the file", false); static Switch interfaceIncludeSpin ("IncludeSpin", "Use the spin information present in the event file, for tau leptons" " only as this is the only case which makes any sense", &LesHouchesReader::theIncludeSpin, true, false, false); static SwitchOption interfaceIncludeSpinYes (interfaceIncludeSpin, "Yes", "Use the spin information", true); static SwitchOption interfaceIncludeSpinNo (interfaceIncludeSpin, "No", "Don't use the spin information", false); interfaceCuts.rank(8); interfacePartonExtractor.rank(7); interfaceBeamA.rank(5); interfaceBeamB.rank(4); interfaceEBeamA.rank(3); interfaceEBeamB.rank(2); interfaceMaxMultCKKW.rank(1.5); interfaceMinMultCKKW.rank(1.0); interfaceBeamA.setHasDefault(false); interfaceBeamB.setHasDefault(false); interfaceEBeamA.setHasDefault(false); interfaceEBeamB.setHasDefault(false); interfaceMaxMultCKKW.setHasDefault(false); interfaceMinMultCKKW.setHasDefault(false); } namespace ThePEG { ostream & operator<<(ostream & os, const HEPEUP & h) { os << "\n" << " " << setw(4) << h.NUP << " " << setw(6) << h.IDPRUP << " " << setw(14) << h.XWGTUP << " " << setw(14) << h.SCALUP << " " << setw(14) << h.AQEDUP << " " << setw(14) << h.AQCDUP << "\n"; for ( int i = 0; i < h.NUP; ++i ) os << " " << setw(8) << h.IDUP[i] << " " << setw(2) << h.ISTUP[i] << " " << setw(4) << h.MOTHUP[i].first << " " << setw(4) << h.MOTHUP[i].second << " " << setw(4) << h.ICOLUP[i].first << " " << setw(4) << h.ICOLUP[i].second << " " << setw(14) << h.PUP[i][0] << " " << setw(14) << h.PUP[i][1] << " " << setw(14) << h.PUP[i][2] << " " << setw(14) << h.PUP[i][3] << " " << setw(14) << h.PUP[i][4] << " " << setw(1) << h.VTIMUP[i] << " " << setw(1) << h.SPINUP[i] << std::endl; os << "" << std::endl; return os; } } diff --git a/LesHouches/LesHouchesReader.h b/LesHouches/LesHouchesReader.h --- a/LesHouches/LesHouchesReader.h +++ b/LesHouches/LesHouchesReader.h @@ -1,1003 +1,1013 @@ // -*- C++ -*- // // LesHouchesReader.h is a part of ThePEG - Toolkit for HEP Event Generation // Copyright (C) 1999-2019 Leif Lonnblad // // ThePEG is licenced under version 3 of the GPL, see COPYING for details. // Please respect the MCnet academic guidelines, see GUIDELINES for details. // #ifndef THEPEG_LesHouchesReader_H #define THEPEG_LesHouchesReader_H // This is the declaration of the LesHouchesReader class. #include "LesHouches.h" #include "ThePEG/Handlers/HandlerBase.h" #include "ThePEG/Utilities/ObjectIndexer.h" #include "ThePEG/Utilities/Exception.h" #include "ThePEG/Utilities/XSecStat.h" #include "ThePEG/PDF/PartonBinInstance.h" #include "ThePEG/PDF/PartonBin.fh" #include "ThePEG/MatrixElement/ReweightBase.h" #include "LesHouchesEventHandler.fh" #include "LesHouchesReader.fh" #include "ThePEG/Utilities/CFile.h" #include #include namespace ThePEG { /** * LesHouchesReader is an abstract base class to be used for objects * which reads event files or streams from matrix element * generators. Derived classes must at least implement the open() and * doReadEvent() methods to read in information about the whole run into * the HEPRUP variable and next event into the HEPEUP variable * respectively. Also the close() function to close the file or stream * read must be implemented. Although these functions are named as if * we are reading from event files, they could just as well implement * the actual generation of events. * * After filling the HEPRUP and HEPEUP variables, which are protected * and easily accesible from the sub-class, this base class will then * be responsible for transforming this data to the ThePEG Event * record in the getEvent() method. LesHouchesReaders can * only be used inside LesHouchesEventHandler objects. * * In the initialization the virtual open() and scan() functions are * called. Here the derived class must provide the information about * the processes in the variables corresponding to the HEPRUP common * block. Note that the IDWTUP is required to be +/- 1, and sub * classes are required to change the information accordingly to * ensure the correct corss section sampling. Note also that the * controlling LesHouchesEventHandler may choose to generate weighted * events even if IDWTUP is 1. * * Note that the information given per process in e.g. the XSECUP and * XMAXUP vectors is not used by the LesHouchesEventHandler and by * default the LesHouchesReader is not assumed to be able to actively * choose between the sub-processes. Instead, the * LesHouchesEventHandler can handle several LesHouchesReader objects * and choose between them. However, a sub-class of LesHouchesReader * may set the flag isActive, in which case it is assumed to be able * to select between its sub-processes itself. * * The LesHouchesReader may be assigned a number ReweightBase objects * which either completely reweights the events produced (in the * reweights vector), or only biases the selection without influencing * the cross section (in the preweights vector). Note that it is the * responsibility of a sub-class to call the reweight() function and * multiply the weight according to its return value (typically done * in the readEvent() function). * * @see \ref LesHouchesReaderInterfaces "The interfaces" * defined for LesHouchesReader. * @see Event * @see LesHouchesEventHandler */ class LesHouchesReader: public HandlerBase, public LastXCombInfo<> { /** * LesHouchesEventHandler should have access to our private parts. */ friend class LesHouchesEventHandler; /** * Map for accumulating statistics of cross sections per process * number. */ typedef map StatMap; /** * Map of XComb objects describing the incoming partons indexed by * the corresponding PartonBin pair. */ typedef map XCombMap; /** * A vector of pointers to ReweightBase objects. */ typedef vector ReweightVector; public: /** @name Standard constructors and destructors. */ //@{ /** * Default constructor. If the optional argument is true, the reader * is assumed to be able to produce events on demand for a given * process. */ LesHouchesReader(bool active = false); /** * Copy-constructor. */ LesHouchesReader(const LesHouchesReader &); /** * Destructor. */ virtual ~LesHouchesReader(); //@} public: /** @name Main virtual fuctions to be overridden in * sub-classes. They are named as if we are reading from event * files, but could equally well implement the actual generation of * events. */ //@{ /** * Open a file or stream with events and read in the run information * into the heprup variable. */ virtual void open() = 0; /** * Read the next event from the file or stream into the * corresponding protected variables. Return false if there is no * more events. */ virtual bool doReadEvent() = 0; /** * Close the file or stream from which events have been read. */ virtual void close() = 0; /** * return the weight names */ virtual vector optWeightsNamesFunc() = 0; /** * vector with the optional weights names */ vector optionalWeightsNames; //@} /** @name Other important function which may be overridden in * sub-classes which wants to bypass the basic HEPRUP or HEPEUP * variables or otherwise facilitate the conversion to ThePEG * objects. */ //@{ /** * Initialize. This function is called by the LesHouchesEventHandler * to which this object is assigned. */ virtual void initialize(LesHouchesEventHandler & eh); /** * Calls readEvent() or uncacheEvent() to read information into the * LesHouches common block variables. This function is called by the * LesHouchesEventHandler if this reader has been selectod to * produce an event. * * @return the weight asociated with this event. If negative weights * are allowed it should be between -1 and 1, otherwise between 0 * and 1. If outside these limits the previously estimated maximum * is violated. Note that the estimated maximum then should be * updated from the outside. */ virtual double getEvent(); /** * Calls doReadEvent() and performs pre-defined reweightings. A * sub-class overrides this function it must make sure that the * corresponding reweightings are done. */ virtual bool readEvent(); /** * Skip \a n events. Used by LesHouchesEventHandler to make sure * that a file is scanned an even number of times in case the events * are not ramdomly distributed in the file. */ virtual void skip(long n); /** * Get an XComb object. Converts the information in the Les Houches * common block variables to an XComb object describing the sub * process. This is the way information is conveyed from the reader * to the controlling LesHouchesEventHandler. */ tXCombPtr getXComb(); /** * Get a SubProcess object corresponding to the information in the * Les Houches common block variables. */ tSubProPtr getSubProcess(); /** * Scan the file or stream to obtain information about cross section * weights and particles etc. This function should fill the * variables corresponding to the /HEPRUP/ common block. The * function returns the number of events scanned. */ virtual long scan(); /** * Take the information corresponding to the HEPRUP common block and * initialize the statistics for this reader. */ virtual void initStat(); /** * Reweights the current event using the reweights and preweights * vectors. It is the responsibility of the sub-class to call this * function after the HEPEUP information has been retrieved. */ double reweight(); /** * Converts the information in the Les Houches common block * variables into a Particle objects. */ virtual void fillEvent(); /** * Removes the particles created in the last generated event, * preparing to produce a new one. */ void reset(); /** * Possibility for subclasses to recover from non-conformant * settings of XMAXUP when an event file has been scanned with \a * neve events. Should set weightScale so that the average XMAXUP * times weightScale gives the cross section for a process. (This is * needed for MadEvent). */ virtual void setWeightScale(long neve); //@} /** @name Access information about the current event. */ //@{ /** * Return the size of this event in bytes. To be used for the cache * file. \a npart is the number of particles. If \a npart is 0, the * number is taken from NUP. */ static size_t eventSize(int N) { return (N + 1)*sizeof(int) + // IDPRUP, ISTUP (7*N + 4)*sizeof(double) + // XWGTUP, SCALUP, AQEDUP, AQCDUP, PUP, // VTIMUP, SPINUP N*sizeof(long) + // IDUP 2*N*sizeof(pair) + // MOTHUP, ICOLUP sizeof(pair) + // XPDWUP. 2*sizeof(double); // lastweight and preweight } /** * The current event weight given by XWGTUP times possible * reweighting. Note that this is not necessarily the same as what * is returned by getEvent(), which is scaled with the maximum * weight. */ double eventWeight() const { return hepeup.XWGTUP*lastweight; } /** * Return the optional named weights associated to the current event. */ const map& optionalEventWeights() const { return optionalWeights; } + /** + * Return the Les Houches event number associated with the current event + */ + const long& LHEEventNum() const { return LHEeventnum; } + /** * Return the optional npLO and npNLO */ const int& optionalEventnpLO() const { return optionalnpLO; } const int& optionalEventnpNLO() const { return optionalnpNLO; } /** * The pair of PartonBinInstance objects describing the current * incoming partons in the event. */ const PBIPair & partonBinInstances() const { return thePartonBinInstances; } /** * Return the instances of the beam particles for the current event. */ const PPair & beams() const { return theBeams; } /** * Return the instances of the incoming particles to the sub process * for the current event. */ const PPair & incoming() const { return theIncoming; } /** * Return the instances of the outgoing particles from the sub process * for the current event. */ const PVector & outgoing() const { return theOutgoing; } /** * Return the instances of the intermediate particles in the sub * process for the current event. */ const PVector & intermediates() const { return theIntermediates; } /** * If this reader is to be used (possibly together with others) for * CKKW reweighting and veto, this should give the multiplicity of * outgoing particles in the highest multiplicity matrix element in * the group. */ int maxMultCKKW() const { return theMaxMultCKKW; } /** * If this reader is to be used (possibly together with others) for * CKKW reweighting and veto, this should give the multiplicity of * outgoing particles in the lowest multiplicity matrix element in * the group. */ int minMultCKKW() const { return theMinMultCKKW; } //@} /** @name Other inlined access functions. */ //@{ /** * The number of events found in this reader. If less than zero the * number of events are unlimited. */ long NEvents() const { return theNEvents; } /** * The number of events produced so far. Is reset to zero if an * event file is reopened. */ long currentPosition() const { return position; } /** * The maximum number of events to scan to collect information about * processes and cross sections. If less than 0, all events will be * scanned. */ long maxScan() const { return theMaxScan; } /** * Return true if this reader is active. */ bool active() const { return isActive; } /** * True if negative weights may be produced. */ bool negativeWeights() const { return heprup.IDWTUP < 0; } /** * The collected cross section statistics for this reader. */ const XSecStat & xSecStats() const { return stats; } /** * Collected statistics about the individual processes. */ const StatMap & processStats() const { return statmap; } /** * Select the current event. It will later be rejected with a * probability given by \a weight. */ void select(double weight) { stats.select(weight); statmap[hepeup.IDPRUP].select(weight); } /** * Accept the current event assuming it was previously selcted. */ void accept() { stats.accept(); statmap[hepeup.IDPRUP].accept(); } /** * Reject the current event assuming it was previously accepted. */ void reject(double w) { stats.reject(w); statmap[hepeup.IDPRUP].reject(w); } /** * Increase the overestimated cross section for this reader. */ virtual void increaseMaxXSec(CrossSection maxxsec); /** * The PartonExtractor object used to construct remnants. */ tPExtrPtr partonExtractor() const { return thePartonExtractor; } /** * Return a possibly null pointer to a CascadeHandler to be used for * CKKW-reweighting. */ tCascHdlPtr CKKWHandler() const { return theCKKW; } /** * The pairs of PartonBin objects describing the partons which can * be extracted by the PartonExtractor object. */ const PartonPairVec & partonBins() const { return thePartonBins; } /** * The map of XComb objects indexed by the corresponding PartonBin * pair. */ const XCombMap & xCombs() const { return theXCombs; } /** * The Cuts object to be used for this reader. */ const Cuts & cuts() const { return *theCuts; } //@} protected: /** @name Functions for manipulating cache files. */ //@{ /** * Name of file used to cache the events form the reader in a * fast-readable form. If empty, no cache file will be generated. */ string cacheFileName() const { return theCacheFileName; } /** * Determines whether to apply cuts to events converting them to * ThePEG format. */ bool cutEarly() const { return doCutEarly; } /** * File stream for the cache. */ CFile cacheFile() const { return theCacheFile;} /** * Open the cache file for reading. */ void openReadCacheFile(); /** * Open the cache file for writing. */ void openWriteCacheFile(); /** * Close the cache file; */ void closeCacheFile(); /** * Write the current event to the cache file. */ void cacheEvent() const; /** * Read an event from the cache file. Return false if something went wrong. */ bool uncacheEvent(); /** * Reopen a reader. If we have reached the end of an event file, * reopen it and issue a warning if we have used up a large fraction * of it. */ void reopen(); /** * Helper function to write a variable to a memory location */ template static char * mwrite(char * pos, const T & t, size_t n = 1) { std::memcpy(pos, &t, n*sizeof(T)); return pos + n*sizeof(T); } /** * Helper function to read a variable from a memory location */ template static const char * mread(const char * pos, T & t, size_t n = 1) { std::memcpy(&t, pos, n*sizeof(T)); return pos + n*sizeof(T); } //@} /** @name Auxilliary virtual methods which may be verridden by sub-classes. */ //@{ /** * Check the existence of a pair of PartonBin objects corresponding * to the current event. * * @return false if no pair of suitable PartonBin objects was found. */ virtual bool checkPartonBin(); /** * Create instances of all particles in the event and store them * in particleIndex. */ virtual void createParticles(); /** * Using the already created particles create a pair of * PartonBinInstance objects corresponding to the incoming * partons. Return the corresponding PartonBin objects. */ virtual tcPBPair createPartonBinInstances(); /** * Create instances of the incoming beams in the event and store * them in particleIndex. If no beam particles are included in the * event they are created from the run info. */ virtual void createBeams(); /** * Go through the mother indices and connect up the Particles. */ virtual void connectMothers(); //@} public: /** @name Functions used by the persistent I/O system. */ //@{ /** * Function used to write out object persistently. * @param os the persistent output stream written to. */ void persistentOutput(PersistentOStream & os) const; /** * Function used to read in object persistently. * @param is the persistent input stream read from. * @param version the version number of the object when written. */ void persistentInput(PersistentIStream & is, int version); //@} /** * Standard Init function used to initialize the interfaces. */ static void Init(); protected: /** @name Set functions for some variables not in the Les Houches accord. */ //@{ /** * The number of events in this reader. If less than zero the number * of events is unlimited. */ void NEvents(long x) { theNEvents = x; } /** * The map of XComb objects indexed by the corresponding PartonBin * pair. */ XCombMap & xCombs() { return theXCombs; } //@} /** @name Standard (and non-standard) Interfaced functions. */ //@{ /** * Initialize this object after the setup phase before saving an * EventGenerator to disk. * @throws InitException if object could not be initialized properly. */ virtual void doinit(); /** * Initialize this object. Called in the run phase just before * a run begins. */ virtual void doinitrun(); /** * Finalize this object. Called in the run phase just after a * run has ended. Used eg. to write out statistics. */ virtual void dofinish() { close(); HandlerBase::dofinish(); } /** * Return true if this object needs to be initialized before all * other objects because it needs to extract PDFs from the event file. */ virtual bool preInitialize() const; /** * Called from doinit() to extract PDFs from the event file and add * the corresponding objects to the current EventGenerator. */ virtual void initPDFs(); //@} protected: /** * The HEPRUP common block. */ HEPRUP heprup; /** * The HEPEUP common block. */ HEPEUP hepeup; /** * The ParticleData objects corresponding to the incoming particles. */ tcPDPair inData; /** * The PDFBase objects which has been used for the beam particle * when generating the events being read. Specified in the interface * or derived from PDFGUP and PDFSUP. */ pair inPDF; /** * The PDFBase object to be used in the subsequent generation. */ pair outPDF; /** * The PartonExtractor object used to construct remnants. */ PExtrPtr thePartonExtractor; /** * A pointer to a CascadeHandler to be used for CKKW-reweighting. */ tCascHdlPtr theCKKW; /** * The pairs of PartonBin objects describing the partons which can * be extracted by the PartonExtractor object. */ PartonPairVec thePartonBins; /** * The map of XComb objects indexed by the corresponding PartonBin * pair. */ XCombMap theXCombs; /** * The Cuts object to be used for this reader. */ CutsPtr theCuts; /** * The number of events in this reader. If less than zero the number * of events is unlimited. */ long theNEvents; /** * The number of events produced by this reader so far. Is reset * every time an event file is reopened. */ long position; /** * The number of times this reader has been reopened. */ int reopened; /** * The maximum number of events to scan to collect information about * processes and cross sections. If less than 0, all events will be * scanned. */ long theMaxScan; /** * Flag to tell whether we are in the process of scanning. */ bool scanning; /** * True if this is an active reader. */ bool isActive; /** * Name of file used to cache the events form the reader in a * fast-readable form. If empty, no cache file will be generated. */ string theCacheFileName; /** * Determines whether to apply cuts to events before converting them * to ThePEG format. */ bool doCutEarly; /** * Collect statistics for this reader. */ XSecStat stats; /** * Collect statistics for each individual process. */ StatMap statmap; /** * The pair of PartonBinInstance objects describing the current * incoming partons in the event. */ PBIPair thePartonBinInstances; /** * Association between ColourLines and colour indices in the current * translation. */ ObjectIndexer colourIndex; /** * Association between Particles and indices in the current * translation. */ ObjectIndexer particleIndex; /** * The instances of the beam particles for the current event. */ PPair theBeams; /** * The instances of the incoming particles to the sub process for * the current event. */ PPair theIncoming; /** * The instances of the outgoing particles from the sub process for * the current event. */ PVector theOutgoing; /** * The instances of the intermediate particles in the sub process for * the current event. */ PVector theIntermediates; /** * File stream for the cache. */ CFile theCacheFile; /** * The reweight objects modifying the weights of this reader. */ ReweightVector reweights; /** * The preweight objects modifying the weights of this reader. */ ReweightVector preweights; /** * The factor with which this reader was last pre-weighted. */ double preweight; /** * Should the event be reweighted by PDFs used by the PartonExtractor? */ bool reweightPDF; /** * Should PDFBase objects be constructed from the information in the * event file in the initialization? */ bool doInitPDFs; /** * If this reader is to be used (possibly together with others) for * CKKW reweighting and veto, this should give the multiplicity of * outgoing particles in the highest multiplicity matrix element in * the group. */ int theMaxMultCKKW; /** * If this reader is to be used (possibly together with others) for * CKKW reweighting and veto, this should give the multiplicity of * outgoing particles in the lowest multiplicity matrix element in * the group. */ int theMinMultCKKW; /** * The weight multiplying the last read event due to PDF * reweighting, CKKW reweighting or assigned reweight and preweight * objects. */ double lastweight; /** * The optional weights associated to the last read events. */ map optionalWeights; + /** + * The event number + */ + long LHEeventnum; + /** * If the maximum cross section of this reader has been increased * with increaseMaxXSec(), this is the total factor with which it * has been increased. */ double maxFactor; /** * npLO for LesHouches merging */ int optionalnpLO; /** * npNLO for LesHouches merging */ int optionalnpNLO; /** * The (reweighted) XWGTUP value should be scaled with this cross * section when compared to the overestimated cross section. */ CrossSection weightScale; /** * Individual scales for different sub-processes if reweighted. */ vector xSecWeights; /** * Individual maximum weights for individual (possibly reweighted) * processes. */ map maxWeights; /** * Is set to true when getEvent() is called from skip(int). */ bool skipping; /** * Option for the treatment of the momenta supplied */ unsigned int theMomentumTreatment; /** * Set to true if warnings about possible weight incompatibilities * should be issued. */ bool useWeightWarnings; /** * Option to allow reopening of the file */ bool theReOpenAllowed; /** * Use the spin information */ bool theIncludeSpin; private: /** Access function for the interface. */ void setBeamA(long id); /** Access function for the interface. */ long getBeamA() const; /** Access function for the interface. */ void setBeamB(long id); /** Access function for the interface. */ long getBeamB() const; /** Access function for the interface. */ void setEBeamA(Energy e); /** Access function for the interface. */ Energy getEBeamA() const; /** Access function for the interface. */ void setEBeamB(Energy e); /** Access function for the interface. */ Energy getEBeamB() const; /** Access function for the interface. */ void setPDFA(PDFPtr); /** Access function for the interface. */ PDFPtr getPDFA() const; /** Access function for the interface. */ void setPDFB(PDFPtr); /** Access function for the interface. */ PDFPtr getPDFB() const; private: /** * Describe an abstract base class with persistent data. */ static AbstractClassDescription initLesHouchesReader; /** * Private and non-existent assignment operator. */ LesHouchesReader & operator=(const LesHouchesReader &) = delete; public: /** @cond EXCEPTIONCLASSES */ /** Exception class used by LesHouchesReader in case inconsistencies * are encountered. */ class LesHouchesInconsistencyError: public Exception {}; /** Exception class used by LesHouchesReader in case more events than available are requested. */ class LesHouchesReopenWarning: public Exception {}; /** Exception class used by LesHouchesReader in case reopening an event file fails. */ class LesHouchesReopenError: public Exception {}; /** Exception class used by LesHouchesReader in case there is information missing in the initialization phase. */ class LesHouchesInitError: public InitException {}; /** @endcond */ }; /// Stream output for HEPEUP ostream & operator<<(ostream & os, const HEPEUP & h); } #include "ThePEG/Utilities/ClassTraits.h" namespace ThePEG { /** @cond TRAITSPECIALIZATIONS */ /** * This template specialization informs ThePEG about the * base class of LesHouchesReader. */ template <> struct BaseClassTrait: public ClassTraitsType { /** Typedef of the base class of LesHouchesReader. */ typedef HandlerBase NthBase; }; /** * This template specialization informs ThePEG about the name of the * LesHouchesReader class and the shared object where it is * defined. */ template <> struct ClassTraits : public ClassTraitsBase { /** * Return the class name. */ static string className() { return "ThePEG::LesHouchesReader"; } /** * Return the name of the shared library to be loaded to get access * to the LesHouchesReader class and every other class it uses * (except the base class). */ static string library() { return "LesHouches.so"; } }; /** @endcond */ } #endif /* THEPEG_LesHouchesReader_H */ diff --git a/MatrixElement/MEGroup.h b/MatrixElement/MEGroup.h --- a/MatrixElement/MEGroup.h +++ b/MatrixElement/MEGroup.h @@ -1,527 +1,535 @@ // -*- C++ -*- // // MEGroup.h is a part of ThePEG - Toolkit for HEP Event Generation // Copyright (C) 1999-2019 Leif Lonnblad // Copyright (C) 2009-2019 Simon Platzer // // ThePEG is licenced under version 3 of the GPL, see COPYING for details. // Please respect the MCnet academic guidelines, see GUIDELINES for details. // #ifndef ThePEG_MEGroup_H #define ThePEG_MEGroup_H // This is the declaration of the MEGroup class. #include "ThePEG/MatrixElement/MEBase.h" #include "ThePEG/Cuts/Cuts.fh" #include "MEGroup.fh" namespace ThePEG { /** * The MEGroup class represents a 'head' matrix element * in association with a group of dependent matrix elements. * It basically acts as a wrapper around its head matrix element * however supplying additional information to the corresponding * StdXCombGroup object. * * @see StdXCombGroup * */ class MEGroup: public MEBase { public: /** @name Standard constructors and destructors. */ //@{ /** * Default constructor. */ MEGroup(); /** * Destructor. */ virtual ~MEGroup(); //@} public: /** @name Virtual functions from MEBase. */ //@{ /** * Return the order in \f$\alpha_S\f$ in which this matrix element * is given. */ virtual unsigned int orderInAlphaS() const { return head()->orderInAlphaS(); } /** * Return the order in \f$\alpha_{EM}\f$ in which this matrix * element is given. Returns 0. */ virtual unsigned int orderInAlphaEW() const { return head()->orderInAlphaEW(); } /** * Return the matrix element for the kinematical configuation * previously provided by the last call to setKinematics(), suitably * scaled by sHat() to give a dimension-less number. */ virtual double me2() const { return head()->me2(); } /** * Return the scale associated with the phase space point provided * by the last call to setKinematics(). */ virtual Energy2 scale() const { return head()->scale(); } /** * Return the value of \f$\alpha_S\f$ associated with the phase * space point provided by the last call to setKinematics(). This * versions returns SM().alphaS(scale()). */ virtual double alphaS() const { return head()->alphaS(); } /** * Return the value of \f$\alpha_EM\f$ associated with the phase * space point provided by the last call to setKinematics(). This * versions returns SM().alphaEM(scale()). */ virtual double alphaEM() const { return head()->alphaEM(); } /** * Set the typed and momenta of the incoming and outgoing partons to * be used in subsequent calls to me() and colourGeometries() * according to the associated XComb object. If the function is * overridden in a sub class the new function must call the base * class one first. */ virtual void setKinematics() { MEBase::setKinematics(); head()->setKinematics(); } /** * construct the spin information for the interaction */ virtual void constructVertex(tSubProPtr sub) { head()->constructVertex(sub); } /** * construct the spin information for the interaction */ virtual void constructVertex(tSubProPtr sub, const ColourLines* cl) { head()->constructVertex(sub,cl); } /** * The number of internal degreed of freedom used in the matrix * element. This default version returns 0; */ virtual int nDim() const { return theNDim; } /** * Generate internal degrees of freedom given nDim() uniform random * numbers in the interval ]0,1[. To help the phase space generator, * the 'dSigHatDR' should be a smooth function of these numbers, * although this is not strictly necessary. The return value should * be true of the generation succeeded. If so the generated momenta * should be stored in the meMomenta() vector. */ virtual bool generateKinematics(const double * r); /** * Return true, if this matrix element expects * the incoming partons in their center-of-mass system */ virtual bool wantCMS () const { return head()->wantCMS(); } /** * Return the matrix element squared differential in the variables * given by the last call to generateKinematics(). */ virtual CrossSection dSigHatDR() const { return head()->dSigHatDR(); } /** * Return true, if this matrix element will generate momenta for the * incoming partons itself. The matrix element is required to store * the incoming parton momenta in meMomenta()[0,1]. No mapping in * tau and y is performed by the PartonExtractor object, if a * derived class returns true here. The phase space jacobian is to * include a factor 1/(x1 x2). */ virtual bool haveX1X2() const { return head()->haveX1X2(); } /** * Return true, if this matrix element provides the PDF * weight for the first incoming parton itself. */ virtual bool havePDFWeight1 () const { return head()->havePDFWeight1(); } /** * Return true, if this matrix element provides the PDF * weight for the second incoming parton itself. */ virtual bool havePDFWeight2 () const { return head()->havePDFWeight2(); } /** * Return true, if the XComb steering this matrix element * should keep track of the random numbers used to generate * the last phase space point */ virtual bool keepRandomNumbers() const { return head()->keepRandomNumbers(); } /** * Comlete a SubProcess object using the internal degrees of freedom * generated in the last generateKinematics() (and possible other * degrees of freedom which was intergated over in dSigHatDR(). This * default version does nothing. Will be made purely virtual in the * future. */ virtual void generateSubCollision(SubProcess & sub) { head()->generateSubCollision(sub); } /** * Clear the information previously provided by a call to * setKinematics(...). */ virtual void clearKinematics(); /** * Add all possible diagrams with the add() function. */ virtual void getDiagrams() const { head()->diagrams(); useDiagrams(head()); } /** * Return true, if this matrix element does not want to * make use of mirroring processes; in this case all * possible partonic subprocesses with a fixed assignment * of incoming particles need to be provided through the diagrams * added with the add(...) method. */ virtual bool noMirror () const { return head()->noMirror(); } /** * Return a Selector with possible colour geometries for the selected * diagram weighted by their relative probabilities. */ virtual Selector colourGeometries(tcDiagPtr diag) const { return head()->colourGeometries(diag); } /** * Select a ColpurLines geometry. The default version returns a * colour geometry selected among the ones returned from * colourGeometries(tcDiagPtr). */ virtual const ColourLines & selectColourGeometry(tcDiagPtr diag) const { return head()->selectColourGeometry(diag); } /** * With the information previously supplied with the * setKinematics(...) method, a derived class may optionally * override this method to weight the given diagrams with their * (although certainly not physical) relative probabilities. */ virtual Selector diagrams(const DiagramVector & dv) const { return head()->diagrams(dv); } /** * Select a diagram. Default version uses diagrams(const * DiagramVector &) to select a diagram according to the * weights. This is the only method used that should be outside of * MEBase. */ virtual DiagramIndex diagram(const DiagramVector & dv) const { DiagramIndex res = head()->diagram(dv); return res; } /** * Set the XComb object to be used in the next call to * generateKinematics() and dSigHatDR(). */ virtual void setXComb(tStdXCombPtr xc) { MEBase::setXComb(xc); head()->setXComb(xc); } /** * If this matrix element is to be used together with others for * CKKW reweighting and veto, this should give the multiplicity of * outgoing particles in the highest multiplicity matrix element in * the group. */ virtual int maxMultCKKW() const { return head()->maxMultCKKW(); } /** * If this matrix element is to be used together with others for * CKKW reweighting and veto, this should give the multiplicity of * outgoing particles in the lowest multiplicity matrix element in * the group. */ virtual int minMultCKKW() const { return head()->minMultCKKW(); } /** * If this matrix element is to be used together with others for * CKKW reweighting and veto, this will set the multiplicity of * outgoing particles in the highest multiplicity matrix element in * the group. */ virtual void maxMultCKKW(int mult) { head()->maxMultCKKW(mult); } /** * If this matrix element is to be used together with others for * CKKW reweighting and veto, this will set the multiplicity of * outgoing particles in the lowest multiplicity matrix element in * the group. */ virtual void minMultCKKW(int mult) { head()->minMultCKKW(mult); } /** * Inform this matrix element that a new phase space * point is about to be generated, so all caches should * be flushed. */ virtual void flushCaches() { head()->flushCaches(); } /** * Collect information on the last evaluated phasespace * point for verification or debugging purposes. This * only called, if the StdXCombGroup did accumulate * a non-zero cross section from this ME group. */ virtual void lastEventStatistics() {} + + /** + * Return true, if this configuration of cross sections should not + * be included due to their relative magnitude. Arguments are head + * cross section and dependent cross section, including all + * reweights. + */ + virtual bool discard(const CrossSection&, const CrossSection&) const { return false; } //@} public: /** * Return the head matrix element. */ tMEPtr head() const { return theHead; } /** * Visit the dependent matrix elements */ const MEVector& dependent() const { return theDependent; } /** * Set the head matrix element. */ void head(tMEPtr me) { theHead = me; } /** * Access the dependent matrix elements */ MEVector& dependent() { return theDependent; } /** * Return the random number offset to access the random * numbers provided for the given matrix element to generate * dependent kinematics. */ int dependentOffset(tMEPtr dep) const; /** * For the given event generation setup return an xcomb object * appropriate to this matrix element. */ virtual StdXCombPtr makeXComb(Energy newMaxEnergy, const cPDPair & inc, tEHPtr newEventHandler,tSubHdlPtr newSubProcessHandler, tPExtrPtr newExtractor, tCascHdlPtr newCKKW, const PBPair & newPartonBins, tCutsPtr newCuts, const DiagramVector & newDiagrams, bool mir, const PartonPairVec& allPBins, tStdXCombPtr newHead = tStdXCombPtr(), tMEPtr newME = tMEPtr()); /** * Create a dependent xcomb object to be used * for the given process steered bythe head object and * dependent matrix element. */ virtual vector makeDependentXCombs(tStdXCombPtr xcHead, const cPDVector& proc, tMEPtr depME, const PartonPairVec& allPBins) const; /** * Fill the projectors object of xcombs to choose subprocesses * different than the one currently integrated. */ virtual void fillProjectors() { head()->fillProjectors(); } /** * Return true, if projectors will be used */ virtual bool willProject() const { return false; } /** * Return true, if this MEGroup will reweight the contributing cross * sections. */ virtual bool groupReweighted() const { return false; } /** * Reweight the head cross section */ virtual double reweightHead(const vector&) { return 1.; } /** * Reweight the dependent cross section */ virtual double reweightDependent(tStdXCombPtr, const vector&) { return 1.; } /** * Return true, if SubProcessGroups should be * setup from this MEGroup. If not, a single SubProcess * is constructed from the data provided by the * head matrix element. */ virtual bool subProcessGroups() const { return true; } public: /** * Return true, if the same additional random numbers * should be presented to any of the dependent * matrix elements. */ virtual bool uniformAdditional() const = 0; /** * Given a process from the head matrix element, * return a list of diagrams which should be considered for * the given dependent matrix element. */ virtual MEBase::DiagramVector dependentDiagrams(const cPDVector& proc, tMEPtr depME) const = 0; public: /** @name Functions used by the persistent I/O system. */ //@{ /** * Function used to write out object persistently. * @param os the persistent output stream written to. */ void persistentOutput(PersistentOStream & os) const; /** * Function used to read in object persistently. * @param is the persistent input stream read from. * @param version the version number of the object when written. */ void persistentInput(PersistentIStream & is, int version); //@} /** * Standard Init function used to initialize the interfaces. */ static void Init(); protected: /** @name Standard Interfaced functions. */ //@{ /** * Initialize this object after the setup phase before saving an * EventGenerator to disk. * @throws InitException if object could not be initialized properly. */ virtual void doinit(); /** * Initialize this object. Called in the run phase just before * a run begins. */ virtual void doinitrun(); /** * Rebind pointer to other Interfaced objects. Called in the setup phase * after all objects used in an EventGenerator has been cloned so that * the pointers will refer to the cloned objects afterwards. * @param trans a TranslationMap relating the original objects to * their respective clones. * @throws RebindException if no cloned object was found for a given * pointer. */ virtual void rebind(const TranslationMap & trans); /** * Return a vector of all pointers to Interfaced objects used in this * object. * @return a vector of pointers. */ virtual IVector getReferences(); //@} private: /** * The head matrix element. */ MEPtr theHead; /** * The dependent matrix elements. */ MEVector theDependent; /** * Offsets to access additional random numbers * required by the dependent matrix elements. */ map theNDimMap; /** * The total number of random numbers required. */ int theNDim; private: /** * Describe a class with persistent data. */ static AbstractClassDescription initMEGroup; /** * Private and non-existent assignment operator. */ MEGroup & operator=(const MEGroup &) = delete; }; } namespace ThePEG { /** @cond TRAITSPECIALIZATIONS */ /** * This template specialization informs ThePEG about the base class of * MEGroup. */ template <> struct BaseClassTrait { /** Typedef of the base class of MEGroup. */ typedef MEBase NthBase; }; /** * This template specialization informs ThePEG about the name of the * MEGroup class. */ template <> struct ClassTraits: public ClassTraitsBase { /** Return the class name. */ static string className() { return "ThePEG::MEGroup"; } }; /** @endcond */ } #endif /* ThePEG_MEGroup_H */ diff --git a/Vectors/HepMCTraits.h b/Vectors/HepMCTraits.h --- a/Vectors/HepMCTraits.h +++ b/Vectors/HepMCTraits.h @@ -1,354 +1,362 @@ // -*- C++ -*- // // HepMCTraits.h is a part of ThePEG - Toolkit for HEP Event Generation // Copyright (C) 1999-2019 Leif Lonnblad // // ThePEG is licenced under version 3 of the GPL, see COPYING for details. // Please respect the MCnet academic guidelines, see GUIDELINES for details. // #ifndef ThePEG_HepMCTraits_H #define ThePEG_HepMCTraits_H #ifdef HAVE_HEPMC3 #include "HepMC3/GenEvent.h" namespace HepMC3 { class GenEvent; class GenParticle; class GenVertex; class GenPdfInfo; } namespace HepMC3 { using PdfInfo=GenPdfInfo; using Polarization=std::pair; } namespace HepMC=HepMC3; #else #include "HepMC/GenEvent.h" namespace HepMC { class GenEvent; class GenParticle; class GenVertex; class Polarization; #ifndef HEPMC_GENPDFINFO_H class PdfInfo; #endif } #endif - + namespace ThePEG { /** * HepMCTraitsBase is a convenient base class for specializing the * HepMCTraits class to deal with different flavours of HepMC in the * HepMCConverter class. The default version will work for the CLHEP * implementation of HepMC. To use the HepMCConverter class for any * flavour of HepMC you have to specialize the HepMCTraits class * accordingly, possibly inheriting the functionality from the * HepMCTraitsBase class and only overriding the functions and * typedefs which are different. For the CLHEP flavour of HepMC you * only need to do template<> struct * HepMCTraits<HepMC::GenEvent>: public * HepMCTraitsBase<HepMC::GenEvent,HepMC::GenParticle,HepMC::GenVertex, * HepMC::Polarization> {}; somewhere inside the ThePEG * namespace. The boolean template argument determines whether the * HepMC implementation is specifying units or not. */ template struct HepMCTraitsBase { /** Typedef of the particle class. */ typedef HepMCParticleT ParticleT; /** Typedef of the event class. */ typedef HepMCEventT EventT; /** Typedef of the vertex class. */ typedef HepMCVertexT VertexT; /** Typedef of the polarization class. */ typedef HepMCPolarizationT PolarizationT; /** Typedef of the PdfInfo class. */ typedef HepMCPdfInfoT PdfInfoT; /** Typedef of a particle pointer */ typedef HepMCParticlePtrT ParticlePtrT; /** Typedef of a vertex pointer */ typedef HepMCVertexPtrT VertexPtrT; /** Create an event object with number \a evno and \a weight. */ static EventT * newEvent(long evno, double weight, const map& optionalWeights) { EventT * e = new EventT(); e->set_event_number(evno); std::vector wnames; std::vector wvalues; - + wnames.push_back("Default"); wvalues.push_back(weight); for ( map::const_iterator w = optionalWeights.begin(); w != optionalWeights.end(); ++w ) { wnames.push_back(w->first); wvalues.push_back(w->second); } #ifdef HAVE_HEPMC3 e->run_info()->set_weight_names(wnames); e->weights()=wvalues; -#else +#else #ifdef HEPMC_HAS_NAMED_WEIGHTS - for (size_t i=0;iweights()[wnames[i]] = wvalues[i]; + for (size_t i=0;iweights()[wnames[i]] = wvalues[i]; #else e->weights()=wvalues; #endif #endif - - + + return e; } /** Reset event weight and number of a re-used GenEvent. */ static void resetEvent(EventT * e, long evno, double weight, const map& optionalWeights) { e->set_event_number(evno); e->weights().clear(); std::vector wnames; std::vector wvalues; - + wnames.push_back("Default"); wvalues.push_back(weight); for ( map::const_iterator w = optionalWeights.begin(); w != optionalWeights.end(); ++w ) { wnames.push_back(w->first); wvalues.push_back(w->second); } #ifdef HAVE_HEPMC3 e->run_info()->set_weight_names(wnames); e->weights()=wvalues; -#else +#else #ifdef HEPMC_HAS_NAMED_WEIGHTS - for (size_t i=0;iweights()[wnames[i]] = wvalues[i]; + for (size_t i=0;iweights()[wnames[i]] = wvalues[i]; #else e->weights()=wvalues; #endif #endif } /** * Return true if this version of HepMC accept user-defined units. */ static bool hasUnits() { -#ifdef HEPMC_HAS_UNITS +#ifdef HEPMC_HAS_UNITS return true; #else return false; #endif } /** * Return the energy unit used in the installed version of HepMC. */ static Energy defaultEnergyUnit() { #ifndef HEPMC_HAS_UNITS return GeV; #else return HepMC::Units::default_momentum_unit() == HepMC::Units::GEV? GeV: MeV; #endif } /** * Return the length unit used in the installed version of HepMC. */ static Length defaultLengthUnit() { #ifndef HEPMC_HAS_UNITS return millimeter; #else return HepMC::Units::default_length_unit() == HepMC::Units::MM? millimeter: 10.0*millimeter; #endif } /** * Return the momentum unit used by a given GenEvent object. If * HepMC does not support units this must return GeV. */ static Energy momentumUnit(const EventT & e) { -#ifdef HEPMC_HAS_UNITS +#ifdef HEPMC_HAS_UNITS return e.momentum_unit() == HepMC::Units::MEV? MeV: GeV; #else return GeV; #endif } /** * Return the length unit used by a given GenEvent object. If * HepMC does not support units this must return millimeter. */ static Length lengthUnit(const EventT & e) { -#ifdef HEPMC_HAS_UNITS +#ifdef HEPMC_HAS_UNITS return e.length_unit() == HepMC::Units::CM? centimeter: millimeter; #else return millimeter; #endif } /** * Set the units to be used by the given GenEvent object. If * HepMC does not support units this should be a no-op. */ -#ifdef HEPMC_HAS_UNITS +#ifdef HEPMC_HAS_UNITS static void setUnits(EventT & e, Energy momu, Length lenu) { e.use_units(momu == MeV? HepMC::Units::MEV: HepMC::Units::GEV, lenu == centimeter? HepMC::Units::CM: HepMC::Units::MM); } #else static void setUnits(EventT &, Energy, Length) {} #endif /** Set the \a scale, \f$\alpha_S\f$ (\a aS) and \f$\alpha_{EM}\f$ (\a aEM) for the event \a e. The scale will be scaled with \a unit before given to the GenEvent. */ static void setScaleAndAlphas(EventT & e, Energy2 scale, double aS, double aEM, Energy unit) { e.set_event_scale(sqrt(scale)/unit); e.set_alphaQCD(aS); e.set_alphaQED(aEM); } /** Set the primary vertex, \a v, for the event \a e. */ static void setSignalProcessVertex(EventT & e, VertexPtrT v) { e.set_signal_process_vertex(v); } /** Set a vertex, \a v, for the event \a e. */ static void addVertex(EventT & e, VertexPtrT v) { e.add_vertex(v); } /** Create a new particle object with momentum \a p, PDG number \a id and status code \a status. The momentum will be scaled with \a unit which according to the HepMC documentation should be GeV. */ static ParticlePtrT newParticle(const Lorentz5Momentum & p, long id, int status, Energy unit) { // Note that according to the documentation the momentum is stored in a // HepLorentzVector in GeV (event though the CLHEP standard is MeV). LorentzVector p_scalar = p/unit; ParticlePtrT genp = new ParticleT(p_scalar, id, status); genp->setGeneratedMass(p.mass()/unit); return genp; } /** Set the polarization directions, \a the and \a phi, for particle \a p. */ static void setPolarization(ParticleT & genp, double the, double phi) { genp.set_polarization(PolarizationT(the, phi)); } /** Set the colour line (with index \a indx) to \a coline for particle \a p. */ static void setColourLine(ParticleT & p, int indx, int coline) { p.set_flow(indx, coline); } /** Create a new vertex. */ static VertexPtrT newVertex() { return new VertexT(); } /** Add an incoming particle, \a p, to the vertex, \a v. */ static void addIncoming(VertexT & v, ParticlePtrT p) { v.add_particle_in(p); } /** Add an outgoing particle, \a p, to the vertex, \a v. */ static void addOutgoing(VertexT & v, ParticlePtrT p) { v.add_particle_out(p); } /** Set the position \a p for the vertex, \a v. The length will be scaled with \a unit which normally should be millimeters. */ static void setPosition(VertexT & v, const LorentzPoint & p, Length unit) { LorentzVector p_scaled = p/unit; v.set_position(p_scaled); } /** Set the beam particles for the event.*/ static void setBeamParticles(EventT & e, ParticlePtrT p1, ParticlePtrT p2) { e.set_beam_particles(p1,p2); p1->set_status(4); p2->set_status(4); } /** Set the PDF info for the event. */ #ifdef HEPMC_HAS_PDF_INFO static void setPdfInfo(EventT & e, int id1, int id2, double x1, double x2, double scale, double xf1, double xf2) { #ifdef HAVE_HEPMC3 - + HepMC::GenPdfInfoPtr pdfinfo = std::make_shared(); pdfinfo->set(id1, id2, x1, x2, scale, xf1, xf2); e.set_pdf_info(pdfinfo); #else e.set_pdf_info(PdfInfoT(id1, id2, x1, x2, scale, xf1, xf2)); #endif } #else static void setPdfInfo(EventT &, int, int, double, double, double, double, double) {} #endif - /** Set the cross section info for the event. */ -#ifdef HEPMC_HAS_CROSS_SECTION +/** Set the cross section info for the event. */ +#ifdef HAVE_HEPMC3 + static void setCrossSection(EventT & ev, double xs, double xserr) { -#ifdef HAVE_HEPMC3 - std::shared_ptr x =std::make_shared(); + std::shared_ptr x = std::make_shared(); x->set_cross_section(xs,xserr); ev.set_cross_section(x); + } + #else - HepMC::GenCrossSection x; - x.set_cross_section(xs, xserr); - ev.set_cross_section(x); -#endif - } -#else - static void setCrossSection(EventT &, double, double) {} + #ifdef HEPMC_HAS_CROSS_SECTION + + static void setCrossSection(EventT & ev, double xs, double xserr) { + HepMC::GenCrossSection x; + x.set_cross_section(xs, xserr); + ev.set_cross_section(x); + } + + #else + + static void setCrossSection(EventT &, double, double) {} + + #endif + #endif }; /** * The HepMCTraits class is used to deal with different flavours of * HepMC in the HepMCConverter class. To use the HepMCConverter class * for any flavour of HepMC you have to specialize the * HepMCTraits class accordingly, possibly inheriting the * functionality from the HepMCTraitsBase class and only overriding * the functions and typedefs which are different. For the CLHEP * flavour of HepMC you only need to do template<> struct * HepMCTraits<HepMC::GenEvent>: public * HepMCTraitsBase<HepMC::GenEvent,HepMC::GenParticle,HepMC::GenVertex, * HepMC::Polarization,HepMC::PdfInfo> {}; somewhere inside the ThePEG * namespace. */ template struct HepMCTraits {}; } #endif -