diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index df151c1..a97558a 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -1,89 +1,91 @@ add_subdirectory(ProgOpts) # List of files (without extensions) that need pre-processing before compiling list(APPEND EXAMPLE_SOURCES_CONFIGURE B3piKMatrixMassProj B3piKMatrixPlots KMatrixDto3pi KMatrixExample PlotKMatrixTAmp ) # List of files (without extensions) that do not need pre-processing before compiling list(APPEND EXAMPLE_SOURCES CalcChiSq FlatPhaseSpace FlatPhaseSpace-CustomMasses FlatSqDalitz FlatSqDalitz-CustomMasses GenFit3K GenFit3KS GenFit3pi GenFitBelleCPKpipi GenFitDpipi GenFitDs2KKpi GenFitEFKLLM GenFitIncoherent_Bs2KSKpi GenFitKpipi GenFitNoDP GenFitNoDPMultiDim GenFitTimeDep GenFitTimeDep_Bs2KSKpi GenFitTimeDep_Bd2Dpipi + GenFitTimeDep_Bd2Dpipi_SignalOnly MergeDataFiles mixedSampleTest PlotResults point2PointTestSample QuasiFlatSqDalitz QuasiFlatSqDalitz-CustomMasses ResultsExtractor SimFitCoordinator + SimFitCoordinator_Bd2Dpipi SimFitTask SimFitTaskRooFit ) if(NOT LAURA_BUILD_ROOFIT_TASK) list(REMOVE_ITEM EXAMPLE_SOURCES SimFitTaskRooFit) endif() # Configure (if appropriate), build and install examples foreach( _example ${EXAMPLE_SOURCES_CONFIGURE}) configure_file(${_example}.cc.in ${_example}.cc @ONLY) add_executable(${_example} ${CMAKE_CURRENT_BINARY_DIR}/${_example}.cc) target_link_libraries(${_example} PRIVATE Laura++ ProgramOptions) install(TARGETS ${_example} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) endforeach() foreach( _example ${EXAMPLE_SOURCES}) add_executable(${_example} ${_example}.cc) target_link_libraries(${_example} PRIVATE Laura++ ProgramOptions) install(TARGETS ${_example} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) endforeach() # Also install the python script version of GenFit3pi configure_file(GenFit3pi.py.in GenFit3pi.py @ONLY) install(PROGRAMS ${CMAKE_CURRENT_BINARY_DIR}/GenFit3pi.py DESTINATION ${CMAKE_INSTALL_BINDIR}) # Also install the files to configure the models, coeffs, etc. # Some need pre-processing list(APPEND EXAMPLE_CONF_CONFIGURE KMatrixDto3pi.json ) foreach( _json ${EXAMPLE_CONF_CONFIGURE}) configure_file(${_json}.in ${_json} @ONLY) endforeach() list(APPEND EXAMPLE_CONF B3piKMNoAdler.json B3piKMPoles.json B3piKMSVPs.json B3piKMatrixCoeff.json FOCUSD3pi.json GenFit3pi.json KMatrixKPiCoeff.json KMatrixPiPiCoeff.json ) list(TRANSFORM EXAMPLE_CONF PREPEND ${CMAKE_CURRENT_SOURCE_DIR}/) list(TRANSFORM EXAMPLE_CONF_CONFIGURE PREPEND ${CMAKE_CURRENT_BINARY_DIR}/) list(APPEND EXAMPLE_CONF ${EXAMPLE_CONF_CONFIGURE}) install(FILES ${EXAMPLE_CONF} DESTINATION ${CMAKE_INSTALL_DATADIR}/${CMAKE_PROJECT_NAME}) diff --git a/examples/GenFitTimeDep_Bd2Dpipi.cc b/examples/GenFitTimeDep_Bd2Dpipi.cc index f008371..e8a7e4d 100644 --- a/examples/GenFitTimeDep_Bd2Dpipi.cc +++ b/examples/GenFitTimeDep_Bd2Dpipi.cc @@ -1,1076 +1,1082 @@ #include "TimeDep_Dpipi_ProgOpts.hh" #include "Lau1DHistPdf.hh" #include "Lau1DCubicSpline.hh" #include "LauBinnedDecayTimeEfficiency.hh" #include "LauBkgndDPModel.hh" #include "LauCrystalBallPdf.hh" #include "LauDaughters.hh" #include "LauDecayTime.hh" #include "LauDecayTimePdf.hh" #include "LauDecayTimePhysicsModel.hh" #include "LauDecayTimeResolution.hh" #include "LauEffModel.hh" #include "LauExponentialPdf.hh" #include "LauFlavTag.hh" #include "LauGounarisSakuraiRes.hh" #include "LauIsobarDynamics.hh" #include "LauLHCbNR.hh" #include "LauMagPhaseCoeffSet.hh" #include "LauRandom.hh" #include "LauRealImagCoeffSet.hh" #include "LauResonanceMaker.hh" #include "LauSplineDecayTimeEfficiency.hh" #include "LauSumPdf.hh" #include "LauTimeDepFitModel.hh" #include "LauVetoes.hh" #include "TFile.h" #include "TH2.h" #include "TRandom.h" #include "TString.h" #include "TSystem.h" #include "TF1.h" #include "TCanvas.h" #include "nlohmann/json.hpp" #include #include #include #include #include #include // Helper function for creating Double-Crystal-Ball PDFs LauAbsPdf* makeDCBPDF( const TString& varName, const Double_t varMin, const Double_t varMax, const TString& componentName, const std::map paramVals ); // Helper functions for creating sets of DTA splines using LauCubicSplineDecayTimeEfficiencyPtr = std::unique_ptr>; using LauSixthSplineDecayTimeEfficiencyPtr = std::unique_ptr>; auto createSplineDTASet( const TString& jsonFileName, const TString& dModeStr, const TString& runStr, const TString& suffix, const TString& signalName, const std::vector& bkgndNames ) { std::cout << "INFO - creating QFS signal DTA model" << std::endl; const TString controlSplineName { Form("%s_Kpi_%s%s",signalName.Data(),runStr.Data(),suffix.Data()) }; const TString controlEffName { Form("dteff_QFS_%s",runStr.Data()) }; auto controlSpline = Lau1DCubicSpline::readFromJson( jsonFileName, controlSplineName ); LauCubicSplineDecayTimeEfficiencyPtr controlEff { std::make_unique>( controlEffName, std::move( controlSpline ) ) }; LauSixthSplineDecayTimeEfficiencyPtr signalEff; if ( dModeStr != "Kpi" ) { std::cout << "INFO - creating " << dModeStr << " signal DTA model" << std::endl; const TString signalSplineName { Form("%s_%s_%s%s",signalName.Data(),dModeStr.Data(),runStr.Data(),suffix.Data()) }; const TString signalEffName { Form("dteff_CP%s_%s",dModeStr.Data(),runStr.Data()) }; auto signalSpline = Lau1DCubicSpline::readFromJson( jsonFileName, signalSplineName ); signalEff = std::make_unique>( signalEffName, *controlEff, std::move( signalSpline ) ); } std::map bkgndEffs; for ( auto& name : bkgndNames ) { std::cout << "INFO - creating " << dModeStr << " " << name << " background DTA model" << std::endl; const TString bkgndSplineName { Form("%s_%s_%s%s",name.Data(),dModeStr.Data(),runStr.Data(),suffix.Data()) }; const TString bkgndEffName { Form("%s_DTA_ratio_%s_%s",name.Data(),runStr.Data(),dModeStr.Data()) }; auto bkgndSpline = Lau1DCubicSpline::readFromJson( jsonFileName, bkgndSplineName ); auto bkgndEff = std::make_unique>( bkgndEffName, *controlEff, std::move( bkgndSpline ) ); bkgndEffs.emplace( std::make_pair( name, std::move(bkgndEff) ) ); } return std::make_tuple( std::move(controlEff), std::move(signalEff), std::move(bkgndEffs) ); } auto createSplineDTASetPCBkgnds( const TString& jsonFileName, const TString& dModeStr, const TString& runStr, const TString& suffix, const TString& signalName, const std::vector& bkgndNames ) { std::cout << "INFO - creating QFS Dpi DTA model" << std::endl; const TString splineNameDpi { Form("Dpi_%s",runStr.Data()) }; auto splineDpi = Lau1DCubicSpline::readFromJson( jsonFileName, splineNameDpi ); // used in case we need a ratio that is just unity across the range const std::vector& xVals { splineDpi->getXValues() }; std::vector yVals; yVals.assign( xVals.size(), 1.0 ); const TString effNameDpi { Form("dteff_QFS_Dpi_%s",runStr.Data()) }; LauCubicSplineDecayTimeEfficiencyPtr effDpi { std::make_unique>( effNameDpi, std::move( splineDpi ) ) }; std::map bkgndEffs; for ( auto& name : bkgndNames ) { std::cout << "INFO - creating " << dModeStr << " " << name << " background DTA model" << std::endl; std::unique_ptr signalRatioSpline; if ( dModeStr == "Kpi" ) { signalRatioSpline = std::make_unique( xVals, yVals ); } else { const TString signalRatioSplineName { Form("%s_%s_%s%s",signalName.Data(),dModeStr.Data(),runStr.Data(),suffix.Data()) }; signalRatioSpline = Lau1DCubicSpline::readFromJson( jsonFileName, signalRatioSplineName ); } const TString bkgndEffName { Form("%s_DTA_ratio_%s_%s",name.Data(),runStr.Data(),dModeStr.Data()) }; auto bkgndEff = std::make_unique>( bkgndEffName, *effDpi, std::move( signalRatioSpline ) ); bkgndEffs.emplace( std::make_pair( name, std::move(bkgndEff) ) ); } return std::make_tuple( std::move(effDpi), std::move(bkgndEffs) ); } class BgTagDecayFlvAsym { private: const Double_t eff_decayB0_taggedB0 {0.}; const Double_t eff_decayB0_taggedB0bar {0.}; const Double_t eff_decayB0bar_taggedB0 {0.}; const Double_t eff_decayB0bar_taggedB0bar {0.}; const Double_t eff_decayB0_ave {0.}; const Double_t eff_decayB0bar_ave {0.}; const Double_t eff_decayB0_delta {0.}; const Double_t eff_decayB0bar_delta {0.}; public: BgTagDecayFlvAsym() = default; BgTagDecayFlvAsym( //This constructor assumes a tagging efficincy of 100% const Double_t n_decayB0_taggedB0, const Double_t n_decayB0_taggedB0bar, const Double_t n_decayB0bar_taggedB0, const Double_t n_decayB0bar_taggedB0bar ): eff_decayB0_taggedB0 {n_decayB0_taggedB0 /(n_decayB0_taggedB0+n_decayB0_taggedB0bar)}, eff_decayB0_taggedB0bar {n_decayB0_taggedB0bar/(n_decayB0_taggedB0+n_decayB0_taggedB0bar)}, eff_decayB0bar_taggedB0 {n_decayB0bar_taggedB0 /(n_decayB0bar_taggedB0+n_decayB0bar_taggedB0bar)}, eff_decayB0bar_taggedB0bar{n_decayB0bar_taggedB0bar/(n_decayB0bar_taggedB0+n_decayB0bar_taggedB0bar)}, eff_decayB0_ave {0.5*(eff_decayB0_taggedB0 + eff_decayB0_taggedB0bar)}, eff_decayB0bar_ave {0.5*(eff_decayB0bar_taggedB0 + eff_decayB0bar_taggedB0bar)}, eff_decayB0_delta {eff_decayB0_taggedB0 - eff_decayB0_taggedB0bar}, eff_decayB0bar_delta {eff_decayB0bar_taggedB0 - eff_decayB0bar_taggedB0bar} {} BgTagDecayFlvAsym( const nlohmann::json& j ) : BgTagDecayFlvAsym( j[0], j[1], j[2], j[3] ) {} std::pair decayB0() const {return std::make_pair(eff_decayB0_taggedB0 ,eff_decayB0_taggedB0bar );} std::pair decayB0bar() const {return std::make_pair(eff_decayB0bar_taggedB0,eff_decayB0bar_taggedB0bar);} std::pair aveDeltaB0() const {return std::make_pair(eff_decayB0_ave , eff_decayB0_delta) ;} std::pair aveDeltaB0bar() const {return std::make_pair(eff_decayB0bar_ave, eff_decayB0bar_delta);} //TODO add constructor for if tagging eff isn't 100%? }; int main(const int argc, const char ** argv) { const TestDpipi_ProgramSettings settings{argc,argv}; if ( ! settings.parsedOK ) { return EXIT_FAILURE; } if ( settings.helpRequested ) { return EXIT_SUCCESS; } if ( settings.dataFit and settings.command == Command::Generate ) { std::cerr << "I can't generate data! Wait for Run 3" << std::endl; return EXIT_FAILURE; } if ( settings.dataFit and not settings.blindFit and not settings.fixPhiMix ) { std::cerr << "We don't have permission to do unblinded fits yet!" << std::endl; return EXIT_FAILURE; } if ( settings.run == 0 or settings.run > 2 ) { std::cerr << "The Run number must be either 1 or 2" << std::endl; return EXIT_FAILURE; } // check that all required data files are present std::string bkgndsJsonFile { "Bd2D0pipi_Bkgnds" }; std::string signalYieldsJsonFile { "Bd2D0pipi_SignalYields" }; if ( settings.setNegToZero ) { bkgndsJsonFile += "_ZeroNegYields"; signalYieldsJsonFile += "_ZeroNegYields"; } else if ( ! settings.constrainedYields ) { bkgndsJsonFile += "_WithNegYields"; signalYieldsJsonFile += "_WithNegYields"; } if ( ! settings.scaleYields ) { bkgndsJsonFile += "_AddNeglectedToComb"; } bkgndsJsonFile += ".json"; signalYieldsJsonFile += ".json"; const std::string dpModelJsonFile { "Bd2D0pipi_DP_Model_Coeffs_Vetoes.json" }; const std::string dtModelJsonFile { "Bd2D0pipi_DecayTime.json" }; const std::string dtaModelJsonFile { "Bd2D0pipi_DecayTime_Acceptance.json" }; const std::string taggingCalibJsonFile { "Bd2D0pipi_TaggingCalib.json" }; const std::array jsonFiles { bkgndsJsonFile, dpModelJsonFile, dtModelJsonFile, dtaModelJsonFile, signalYieldsJsonFile, taggingCalibJsonFile }; for ( auto& jsonFile : jsonFiles ) { if ( std::filesystem::exists( jsonFile ) ) { continue; } std::cerr << "WARNING : couldn't find the json file \"" << jsonFile << "\" attempting to fetch from eos ..." << std::endl; if ( system( Form("xrdcp %s/%s/%s .", settings.eosRoot.c_str(), settings.eosConfigDir.c_str(), jsonFile.c_str()) ) != 0 ) { std::cerr << "FATAL : couldn't get the json file from remote!" << std::endl; return EXIT_FAILURE; } } const UInt_t runNo = settings.run; const std::string runStr = Form("Run%u", runNo); const TString dModeStr { TString{settings.directory}.ReplaceAll("B2Dhh_","") }; LauRandom::setSeed(settings.RNGseed); LauDaughters* daughtersB0bar = new LauDaughters("B0_bar", "pi+", "pi-", "D0", kTRUE); LauDaughters* daughtersB0 = new LauDaughters("B0", "pi+", "pi-", "D0_bar",kTRUE); // efficiency auto vetoes { LauVetoes::readFromJson( dpModelJsonFile, "Bd2D0pipi_Vetoes" ) }; LauEffModel* effModelB0bar = new LauEffModel(daughtersB0bar, vetoes.get()); LauEffModel* effModelB0 = new LauEffModel(daughtersB0, vetoes.get()); TFile* effFile(nullptr); TH2* effHist(nullptr); if (settings.directory == "B2Dhh_Kpi"){ effFile = TFile::Open(Form("%s/%s/eff_map_mprime_thprime_model_d2kpi_run%u.root",settings.eosRoot.c_str(),settings.eosConfigDir.c_str(),runNo)); effHist = dynamic_cast(effFile->Get(Form("eff_map_mprime_thprime_model_d2kpi_run%u",runNo))); } else if (settings.directory == "B2Dhh_KK"){ effFile = TFile::Open(Form("%s/%s/eff_map_mprime_thprime_model_d2kk_run%u.root",settings.eosRoot.c_str(),settings.eosConfigDir.c_str(),runNo)); effHist = dynamic_cast(effFile->Get(Form("eff_map_mprime_thprime_model_d2kk_run%u",runNo))); } else if (settings.directory == "B2Dhh_pipi"){ effFile = TFile::Open(Form("%s/%s/eff_map_mprime_thprime_model_d2pipi_run%u.root",settings.eosRoot.c_str(),settings.eosConfigDir.c_str(),runNo)); effHist = dynamic_cast(effFile->Get(Form("eff_map_mprime_thprime_model_d2pipi_run%u",runNo))); } else {std::cerr << "bad dir given!" << std::endl; return EXIT_FAILURE;} effModelB0->setEffHisto(effHist, kTRUE, kFALSE, 0.0, 0.0, kFALSE, kTRUE); effModelB0bar->setEffHisto(effHist, kTRUE, kFALSE, 0.0, 0.0, kFALSE, kTRUE); // background types // NB deliberately not using uniform initialisation here because of this issue: // https://json.nlohmann.me/home/faq/#brace-initialization-yields-arrays const nlohmann::json bkgndsJson = LauJsonTools::readJsonFile( bkgndsJsonFile, "", LauJsonTools::JsonType::Object ); if ( bkgndsJson.is_null() ) { std::cerr << "ERROR : unable to retrieve root JSON object from file \"" << bkgndsJsonFile << "\"" << std::endl; return EXIT_FAILURE; } std::map BkgndTypes; std::map bkgndDecayTimeTypes; for ( auto& bkgndName : settings.bkgndList ) { if ( ! bkgndsJson.contains( bkgndName ) ) { std::cerr << "ERROR : unable to retrieve \"" << bkgndName << "\" JSON object from file \"" << bkgndsJsonFile << "\"" << std::endl; return EXIT_FAILURE; } auto bkgndType { LauJsonTools::getValue( bkgndsJson.at(bkgndName), "type" ) }; auto bkgndDtFunc { LauJsonTools::getValue( bkgndsJson.at(bkgndName), "dt_func" ) }; BkgndTypes.insert( std::make_pair( bkgndName, bkgndType ) ); bkgndDecayTimeTypes.insert( std::make_pair( bkgndName, bkgndDtFunc ) ); } // setup flavour tagging const Bool_t useAveDeltaCalibVals { settings.useAveDeltaCalibVals }; const Bool_t useEtaPrime { kFALSE }; LauFlavTag* flavTag = new LauFlavTag(useAveDeltaCalibVals,useEtaPrime,BkgndTypes); if (settings.dType == LauTimeDepFitModel::CPEigenvalue::QFS) { flavTag->setDecayFlvVarName("decayFlv"); } if(not settings.dataFit) { flavTag->setTrueTagVarName("trueTag"); } TFile* etaFile = TFile::Open(Form("%s/%s/ETAhists.root",settings.eosRoot.c_str(),settings.eosConfigDir.c_str())); TH1* etaHist{nullptr}; TH1* etaHistB0{nullptr}; TH1* etaHistB0bar{nullptr}; if (settings.directory == "B2Dhh_Kpi"){ etaHistB0 = dynamic_cast(etaFile->Get(Form("signalMC_Kpi_Run%u_etaHist_dcyB",runNo))); etaHistB0bar = dynamic_cast(etaFile->Get(Form("signalMC_Kpi_Run%u_etaHist_dcyBbar",runNo))); etaHist = dynamic_cast(etaHistB0->Clone(Form("signalMC_Kpi_Run%u_etaHist",runNo))); etaHist->Add(etaHistB0bar); } else if (settings.directory == "B2Dhh_KK"){ etaHist = dynamic_cast(etaFile->Get(Form("signalMC_KK_Run%u_etaHist",runNo))); } else if (settings.directory == "B2Dhh_pipi"){ etaHist = dynamic_cast(etaFile->Get(Form("signalMC_pipi_Run%u_etaHist",runNo))); } else {std::cerr << "bad dir given!" << std::endl; return EXIT_FAILURE;} // Crude check as to whether we're doing perfect vs realistic mis-tag // - in the former case all entries should be in the first bin // If the tagging is perfect then don't interpolate the eta histogram // and also make it perfectly efficient, otherwise do interpolate and // make it 50% efficient Lau1DHistPdf* etaHistPdf = new Lau1DHistPdf( "eta", etaHist, 0.0, 0.5, kTRUE, kFALSE ); if(settings.addTaggers) { // NB deliberately not using uniform initialisation here because of this issue: // https://json.nlohmann.me/home/faq/#brace-initialization-yields-arrays const nlohmann::json taggingCalibJson = LauJsonTools::readJsonFile( taggingCalibJsonFile, "", LauJsonTools::JsonType::Array ); if ( taggingCalibJson.is_null() ) { std::cerr << "ERROR : unable to retrieve root JSON object from file \"" << taggingCalibJsonFile << "\"" << std::endl; return EXIT_FAILURE; } for ( const auto& taggerConfig : taggingCalibJson ) { if ( taggerConfig.at("name").get() == ("IFT_" + runStr) ) { flavTag->addTagger( taggerConfig, etaHistPdf ); } } if (settings.floatCalibPars) { flavTag->floatAllCalibPars(); } } // flavour tagging setup for backgrounds std::map< TString, std::pair > BkgndEtas; TH1* bkgndEtaHist1{nullptr}; Lau1DHistPdf* bkgndEtaHistPDF1{nullptr}; TH1* bkgndEtaHistMinus1{nullptr}; Lau1DHistPdf* bkgndEtaHistPDFMinus1{nullptr}; for(auto& [name, _] : BkgndTypes) { TString decayName = name; if ( name == "comb" ) { decayName = "sideband"; } else if ( name.Contains("Dst") ) { decayName = "signalMC"; } if (settings.directory == "B2Dhh_Kpi"){ bkgndEtaHist1 = dynamic_cast(etaFile->Get( Form("%s_Kpi_Run%u_etaHist_dcyB" ,decayName.Data(),runNo) )); bkgndEtaHistPDF1 = new Lau1DHistPdf("eta",bkgndEtaHist1 ,0.0,0.5,kTRUE,kFALSE); bkgndEtaHistMinus1 = dynamic_cast(etaFile->Get( Form("%s_Kpi_Run%u_etaHist_dcyBbar",decayName.Data(),runNo) )); bkgndEtaHistPDFMinus1 = new Lau1DHistPdf("eta",bkgndEtaHistMinus1,0.0,0.5,kTRUE,kFALSE); } else if (settings.directory == "B2Dhh_KK"){ bkgndEtaHist1 = dynamic_cast(etaFile->Get( Form("%s_KK_Run%u_etaHist" ,decayName.Data(),runNo) )); bkgndEtaHistPDF1 = new Lau1DHistPdf("eta",bkgndEtaHist1 ,0.0,0.5,kTRUE,kFALSE); } else if (settings.directory == "B2Dhh_pipi"){ bkgndEtaHist1 = dynamic_cast(etaFile->Get( Form("%s_pipi_Run%u_etaHist" ,decayName.Data(),runNo) )); bkgndEtaHistPDF1 = new Lau1DHistPdf("eta",bkgndEtaHist1 ,0.0,0.5,kTRUE,kFALSE); } else {std::cerr << "bad dir given!" << std::endl; return EXIT_FAILURE;} BkgndEtas.emplace( std::make_pair(name, std::make_pair(bkgndEtaHistPDF1,bkgndEtaHistPDFMinus1) ) ); } const Double_t tagEffVal{1.0}; std::pair tagEff {tagEffVal, useAveDeltaCalibVals ? 0.0 : tagEffVal}; for ( auto& [name,hists] : BkgndEtas ) { if ( name == "comb" ) { if ( ! bkgndsJson.contains("comb") || ! bkgndsJson.at("comb").contains("tag_asym") ) { std::cerr << "ERROR : unable to retrieve \"tag_asym\" JSON object from \"comb\" object of file \"" << bkgndsJsonFile << "\"" << std::endl; return EXIT_FAILURE; } const nlohmann::json& combTagAsymJson { bkgndsJson.at("comb").at("tag_asym") }; if ( settings.dType == LauTimeDepFitModel::CPEigenvalue::QFS ) { const BgTagDecayFlvAsym bgt { combTagAsymJson.at(runStr).at(settings.directory) }; const std::pair effsB0 { useAveDeltaCalibVals ? bgt.aveDeltaB0() : bgt.decayB0() }; const std::pair effsB0bar { useAveDeltaCalibVals ? bgt.aveDeltaB0bar() : bgt.decayB0bar() }; flavTag->setBkgndParams( name, Form("IFT_Run%u",runNo), hists.first, effsB0, hists.second, effsB0bar ); } else if (settings.dType == LauTimeDepFitModel::CPEigenvalue::CPEven){ const BgTagDecayFlvAsym bgt { combTagAsymJson.at(runStr).at(settings.directory) }; const std::pair effs { useAveDeltaCalibVals ? bgt.aveDeltaB0() : bgt.decayB0() }; flavTag->setBkgndParams( name, Form("IFT_Run%u",runNo), hists.first, effs); } } else { if ( settings.dType == LauTimeDepFitModel::CPEigenvalue::QFS ) { flavTag->setBkgndParams( name, Form("IFT_Run%u",runNo), hists.first, tagEff, hists.second, tagEff ); } else if (settings.dType == LauTimeDepFitModel::CPEigenvalue::CPEven){ flavTag->setBkgndParams( name, Form("IFT_Run%u",runNo), hists.first, tagEff ); } } } // signal dynamics LauIsobarDynamics* sigModelB0bar = new LauIsobarDynamics(daughtersB0bar, effModelB0bar); if (settings.directory == "B2Dhh_Kpi"){ sigModelB0bar->setIntFileName(Form("integ_Kpi_Run%u_B0bar.dat",runNo)); } else if (settings.directory == "B2Dhh_KK"){ sigModelB0bar->setIntFileName(Form("integ_KK_Run%u_B0bar.dat",runNo)); } else if (settings.directory == "B2Dhh_pipi"){ sigModelB0bar->setIntFileName(Form("integ_pipi_Run%u_B0bar.dat",runNo)); } sigModelB0bar->constructModelFromJson( dpModelJsonFile, "Bd2D0pipi_B0bar_Model" ); LauIsobarDynamics* sigModelB0 = new LauIsobarDynamics(daughtersB0, effModelB0); if (settings.directory == "B2Dhh_Kpi"){ sigModelB0->setIntFileName(Form("integ_Kpi_Run%u_B0.dat",runNo)); } else if (settings.directory == "B2Dhh_KK"){ sigModelB0->setIntFileName(Form("integ_KK_Run%u_B0.dat",runNo)); } else if (settings.directory == "B2Dhh_pipi"){ sigModelB0->setIntFileName(Form("integ_pipi_Run%u_B0.dat",runNo)); } sigModelB0->constructModelFromJson( dpModelJsonFile, "Bd2D0pipi_B0_Model" ); // fit model LauTimeDepFitModel* fitModel = new LauTimeDepFitModel(sigModelB0bar,sigModelB0,flavTag); fitModel->breakOnBadLikelihood(kFALSE); fitModel->sendFixedYieldsToFitter(kTRUE); std::vector> coeffset { LauAbsCoeffSet::readFromJson( dpModelJsonFile, "Bd2D0pipi_Coeffs" ) }; for ( auto& coeff : coeffset ) { fitModel->setAmpCoeffSet( std::move(coeff) ); } // background DP models LauBkgndDPModel* BkgndModel{nullptr}; LauBkgndDPModel* BkgndModelbar{nullptr}; TFile* BkgndHistFile1 = TFile::Open(Form("%s/%s/BG_SDPs.root",settings.eosRoot.c_str(),settings.eosConfigDir.c_str())); TFile* BkgndHistFile2 = TFile::Open(Form("%s/%s/sidebandSDP.root",settings.eosRoot.c_str(),settings.eosConfigDir.c_str())); TFile* BkgndHistFile3 = TFile::Open(Form("%s/%s/BG_SDPs_Dst.root",settings.eosRoot.c_str(),settings.eosConfigDir.c_str())); TH2* BkgndHistB0{nullptr}; TH2* BkgndHistB0bar{nullptr}; for(auto& [name, _] : BkgndTypes) { //Only have Kpi histos for now for misid if( name=="comb"){ if( settings.directory=="B2Dhh_Kpi"){ BkgndHistB0 = dynamic_cast(BkgndHistFile2-> Get(Form("sideband_Run%u_Kpi_SDP_decayFlv1",runNo))); BkgndHistB0bar = dynamic_cast(BkgndHistFile2->Get(Form("sideband_Run%u_Kpi_SDP_decayFlvMinus1",runNo))); } else if( settings.directory=="B2Dhh_KK"){ BkgndHistB0 = dynamic_cast(BkgndHistFile2->Get(Form("sideband_Run%u_KK_SDP",runNo))); } else if( settings.directory=="B2Dhh_pipi"){ BkgndHistB0 = dynamic_cast(BkgndHistFile2->Get(Form("sideband_Run%u_pipi_SDP",runNo))); } } else if ( name.Contains("Dst") ) { TString histNameB = Form("%s_Kpi_Run%u_B_SDP",name.Data(),runNo); TString histNameBbar = Form("%s_Kpi_Run%u_Bbar_SDP",name.Data(),runNo); BkgndHistB0 = dynamic_cast(BkgndHistFile3->Get(histNameB)); BkgndHistB0bar = dynamic_cast(BkgndHistFile3->Get(histNameBbar)); if ( !BkgndHistB0 || !BkgndHistB0bar ) { std::cerr << "ERROR - unable to retrieve histograms " << histNameB << " and " << histNameBbar << " from file " << BkgndHistFile3->GetName() << std::endl; return 1; } } else { TString mode = settings.directory; mode.ReplaceAll("B2Dhh_",""); TString histName = Form("%s_%s_Run%u_SDP_decFlv",name.Data(),mode.Data(),runNo); BkgndHistB0 = dynamic_cast(BkgndHistFile1->Get(histName+"1")); BkgndHistB0bar = dynamic_cast(BkgndHistFile1->Get(histName+"Minus1")); } BkgndModel = new LauBkgndDPModel(daughtersB0, vetoes.get()); BkgndHistB0->Smooth(); BkgndModel->setBkgndSpline(BkgndHistB0, kFALSE, kFALSE, kTRUE ); //For CP comb we don't split based on decay flavour since it's impossible if(name == "comb" and settings.directory!="B2Dhh_Kpi") {BkgndModelbar=nullptr;} else { BkgndModelbar = new LauBkgndDPModel(daughtersB0bar, vetoes.get()); BkgndHistB0bar->Smooth(); BkgndModelbar->setBkgndSpline(BkgndHistB0bar, kFALSE, kFALSE, kTRUE ); } fitModel->setBkgndDPModels( name, BkgndModel, BkgndModelbar ); } // decay type and mixing parameter const Bool_t fixPhiMix{ settings.fixPhiMix || settings.dType == LauTimeDepFitModel::CPEigenvalue::QFS }; const Bool_t useSinCos{ settings.useSinCos }; fitModel->setCPEigenvalue( settings.dType ); fitModel->setPhiMix( 2.0*LauConstants::beta, fixPhiMix, useSinCos ); if ( useSinCos ) { if ( settings.blindFit ) { //Blinding strings generated from: https://www.thewordfinder.com/random-sentence-generator/ const TString sinBlindString{settings.dataFit ? "Tom realized he could be making a big mistake." : "Don't Look a Gift Horse In The Mouth"}; const TString cosBlindString{settings.dataFit ? "He admitted defeat." : "Long In The Tooth"}; fitModel->blindPhiMix( sinBlindString, cosBlindString ); } std::cout << "Is sin(phiMix) blinded? " << std::boolalpha << fitModel->sinPhiMixBlinded() << std::endl; std::cout << "Is cos(phiMix) blinded? " << std::boolalpha << fitModel->cosPhiMixBlinded() << std::endl; } else { if ( settings.blindFit ) { const TString blindString{"A Cut Above"}; fitModel->blindPhiMix( blindString ); } std::cout << "Is phiMix blinded? " << std::boolalpha << fitModel->phiMixBlinded() << std::endl; } // production asymmetries fitModel->setAsymmetries( 0.0, kTRUE ); for(auto& [name, _] : BkgndTypes) { fitModel->setBkgndAsymmetries( name, 0.0, kTRUE ); } // decay time PDFs const Double_t minDt{0.2}; const Double_t maxDt{15.0}; const Double_t minDtErr{0.0}; const Double_t maxDtErr{0.15}; // physics models auto dtPhysicsModels { LauDecayTimePhysicsModel::readFromJson( dtModelJsonFile, "PhysicsModels" ) }; auto& dtPhysModel { dtPhysicsModels.at("signal") }; auto lifetimePar = dynamic_cast( dtPhysModel->findParameter("dt_sig_tau_Bd") ); if ( lifetimePar ) { lifetimePar->fixed( settings.fixLifetime ); } auto deltaMPar = dynamic_cast( dtPhysModel->findParameter("dt_sig_deltaM_Bd") ); if ( deltaMPar ) { deltaMPar->fixed( settings.fixDeltaM ); } // resolution models auto dtResolutionModels { LauDecayTimeResolution::readFromJson( dtModelJsonFile, Form("ResolutionModels_Run%d", runNo) ) }; auto& dtResoModel { dtResolutionModels.at("signal") }; // Decay time error histogram // (always set this so that it gets generated properly, whether we're using it in the PDF or not) TFile* dtErrFile = TFile::Open(Form("%s/%s/dte-hist.root",settings.eosRoot.c_str(),settings.eosConfigDir.c_str())); TH1* dtErrHist = dynamic_cast(dtErrFile->Get("dte_hist")); LauDecayTimePdf * dtPdf{nullptr}; if ( settings.timeResolution ) { if ( settings.perEventTimeErr ) { std::cerr << "Warning : the resolution model has not been constructed with per-event error scaling, but you have requested it" << std::endl; dtPdf = new LauDecayTimePdf( "decayTime", minDt, maxDt, "decayTimeErr", minDtErr, maxDtErr, std::move(dtPhysModel), std::move(dtResoModel), dtErrHist ); } else { dtPdf = new LauDecayTimePdf( "decayTime", minDt, maxDt, std::move(dtPhysModel), std::move(dtResoModel) ); } } else { dtPdf = new LauDecayTimePdf( "decayTime", minDt, maxDt, std::move(dtPhysModel) ); } // Decay time acceptance histogram TFile* dtaFile = TFile::Open(Form("%s/%s/DTAhists.root",settings.eosRoot.c_str(),settings.eosConfigDir.c_str())); TH1* dtaHist{nullptr}; if (settings.directory == "B2Dhh_Kpi"){ dtaHist = dynamic_cast(dtaFile->Get(Form("signalMC_Kpi_Run%u_DTAhist",runNo))); } else if (settings.directory == "B2Dhh_KK"){ dtaHist = dynamic_cast(dtaFile->Get(Form("signalMC_KK_Run%u_DTAhist",runNo))); } else if (settings.directory == "B2Dhh_pipi"){ dtaHist = dynamic_cast(dtaFile->Get(Form("signalMC_pipi_Run%u_DTAhist",runNo))); } else {std::cerr << "bad dir given!" << std::endl; return EXIT_FAILURE;} const TString signalName { "signalMC" }; std::vector bkgndNames; std::vector bkgndNamesDst; bkgndNames.reserve( bkgndDecayTimeTypes.size() ); bkgndNamesDst.reserve( bkgndDecayTimeTypes.size() ); for ( auto& [name, type] : bkgndDecayTimeTypes ) { if ( type == LauDecayTime::FuncType::Hist ) { continue; } if ( name.Contains("Dst") ) { bkgndNamesDst.push_back( name ); } else { bkgndNames.push_back( name ); } } const TString suffix { "_DTAhist" }; auto [ qfsDTAModel, cpDTAModel, bkgndDTAModels ] = createSplineDTASet( dtaModelJsonFile, dModeStr, runStr, suffix, signalName, bkgndNames ); auto [ dtaDpiModel, pcBkgndDTAModels ] = createSplineDTASetPCBkgnds( dtaModelJsonFile, dModeStr, runStr, suffix, signalName, bkgndNamesDst ); auto qfsDTAModelPtr = qfsDTAModel.get(); switch(settings.timeEffModel) { case LauDecayTime::EfficiencyMethod::Spline: { fitModel->setASqMaxValue(0.01); if ( settings.dType == LauTimeDepFitModel::CPEigenvalue::QFS ) { fitModel->setASqMaxValue(0.009); } dynamic_cast(qfsDTAModel->getParameters().front())->maxValue(1e-1); //qfsDTAModel->fitToTH1(dtaHist); // Set which knots to float and which to fix (at least 1 knot must be fixed, not the first one) // Knots should only be floating if requested AND the B lifetime is fixed! if ( settings.fixSplineKnots or not settings.fixLifetime ) { qfsDTAModel->fixKnots(); } else { qfsDTAModel->floatKnots(); qfsDTAModel->fixKnot( 0, true ); qfsDTAModel->fixKnot( 3, true ); } if ( settings.dType == LauTimeDepFitModel::CPEigenvalue::QFS ) { dtPdf->setSplineEfficiency( std::move(qfsDTAModel) ); } else { dtPdf->setSplineEfficiency( std::move(cpDTAModel) ); } break; } case LauDecayTime::EfficiencyMethod::Binned: { fitModel->setASqMaxValue(0.06); auto dtaBinnedModel = std::make_unique( *dtaHist ); dtPdf->setBinnedEfficiency( std::move(dtaBinnedModel) ); break; } case LauDecayTime::EfficiencyMethod::Uniform: { fitModel->setASqMaxValue(4.45); break; } } fitModel->setSignalDtPdf( dtPdf ); const TString dtaHistSuffix { Form("_%s_%s_DTAhist",dModeStr.Data(),runStr.c_str()) }; for (auto& [bg, _] : BkgndTypes) { if ( bkgndDecayTimeTypes[bg] == LauDecayTime::FuncType::Hist ) { TString histname = bg + dtaHistSuffix; if(histname.Contains("comb")) { histname = "Sideband" + dtaHistSuffix; histname.ReplaceAll("DTAhist","DThist"); } TH1* dt_hist = dynamic_cast(dtaFile->Get(histname.Data())); TH1* dt_err_hist = nullptr; // TODO get these! if ( not dt_hist ) { std::cerr << "PROBLEM FINDING THE LIFETIME HIST:" << histname << std::endl; return EXIT_FAILURE; } if ( settings.perEventTimeErr and not dt_err_hist ) { std::cerr << "PROBLEM FINDING THE LIFETIME ERROR HIST:" << bg << std::endl; return EXIT_FAILURE; } LauDecayTimePdf* backgroundPdf {nullptr}; if ( settings.perEventTimeErr ) { backgroundPdf = new LauDecayTimePdf("decayTime", minDt, maxDt, "decayTimeErr", minDtErr, maxDtErr, dt_hist, dt_err_hist ); } else { backgroundPdf = new LauDecayTimePdf("decayTime", minDt, maxDt, dt_hist ); } fitModel->setBkgndDtPdf(bg,backgroundPdf); } else { try { dtPhysicsModels.at(bg); } catch ( std::out_of_range& ) { std::cerr << "ERROR - cannot locate physics model for " << bg << std::endl; return 1; } try { dtResolutionModels.at(bg); } catch ( std::out_of_range& ) { std::cerr << "ERROR - cannot locate resolution model for " << bg << std::endl; return 1; } try { ( bg.Contains("Dst") ) ? pcBkgndDTAModels.at(bg) : bkgndDTAModels.at(bg); } catch ( std::out_of_range& ) { std::cerr << "ERROR - cannot locate DTA model for " << bg << std::endl; return 1; } auto& bgPhysModel { dtPhysicsModels.at(bg) }; auto& bgResoModel { dtResolutionModels.at(bg) }; auto& bgDTAModel { ( bg.Contains("Dst") ) ? pcBkgndDTAModels.at(bg) : bkgndDTAModels.at(bg) }; TH1* dt_err_hist = nullptr; // TODO get these! if ( settings.perEventTimeErr and not dt_err_hist ) { std::cerr << "PROBLEM FINDING THE LIFETIME ERROR HIST:" << bg << std::endl; return EXIT_FAILURE; } LauDecayTimePdf* backgroundPdf {nullptr}; if ( settings.timeResolution ) { if ( settings.perEventTimeErr ) { std::cerr << "Warning : the resolution model has not been constructed with per-event error scaling, but you have requested it" << std::endl; backgroundPdf = new LauDecayTimePdf( "decayTime", minDt, maxDt, "decayTimeErr", minDtErr, maxDtErr, std::move(bgPhysModel), std::move(bgResoModel), dt_err_hist ); } else { backgroundPdf = new LauDecayTimePdf( "decayTime", minDt, maxDt, std::move(bgPhysModel), std::move(bgResoModel) ); } } else { backgroundPdf = new LauDecayTimePdf( "decayTime", minDt, maxDt, std::move(bgPhysModel) ); } // background decay time acceptance histogram /* TString histname = bg + dtaHistSuffix; auto bgDTA = dynamic_cast(dtaFile->Get(histname)); if ( not bgDTA and bg!="comb" ) { std::cerr << "Couldn't find DTA histogram for background: " << bg << std::endl; return EXIT_FAILURE; } //XXX if we decide to rebin, do it here bgDTA->Divide(dtaHist); for (Int_t bin = 1; bin <= bgDTA->GetNbinsX(); ++bin) { if( bgDTA->GetBinContent(bin)==0. ){bgDTA->SetBinContent(bin,1.);} } bgDTAModel->fitToTH1(bgDTA, LauOutputLevel::Quiet, true); */ backgroundPdf->setSplineEfficiency( std::move(bgDTAModel) ); fitModel->setBkgndDtPdf(bg,backgroundPdf); } } // Signal and bkgnd yields const Bool_t fixYields { settings.bkgndList.empty() or not settings.floatYields }; TString yieldName; LauParameter* nSigPar{nullptr}; std::map bkgndYields; // NB deliberately not using uniform initialisation here because of this issue: // https://json.nlohmann.me/home/faq/#brace-initialization-yields-arrays const nlohmann::json signalYieldsJson = LauJsonTools::readJsonFile( signalYieldsJsonFile, "", LauJsonTools::JsonType::Object ); if ( signalYieldsJson.is_null() ) { std::cerr << "ERROR : unable to retrieve root JSON object from file \"" << signalYieldsJsonFile << "\"" << std::endl; return EXIT_FAILURE; } const auto& nSigJson { signalYieldsJson.at(runStr).at(settings.directory) }; yieldName = Form("signalEvents_%s_Run%u", settings.directory.c_str(), runNo); if ( nSigJson.is_number() ) { const auto nSigEvents{ nSigJson.get() }; nSigPar = new LauParameter(yieldName, nSigEvents, -2.0*TMath::Abs(nSigEvents), 2.0*TMath::Abs(nSigEvents), fixYields); } else { const auto nSigEvents { LauJsonTools::getValue( nSigJson, "value" ) }; nSigPar = new LauParameter(yieldName, nSigEvents, -2.0*TMath::Abs(nSigEvents), 2.0*TMath::Abs(nSigEvents), fixYields); if ( ! fixYields && LauJsonTools::getOptionalValue( nSigJson, "constrained", LauJsonTools::JsonType::Boolean ).value_or(kFALSE) ) { const auto nSigEventsErr { LauJsonTools::getValue( nSigJson, "error" ) }; nSigPar->removeLimits(); nSigPar->error( nSigEventsErr ); - nSigPar->addGaussianConstraint( nSigEvents, nSigEventsErr ); + // Addition of Gaussian constraint commented + // out since now included in multi-dimensional + // constraint in SimFitCoordinator_Bd2Dpipi + //nSigPar->addGaussianConstraint( nSigEvents, nSigEventsErr ); } } if ( nSigPar->value() < 0.0 ) { if ( settings.command == Command::Generate ) { std::cerr << "WARNING : setting negative signal yield to 0 for generation" << std::endl; } nSigPar->genValue( 0.0 ); } fitModel->setNSigEvents(nSigPar); for ( auto& bkgndName : settings.bkgndList ) { yieldName = Form("%sEvents_%s_Run%u", bkgndName.c_str(), settings.directory.c_str(), runNo); const auto& yieldJson { bkgndsJson.at(bkgndName).at("yields").at(runStr).at(settings.directory) }; if ( yieldJson.is_number() ) { const auto nBkg { yieldJson.get() }; bkgndYields[bkgndName] = new LauParameter(yieldName, nBkg, -2.0*TMath::Abs(nBkg), 2.0*TMath::Abs(nBkg), fixYields); } else { const auto nBkg { LauJsonTools::getValue( yieldJson, "value" ) }; bkgndYields[bkgndName] = new LauParameter(yieldName, nBkg, -2.0*TMath::Abs(nBkg), 2.0*TMath::Abs(nBkg), fixYields); if ( ! fixYields && LauJsonTools::getOptionalValue( yieldJson, "constrained", LauJsonTools::JsonType::Boolean ).value_or(kFALSE) ) { const auto nBkgErr { LauJsonTools::getValue( yieldJson, "error" ) }; bkgndYields[bkgndName]->removeLimits(); bkgndYields[bkgndName]->error( nBkgErr ); - bkgndYields[bkgndName]->addGaussianConstraint( nBkg, nBkgErr ); + // Addition of Gaussian constraint commented + // out since now included in multi-dimensional + // constraint in SimFitCoordinator_Bd2Dpipi + //bkgndYields[bkgndName]->addGaussianConstraint( nBkg, nBkgErr ); } } LauParameter* nBkgndPar = bkgndYields[bkgndName]; if ( nBkgndPar->value() < 0.0 ) { if ( settings.command == Command::Generate ) { std::cerr << "WARNING : setting negative yield for background mode " << bkgndName << " to 0 for generation" << std::endl; } nBkgndPar->genValue( 0.0 ); } fitModel->setNBkgndEvents( nBkgndPar ); } if ( settings.command == Command::Generate or ( not fixYields and not settings.dataFit ) ) { // mB PDFs // PDFs for B+ and B- are set to be identical here but could be different in general const TString mBName { "mB" }; const Double_t mBMin { 5.200 }; const Double_t mBMax { 5.700 }; // Signal PDF is a double Crystal Ball function const std::map sigParVals { {"mB_sig_mean", 5.2802}, {"mB_sig_sigma", 0.01125}, {"mB_sig_alphaL", 1.226}, {"mB_sig_alphaR", -1.722}, {"mB_sig_orderL", 1.817}, {"mB_sig_orderR", 2.861}, {"mB_sig_frac", 0.480} }; fitModel->setSignalPdfs( makeDCBPDF( mBName, mBMin, mBMax, "sig", sigParVals ) ); // Combinatorial PDF is an exponential if ( BkgndTypes.find("comb") != BkgndTypes.end() ) { LauParameter* mB_comb_slope = new LauParameter("mB_comb_slope", -0.00419); LauAbsPdf* mB_comb_pdf = new LauExponentialPdf(mBName, {mB_comb_slope}, mBMin, mBMax); fitModel->setBkgndPdf("comb", mB_comb_pdf); } // Bd2DKpi PDF is double Crystal Ball function if ( BkgndTypes.find("Bd2DKpi") != BkgndTypes.end() ) { const std::map Bd2DKpiParVals { {"mB_Bd2DKpi_mean", 5.2401}, {"mB_Bd2DKpi_sigma", 0.01580}, {"mB_Bd2DKpi_alphaL", 0.560}, {"mB_Bd2DKpi_alphaR", -2.212}, {"mB_Bd2DKpi_orderL", 1.076}, {"mB_Bd2DKpi_orderR", 2.453}, {"mB_Bd2DKpi_frac", 0.544} }; fitModel->setBkgndPdf("Bd2DKpi", makeDCBPDF( mBName, mBMin, mBMax, "Bd2DKpi", Bd2DKpiParVals ) ); } // Bs2DKpi PDF is double Crystal Ball function if ( BkgndTypes.find("Bs2DKpi") != BkgndTypes.end() ) { const std::map Bs2DKpiParVals { {"mB_Bs2DKpi_mean", 5.3244}, {"mB_Bs2DKpi_sigma", 0.01827}, {"mB_Bs2DKpi_alphaL", 0.865}, {"mB_Bs2DKpi_alphaR", -0.001}, {"mB_Bs2DKpi_orderL", 10.000}, {"mB_Bs2DKpi_orderR", 10.000}, {"mB_Bs2DKpi_frac", 0.954} }; fitModel->setBkgndPdf("Bs2DKpi", makeDCBPDF( mBName, mBMin, mBMax, "Bs2DKpi", Bs2DKpiParVals ) ); } // Lb2Dppi PDF is double Crystal Ball function if ( BkgndTypes.find("Lb2Dppi") != BkgndTypes.end() ) { const std::map Lb2DppiParVals { {"mB_Lb2Dppi_mean", 5.4884}, {"mB_Lb2Dppi_sigma", 0.02561}, {"mB_Lb2Dppi_alphaL", 0.091}, {"mB_Lb2Dppi_alphaR", -18.053}, {"mB_Lb2Dppi_orderL", 2.906}, {"mB_Lb2Dppi_orderR", 6.087}, {"mB_Lb2Dppi_frac", 0.968} }; fitModel->setBkgndPdf("Lb2Dppi", makeDCBPDF( mBName, mBMin, mBMax, "Lb2Dppi", Lb2DppiParVals ) ); } } // set the number of experiments if (settings.command == Command::Generate) { fitModel->setNExpts(settings.nExptGen, settings.firstExptGen, kTRUE); } else { fitModel->setNExpts(settings.nExptFit, settings.firstExptFit, !settings.dataFit); } fitModel->useAsymmFitErrors(kFALSE); fitModel->useRandomInitFitPars(kFALSE); fitModel->writeLatexTable(kFALSE); const Bool_t haveBkgnds = ( fitModel->nBkgndClasses() > 0 ); fitModel->doPoissonSmearing(haveBkgnds); fitModel->doEMLFit(haveBkgnds); TString dTypeStr; switch (settings.dType) { case LauTimeDepFitModel::CPEigenvalue::CPEven : dTypeStr = "CPEven"; break; case LauTimeDepFitModel::CPEigenvalue::CPOdd : dTypeStr = "CPOdd"; break; case LauTimeDepFitModel::CPEigenvalue::QFS : dTypeStr = "QFS"; break; } TString dataFile(""); TString treeName("fitTree"); TString rootFileName(""); TString tableFileName(""); TString fitToyFileName(""); TString splotFileName(""); dataFile = "TEST-Dpipi"; dataFile += "_"+dTypeStr+"_"+settings.directory; switch(settings.timeEffModel) { case LauDecayTime::EfficiencyMethod::Spline: dataFile += "_Spline"; break; case LauDecayTime::EfficiencyMethod::Binned: dataFile += "_Binned"; break; case LauDecayTime::EfficiencyMethod::Uniform: dataFile += "_Uniform"; break; } if (settings.timeResolution) { if (settings.perEventTimeErr) { dataFile += "_DTRperevt"; } else { dataFile += "_DTRavg"; } } else { dataFile += "_DTRoff"; } dataFile += "_expts"; dataFile += settings.firstExptGen; dataFile += "-"; dataFile += settings.firstExptGen+settings.nExptGen-1; dataFile += Form("-Run%u",runNo); dataFile += ".root"; if (settings.dataFit) { if(runNo == 2) { dataFile = Form("%s/%s/Run2/B2Dhh-CollisionCombined-MagCombined-Stripping24r2-withDNN-withPIDcorr-withMVA-withPIDMVA-WithDpiMatching-withSwappedMassHypotheses-withKKvetoes-withSelection-LauraPrepped.root",settings.eosRoot.c_str(),settings.eosDataDir.c_str()); } else { dataFile = Form("%s/%s/Run1/B2Dhh-CollisionCombined-MagCombined-Stripping21r1p2-withDNN-withPIDcorr-withMVA-withPIDMVA-WithDpiMatching-withSwappedMassHypotheses-withKKvetoes-withSelection-LauraPrepped.root",settings.eosRoot.c_str(),settings.eosDataDir.c_str()); } treeName = Form("%s/B2DhhReco",settings.directory.data()); } if (settings.command == Command::Generate) { rootFileName = "dummy.root"; tableFileName = "genResults"; } else { rootFileName = "fit"; rootFileName += settings.iFit; rootFileName += "_Results_"; rootFileName += dTypeStr; rootFileName += "_"; rootFileName += settings.directory; rootFileName += "_expts"; rootFileName += settings.firstExptFit; rootFileName += "-"; rootFileName += settings.firstExptFit+settings.nExptFit-1; rootFileName += Form("_Run%u",runNo); rootFileName += ".root"; tableFileName = "fit"; tableFileName += settings.iFit; tableFileName += "_Results_"; tableFileName += dTypeStr; tableFileName += "_"; tableFileName += settings.directory; tableFileName += "_expts"; tableFileName += settings.firstExptFit; tableFileName += "-"; tableFileName += settings.firstExptFit+settings.nExptFit-1; tableFileName += Form("_Run%u",runNo); fitToyFileName = "fit"; fitToyFileName += settings.iFit; fitToyFileName += "_ToyMC_"; fitToyFileName += dTypeStr; fitToyFileName += "_"; fitToyFileName += settings.directory; fitToyFileName += "_expts"; fitToyFileName += settings.firstExptFit; fitToyFileName += "-"; fitToyFileName += settings.firstExptFit+settings.nExptFit-1; fitToyFileName += Form("_Run%u",runNo); fitToyFileName += ".root"; splotFileName = "fit"; splotFileName += settings.iFit; splotFileName += "_sPlot_"; splotFileName += dTypeStr; splotFileName += "_"; splotFileName += settings.directory; splotFileName += "_expts"; splotFileName += settings.firstExptFit; splotFileName += "-"; splotFileName += settings.firstExptFit+settings.nExptFit-1; splotFileName += Form("_Run%u",runNo); splotFileName += ".root"; } if( settings.genToy and not settings.blindFit ) { fitModel->compareFitData(50, fitToyFileName); } // Generate toy from the fitted parameters //fitModel->compareFitData(1, fitToyFileName); // Write out per-event likelihoods and sWeights //fitModel->writeSPlotData(splotFileName, "splot", kFALSE); // Execute the generation/fit switch (settings.command) { case Command::Generate : fitModel->run( "gen", dataFile, treeName, rootFileName, tableFileName ); break; case Command::Fit : fitModel->run( "fit", dataFile, treeName, rootFileName, tableFileName ); break; case Command::SimFit : fitModel->runTask( dataFile, treeName, rootFileName, tableFileName, "localhost", settings.port ); break; } // Write JSON files with the fitted values // signal yield nlohmann::json signalYieldJson = nlohmann::json::object(); signalYieldJson[ runStr ] = nlohmann::json::object(); signalYieldJson[ runStr ][ settings.directory ] = nlohmann::json::object(); signalYieldJson[ runStr ][ settings.directory ][ "value" ] = nSigPar->value(); signalYieldJson[ runStr ][ settings.directory ][ "error" ] = nSigPar->error(); signalYieldJson[ runStr ][ settings.directory ][ "constrained" ] = nSigPar->gaussConstraint(); LauJsonTools::writeJsonFile( signalYieldJson, "Bd2D0pipi_SignalYield_" + settings.directory + "_" + runStr + ".json" ); // background yields nlohmann::json backgroundYieldsJson = nlohmann::json::object(); for ( auto& [ bkgndName, yieldPar ] : bkgndYields ) { backgroundYieldsJson[ bkgndName ] = nlohmann::json::object(); backgroundYieldsJson[ bkgndName ][ "yields" ] = nlohmann::json::object(); backgroundYieldsJson[ bkgndName ][ "yields" ][ runStr ] = nlohmann::json::object(); backgroundYieldsJson[ bkgndName ][ "yields" ][ runStr ][ settings.directory ] = nlohmann::json::object(); backgroundYieldsJson[ bkgndName ][ "yields" ][ runStr ][ settings.directory ][ "value" ] = yieldPar->value(); backgroundYieldsJson[ bkgndName ][ "yields" ][ runStr ][ settings.directory ][ "error" ] = yieldPar->error(); backgroundYieldsJson[ bkgndName ][ "yields" ][ runStr ][ settings.directory ][ "constrained" ] = yieldPar->gaussConstraint(); } LauJsonTools::writeJsonFile( backgroundYieldsJson, "Bd2D0pipi_BackgroundYields_" + settings.directory + "_" + runStr + ".json" ); if (settings.dType == LauTimeDepFitModel::CPEigenvalue::QFS) { // QFS DTA spline qfsDTAModelPtr->writeSplineToJson( "Bd2D0pipi_DecayTime_Acceptance_Fitted_" + runStr + ".json" ); // FT calibration parameters flavTag->writeTaggersToJson( "Bd2D0pipi_TaggingCalib_Fitted_" + runStr + ".json" ); } return EXIT_SUCCESS; } LauAbsPdf* makeDCBPDF( const TString& varName, const Double_t varMin, const Double_t varMax, const TString& componentName, const std::map paramVals ) { const TString meanName { Form( "%s_%s_mean", varName.Data(), componentName.Data() ) }; const TString sigmaName { Form( "%s_%s_sigma", varName.Data(), componentName.Data() ) }; const TString alphaLName { Form( "%s_%s_alphaL", varName.Data(), componentName.Data() ) }; const TString alphaRName { Form( "%s_%s_alphaR", varName.Data(), componentName.Data() ) }; const TString orderLName { Form( "%s_%s_orderL", varName.Data(), componentName.Data() ) }; const TString orderRName { Form( "%s_%s_orderR", varName.Data(), componentName.Data() ) }; const TString fracName { Form( "%s_%s_frac", varName.Data(), componentName.Data() ) }; LauParameter* mean = new LauParameter( meanName, paramVals.at( meanName ) ); LauParameter* sigma = new LauParameter( sigmaName, paramVals.at( sigmaName ) ); LauParameter* alphaL = new LauParameter( alphaLName, paramVals.at( alphaLName ) ); LauParameter* alphaR = new LauParameter( alphaRName, paramVals.at( alphaRName ) ); LauParameter* orderL = new LauParameter( orderLName, paramVals.at( orderLName ) ); LauParameter* orderR = new LauParameter( orderRName, paramVals.at( orderRName ) ); LauParameter* frac = new LauParameter( fracName, paramVals.at( fracName ) ); std::vector pars; pars.reserve(4); pars.clear(); pars.push_back(mean); pars.push_back(sigma); pars.push_back(alphaL); pars.push_back(orderL); LauAbsPdf* pdf_L = new LauCrystalBallPdf(varName, pars, varMin, varMax); pars.clear(); pars.push_back(mean); pars.push_back(sigma); pars.push_back(alphaR); pars.push_back(orderR); LauAbsPdf* pdf_R = new LauCrystalBallPdf(varName, pars, varMin, varMax); LauAbsPdf* pdf = new LauSumPdf(pdf_L, pdf_R, frac); return pdf; } diff --git a/examples/GenFitTimeDep_Bd2Dpipi_SignalOnly.cc b/examples/GenFitTimeDep_Bd2Dpipi_SignalOnly.cc new file mode 100644 index 0000000..c3995a4 --- /dev/null +++ b/examples/GenFitTimeDep_Bd2Dpipi_SignalOnly.cc @@ -0,0 +1,591 @@ + +#include "TimeDep_Dpipi_ProgOpts.hh" + +#include "Lau1DHistPdf.hh" +#include "Lau1DCubicSpline.hh" +#include "LauBinnedDecayTimeEfficiency.hh" +#include "LauBkgndDPModel.hh" +#include "LauCrystalBallPdf.hh" +#include "LauDaughters.hh" +#include "LauDecayTime.hh" +#include "LauDecayTimePdf.hh" +#include "LauDecayTimePhysicsModel.hh" +#include "LauDecayTimeResolution.hh" +#include "LauEffModel.hh" +#include "LauExponentialPdf.hh" +#include "LauFlavTag.hh" +#include "LauGounarisSakuraiRes.hh" +#include "LauIsobarDynamics.hh" +#include "LauLHCbNR.hh" +#include "LauMagPhaseCoeffSet.hh" +#include "LauRandom.hh" +#include "LauRealImagCoeffSet.hh" +#include "LauResonanceMaker.hh" +#include "LauSplineDecayTimeEfficiency.hh" +#include "LauSumPdf.hh" +#include "LauTimeDepFitModel.hh" +#include "LauVetoes.hh" + +#include "TFile.h" +#include "TH2.h" +#include "TRandom.h" +#include "TString.h" +#include "TSystem.h" +#include "TF1.h" +#include "TCanvas.h" +#include "TRegexp.h" + +#include "nlohmann/json.hpp" + +#include +#include +#include +#include +#include +#include + +// Helper functions for creating sets of DTA splines +using LauCubicSplineDecayTimeEfficiencyPtr = std::unique_ptr>; +using LauSixthSplineDecayTimeEfficiencyPtr = std::unique_ptr>; + +auto createSplineDTASet( const TString& jsonFileName, const TString& dModeStr, const TString& runStr, const TString& suffix, const TString& signalName, const std::vector& bkgndNames ) +{ + std::cout << "INFO - creating QFS signal DTA model" << std::endl; + const TString controlSplineName { Form("%s_Kpi_%s%s",signalName.Data(),runStr.Data(),suffix.Data()) }; + const TString controlEffName { Form("dteff_QFS_%s",runStr.Data()) }; + auto controlSpline = Lau1DCubicSpline::readFromJson( jsonFileName, controlSplineName ); + LauCubicSplineDecayTimeEfficiencyPtr controlEff { std::make_unique>( controlEffName, std::move( controlSpline ) ) }; + + LauSixthSplineDecayTimeEfficiencyPtr signalEff; + if ( dModeStr != "Kpi" ) { + std::cout << "INFO - creating " << dModeStr << " signal DTA model" << std::endl; + const TString signalSplineName { Form("%s_%s_%s%s",signalName.Data(),dModeStr.Data(),runStr.Data(),suffix.Data()) }; + const TString signalEffName { Form("dteff_CP%s_%s",dModeStr.Data(),runStr.Data()) }; + auto signalSpline = Lau1DCubicSpline::readFromJson( jsonFileName, signalSplineName ); + signalEff = std::make_unique>( signalEffName, *controlEff, std::move( signalSpline ) ); + } + + std::map bkgndEffs; + for ( auto& name : bkgndNames ) { + std::cout << "INFO - creating " << dModeStr << " " << name << " background DTA model" << std::endl; + const TString bkgndSplineName { Form("%s_%s_%s%s",name.Data(),dModeStr.Data(),runStr.Data(),suffix.Data()) }; + const TString bkgndEffName { Form("%s_DTA_ratio_%s_%s",name.Data(),runStr.Data(),dModeStr.Data()) }; + auto bkgndSpline = Lau1DCubicSpline::readFromJson( jsonFileName, bkgndSplineName ); + auto bkgndEff = std::make_unique>( bkgndEffName, *controlEff, std::move( bkgndSpline ) ); + bkgndEffs.emplace( std::make_pair( name, std::move(bkgndEff) ) ); + } + + return std::make_tuple( std::move(controlEff), std::move(signalEff), std::move(bkgndEffs) ); +} + +int main(const int argc, const char ** argv) +{ + const TestDpipi_ProgramSettings settings{argc,argv}; + if ( ! settings.parsedOK ) { + return EXIT_FAILURE; + } + if ( settings.helpRequested ) { + return EXIT_SUCCESS; + } + if ( settings.dataFit and settings.command == Command::Generate ) { + std::cerr << "I can't generate data! Wait for Run 3" << std::endl; + return EXIT_FAILURE; + } + if ( settings.dataFit and not settings.blindFit and not settings.fixPhiMix ) { + std::cerr << "We don't have permission to do unblinded fits yet!" << std::endl; + return EXIT_FAILURE; + } + if ( settings.run == 0 or settings.run > 2 ) { + std::cerr << "The Run number must be either 1 or 2" << std::endl; + return EXIT_FAILURE; + } + + // check that all required data files are present + const std::string signalYieldsJsonFile { "Bd2D0pipi_SignalYields.json" }; + const std::string dpModelJsonFile = Form("Bd2D0pipi_DP_Model_Coeffs_Vetoes.json"); + const std::string dtModelJsonFile { "Bd2D0pipi_DecayTime.json" }; + const std::string dtaModelJsonFile { "Bd2D0pipi_DecayTime_Acceptance.json" }; + const std::string taggingCalibJsonFile { "Bd2D0pipi_TaggingCalib.json" }; + + const std::array jsonFiles { + dpModelJsonFile, + dtModelJsonFile, + dtaModelJsonFile, + signalYieldsJsonFile, + taggingCalibJsonFile + }; + + for ( auto& jsonFile : jsonFiles ) { + if ( std::filesystem::exists( jsonFile ) ) { + continue; + } + + std::cerr << "WARNING : couldn't find the json file \"" << jsonFile << "\" attempting to fetch from eos ..." << std::endl; + if ( system( Form("xrdcp %s/%s/%s .", settings.eosRoot.c_str(), settings.eosConfigDir.c_str(), jsonFile.c_str()) ) != 0 ) { + std::cerr << "FATAL : couldn't get the json file from remote!" << std::endl; + return EXIT_FAILURE; + } + } + + + const UInt_t runNo = settings.run; + const std::string runStr = Form("Run%u", runNo); + + const TString dModeStr { TString{settings.directory}.ReplaceAll("B2Dhh_","") }; + + LauRandom::setSeed(settings.RNGseed); + + LauDaughters* daughtersB0bar = new LauDaughters("B0_bar", "pi+", "pi-", "D0", kTRUE); + LauDaughters* daughtersB0 = new LauDaughters("B0", "pi+", "pi-", "D0_bar",kTRUE); + + // efficiency + auto vetoes { LauVetoes::readFromJson( dpModelJsonFile, "Bd2D0pipi_Vetoes" ) }; + LauEffModel* effModelB0bar = new LauEffModel(daughtersB0bar, vetoes.get()); + LauEffModel* effModelB0 = new LauEffModel(daughtersB0, vetoes.get()); + + TFile* effFile(nullptr); + TH2* effHist(nullptr); + + if (settings.directory == "B2Dhh_Kpi"){ + effFile = TFile::Open(Form("%s/%s/eff_map_mprime_thprime_model_d2kpi_run%u.root",settings.eosRoot.c_str(),settings.eosConfigDir.c_str(),runNo)); + effHist = dynamic_cast(effFile->Get(Form("eff_map_mprime_thprime_model_d2kpi_run%u",runNo))); + } else if (settings.directory == "B2Dhh_KK"){ + effFile = TFile::Open(Form("%s/%s/eff_map_mprime_thprime_model_d2kk_run%u.root",settings.eosRoot.c_str(),settings.eosConfigDir.c_str(),runNo)); + effHist = dynamic_cast(effFile->Get(Form("eff_map_mprime_thprime_model_d2kk_run%u",runNo))); + } else if (settings.directory == "B2Dhh_pipi"){ + effFile = TFile::Open(Form("%s/%s/eff_map_mprime_thprime_model_d2pipi_run%u.root",settings.eosRoot.c_str(),settings.eosConfigDir.c_str(),runNo)); + effHist = dynamic_cast(effFile->Get(Form("eff_map_mprime_thprime_model_d2pipi_run%u",runNo))); + } else {std::cerr << "bad dir given!" << std::endl; return EXIT_FAILURE;} + + effModelB0->setEffHisto(effHist, kTRUE, kFALSE, 0.0, 0.0, kFALSE, kTRUE); + effModelB0bar->setEffHisto(effHist, kTRUE, kFALSE, 0.0, 0.0, kFALSE, kTRUE); + + // setup flavour tagging + const Bool_t useAveDeltaCalibVals { settings.useAveDeltaCalibVals }; + const Bool_t useEtaPrime { kFALSE }; + LauFlavTag* flavTag = new LauFlavTag(useAveDeltaCalibVals,useEtaPrime); + if (settings.dType == LauTimeDepFitModel::CPEigenvalue::QFS) { + flavTag->setDecayFlvVarName("decayFlv"); + } + //if(not settings.dataFit) + //{ + // flavTag->setTrueTagVarName("trueTag"); + //} + + TFile* etaFile = TFile::Open(Form("%s/%s/ETAhists.root",settings.eosRoot.c_str(),settings.eosConfigDir.c_str())); + TH1* etaHist{nullptr}; + TH1* etaHistB0{nullptr}; + TH1* etaHistB0bar{nullptr}; + + if (settings.directory == "B2Dhh_Kpi"){ + etaHistB0 = dynamic_cast(etaFile->Get(Form("signalMC_Kpi_Run%u_etaHist_dcyB",runNo))); + etaHistB0bar = dynamic_cast(etaFile->Get(Form("signalMC_Kpi_Run%u_etaHist_dcyBbar",runNo))); + etaHist = dynamic_cast(etaHistB0->Clone(Form("signalMC_Kpi_Run%u_etaHist",runNo))); + etaHist->Add(etaHistB0bar); + } else if (settings.directory == "B2Dhh_KK"){ + etaHist = dynamic_cast(etaFile->Get(Form("signalMC_KK_Run%u_etaHist",runNo))); + } else if (settings.directory == "B2Dhh_pipi"){ + etaHist = dynamic_cast(etaFile->Get(Form("signalMC_pipi_Run%u_etaHist",runNo))); + } else {std::cerr << "bad dir given!" << std::endl; return EXIT_FAILURE;} + + // Crude check as to whether we're doing perfect vs realistic mis-tag + // - in the former case all entries should be in the first bin + // If the tagging is perfect then don't interpolate the eta histogram + // and also make it perfectly efficient, otherwise do interpolate and + // make it 50% efficient + Lau1DHistPdf* etaHistPdf = new Lau1DHistPdf( "eta", etaHist, 0.0, 0.5, kTRUE, kFALSE ); + + if(settings.addTaggers) { + // NB deliberately not using uniform initialisation here because of this issue: + // https://json.nlohmann.me/home/faq/#brace-initialization-yields-arrays + const nlohmann::json taggingCalibJson = LauJsonTools::readJsonFile( taggingCalibJsonFile, "", LauJsonTools::JsonType::Array ); + if ( taggingCalibJson.is_null() ) { + std::cerr << "ERROR : unable to retrieve root JSON object from file \"" << taggingCalibJsonFile << "\"" << std::endl; + return EXIT_FAILURE; + } + + for ( const auto& taggerConfig : taggingCalibJson ) { + if ( taggerConfig.at("name").get() == ("IFT_" + runStr) ) { + flavTag->addTagger( taggerConfig, etaHistPdf ); + } + } + + if (settings.floatCalibPars) { + flavTag->floatAllCalibPars(); + } + } + + // signal dynamics + LauIsobarDynamics* sigModelB0bar = new LauIsobarDynamics(daughtersB0bar, effModelB0bar); + if (settings.directory == "B2Dhh_Kpi"){ + sigModelB0bar->setIntFileName(Form("integ_Kpi_Run%u_B0bar.dat",runNo)); + } else if (settings.directory == "B2Dhh_KK"){ + sigModelB0bar->setIntFileName(Form("integ_KK_Run%u_B0bar.dat",runNo)); + } else if (settings.directory == "B2Dhh_pipi"){ + sigModelB0bar->setIntFileName(Form("integ_pipi_Run%u_B0bar.dat",runNo)); + } + sigModelB0bar->constructModelFromJson( dpModelJsonFile, "Bd2D0pipi_B0bar_Model" ); + + LauIsobarDynamics* sigModelB0 = new LauIsobarDynamics(daughtersB0, effModelB0); + if (settings.directory == "B2Dhh_Kpi"){ + sigModelB0->setIntFileName(Form("integ_Kpi_Run%u_B0.dat",runNo)); + } else if (settings.directory == "B2Dhh_KK"){ + sigModelB0->setIntFileName(Form("integ_KK_Run%u_B0.dat",runNo)); + } else if (settings.directory == "B2Dhh_pipi"){ + sigModelB0->setIntFileName(Form("integ_pipi_Run%u_B0.dat",runNo)); + } + sigModelB0->constructModelFromJson( dpModelJsonFile, "Bd2D0pipi_B0_Model" ); + + // fit model + LauTimeDepFitModel* fitModel = new LauTimeDepFitModel(sigModelB0bar,sigModelB0,flavTag); + fitModel->breakOnBadLikelihood(kFALSE); + fitModel->sendFixedYieldsToFitter(kTRUE); + + std::vector> coeffset { LauAbsCoeffSet::readFromJson( dpModelJsonFile, "Bd2D0pipi_Coeffs" ) }; + + for ( auto& coeff : coeffset ) { + fitModel->setAmpCoeffSet( std::move(coeff) ); + } + + // decay type and mixing parameter + const Bool_t fixPhiMix{ settings.fixPhiMix || settings.dType == LauTimeDepFitModel::CPEigenvalue::QFS }; + const Bool_t useSinCos{ settings.useSinCos }; + + fitModel->setCPEigenvalue( settings.dType ); + fitModel->setPhiMix( 2.0*LauConstants::beta, fixPhiMix, useSinCos ); + + if ( useSinCos ) { + if ( settings.blindFit ) { + //Blinding strings generated from: https://www.thewordfinder.com/random-sentence-generator/ + const TString sinBlindString{settings.dataFit ? "Tom realized he could be making a big mistake." : "Don't Look a Gift Horse In The Mouth"}; + const TString cosBlindString{settings.dataFit ? "He admitted defeat." : "Long In The Tooth"}; + fitModel->blindPhiMix( sinBlindString, cosBlindString ); + } + std::cout << "Is sin(phiMix) blinded? " << std::boolalpha << fitModel->sinPhiMixBlinded() << std::endl; + std::cout << "Is cos(phiMix) blinded? " << std::boolalpha << fitModel->cosPhiMixBlinded() << std::endl; + } else { + if ( settings.blindFit ) { + const TString blindString{"A Cut Above"}; + fitModel->blindPhiMix( blindString ); + } + std::cout << "Is phiMix blinded? " << std::boolalpha << fitModel->phiMixBlinded() << std::endl; + } + + // production asymmetries + fitModel->setAsymmetries( 0.0, kTRUE ); + + // decay time PDFs + const Double_t minDt{0.2}; + const Double_t maxDt{15.0}; + const Double_t minDtErr{0.0}; + const Double_t maxDtErr{0.15}; + + + // physics models + auto dtPhysicsModels { LauDecayTimePhysicsModel::readFromJson( dtModelJsonFile, "PhysicsModels" ) }; + + auto& dtPhysModel { dtPhysicsModels.at("signal") }; + auto lifetimePar = dynamic_cast( dtPhysModel->findParameter("dt_sig_tau_Bd") ); + if ( lifetimePar ) { + lifetimePar->fixed( settings.fixLifetime ); + } + auto deltaMPar = dynamic_cast( dtPhysModel->findParameter("dt_sig_deltaM_Bd") ); + if ( deltaMPar ) { + deltaMPar->fixed( settings.fixDeltaM ); + } + + // resolution models + auto dtResolutionModels { LauDecayTimeResolution::readFromJson( dtModelJsonFile, Form("ResolutionModels_Run%d", runNo) ) }; + auto& dtResoModel { dtResolutionModels.at("signal") }; + + // Decay time error histogram + // (always set this so that it gets generated properly, whether we're using it in the PDF or not) + TFile* dtErrFile = TFile::Open(Form("%s/%s/dte-hist.root",settings.eosRoot.c_str(),settings.eosConfigDir.c_str())); + TH1* dtErrHist = dynamic_cast(dtErrFile->Get("dte_hist")); + + LauDecayTimePdf * dtPdf{nullptr}; + if ( settings.timeResolution ) { + if ( settings.perEventTimeErr ) { + std::cerr << "Warning : the resolution model has not been constructed with per-event error scaling, but you have requested it" << std::endl; + dtPdf = new LauDecayTimePdf( "decayTime", minDt, maxDt, "decayTimeErr", minDtErr, maxDtErr, std::move(dtPhysModel), std::move(dtResoModel), dtErrHist ); + } else { + dtPdf = new LauDecayTimePdf( "decayTime", minDt, maxDt, std::move(dtPhysModel), std::move(dtResoModel) ); + } + } else { + dtPdf = new LauDecayTimePdf( "decayTime", minDt, maxDt, std::move(dtPhysModel) ); + } + + // Decay time acceptance histogram + TFile* dtaFile = TFile::Open(Form("%s/%s/DTAhists.root",settings.eosRoot.c_str(),settings.eosConfigDir.c_str())); + TH1* dtaHist{nullptr}; + + if (settings.directory == "B2Dhh_Kpi"){ + dtaHist = dynamic_cast(dtaFile->Get(Form("signalMC_Kpi_Run%u_DTAhist",runNo))); + } else if (settings.directory == "B2Dhh_KK"){ + dtaHist = dynamic_cast(dtaFile->Get(Form("signalMC_KK_Run%u_DTAhist",runNo))); + } else if (settings.directory == "B2Dhh_pipi"){ + dtaHist = dynamic_cast(dtaFile->Get(Form("signalMC_pipi_Run%u_DTAhist",runNo))); + } else {std::cerr << "bad dir given!" << std::endl; return EXIT_FAILURE;} + + const TString signalName { "signalMC" }; + std::vector bkgndNames; + const TString suffix { "_DTAhist" }; + + auto [ qfsDTAModel, cpDTAModel, _ ] = createSplineDTASet( dtaModelJsonFile, dModeStr, runStr, suffix, signalName, bkgndNames ); + auto qfsDTAModelPtr = qfsDTAModel.get(); + + switch(settings.timeEffModel) + { + case LauDecayTime::EfficiencyMethod::Spline: + { + fitModel->setASqMaxValue(0.01); + if ( settings.dType == LauTimeDepFitModel::CPEigenvalue::QFS ) { + fitModel->setASqMaxValue(0.009); + } + + dynamic_cast(qfsDTAModel->getParameters().front())->maxValue(1e-1); + //qfsDTAModel->fitToTH1(dtaHist); + + // Set which knots to float and which to fix (at least 1 knot must be fixed, not the first one) + // Knots should only be floating if requested AND the B lifetime is fixed! + if ( settings.fixSplineKnots or not settings.fixLifetime ) { + qfsDTAModel->fixKnots(); + } else { + qfsDTAModel->floatKnots(); + qfsDTAModel->fixKnot( 0, true ); + qfsDTAModel->fixKnot( 3, true ); + } + + if ( settings.dType == LauTimeDepFitModel::CPEigenvalue::QFS ) { + dtPdf->setSplineEfficiency( std::move(qfsDTAModel) ); + } else { + dtPdf->setSplineEfficiency( std::move(cpDTAModel) ); + } + + break; + } + + case LauDecayTime::EfficiencyMethod::Binned: + { + fitModel->setASqMaxValue(0.06); + + auto dtaBinnedModel = std::make_unique( *dtaHist ); + dtPdf->setBinnedEfficiency( std::move(dtaBinnedModel) ); + + break; + } + + case LauDecayTime::EfficiencyMethod::Uniform: + { + fitModel->setASqMaxValue(4.45); + break; + } + } + + fitModel->setSignalDtPdf( dtPdf ); + + // Signal and bkgnd yields + + const Bool_t fixYields {kTRUE};//{ settings.bkgndList.empty() or not settings.floatYields }; + + TString yieldName; + LauParameter* nSigPar{nullptr}; + + // NB deliberately not using uniform initialisation here because of this issue: + // https://json.nlohmann.me/home/faq/#brace-initialization-yields-arrays + const nlohmann::json signalYieldsJson = LauJsonTools::readJsonFile( signalYieldsJsonFile, "", LauJsonTools::JsonType::Object ); + if ( signalYieldsJson.is_null() ) { + std::cerr << "ERROR : unable to retrieve root JSON object from file \"" << signalYieldsJsonFile << "\"" << std::endl; + return EXIT_FAILURE; + } + + const auto& nSigJson { signalYieldsJson.at(runStr).at(settings.directory) }; + yieldName = Form("signalEvents_%s_Run%u", settings.directory.c_str(), runNo); + if ( nSigJson.is_number() ) { + const auto nSigEvents{ nSigJson.get() }; + nSigPar = new LauParameter(yieldName, nSigEvents, -2.0*TMath::Abs(nSigEvents), 2.0*TMath::Abs(nSigEvents), fixYields); + } else { + const auto nSigEvents { LauJsonTools::getValue( nSigJson, "value" ) }; + nSigPar = new LauParameter(yieldName, nSigEvents, -2.0*TMath::Abs(nSigEvents), 2.0*TMath::Abs(nSigEvents), fixYields); + + if ( ! fixYields && LauJsonTools::getOptionalValue( nSigJson, "constrained", LauJsonTools::JsonType::Boolean ).value_or(kFALSE) ) { + const auto nSigEventsErr { LauJsonTools::getValue( nSigJson, "error" ) }; + nSigPar->removeLimits(); + nSigPar->error( nSigEventsErr ); + nSigPar->addGaussianConstraint( nSigEvents, nSigEventsErr ); + } + } + if ( nSigPar->value() < 0.0 ) { + if ( settings.command == Command::Generate ) { + std::cerr << "WARNING : setting negative signal yield to 0 for generation" << std::endl; + } + nSigPar->genValue( 0.0 ); + } + fitModel->setNSigEvents(nSigPar); + + // set the number of experiments + if (settings.command == Command::Generate) { + fitModel->setNExpts(settings.nExptGen, settings.firstExptGen, kTRUE); + } else { + fitModel->setNExpts(settings.nExptFit, settings.firstExptFit, !settings.dataFit); + } + + fitModel->useAsymmFitErrors(kFALSE); + fitModel->useRandomInitFitPars(kFALSE); + fitModel->writeLatexTable(kFALSE); + const Bool_t haveBkgnds = ( fitModel->nBkgndClasses() > 0 ); + fitModel->doPoissonSmearing(haveBkgnds); + fitModel->doEMLFit(haveBkgnds); + + TString dTypeStr; + switch (settings.dType) { + case LauTimeDepFitModel::CPEigenvalue::CPEven : + dTypeStr = "CPEven"; + break; + case LauTimeDepFitModel::CPEigenvalue::CPOdd : + dTypeStr = "CPOdd"; + break; + case LauTimeDepFitModel::CPEigenvalue::QFS : + dTypeStr = "QFS"; + break; + } + + TString dataFile(""); + TString treeName("fitTree"); + TString rootFileName(""); + TString tableFileName(""); + TString fitToyFileName(""); + TString splotFileName(""); + + dataFile = "TEST-Dpipi"; + + dataFile += "_"+dTypeStr+"_"+settings.directory; + + switch(settings.timeEffModel) + { + case LauDecayTime::EfficiencyMethod::Spline: + dataFile += "_Spline"; + break; + case LauDecayTime::EfficiencyMethod::Binned: + dataFile += "_Binned"; + break; + case LauDecayTime::EfficiencyMethod::Uniform: + dataFile += "_Uniform"; + break; + } + + if (settings.timeResolution) { + if (settings.perEventTimeErr) { + dataFile += "_DTRperevt"; + } else { + dataFile += "_DTRavg"; + } + } else { + dataFile += "_DTRoff"; + } + + dataFile += "_expts"; + dataFile += settings.firstExptGen; + dataFile += "-"; + dataFile += settings.firstExptGen+settings.nExptGen-1; + dataFile += Form("-Run%u",runNo); + + dataFile += ".root"; + + if (settings.dataFit) + { + if(runNo == 2) + { + dataFile = Form("%s/%s/Run2/B2Dhh-CollisionCombined-MagCombined-Stripping24r2-withDNN-withPIDcorr-withMVA-withPIDMVA-WithDpiMatching-withSwappedMassHypotheses-withKKvetoes-withSelection-LauraPrepped.root",settings.eosRoot.c_str(),settings.eosDataDir.c_str()); + } else { + dataFile = Form("%s/%s/Run1/B2Dhh-CollisionCombined-MagCombined-Stripping21r1p2-withDNN-withPIDcorr-withMVA-withPIDMVA-WithDpiMatching-withSwappedMassHypotheses-withKKvetoes-withSelection-LauraPrepped.root",settings.eosRoot.c_str(),settings.eosDataDir.c_str()); + } + //treeName = Form("%s/B2DhhReco",settings.directory.data()); + treeName = "fitTree"; + } + else if (settings.directory != "B2Dhh_Kpi") + { + TString dir = settings.directory; + dir = TString(dir("_.+$")); + dir.ReplaceAll("_",""); + dataFile = Form("Bd2pipiD0-D02%s-%s-BootstrapSample.root", dir.Data(), runStr.c_str()); + + treeName = "fitTree"; + } + + if (settings.command == Command::Generate) { + rootFileName = "dummy.root"; + tableFileName = "genResults"; + } else { + rootFileName = "fit"; rootFileName += settings.iFit; + rootFileName += "_Results_"; rootFileName += dTypeStr; + rootFileName += "_"; rootFileName += settings.directory; + rootFileName += "_expts"; rootFileName += settings.firstExptFit; rootFileName += "-"; rootFileName += settings.firstExptFit+settings.nExptFit-1; + rootFileName += Form("_Run%u",runNo); + rootFileName += ".root"; + + tableFileName = "fit"; tableFileName += settings.iFit; + tableFileName += "_Results_"; tableFileName += dTypeStr; + tableFileName += "_"; tableFileName += settings.directory; + tableFileName += "_expts"; tableFileName += settings.firstExptFit; tableFileName += "-"; tableFileName += settings.firstExptFit+settings.nExptFit-1; + tableFileName += Form("_Run%u",runNo); + + fitToyFileName = "fit"; fitToyFileName += settings.iFit; + fitToyFileName += "_ToyMC_"; fitToyFileName += dTypeStr; + fitToyFileName += "_"; fitToyFileName += settings.directory; + fitToyFileName += "_expts"; fitToyFileName += settings.firstExptFit; fitToyFileName += "-"; fitToyFileName += settings.firstExptFit+settings.nExptFit-1; + fitToyFileName += Form("_Run%u",runNo); + fitToyFileName += ".root"; + + splotFileName = "fit"; splotFileName += settings.iFit; + splotFileName += "_sPlot_"; splotFileName += dTypeStr; + splotFileName += "_"; splotFileName += settings.directory; + splotFileName += "_expts"; splotFileName += settings.firstExptFit; splotFileName += "-"; splotFileName += settings.firstExptFit+settings.nExptFit-1; + splotFileName += Form("_Run%u",runNo); + splotFileName += ".root"; + } + + if( settings.genToy and not settings.blindFit ) + { + fitModel->compareFitData(50, fitToyFileName); + } + + // Generate toy from the fitted parameters + //fitModel->compareFitData(1, fitToyFileName); + + // Write out per-event likelihoods and sWeights + //fitModel->writeSPlotData(splotFileName, "splot", kFALSE); + + // Execute the generation/fit + switch (settings.command) { + case Command::Generate : + fitModel->run( "gen", dataFile, treeName, rootFileName, tableFileName ); + break; + case Command::Fit : + fitModel->run( "fit", dataFile, treeName, rootFileName, tableFileName ); + break; + case Command::SimFit : + fitModel->runTask( dataFile, treeName, rootFileName, tableFileName, "localhost", settings.port ); + break; + } + + // Write JSON files with the fitted values + + // signal yield + nlohmann::json signalYieldJson = nlohmann::json::object(); + signalYieldJson[ runStr ] = nlohmann::json::object(); + signalYieldJson[ runStr ][ settings.directory ] = nlohmann::json::object(); + signalYieldJson[ runStr ][ settings.directory ][ "value" ] = nSigPar->value(); + signalYieldJson[ runStr ][ settings.directory ][ "error" ] = nSigPar->error(); + signalYieldJson[ runStr ][ settings.directory ][ "constrained" ] = nSigPar->gaussConstraint(); + LauJsonTools::writeJsonFile( signalYieldJson, "Bd2D0pipi_SignalYield_" + settings.directory + "_" + runStr + ".json" ); + + if (settings.dType == LauTimeDepFitModel::CPEigenvalue::QFS) { + // QFS DTA spline + qfsDTAModelPtr->writeSplineToJson( "Bd2D0pipi_DecayTime_Acceptance_Fitted_" + runStr + ".json" ); + // FT calibration parameters + flavTag->writeTaggersToJson( "Bd2D0pipi_TaggingCalib_Fitted_" + runStr + ".json" ); + } + + return EXIT_SUCCESS; +} diff --git a/examples/SimFitCoordinator_Bd2Dpipi.cc b/examples/SimFitCoordinator_Bd2Dpipi.cc new file mode 100644 index 0000000..9ced8c5 --- /dev/null +++ b/examples/SimFitCoordinator_Bd2Dpipi.cc @@ -0,0 +1,165 @@ + +/* +Copyright 2013 University of Warwick + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +/* +Laura++ package authors: +John Back +Paul Harrison +Thomas Latham +*/ + +#include +#include + +#include + +#include "TFile.h" +#include "TMatrixD.h" +#include "TRandom.h" +#include "TString.h" +#include "TSystem.h" +#include "TVectorD.h" + +#include "LauJsonTools.hh" +#include "LauSimFitCoordinator.hh" + +void usage( std::ostream& out, const TString& progName ) +{ + out<<"Usage:\n"; + out< [firstExpt = 0] [numTasks = 2] [port = 0] [refFitDir] \n"; +} + +int main(const int argc, const char ** argv) +{ + if ( argc < 3 ) { + usage( std::cerr, argv[0] ); + return EXIT_FAILURE; + } + + UInt_t iFit = atoi( argv[1] ); + UInt_t nExpt = atoi( argv[2] ); + UInt_t firstExpt = 0; + UInt_t nTasks = 2; + UInt_t port = 0; + TString refDirName = ""; + Bool_t useAsymmErrors = kFALSE; + Bool_t twoStageFit = kFALSE; + TString ntupleName = ""; + + if ( argc > 3 ) { + firstExpt = atoi( argv[3] ); + + if ( argc > 4 ) { + nTasks = atoi( argv[4] ); + + if ( argc > 5 ) { + port = atoi( argv[5] ); + + if ( argc > 6 ) { + refDirName = argv[6]; + ntupleName += refDirName; ntupleName += "/"; + } + } + } + } + + UInt_t lastExpt = firstExpt + nExpt - 1; + + ntupleName += "coordinator-ntuple_expts"; + ntupleName += firstExpt; + ntupleName += "-"; + ntupleName += lastExpt; + ntupleName += "_fit"; + ntupleName += iFit; + ntupleName += ".root"; + + LauSimFitCoordinator coordinator( nTasks, port ); + + const std::string signalYieldsJsonFile { "Bd2D0pipi_SignalYields.json" }; + const std::string bkgndsJsonFile { "Bd2D0pipi_Bkgnds.json" }; + + const nlohmann::json signalYieldsJson = LauJsonTools::readJsonFile( signalYieldsJsonFile, "", LauJsonTools::JsonType::Object ); + if ( signalYieldsJson.is_null() ) { + std::cerr << "ERROR : unable to retrieve root JSON object from file \"" << signalYieldsJsonFile << "\"" << std::endl; + return EXIT_FAILURE; + } + + const nlohmann::json bkgndsJson = LauJsonTools::readJsonFile( bkgndsJsonFile, "", LauJsonTools::JsonType::Object ); + if ( bkgndsJson.is_null() ) { + std::cerr << "ERROR : unable to retrieve root JSON object from file \"" << bkgndsJsonFile << "\"" << std::endl; + return EXIT_FAILURE; + } + + const std::array runs {"Run1", "Run2"}; + const std::array modes {"Kpi", "KK", "pipi"}; + const std::map bkgndIndex { + { "comb" , 1 }, + { "Bd2DKpi" , 2 }, + { "Bs2DKpi" , 3 }, + { "Lb2Dppi" , 4 }, + { "Bu2Dstpi_Dpi0" , 5 }, + { "Bu2Dstpi" , 6 }, + { "Bd2Dstpi" , 7 } + }; + const std::size_t nComponents{bkgndIndex.size()+1}; + + for ( auto& run : runs ) { + for ( auto& mode : modes ) { + + const std::string dir { "B2Dhh_" + mode }; + + std::vector parNames; + parNames.resize( nComponents ); + TVectorD means( nComponents ); + + parNames[0] = Form( "signalEvents_%s_%s", dir.c_str(), run.c_str() ); + + const auto& nSigJson { signalYieldsJson.at(run).at(dir) }; + if ( nSigJson.is_number() ) { + means[0] = nSigJson.get(); + } else { + means[0] = LauJsonTools::getValue( nSigJson, "value" ); + } + + + for ( auto& [ bkgndName, bkgndJson ] : bkgndsJson.items() ) { + std::size_t iBkgnd{ bkgndIndex.at(bkgndName) }; + std::cout << "Processing background " << iBkgnd << " called " << bkgndName << std::endl; + + parNames[iBkgnd] = Form( "%sEvents_%s_%s", bkgndName.c_str(), dir.c_str(), run.c_str() ); + + const auto& yieldJson { bkgndJson.at("yields").at(run).at(dir) }; + if ( yieldJson.is_number() ) { + means[iBkgnd] = yieldJson.get(); + } else { + means[iBkgnd] = LauJsonTools::getValue( yieldJson, "value" ); + } + } + + std::unique_ptr file { TFile::Open( Form( "root://eoslhcb.cern.ch//eos/lhcb/wg/b2oc/B2D0pipi/timedep_fit_histos/Overall_fitResult_%s_%s.root", run.c_str(), mode.c_str() ) ) }; + TMatrixD * covMat { static_cast( file->Get("covariance_sigRegion") ) }; + + coordinator.addMultiDimConstraint( parNames, means, *covMat ); + + file->Close(); + } + } + + coordinator.runSimFit( ntupleName, nExpt, firstExpt, useAsymmErrors, twoStageFit ); + + return EXIT_SUCCESS; +} diff --git a/examples/runTimeDepTest.sh b/examples/runTimeDepTest.sh index 7b1ede5..9c3cdd9 100755 --- a/examples/runTimeDepTest.sh +++ b/examples/runTimeDepTest.sh @@ -1,236 +1,238 @@ #!/bin/bash #SBATCH --partition=epp #SBATCH --ntasks=1 #SBATCH --cpus-per-task=6 #SBATCH --mem-per-cpu=3997 #SBATCH --time=02:00:00 #Run with sbatch runTimeDepTest.sh # Copyright 2020 University of Warwick # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Laura++ package authors: # John Back # Paul Harrison # Thomas Latham processes=() # trap ^C and call ctrl_c() trap ctrl_c INT function ctrl_c() { echo " ** Caught ^C event!" for process in ${processes[@]} do if ps -u $USER | grep -q "$process" then echo "Killing : $process" kill $process fi done exit 1 } # Check that we have a kerberos token, since nothing will work without one! klist -s > /dev/null 2>&1 if [ $? -ne 0 ] then echo "You don't have a valid Kerberos ticket!" exit 1 fi iFit=0 dta_model="spline" dtr=1 dtr_perevent=0 fixTau=1 fixDeltaM=1 floatYields=1 seed=140279 # $(pwd | xargs basename) dataFit=1 runNo=12 # Modify these if you want to run more experiments nExpt=1 firstExpt=0 logName_genQFS_run1=Logs/gen-QFS-run1-dtamodel_$dta_model-dtr_$dtr-dtrperevent_$dtr_perevent.log logName_genQFS_run2=Logs/gen-QFS-run2-dtamodel_$dta_model-dtr_$dtr-dtrperevent_$dtr_perevent.log logName_genCPE_KK_run1=Logs/gen-CPEvenKK-run1-dtamodel_$dta_model-dtr_$dtr-dtrperevent_$dtr_perevent.log logName_genCPE_KK_run2=Logs/gen-CPEvenKK-run2-dtamodel_$dta_model-dtr_$dtr-dtrperevent_$dtr_perevent.log logName_genCPE_pipi_run1=Logs/gen-CPEvenpipi-run1-dtamodel_$dta_model-dtr_$dtr-dtrperevent_$dtr_perevent.log logName_genCPE_pipi_run2=Logs/gen-CPEvenpipi-run2-dtamodel_$dta_model-dtr_$dtr-dtrperevent_$dtr_perevent.log logName_coord=Logs/coordinator-dtamodel_$dta_model-dtr_$dtr-dtrperevent_$dtr_perevent-$iFit.log logName_taskQFS_run1=Logs/task-QFS-run1-dtamodel_$dta_model-dtr_$dtr-dtrperevent_$dtr_perevent-$iFit.log logName_taskQFS_run2=Logs/task-QFS-run2-dtamodel_$dta_model-dtr_$dtr-dtrperevent_$dtr_perevent-$iFit.log logName_taskCPE_KK_run1=Logs/task-CPEvenKK-run1-dtamodel_$dta_model-dtr_$dtr-dtrperevent_$dtr_perevent-$iFit.log logName_taskCPE_KK_run2=Logs/task-CPEvenKK-run2-dtamodel_$dta_model-dtr_$dtr-dtrperevent_$dtr_perevent-$iFit.log logName_taskCPE_pipi_run1=Logs/task-CPEvenpipi-run1-dtamodel_$dta_model-dtr_$dtr-dtrperevent_$dtr_perevent-$iFit.log logName_taskCPE_pipi_run2=Logs/task-CPEvenpipi-run2-dtamodel_$dta_model-dtr_$dtr-dtrperevent_$dtr_perevent-$iFit.log mkdir -p Logs # make sure we have the most up-to-date json files eosRoot="root://eoslhcb.cern.ch//eos/lhcb/wg/b2oc/B2D0pipi" eosConfigDir="timedep_fit_histos" eosArea="$eosRoot/$eosConfigDir" for jsonfile in Bd2D0pipi_Bkgnds.json Bd2D0pipi_DP_Model_Coeffs_Vetoes.json Bd2D0pipi_DecayTime.json Bd2D0pipi_DecayTime_Acceptance.json Bd2D0pipi_SignalYields.json Bd2D0pipi_TaggingCalib.json do rm -f $jsonfile xrdcp $eosArea/$jsonfile . if [ $? -ne 0 ] then echo "Problem downloading json files from EOS, exiting" exit 1 fi done if [ $iFit == 0 -a $dataFit == 0 ] then #run generation of the modes in parallel (put the tasks in the bg) echo "Generating samples..." if [ $runNo -eq 1 -o $runNo -eq 12 ] then GenFitTimeDep_Bd2Dpipi gen --dtype QFS --eosConfigDir $eosConfigDir --fixTau $fixTau --seed $seed --dta-model $dta_model --dtr $dtr --dtr-perevent $dtr_perevent --firstExptGen $firstExpt --nExptGen $nExpt --run 1 --dir B2Dhh_Kpi > $logName_genQFS_run1 2>&1 & task=$! processes+=( $task ) GenFitTimeDep_Bd2Dpipi gen --dtype CPEven --eosConfigDir $eosConfigDir --fixTau $fixTau --seed $seed --dta-model $dta_model --dtr $dtr --dtr-perevent $dtr_perevent --firstExptGen $firstExpt --nExptGen $nExpt --run 1 --dir B2Dhh_KK > $logName_genCPE_KK_run1 2>&1 & task=$! processes+=( $task ) GenFitTimeDep_Bd2Dpipi gen --dtype CPEven --eosConfigDir $eosConfigDir --fixTau $fixTau --seed $seed --dta-model $dta_model --dtr $dtr --dtr-perevent $dtr_perevent --firstExptGen $firstExpt --nExptGen $nExpt --run 1 --dir B2Dhh_pipi > $logName_genCPE_pipi_run1 2>&1 & task=$! processes+=( $task ) fi if [ $runNo -eq 2 -o $runNo -eq 12 ] then GenFitTimeDep_Bd2Dpipi gen --dtype QFS --eosConfigDir $eosConfigDir --fixTau $fixTau --seed $seed --dta-model $dta_model --dtr $dtr --dtr-perevent $dtr_perevent --firstExptGen $firstExpt --nExptGen $nExpt --run 2 --dir B2Dhh_Kpi > $logName_genQFS_run2 2>&1 & task=$! processes+=( $task ) GenFitTimeDep_Bd2Dpipi gen --dtype CPEven --eosConfigDir $eosConfigDir --fixTau $fixTau --seed $seed --dta-model $dta_model --dtr $dtr --dtr-perevent $dtr_perevent --firstExptGen $firstExpt --nExptGen $nExpt --run 2 --dir B2Dhh_KK > $logName_genCPE_KK_run2 2>&1 & task=$! processes+=( $task ) GenFitTimeDep_Bd2Dpipi gen --dtype CPEven --eosConfigDir $eosConfigDir --fixTau $fixTau --seed $seed --dta-model $dta_model --dtr $dtr --dtr-perevent $dtr_perevent --firstExptGen $firstExpt --nExptGen $nExpt --run 2 --dir B2Dhh_pipi > $logName_genCPE_pipi_run2 2>&1 & task=$! processes+=( $task ) fi wait #wait for the generation to complete fi echo "Running fit $iFit" if [ $runNo -eq 12 ] then nTasks=6 else nTasks=3 fi -SimFitCoordinator $iFit $nExpt $firstExpt $nTasks > $logName_coord 2>&1 & +SimFitCoordinator_Bd2Dpipi $iFit $nExpt $firstExpt $nTasks > $logName_coord 2>&1 & task=$! echo "SimFitCoordinator process : $task" processes+=( $task ) echo "Initialised coordinator process for fit $iFit" sleep 5 port="" while [ "x$port" == "x" ] do sleep 2 port=`tail $logName_coord | grep "Waiting for connection" | awk '{print $NF}'` done echo "Coordinator is listening on port $port" echo "Initialising tasks..." if [ $runNo -eq 1 -o $runNo -eq 12 ] then GenFitTimeDep_Bd2Dpipi simfit --blindFit $dataFit --dataFit $dataFit --eosConfigDir $eosConfigDir --dtype QFS --seed $seed --port $port --iFit $iFit --fixTau $fixTau --dta-model $dta_model --dtr $dtr --dtr-perevent $dtr_perevent --firstExpt $firstExpt --nExpt $nExpt --firstExptGen $firstExpt --nExptGen $nExpt --run 1 --dir B2Dhh_Kpi --fixDm $fixDeltaM --floatYields $floatYields > $logName_taskQFS_run1 2>&1 & task=$! echo "QFS Run 1 process : $task" processes+=( $task ) sleep 5 GenFitTimeDep_Bd2Dpipi simfit --blindFit $dataFit --dataFit $dataFit --eosConfigDir $eosConfigDir --dtype CPEven --seed $seed --port $port --iFit $iFit --fixTau $fixTau --dta-model $dta_model --dtr $dtr --dtr-perevent $dtr_perevent --firstExpt $firstExpt --nExpt $nExpt --firstExptGen $firstExpt --nExptGen $nExpt --run 1 --dir B2Dhh_KK --fixDm $fixDeltaM --floatYields $floatYields > $logName_taskCPE_KK_run1 2>&1 & task=$! echo "CP KK Run 1 process : $task" processes+=( $task ) sleep 5 GenFitTimeDep_Bd2Dpipi simfit --blindFit $dataFit --dataFit $dataFit --eosConfigDir $eosConfigDir --dtype CPEven --seed $seed --port $port --iFit $iFit --fixTau $fixTau --dta-model $dta_model --dtr $dtr --dtr-perevent $dtr_perevent --firstExpt $firstExpt --nExpt $nExpt --firstExptGen $firstExpt --nExptGen $nExpt --run 1 --dir B2Dhh_pipi --fixDm $fixDeltaM --floatYields $floatYields > $logName_taskCPE_pipi_run1 2>&1 & task=$! echo "CP pipi Run 1 process : $task" processes+=( $task ) + sleep 5 fi if [ $runNo -eq 2 -o $runNo -eq 12 ] then GenFitTimeDep_Bd2Dpipi simfit --blindFit $dataFit --dataFit $dataFit --eosConfigDir $eosConfigDir --dtype QFS --seed $seed --port $port --iFit $iFit --fixTau $fixTau --dta-model $dta_model --dtr $dtr --dtr-perevent $dtr_perevent --firstExpt $firstExpt --nExpt $nExpt --firstExptGen $firstExpt --nExptGen $nExpt --run 2 --dir B2Dhh_Kpi --fixDm $fixDeltaM --floatYields $floatYields > $logName_taskQFS_run2 2>&1 & task=$! echo "QFS Run 2 process : $task" processes+=( $task ) sleep 5 GenFitTimeDep_Bd2Dpipi simfit --blindFit $dataFit --dataFit $dataFit --eosConfigDir $eosConfigDir --dtype CPEven --seed $seed --port $port --iFit $iFit --fixTau $fixTau --dta-model $dta_model --dtr $dtr --dtr-perevent $dtr_perevent --firstExpt $firstExpt --nExpt $nExpt --firstExptGen $firstExpt --nExptGen $nExpt --run 2 --dir B2Dhh_KK --fixDm $fixDeltaM --floatYields $floatYields > $logName_taskCPE_KK_run2 2>&1 & task=$! echo "CP KK Run 2 process : $task" processes+=( $task ) sleep 5 GenFitTimeDep_Bd2Dpipi simfit --blindFit $dataFit --dataFit $dataFit --eosConfigDir $eosConfigDir --dtype CPEven --seed $seed --port $port --iFit $iFit --fixTau $fixTau --dta-model $dta_model --dtr $dtr --dtr-perevent $dtr_perevent --firstExpt $firstExpt --nExpt $nExpt --firstExptGen $firstExpt --nExptGen $nExpt --run 2 --dir B2Dhh_pipi --fixDm $fixDeltaM --floatYields $floatYields > $logName_taskCPE_pipi_run2 2>&1 & task=$! echo "CP pipi Run 2 process : $task" processes+=( $task ) + sleep 5 fi wait #wait for the fits and coordinator to terminate echo "Fit completed" echo "Warnings/Errors from coordinator:" grep -e ERROR -e WARNING -e Error -e Warning $logName_coord echo "Warnings/Errors from CPeven KK tasks:" if [ $runNo -eq 1 -o $runNo -eq 12 ] then grep -e ERROR -e WARNING -e Error -e Warning $logName_taskCPE_KK_run1 fi if [ $runNo -eq 2 -o $runNo -eq 12 ] then grep -e ERROR -e WARNING -e Error -e Warning $logName_taskCPE_KK_run2 fi echo "Warnings/Errors from CPeven pipi tasks:" if [ $runNo -eq 1 -o $runNo -eq 12 ] then grep -e ERROR -e WARNING -e Error -e Warning $logName_taskCPE_pipi_run1 fi if [ $runNo -eq 2 -o $runNo -eq 12 ] then grep -e ERROR -e WARNING -e Error -e Warning $logName_taskCPE_pipi_run2 fi echo "Warnings/Errors from QFS tasks:" if [ $runNo -eq 1 -o $runNo -eq 12 ] then grep -e ERROR -e WARNING -e Error -e Warning $logName_taskQFS_run1 fi if [ $runNo -eq 2 -o $runNo -eq 12 ] then grep -e ERROR -e WARNING -e Error -e Warning $logName_taskQFS_run2 fi