From dbea66a84b7a7365299d6d824d3713f034984124 Mon Sep 17 00:00:00 2001 From: Aizat Daribayeva Date: Tue, 17 Mar 2026 16:30:23 +0100 Subject: [PATCH] Adding error msg for TGeo features and QA macro for reco --- .../TRK/base/include/TRKBase/GeometryTGeo.h | 14 +- .../ALICE3/TRK/macros/test/CheckTracksCA.C | 579 +++++++++++++----- 2 files changed, 451 insertions(+), 142 deletions(-) diff --git a/Detectors/Upgrades/ALICE3/TRK/base/include/TRKBase/GeometryTGeo.h b/Detectors/Upgrades/ALICE3/TRK/base/include/TRKBase/GeometryTGeo.h index 21d86378f59ec..e32a2546c6842 100644 --- a/Detectors/Upgrades/ALICE3/TRK/base/include/TRKBase/GeometryTGeo.h +++ b/Detectors/Upgrades/ALICE3/TRK/base/include/TRKBase/GeometryTGeo.h @@ -106,17 +106,23 @@ class GeometryTGeo : public o2::detectors::DetMatrixCache float getSensorRefAlphaMLOT(int chipId) const { - assert(getSubDetID(chipId) != 0 && "Called MLOT getter with VD chipId"); + if (getSubDetID(chipId) == 0) { + LOG(error) << "getSensorRefAlphaMLOT(): VD layers are not supported yet! chipID = " << chipId + << "please provide chipId for ML/OT! "; + return std::numeric_limits::quiet_NaN(); + } const int local = chipId - getNumberOfActivePartsVD(); - assert(local >= 0 && local < (int)mCacheRefAlphaMLOT.size()); return mCacheRefAlphaMLOT[local]; } float getSensorXMLOT(int chipId) const { - assert(getSubDetID(chipId) != 0 && "Called MLOT getter with VD chipId"); + if (getSubDetID(chipId) == 0) { + LOG(error) << "getSensorXMLOT(): VD layers are not supported yet! chipID = " << chipId + << "please provide chipId for ML/OT! "; + return std::numeric_limits::quiet_NaN(); + } const int local = chipId - getNumberOfActivePartsVD(); - assert(local >= 0 && local < (int)mCacheRefXMLOT.size()); return mCacheRefXMLOT[local]; } diff --git a/Detectors/Upgrades/ALICE3/TRK/macros/test/CheckTracksCA.C b/Detectors/Upgrades/ALICE3/TRK/macros/test/CheckTracksCA.C index ae75616b7719c..7d9ee45b6d646 100644 --- a/Detectors/Upgrades/ALICE3/TRK/macros/test/CheckTracksCA.C +++ b/Detectors/Upgrades/ALICE3/TRK/macros/test/CheckTracksCA.C @@ -9,8 +9,8 @@ // granted to it by virtue of its status as an Intergovernmental Organization // or submit itself to any jurisdiction. -/// \file CheckTracksCA.C -/// \brief Quality assurance macro for TRK tracking +/// \author A. Daribayeva +/// Quality assurance test on reconstructed tracks, producing efficiency plots and performance table #if !defined(__CLING__) || defined(__ROOTCLING__) #include @@ -18,15 +18,24 @@ #include #include #include +#include +#include +#include #include #include -#include +#include +#include #include #include #include #include #include +#include +#include +#include +#include +#include #include "DataFormatsITS/TrackITS.h" #include "SimulationDataFormat/MCCompLabel.h" @@ -41,6 +50,18 @@ using namespace std; using namespace o2; +enum class RangeMode { + ContentOnly, + ContentOrError, + ReferenceContent +}; + +void setAutoXRange(TH1* h, + RangeMode mode = RangeMode::ContentOnly, + const TH1* hRef = nullptr, + double threshold = 0.0, + int marginBins = 1); + /// Structure to track particle hit information struct ParticleHitInfo { std::bitset<11> layerHits; ///< Which layers have hits (11 layers for TRK) @@ -73,25 +94,37 @@ struct ParticleHitInfo { } }; -void CheckTracksCA(std::string tracfile = "o2trac_trk.root", +bool hasConsecutiveLayers(const o2::its::TrackITS& recoTrack, int nClusters) +{ + std::array layers{}; + + for (int i = 0; i < 11; i++) { + layers[i] = recoTrack.hasHitOnLayer(i); + } + + return std::search_n(layers.begin(), layers.end(), nClusters, true) != layers.end(); +} + +void CheckTracksCA(std::string trackfile = "o2trac_trk.root", std::string kinefile = "o2sim_Kine.root", std::string hitsfile = "o2sim_HitsTRK.root", - std::string outputfile = "trk_qa_output.root") + std::string outFile = "RecoPerformanceTable.dat", + std::string outFile1 = "RecoTracksQA.root") { - gStyle->SetOptStat(0); - std::cout << "=== Starting TRK Track Quality Assurance ===" << std::endl; + std::cout << "=== Starting TRK Track Reconstruction Quality Assurance ===" << std::endl; std::cout << "Input files:" << std::endl; - std::cout << " Tracks: " << tracfile << std::endl; + std::cout << " Tracks: " << trackfile << std::endl; std::cout << " Kinematics: " << kinefile << std::endl; std::cout << " Hits: " << hitsfile << std::endl; - std::cout << " Output: " << outputfile << std::endl; - std::cout << std::endl; + std::cout << " Output file with performance table: " << outFile << std::endl; + std::cout << " Output root file with histograms: " << outFile1 << std::endl; + + gROOT->SetBatch(true); // MC kinematics reader o2::steer::MCKinematicsReader kineReader("o2sim", o2::steer::MCKinematicsReader::Mode::kMCKine); const int nEvents = kineReader.getNEvents(0); - std::cout << "Number of MC events: " << nEvents << std::endl; // Open hits file to count hits per particle per layer TFile* hitsFile = TFile::Open(hitsfile.c_str(), "READ"); @@ -99,6 +132,7 @@ void CheckTracksCA(std::string tracfile = "o2trac_trk.root", std::cerr << "ERROR: Cannot open hits file: " << hitsfile << std::endl; return; } + TTree* hitsTree = hitsFile->Get("o2sim"); if (!hitsTree) { std::cerr << "ERROR: Cannot find o2sim tree in hits file" << std::endl; @@ -106,25 +140,19 @@ void CheckTracksCA(std::string tracfile = "o2trac_trk.root", } // Open reconstructed tracks file - TFile* tracFile = TFile::Open(tracfile.c_str(), "READ"); + TFile* tracFile = TFile::Open(trackfile.c_str(), "READ"); if (!tracFile || tracFile->IsZombie()) { - std::cerr << "ERROR: Cannot open tracks file: " << tracfile << std::endl; + std::cerr << "ERROR: Cannot open tracks file: " << trackfile << std::endl; return; } + TTree* recTree = tracFile->Get("o2sim"); if (!recTree) { std::cerr << "ERROR: Cannot find o2sim tree in tracks file" << std::endl; return; } - // Reconstructed tracks and labels - std::vector* recTracks = nullptr; - std::vector* trkLabels = nullptr; - recTree->SetBranchAddress("TRKTrack", &recTracks); - recTree->SetBranchAddress("TRKTrackMCTruth", &trkLabels); - - std::cout << "Reading tracks from tree..." << std::endl; - + // ============== MC part =============================== // Analyze hits tree to count hits per particle per layer std::cout << "Analyzing hits from tree..." << std::endl; std::unordered_map particleHitMap; @@ -133,9 +161,6 @@ void CheckTracksCA(std::string tracfile = "o2trac_trk.root", o2::base::GeometryManager::loadGeometry(); auto* gman = o2::trk::GeometryTGeo::Instance(); - // Array to map detector to starting layer - constexpr std::array startLayer{0, 3}; - std::vector* trkHit = nullptr; hitsTree->SetBranchAddress("TRKHit", &trkHit); @@ -152,8 +177,7 @@ void CheckTracksCA(std::string tracfile = "o2trac_trk.root", } // Determine layer - int subDetID = gman->getSubDetID(hit.GetDetectorID()); - const int layer = startLayer[subDetID] + gman->getLayer(hit.GetDetectorID()); + const int layer = gman->getBarrelLayer(hit.GetDetectorID()); // Create label for this particle o2::MCCompLabel label(hit.GetTrackID(), static_cast(iEntry), 0); @@ -165,81 +189,132 @@ void CheckTracksCA(std::string tracfile = "o2trac_trk.root", std::cout << "Found " << particleHitMap.size() << " unique particles with hits" << std::endl; - // Store particle info and fill generated histograms - std::unordered_map particlePtMap; - - // Create histograms - constexpr int nLayers = 11; - constexpr int nb = 100; - double xbins[nb + 1], ptcutl = 0.05, ptcuth = 10.; - double a = std::log(ptcuth / ptcutl) / nb; - for (int i = 0; i <= nb; i++) - xbins[i] = ptcutl * std::exp(i * a); - - TH1D genParticlePtHist("genParticlePt", "Generated Particle p_{T} (All Layers); #it{p}_{T} (GeV/#it{c}); Counts", nb, xbins); - TH1D genParticlePt7LayersHist("genParticlePt7Layers", "Generated Particle p_{T} with hits in at least 7 consecutive layers; #it{p}_{T} (GeV/#it{c}); Counts", nb, xbins); - TH1D goodTracks("goodTracks", "Good Tracks; p_{T} (GeV/c); Counts", nb, xbins); - TH1D fakeTracks("fakeTracks", "Fake Tracks; p_{T} (GeV/c); Counts", nb, xbins); - - std::array goodTracksMatching, fakeTracksMatching; - for (int i = 0; i < 5; ++i) { - goodTracksMatching[i] = TH1D(Form("goodTracksMatching_%dLayers", i + 7), - Form("Good Tracks with %d layer hits; p_{T} (GeV/c); Counts", i + 7), - nb, xbins); - fakeTracksMatching[i] = TH1D(Form("fakeTracksMatching_%dLayers", i + 7), - Form("Fake Tracks with %d layer hits; p_{T} (GeV/c); Counts", i + 7), - nb, xbins); - } + //=========== need to set the min and max ranges for hists + std::vector pTDist; + std::unordered_map MCTrackMap; - TH1D numberOfClustersPerTrack("numberOfClustersPerTrack", - "Number of clusters per track; N_{clusters}; Counts", - 12, -0.5, 11.5); + // counters for general statisics + int counterPrimaries{0}, counterSecondaries{0}; - // First pass: identify particles with full hit coverage from kinematics - std::cout << "Analyzing MC particles..." << std::endl; for (int iEvent = 0; iEvent < nEvents; ++iEvent) { const auto& mcTracks = kineReader.getTracks(iEvent); + for (size_t iTrack = 0; iTrack < mcTracks.size(); ++iTrack) { const auto& mcTrack = mcTracks[iTrack]; - if (!mcTrack.isPrimary()) { - continue; - } // Create label for this particle o2::MCCompLabel label(iTrack, iEvent, 0); - float pt = mcTrack.GetPt(); - // Store particle info - particlePtMap[label] = pt; - - // Check if this particle has hits auto hitIt = particleHitMap.find(label); if (hitIt != particleHitMap.end()) { - // Store pT in hit info - hitIt->second.pt = pt; - // Fill histogram for particles with hits in all 11 layers - if (hitIt->second.nHits == 11) { - genParticlePtHist.Fill(pt); + if (mcTrack.isPrimary()) { + counterPrimaries++; } - - // Fill histogram for particles with at least 7 consecutive layer hits - if (hitIt->second.hasConsecutiveLayers(7)) { - genParticlePt7LayersHist.Fill(pt); + if (mcTrack.isSecondary()) { + counterSecondaries++; } + + MCTrackMap.emplace(label, mcTrack); + pTDist.push_back(mcTrack.GetPt()); } } } - std::cout << "Generated particles with 11 hits: " << genParticlePtHist.GetEntries() << std::endl; - std::cout << "Generated particles with 7+ consecutive hits: " << genParticlePt7LayersHist.GetEntries() << std::endl; + int nBins = 100; + auto [minpT, maxpT] = std::minmax_element(pTDist.begin(), pTDist.end()); + + //=========== histograms ============= + // for exclusive studies + TH1F hPtDenExclusive("", "", nBins, *minpT, *maxpT); + + TH1F hPtNumExclusive("", "", nBins, *minpT, *maxpT); + + TH1F hPtEffExclusive("hPtEffExclusive", + "efficiency (exclusive, good, primaries) vs p_{T}; p_{T} [GeV/c]; Efficiency", + nBins, *minpT, *maxpT); + + // for inclusive studies + TH1F hPtDenInclusive("", "", nBins, *minpT, *maxpT); + + TH1F hPtNumInclusive("", "", nBins, *minpT, *maxpT); + + TH1F hPtEffInclusive("hPtEffInclusive", "", nBins, *minpT, *maxpT); + + // for inclusive studies, fake + TH1F hPtNumInclusiveFake("", "", nBins, *minpT, *maxpT); + TH1F hPtEffInclusiveFake("", "", nBins, *minpT, *maxpT); + + TH2D hPtResVsPt("", "", nBins, *minpT, *maxpT, 100, -0.5, 0.5); + + // for inclusive efficiencies + int counterAll{0}, prim_ge7{0}, sec_ge7{0}; + + // for exclusive studies when we have 7,8,9,10,11 hits + std::array mcExact{}; + + for (const auto& [label, mcTrack] : MCTrackMap) { + + const auto& hitInfo = particleHitMap.at(label); + int nHits = hitInfo.nHits; + + if (nHits < 7 || nHits > 11) { + continue; + } + + float pT = mcTrack.GetPt(); + + bool consecutive7 = hitInfo.hasConsecutiveLayers(7); + + if (mcTrack.isPrimary()) { + + // exclusive - all hits should be on subsequent layers + if (hitInfo.hasConsecutiveLayers(nHits)) { + hPtDenExclusive.Fill(pT); + ++mcExact[nHits]; + } + + // inclusive - it's enough to be on 7 consequtive layers + if (consecutive7) { + hPtDenInclusive.Fill(pT); + ++prim_ge7; + } + + } else if (mcTrack.isSecondary()) { + + if (consecutive7) { + ++sec_ge7; + } + } + } + + counterAll = prim_ge7 + sec_ge7; + + //============ reco tracks =============== + + // Reconstructed tracks and labels + std::vector* recTracks = nullptr; + std::vector* trkLabels = nullptr; + std::vector pTResVector; // good, primaries, inclusive + + recTree->SetBranchAddress("TRKTrack", &recTracks); + recTree->SetBranchAddress("TRKTrackMCTruth", &trkLabels); // Second pass: analyze reconstructed tracks std::cout << "Analyzing reconstructed tracks..." << std::endl; int nROFs = recTree->GetEntries(); - int totalTracks = 0; - int goodTracksCount = 0; - int fakeTracksCount = 0; + int totalTracks{0}; + + // inclusive count + std::unordered_set foundAllGood, foundAllFake; + std::unordered_set foundPrimGood, foundPrimFake; + std::unordered_set foundSecGood, foundSecFake; + + // exclusive count + std::array, 12> foundExclusiveGood, foundExclusiveFake; + std::array, 12> foundWithLessClusters; + + int count7RecoGood{0}; for (int iROF = 0; iROF < nROFs; ++iROF) { recTree->GetEntry(iROF); @@ -258,88 +333,316 @@ void CheckTracksCA(std::string tracfile = "o2trac_trk.root", continue; } - int eventID = label.getEventID(); - int trackID = label.getTrackID(); int nClusters = track.getNumberOfClusters(); + if (nClusters < 7 || nClusters > 11) { + continue; + } + + auto key = o2::MCCompLabel(label.getTrackID(), label.getEventID(), 0); - // Get MC track info - if (eventID < 0 || eventID >= nEvents) { + auto hitIt = particleHitMap.find(key); + auto mcIt = MCTrackMap.find(key); + + if (hitIt == particleHitMap.end() || mcIt == MCTrackMap.end()) { continue; } - const auto& mcTracks = kineReader.getTracks(eventID); - if (trackID < 0 || trackID >= (int)mcTracks.size()) { + int nHits = hitIt->second.nHits; + if (nHits < 7 || nHits > 11) { continue; } - float pt = mcTracks[trackID].GetPt(); + bool mcHasN = hitIt->second.hasConsecutiveLayers(nHits); + bool recoHasN = hasConsecutiveLayers(track, nClusters); + + float mcPt = mcIt->second.GetPt(); + float recoPT = track.getPt(); - // Fill histograms - numberOfClustersPerTrack.Fill(nClusters); + // inclusive count + if (hitIt->second.hasConsecutiveLayers(7) && hasConsecutiveLayers(track, 7)) { - auto key = o2::MCCompLabel(trackID, eventID, 0); - if (particleHitMap.find(key) != particleHitMap.end() && particleHitMap[key].hasConsecutiveLayers(11)) { + // for good tracks + if (label.isCorrect()) { + foundAllGood.insert(key); + + if (mcIt->second.isPrimary()) { + foundPrimGood.insert(key); + hPtNumInclusive.Fill(mcPt); + + float ptRes = (recoPT - mcPt) / mcPt; + pTResVector.push_back(ptRes); + hPtResVsPt.Fill(mcPt, ptRes); + + } else if (mcIt->second.isSecondary()) { + foundSecGood.insert(key); + } + } + + // for fake tracks if (label.isFake()) { - fakeTracks.Fill(pt); - fakeTracksCount++; - if (nClusters >= 7 && nClusters <= 11) { - fakeTracksMatching[nClusters - 7].Fill(pt); + foundAllFake.insert(key); + + if (mcIt->second.isPrimary()) { + foundPrimFake.insert(key); + hPtNumInclusiveFake.Fill(mcPt); + } else if (mcIt->second.isSecondary()) { + foundSecFake.insert(key); } - } else { - goodTracks.Fill(pt); - goodTracksCount++; - if (nClusters >= 7 && nClusters <= 11) { - goodTracksMatching[nClusters - 7].Fill(pt); + } + } + + // exclusive count + if (nHits == nClusters && mcHasN && recoHasN) { + + if (mcIt->second.isPrimary()) { + + if (label.isCorrect()) { + + hPtNumExclusive.Fill(mcPt); + foundExclusiveGood[nHits].insert(key); + } + + if (label.isFake()) { + foundExclusiveFake[nHits].insert(key); } } } + + // counting cluster loss + if (mcIt->second.isPrimary() && mcHasN && recoHasN && + label.isCorrect() && + nClusters < nHits) { + + foundWithLessClusters[nHits].insert(key); + } + + } // end loop over reco tracks + } // end loop over RoFs + + // inclusive efficiencies for Good tracks + float effForAllGood = counterAll > 0 ? 100.f * foundAllGood.size() / counterAll : 0.f; + float effForPrimGood = prim_ge7 > 0 ? 100.f * foundPrimGood.size() / prim_ge7 : 0.f; + float effForSecGood = sec_ge7 > 0 ? 100.f * foundSecGood.size() / sec_ge7 : 0.f; + + // inclusive efficiencies for Fake tracks + float effForAllFake = counterAll > 0 ? 100.f * foundAllFake.size() / counterAll : 0.f; + float effForPrimFake = prim_ge7 > 0 ? 100.f * foundPrimFake.size() / prim_ge7 : 0.f; + float effForSecFake = sec_ge7 > 0 ? 100.f * foundSecFake.size() / sec_ge7 : 0.f; + + // exclusive efficiencies for Good and Fake tracks + std::array effExactAllGood{}, effExactAllFake{}; + + for (int n = 7; n <= 11; ++n) { + effExactAllGood[n] = mcExact[n] > 0 ? 100.f * foundExclusiveGood[n].size() / mcExact[n] : 0.f; + effExactAllFake[n] = mcExact[n] > 0 ? 100.f * foundExclusiveFake[n].size() / mcExact[n] : 0.f; + } + + // cluster loss + std::array fracWithLessClusters{}; + for (int n = 7; n <= 11; ++n) { + fracWithLessClusters[n] = mcExact[n] > 0 ? 100.f * foundWithLessClusters[n].size() / mcExact[n] : 0.f; + } + + // pT vs inclusive & exclusive track efficiencies + hPtEffExclusive.Divide(&hPtNumExclusive, &hPtDenExclusive, 1.0, 1.0, "B"); + hPtEffInclusive.Divide(&hPtNumInclusive, &hPtDenInclusive, 1.0, 1.0, "B"); + hPtEffInclusiveFake.Divide(&hPtNumInclusiveFake, &hPtDenInclusive, 1.0, 1.0, "B"); + + // pT resolution for good inclusive tracks, primaries + auto [minPtRes, maxPtRes] = std::minmax_element(pTResVector.begin(), pTResVector.end()); + TH1F pTResolution("pTResolutionForInclusive", "p_{T} resolution; (p_{T}^{rec}-p_{T}^{MC})/p_{T}^{MC}; Counts", nBins, *minPtRes, *maxPtRes); + for (const auto& pTVal : pTResVector) { + pTResolution.Fill(pTVal); + } + pTResolution.Fit("gaus"); + + TObjArray fitSlices; + hPtResVsPt.FitSlicesY(nullptr, 0, -1, 0, "QNR", &fitSlices); + + TH1D* hSigmaVsPt = nullptr; + + if (fitSlices.GetEntries() > 2 && fitSlices.At(2)) { + hSigmaVsPt = dynamic_cast(fitSlices.At(2)->Clone("hSigmaVsPt")); + if (hSigmaVsPt) { + hSigmaVsPt->SetTitle("#sigma(p_{T} resolution) vs p_{T}; p_{T}^{MC} [GeV/c]; #sigma"); + hSigmaVsPt->GetXaxis()->SetRangeUser(0.5., *maxpT); } } - // Create efficiency histograms - std::cout << "Computing efficiencies..." << std::endl; + // Style + hPtEffInclusive.SetLineColor(kBlue + 1); + hPtEffInclusive.SetMarkerColor(kBlue + 1); + hPtEffInclusive.SetMarkerStyle(20); + hPtEffInclusive.SetMarkerSize(1.0); + hPtEffInclusive.SetLineWidth(2); + + hPtEffInclusiveFake.SetLineColor(kRed + 1); + hPtEffInclusiveFake.SetMarkerColor(kRed + 1); + hPtEffInclusiveFake.SetMarkerStyle(24); + hPtEffInclusiveFake.SetMarkerSize(1.0); + hPtEffInclusiveFake.SetLineWidth(2); + + // Titles and axis labels + hPtEffInclusive.SetTitle("Inclusive tracking performance vs p_{T}"); + hPtEffInclusive.GetXaxis()->SetTitle("p_{T} [GeV/c]"); + hPtEffInclusive.GetYaxis()->SetTitle("Rate"); + + // Canvas + TCanvas* cPtEff = new TCanvas("", "", 900, 700); + + setAutoXRange(&hPtEffInclusive, RangeMode::ReferenceContent, &hPtDenInclusive); + setAutoXRange(&hPtEffInclusiveFake, RangeMode::ReferenceContent, &hPtDenInclusive); + + hPtEffInclusive.Draw("E1"); + hPtEffInclusiveFake.Draw("E1 SAME"); + + // Legend + TLegend* leg = new TLegend(0.60, 0.15, 0.88, 0.35); + leg->SetBorderSize(0); + leg->SetFillStyle(0); + leg->AddEntry(&hPtEffInclusive, "Inclusive good efficiency", "lp"); + leg->AddEntry(&hPtEffInclusiveFake, "Inclusive fake rate", "lp"); + leg->Draw("E1 SAME"); + + setAutoXRange(&hPtEffExclusive, RangeMode::ContentOnly); + + // Writing to output Root file + std::cout << "Writing histograms to " << outFile1 << std::endl; + TFile outFileRoot(outFile1.c_str(), "RECREATE"); + if (hSigmaVsPt) { + hSigmaVsPt->Write(); + } + hPtEffExclusive.Write(); + hPtEffInclusive.Write(); + cPtEff->Write(); + pTResolution.Write(); + outFileRoot.Close(); + + // Building performance table + std::cout << "Building performance table ... " << std::endl; + std::ofstream outFileTxt(outFile.c_str()); + outFileTxt << std::fixed << std::setprecision(2); + + outFileTxt << "This is preliminary reconstruction performance table !!" << std::endl; + outFileTxt << "\nGenerated " << particleHitMap.size() << " unique particles with hits" << std::endl; + outFileTxt << "Among them, N primaries: " << counterPrimaries << " and secondaries: " << counterSecondaries << std::endl; + outFileTxt << "Number of total reconstructed tracks: " << totalTracks << std::endl; + + outFileTxt << "\nReconstruction performance table\n\n"; + + outFileTxt << "| " + << std::left << std::setw(20) << "Track category" + << "| " << std::setw(14) << "Efficiency (%)" + << "| " << std::setw(14) << "Fake rate (%)" + << "| " << std::setw(12) << "MC counts" + << " |\n"; + + outFileTxt << std::string(70, '-') << "\n"; + + outFileTxt << "| " + << std::left << std::setw(20) << "All (prim+sec)" + << "| " << std::setw(14) << effForAllGood + << "| " << std::setw(14) << effForAllFake + << "| " << std::setw(12) << counterAll + << " |\n"; + + outFileTxt << "| " + << std::left << std::setw(20) << "Primaries" + << "| " << std::setw(14) << effForPrimGood + << "| " << std::setw(14) << effForPrimFake + << "| " << std::setw(12) << prim_ge7 + << " |\n"; + + outFileTxt << "| " + << std::left << std::setw(20) << "Secondaries" + << "| " << std::setw(14) << effForSecGood + << "| " << std::setw(14) << effForSecFake + << "| " << std::setw(12) << sec_ge7 + << " |\n"; + + outFileTxt << "\n\nExclusive efficiencies for primaries:\n\n"; + + outFileTxt << "| " + << std::left << std::setw(15) << "Track length" + << "| " << std::setw(14) << "Efficiency (%)" + << "| " << std::setw(14) << "Fake rate (%)" + << "| " << std::setw(14) << "Cluster loss (%)" + << "| " << std::setw(14) << "MC counts" + << " |\n"; + + outFileTxt << std::string(85, '-') << "\n"; + + for (int n = 11; n >= 7; --n) { + outFileTxt << "| " + << std::left << std::setw(15) << (std::to_string(n) + "-hit") + << "| " << std::setw(14) << effExactAllGood[n] + << "| " << std::setw(14) << effExactAllFake[n] + << "| " << std::setw(16) << fracWithLessClusters[n] + << "| " << std::setw(14) << mcExact[n] + << " |\n"; + } + + std::cout << "Analysis complete!" << std::endl; - std::array efficiencyHistograms; - THStack* efficiencyStack = new THStack("efficiencyStack", - "Tracking Efficiency; #it{p}_{T} (GeV/#it{c}); Efficiency"); +} // end of macro - int colors[5] = {kRed, kBlue, kGreen + 2, kMagenta, kOrange}; - for (int i = 0; i < 5; ++i) { - int nClusters = i + 7; - efficiencyHistograms[i] = TH1D(Form("efficiency_%dClusters", nClusters), - Form("Efficiency for %d cluster tracks; #it{p}_{T} (GeV/#it{c}); Efficiency", nClusters), - nb, xbins); +void setAutoXRange(TH1* h, RangeMode mode, + const TH1* hRef, + double threshold, + int marginBins) +{ + if (!h) + return; - efficiencyHistograms[i].Divide(&goodTracksMatching[i], &genParticlePtHist, 1, 1, "B"); + const TH1* hScan = h; - efficiencyHistograms[i].SetLineColor(colors[i]); - efficiencyHistograms[i].SetFillColor(colors[i]); - efficiencyHistograms[i].SetLineWidth(2); - efficiencyHistograms[i].SetMarkerColor(colors[i]); - efficiencyHistograms[i].SetMarkerStyle(20 + i); - efficiencyStack->Add(&efficiencyHistograms[i]); + if (mode == RangeMode::ReferenceContent) { + if (!hRef) + return; + hScan = hRef; } - // Write output - std::cout << "Writing output to " << outputfile << std::endl; - TFile outFile(outputfile.c_str(), "RECREATE"); - genParticlePtHist.Write(); - goodTracks.Write(); - fakeTracks.Write(); - for (int i = 0; i < 5; ++i) { - goodTracksMatching[i].Write(); - fakeTracksMatching[i].Write(); - efficiencyHistograms[i].Write(); + const int nBins = hScan->GetNbinsX(); + int first = -1; + int last = -1; + + auto isUsefulBin = [&](int i) -> bool { + const double content = hScan->GetBinContent(i); + const double error = hScan->GetBinError(i); + + switch (mode) { + case RangeMode::ContentOnly: + return content > threshold; + + case RangeMode::ContentOrError: + return (content > threshold) || (error > 0.0); + + case RangeMode::ReferenceContent: + return content > threshold; + } + return false; + }; + + for (int i = 1; i <= nBins; ++i) { + if (isUsefulBin(i)) { + first = i; + break; + } } - efficiencyStack->Write(); - genParticlePt7LayersHist.Write(); - numberOfClustersPerTrack.Write(); - outFile.Close(); - - // Clean up - hitsFile->Close(); - tracFile->Close(); - delete efficiencyStack; - delete hitsFile; - delete tracFile; -} + + for (int i = nBins; i >= 1; --i) { + if (isUsefulBin(i)) { + last = i; + break; + } + } + + if (first == -1 || last == -1 || first > last) { + return; + } + + first = std::max(1, first - marginBins); + last = std::min(nBins, last + marginBins); + + h->GetXaxis()->SetRange(first, last); +} \ No newline at end of file