#include #include #include #include #include #include "Jeep/JParser.hh" #include "JDetector/JDetector.hh" #include "JDetector/JDetectorToolkit.hh" #include "JDetector/JModuleRouter.hh" #include "JTools/JCombinatorics.hh" #include "km3net-dataformat/online/JDAQ.hh" #include "JSupport/JMeta.hh" #include "TH1.h" #include "TH2.h" #include "TFile.h" #include "TKey.h" #include "TF1.h" #include "TFitResult.h" #include "JNanobeacon.hh" using namespace std; using namespace JPP; using namespace KM3NETDAQ; int main(int argc , char** argv){ string inputFiles; string detectorFile; string outFile; int Neighbours; bool overwriteDetector; try { JParser<> zap; zap['f'] = make_field(inputFiles ); //The input is the output from JMergeNB, if nothing needs to be merged the output from JCalibrateNB works as well. zap['a'] = make_field(detectorFile ); zap['N'] = make_field(Neighbours ) = 1; //Determines the maximum distance between reference and target DOM zap['A'] = make_field(overwriteDetector ); zap['o'] = make_field(outFile ); //if you want to write the fitted histograms to an output file, !!WITHOUT EXTENSION!! zap(argc,argv); } catch(const exception &error) { ERROR(error.what() << endl); } JDetector detector; load(detectorFile , detector); JModuleRouter moduleRouter(detector); int num_of_floors = getNumberOfFloors(detector); JCombinatorics c(num_of_floors); c.sort(JNANOBEACON::comparepair); TFile in(inputFiles.c_str() , "read"); string hist_str; string hist_name; string outFile_root = outFile+".root"; string outFile_csv = outFile+".csv"; string outFile_det = outFile+"_det.csv"; TFile output(outFile_root.c_str(),"RECREATE"); cout << " Writing histograms to: " << outFile_root << endl; output.cd(); ofstream out_csv(outFile_csv.c_str()); cout << " Writing fitting data to: " << outFile_csv << endl; out_csv << "String,Ref_floor,Tgt_floor,T0,Error" << endl; // Create two maps of vectors to store the T0s and Errors so that they can be averaged int num_of_strings = getNumberOfStrings(detector); typedef map > map_type; vector base_vector(num_of_floors, 0.0); map_type vmap; for( int j = 1; j<=num_of_strings; j++){ vmap.insert( make_pair(j,base_vector) ); } for(JDetector::iterator module = detector.begin(); module != detector.end(); ++module){ //loop over modules in detector file to fill the maps int string_no = moduleRouter.getModule( module->getID() ).getString(); int tgt_floor = moduleRouter.getModule( module->getID() ).getFloor(); vector T0s; vector Errors; for( int i = 1; i <= Neighbours; i++){ int ref_floor = tgt_floor - i; if(ref_floor < 1){ continue; } //The reference floor can not be negative hist_str = to_string(string_no); if( in.GetListOfKeys()->Contains(hist_str.c_str()) ){ //check if histogram corresponding to the string exists in the input file TH2D* h2D; in.GetObject( hist_str.c_str(), h2D ); //floors run between 1 and 18 but the pairs consist of the number 0 to 17. The index is given between 0 and npairs, but the bins run from 1 to npairs+1 int bin = c.getIndex(tgt_floor - 1, ref_floor - 1 ) + 1; hist_name = "S"+hist_str+"F"+to_string(ref_floor)+"_S"+hist_str+"F"+to_string(tgt_floor); TH1D* h1D = h2D->ProjectionY( hist_name.c_str(), bin, bin ); if( h1D->GetEntries() < 1 ){ continue; } //No point in fitting empty histograms TFitResultPtr fitresult = JNANOBEACON::Fit(h1D); T0s.push_back( fitresult->Parameter(1) ); Errors.push_back( fitresult->ParError(1) ); //Write the fitting results to a csv file, these are not the same numbers as the ones written to the detector file!! out_csv << string_no << "," << ref_floor << "," << tgt_floor << "," << fitresult->Parameter(1) << "," << fitresult->ParError(1) << endl; if(!outFile.empty()){ h1D->Write(); //write the fitted histograms to the root file } } } // if multiple neighbours are included the average of the T0s weighted by the fitting error is taken double weighted_T0 = 0; double Weight; double Sum_of_Weight = 0; for( size_t i = 0, max = T0s.size(); i != max; ++i ){ Weight = 1/Errors[i]; weighted_T0 += T0s[i]*Weight; Sum_of_Weight += Weight; } if(Sum_of_Weight != 0){ weighted_T0 /= Sum_of_Weight; } // Add the fitting results to the map map_type::iterator map_iterator = vmap.find(string_no); map_iterator->second[tgt_floor-1] = weighted_T0; } out_csv.close(); //For each DU subtract the average T0 from the list of T0s for( int j = 1; j<=num_of_strings; j++){ map_type::iterator map_iterator = vmap.find(j); int n = map_iterator->second.size(); double average = accumulate( map_iterator->second.begin(), map_iterator->second.end(), 0.0)/n; for(int j = 0; jsecond[j] = map_iterator->second[j] - average; } } ofstream out_det(outFile_det.c_str()); cout << " Writing det changes to: " << outFile_det << endl; out_det << "String,Floor,T0" << endl; //Write the T0s to the detector file and write the combined fitting results to an output file for(JDetector::iterator module = detector.begin(); module != detector.end(); ++module){ int string_no = moduleRouter.getModule( module->getID() ).getString(); int tgt_floor = moduleRouter.getModule( module->getID() ).getFloor(); map_type::iterator map_iterator = vmap.find(string_no); double weighted_T0 = map_iterator->second[tgt_floor-1]; if(overwriteDetector){ for( int pmt = 0; pmt != KM3NETDAQ::NUMBER_OF_PMTS; ++pmt){ module->getPMT(pmt).addT0(weighted_T0); //All PMTs get the same T0 added because we are adding a T0 to the dom } } out_det << string_no << "," << tgt_floor << "," << weighted_T0 << endl; } out_det.close(); if(overwriteDetector){ cout << " Overwriting the detectorfile" << endl; detector.comment.add( JMeta(argc,argv) ); store(detectorFile,detector); } }