void mlpNEPPSR(Int_t ntrain=100) { // begin mlpNEPPSR() if (!gROOT->GetClass("TMultiLayerPerceptron")) { gSystem->Load("libMLP"); } // Prepare inputs TFile sig_input("tc_pi0.root"); // Signal Monte Carlo TFile bg_input("wjj.root"); // Background Monte Carlo TFile challenge_input("mystery_1.root"); // Challenge Data TTree *signal = (TTree *) sig_input.Get("neppsr"); TTree *background = (TTree *) bg_input.Get("neppsr"); TTree *challenge = (TTree *) challenge_input.Get("neppsr"); TTree *simu = new TTree("MonteCarlo", "Filtered Monte Carlo Events"); Float_t elpt, jet1pt, jet2pt, jjpt, met, hte, jjdphi, meteldphi; Int_t type; signal->SetBranchAddress("elpt", &elpt); signal->SetBranchAddress("jet1pt", &jet1pt); signal->SetBranchAddress("jet2pt", &jet2pt); signal->SetBranchAddress("jjpt", &jjpt); signal->SetBranchAddress("met", &met); signal->SetBranchAddress("hte", &hte); signal->SetBranchAddress("jjdphi", &jjdphi); signal->SetBranchAddress("meteldphi", &meteldphi); background->SetBranchAddress("elpt", &elpt); background->SetBranchAddress("jet1pt", &jet1pt); background->SetBranchAddress("jet2pt", &jet2pt); background->SetBranchAddress("jjpt", &jjpt); background->SetBranchAddress("met", &met); background->SetBranchAddress("hte", &hte); background->SetBranchAddress("jjdphi", &jjdphi); background->SetBranchAddress("meteldphi", &meteldphi); challenge->SetBranchAddress("elpt", &elpt); challenge->SetBranchAddress("jet1pt", &jet1pt); challenge->SetBranchAddress("jet2pt", &jet2pt); challenge->SetBranchAddress("jjpt", &jjpt); challenge->SetBranchAddress("met", &met); challenge->SetBranchAddress("hte", &hte); challenge->SetBranchAddress("jjdphi", &jjdphi); challenge->SetBranchAddress("meteldphi", &meteldphi); simu->Branch("elpt", &elpt, "elpt/F"); simu->Branch("jet1pt", &jet1pt, "jet1pt/F"); simu->Branch("jet2pt", &jet2pt, "jet2pt/F"); simu->Branch("jjpt", &jjpt, "jjpt/F"); simu->Branch("met", &met, "met/F"); simu->Branch("hte", &hte, "hte/F"); simu->Branch("jjdphi", &jjdphi,"jjdphi/F"); simu->Branch("meteldphi", &meteldphi, "meteldphi/F"); simu->Branch("type", &type, "type/I"); type = 1; Int_t i; cout << signal->GetEntries() << " signal events" << endl; for (i = 0; i < signal->GetEntries(); i++) { signal->GetEntry(i); simu->Fill(); } type = 0; cout << background->GetEntries() << " background events" << endl; for (i = 0; i < background->GetEntries(); i++) { background->GetEntry(i); simu->Fill(); } // Build and train the NN TMultiLayerPerceptron *mlp = new TMultiLayerPerceptron("@elpt,@jet1pt,@jet2pt,@jjpt,@met,@hte,@jjdphi,@meteldphi:5:3:type", simu,"Entry$%2","(Entry$+1)%2"); mlp->Train(ntrain,"text,graph,update=10"); mlp->Export("test","python"); mlp->Export("test","c++"); // Use TMLPAnalyzer to see what it looks for TCanvas* mlpa_canvas = new TCanvas("mlpa_canvas","Network analysis"); mlpa_canvas->Divide(2,2); TMLPAnalyzer ana(mlp); // Initialisation ana.GatherInformations(); // output to the console ana.CheckNetwork(); mlpa_canvas->cd(1); // shows how each variable influences the network ana.DrawDInputs(); mlpa_canvas->cd(2); // shows the network structure mlp->Draw(); mlpa_canvas->cd(3); // draws the resulting network ana.DrawNetwork(0,"type==1","type==0"); mlpa_canvas->cd(4); // Use the NN to plot the results for each sample // This will give approx. the same result as DrawNetwork. // All entries are used, while DrawNetwork focuses on // the test sample. Also the xaxis range is manually set. TH1F *bg = new TH1F("bgh", "NN output", 50, -0.5, 1.5); TH1F *sig = new TH1F("sigh", "NN output", 50, -0.5, 1.5); bg->SetDirectory(0); sig->SetDirectory(0); Double_t params[8]; for (i = 0; i < background->GetEntries(); i++) { background->GetEntry(i); params[0] = elpt; params[1] = jet1pt; params[2] = jet2pt; params[3] = jjpt; params[4] = met; params[5] = hte; params[6] = jjdphi; params[7] = meteldphi; bg->Fill(mlp->Evaluate(0, params)); } for (i = 0; i < signal->GetEntries(); i++) { signal->GetEntry(i); params[0] = elpt; params[1] = jet1pt; params[2] = jet2pt; params[3] = jjpt; params[4] = met; params[5] = hte; params[6] = jjdphi; params[7] = meteldphi; sig->Fill(mlp->Evaluate(0,params)); } bg->SetLineColor(kBlue); bg->SetFillStyle(3008); bg->SetFillColor(kBlue); sig->SetLineColor(kRed); sig->SetFillStyle(3003); sig->SetFillColor(kRed); bg->SetStats(0); sig->SetStats(0); bg->Draw(); sig->Draw("same"); TLegend *legend = new TLegend(.75, .80, .95, .95); legend->AddEntry(bg, "Background (Wjj)"); legend->AddEntry(sig, "Signal (TC)"); legend->Draw(); mlpa_canvas->cd(0); // Run the challenge sample through the NN and extract the signal fraction TH1F *chal = new TH1F("chal", "NN output", 50, -0.5, 1.5); chal->SetDirectory(0); for (i = 0; i < challenge->GetEntries(); i++) { challenge->GetEntry(i); params[0] = elpt; params[1] = jet1pt; params[2] = jet2pt; params[3] = jjpt; params[4] = met; params[5] = hte; params[6] = jjdphi; params[7] = meteldphi; chal->Fill(mlp->Evaluate(0,params)); } // Create output file for saving NN histograms TFile fout ("mlp_outputhistos.root","RECREATE"); fout.cd(); chal->Write(); sig->Write(); bg->Write(); fout.Write(); fout.Close(); } // end mlpNEPPSR()