29 #include "TTreeFormula.h" 30 #include "TEventList.h" 40 #include "Riostream.h" 60 return fStructure.CountChar(
':')+1;
70 TString input = TString(fStructure(0, fStructure.First(
':')));
71 return input.CountChar(
',')+1;
75 TString output = TString(fStructure(fStructure.Last(
':') + 1,
76 fStructure.Length() - fStructure.Last(
':')));
77 return output.CountChar(
',')+1;
82 TString hidden = TString(fStructure(fStructure.First(
':') + 1,
83 fStructure.Last(
':') - fStructure.First(
':') - 1));
85 Int_t end = hidden.Index(
":", beg + 1);
88 num = atoi(TString(hidden(beg, end - beg)).Data());
91 end = hidden.Index(
":", beg + 1);
92 if(layer==cnt)
return num;
94 num = atoi(TString(hidden(beg, hidden.Length() - beg)).Data());
96 if(layer==cnt)
return num;
108 TString input = TString(fStructure(0, fStructure.First(
':')));
110 Int_t end = input.Index(
",", beg + 1);
114 brName = TString(input(beg, end - beg));
116 brName = brName(1,brName.Length()-1);
118 end = input.Index(
",", beg + 1);
119 if(cnt==idx)
return brName;
122 brName = TString(input(beg, input.Length() - beg));
124 brName = brName(1,brName.Length()-1);
134 return neuron ? neuron->GetName() :
"NO SUCH NEURON";
143 return neuron ? neuron->GetName() :
"NO SUCH NEURON";
152 std::cout <<
"Network with structure: " << fStructure.Data() << std::endl;
153 std::cout <<
"inputs with low values in the differences plot may not be needed" << std::endl;
155 char var[64], sel[64];
157 snprintf(var,64,
"diff>>tmp%d",i);
158 snprintf(sel,64,
"inNeuron==%d",i);
160 TH1F* tmp = (TH1F*)gDirectory->Get(Form(
"tmp%d",i));
163 <<
" -> " << tmp->GetMean()
164 <<
" +/- " << tmp->GetRMS() << std::endl;
175 Double_t shift = 0.1;
178 Int_t nEvents = test->GetN();
180 Double_t* params =
new Double_t[nn];
181 Double_t* rms =
new Double_t[nn];
182 TTreeFormula** formulas =
new TTreeFormula*[nn];
183 Int_t* index =
new Int_t[nn];
185 TRegexp re(
"{[0-9]+}$");
186 Ssiz_t len = formula.Length();
188 Int_t i(0), j(0), k(0), l(0);
191 pos = re.Index(formula,&len);
192 if(pos==-1 || len<3) {
193 formulas[i] =
new TTreeFormula(Form(
"NF%lu",(ULong_t)
this),formula,data);
197 TString newformula(formula,pos);
198 TString val = formula(pos+1,len-2);
199 formulas[i] =
new TTreeFormula(Form(
"NF%lu",(ULong_t)
this),newformula,data);
200 formula = newformula;
201 index[i] = val.Atoi();
203 TH1D tmp(
"tmpb",
"tmpb", 1, -FLT_MAX, FLT_MAX);
204 data->Draw(Form(
"%s>>tmpb",formula.Data()),
"",
"goff");
205 rms[i] = tmp.GetRMS();
215 Double_t *outVal=
new Double_t[numOutNodes];
216 Double_t *trueVal=
new Double_t[numOutNodes];
219 fIOTree=
new TTree(
"MLP_iotree",
"MLP_iotree");
223 leaflist+=Form(
"In%d/D:",i);
224 leaflist.Remove(leaflist.Length()-1);
225 fIOTree->Branch(
"In", params, leaflist);
228 for (i=0; i<numOutNodes; i++)
229 leaflist+=Form(
"Out%d/D:",i);
230 leaflist.Remove(leaflist.Length()-1);
231 fIOTree->Branch(
"Out", outVal, leaflist);
234 for (i=0; i<numOutNodes; i++)
235 leaflist+=Form(
"True%d/D:",i);
236 leaflist.Remove(leaflist.Length()-1);
237 fIOTree->Branch(
"True", trueVal, leaflist);
241 for(j=0; j< nEvents; j++) {
245 params[k] = formulas[k]->EvalInstance(index[k]);
259 params[i] += shift*rms[i];
261 params[i] -= 2*shift*rms[i];
263 diff += (v1-v2)*(v1-v2);
265 params[i] += shift*rms[i];
276 for(i=0; i<
GetNeurons(1); i++)
delete formulas[i];
279 fIOTree->ResetBranchAddresses();
290 snprintf(sel,64,
"inNeuron==%d", i);
316 THStack* stack =
new THStack(
"differences",
"differences (impact of variables on ANN)");
317 TLegend* legend =
new TLegend(0.75,0.75,0.95,0.95);
319 char var[64], sel[64];
321 snprintf(var,64,
"diff>>tmp%d", i);
322 snprintf(sel,64,
"inNeuron==%d", i);
324 tmp = (TH1F*)gDirectory->Get(Form(
"tmp%d",i));
325 tmp->SetDirectory(0);
326 tmp->SetLineColor(i+1);
330 stack->Draw(
"nostack");
344 TEventList* current = data->GetEventList();
345 data->SetEventList(test);
346 THStack* stack =
new THStack(
"__NNout_TMLPA",Form(
"Neural net output (neuron %d)",neuron));
347 TH1F *bgh =
new TH1F(
"__bgh_TMLPA",
"NN output", 50, -0.5, 1.5);
348 TH1F *sigh =
new TH1F(
"__sigh_TMLPA",
"NN output", 50, -0.5, 1.5);
349 bgh->SetDirectory(0);
350 sigh->SetDirectory(0);
354 TEventList* signal_list =
new TEventList(
"__tmpSig_MLPA");
355 TEventList* bg_list =
new TEventList(
"__tmpBkg_MLPA");
356 data->Draw(
">>__tmpSig_MLPA",signal,
"goff");
357 data->Draw(
">>__tmpBkg_MLPA",bg,
"goff");
360 nEvents = bg_list->GetN();
361 for(j=0; j< nEvents; j++) {
365 nEvents = signal_list->GetN();
366 for(j=0; j< nEvents; j++) {
370 bgh->SetLineColor(kBlue);
371 bgh->SetFillStyle(3008);
372 bgh->SetFillColor(kBlue);
373 sigh->SetLineColor(kRed);
374 sigh->SetFillStyle(3003);
375 sigh->SetFillColor(kRed);
380 TLegend *legend =
new TLegend(.75, .80, .95, .95);
381 legend->AddEntry(bgh,
"Background");
382 legend->AddEntry(sigh,
"Signal");
383 stack->Draw(
"nostack");
386 data->SetEventList(current);
405 TString pipehist=Form(
"MLP_truthdev_%d",outnode);
407 drawline.Form(
"Out.Out%d-True.True%d:True.True%d>>",
408 outnode, outnode, outnode);
409 fIOTree->Draw(drawline+pipehist+
"(20)",
"",
"goff prof");
410 TProfile* h=(TProfile*)gDirectory->Get(pipehist);
414 h->SetTitle(Form(
"#Delta(output - truth) vs. truth for %s",
416 h->GetXaxis()->SetTitle(title);
417 h->GetYaxis()->SetTitle(Form(
"#Delta(output - truth) for %s", title));
419 if (!strstr(option,
"goff"))
436 THStack *hs=
new THStack(
"MLP_TruthDeviation",
437 "Deviation of MLP output from truth");
441 if (!option || !strstr(option,
"goff"))
442 leg=
new TLegend(.4,.85,.95,.95,
"#Delta(output - truth) vs. truth for:");
444 const char* xAxisTitle=0;
450 h->SetLineColor(1+outnode);
455 xAxisTitle=h->GetXaxis()->GetTitle();
462 hs->GetXaxis()->SetTitle(xAxisTitle);
463 hs->GetYaxis()->SetTitle(
"#Delta(output - truth)");
482 TString pipehist=Form(
"MLP_truthdev_i%d_o%d", innode, outnode);
484 drawline.Form(
"Out.Out%d-True.True%d:In.In%d>>",
485 outnode, outnode, innode);
486 fIOTree->Draw(drawline+pipehist+
"(50)",
"",
"goff prof");
487 TProfile* h=(TProfile*)gROOT->FindObject(pipehist);
491 h->SetTitle(Form(
"#Delta(output - truth) of %s vs. input %s",
492 titleOutNeuron, titleInNeuron));
493 h->GetXaxis()->SetTitle(Form(
"%s", titleInNeuron));
494 h->GetYaxis()->SetTitle(Form(
"#Delta(output - truth) for %s",
496 if (!strstr(option,
"goff"))
513 sName.Form(
"MLP_TruthDeviationIO_%d", outnode);
515 THStack *hs=
new THStack(sName,
516 Form(
"Deviation of MLP output %s from truth",
521 if (!option || !strstr(option,
"goff"))
522 leg=
new TLegend(.4,.75,.95,.95,
523 Form(
"#Delta(output - truth) of %s vs. input for:",
530 for (innode=0; innode<numInNodes; innode++) {
532 h->SetLineColor(1+innode);
534 if (leg) leg->AddEntry(h,h->GetXaxis()->GetTitle());
541 hs->GetXaxis()->SetTitle(
"Input value");
542 hs->GetYaxis()->SetTitle(Form(
"#Delta(output - truth) for %s",
const char * GetOutputNeuronTitle(Int_t out)
Returns the name of any neuron from the output layer.
void CheckNetwork()
Gives some information about the network in the terminal.
void DrawDInput(Int_t i)
Draws the distribution (on the test sample) of the impact on the network output of a small variation ...
Double_t Evaluate(Int_t index, Double_t *params) const
Returns the Neural Net for a given set of input parameters #parameters must equal #input neurons...
void GatherInformations()
Collect information about what is usefull in the network.
void DrawDInputs()
Draws the distribution (on the test sample) of the impact on the network output of a small variation ...
Int_t GetLayers()
Returns the number of layers.
Int_t GetNeurons(Int_t layer)
Returns the number of neurons in given layer.
void DrawNetwork(Int_t neuron, const char *signal, const char *bg)
Draws the distribution of the neural network (using ith neuron).
TProfile * DrawTruthDeviationInOut(Int_t innode, Int_t outnode=0, Option_t *option="")
Creates a profile of the difference of the MLP output outnode minus the true value of outnode vs the ...
TMultiLayerPerceptron * fNetwork
const char * GetInputNeuronTitle(Int_t in)
Returns the name of any neuron from the input layer.
virtual ~TMLPAnalyzer()
Destructor.
TEventList * fTest
EventList defining the events in the training dataset.
TString GetNeuronFormula(Int_t idx)
Returns the formula used as input for neuron (idx) in the first layer.
TProfile * DrawTruthDeviation(Int_t outnode=0, Option_t *option="")
Create a profile of the difference of the MLP output minus the true value for a given output node out...
THStack * DrawTruthDeviations(Option_t *option="")
Creates TProfiles of the difference of the MLP output minus the true value vs the true value...
void GetEntry(Int_t) const
Load an entry into the network.
Double_t Sqrt(Double_t x)
Double_t Result(Int_t event, Int_t index=0) const
Computes the output for a given event.
TString GetStructure() const
THStack * DrawTruthDeviationInsOut(Int_t outnode=0, Option_t *option="")
Creates a profile of the difference of the MLP output outnode minus the true value of outnode vs the ...