226 #include "TEventList.h" 228 #include "TTimeStamp.h" 234 #include "TMultiGraph.h" 235 #include "TDirectory.h" 237 #include "Riostream.h" 239 #include "TTreeFormula.h" 240 #include "TTreeFormulaManager.h" 244 #include "TObjString.h" 254 if(!TClass::GetClass(
"TTreePlayer")) gSystem->Load(
"libTreePlayer");
303 TEventList * training,
306 const char* extF,
const char* extD)
308 if(!TClass::GetClass(
"TTreePlayer")) gSystem->Load(
"libTreePlayer");
361 const char * weight, TTree * data,
362 TEventList * training,
365 const char* extF,
const char* extD)
367 if(!TClass::GetClass(
"TTreePlayer")) gSystem->Load(
"libTreePlayer");
421 const char * training,
424 const char* extF,
const char* extD)
426 if(!TClass::GetClass(
"TTreePlayer")) gSystem->Load(
"libTreePlayer");
435 fTraining =
new TEventList(Form(
"fTrainingList_%lu",(ULong_t)
this));
437 fTest =
new TEventList(Form(
"fTestList_%lu",(ULong_t)
this));
440 TString testcut = test;
441 if(testcut==
"") testcut = Form(
"!(%s)",training);
450 data->Draw(Form(
">>fTrainingList_%lu",(ULong_t)
this),training,
"goff");
451 data->Draw(Form(
">>fTestList_%lu",(ULong_t)
this),(
const char *)testcut,
"goff");
455 Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
487 const char * weight, TTree * data,
488 const char * training,
491 const char* extF,
const char* extD)
493 if(!TClass::GetClass(
"TTreePlayer")) gSystem->Load(
"libTreePlayer");
502 fTraining =
new TEventList(Form(
"fTrainingList_%lu",(ULong_t)
this));
504 fTest =
new TEventList(Form(
"fTestList_%lu",(ULong_t)
this));
507 TString testcut = test;
508 if(testcut==
"") testcut = Form(
"!(%s)",training);
517 data->Draw(Form(
">>fTrainingList_%lu",(ULong_t)
this),training,
"goff");
518 data->Draw(Form(
">>fTestList_%lu",(ULong_t)
this),(
const char *)testcut,
"goff");
522 Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
549 std::cerr <<
"Error: data already defined." << std::endl;
604 fTraining =
new TEventList(Form(
"fTrainingList_%lu",(ULong_t)
this));
607 fData->Draw(Form(
">>fTrainingList_%lu",(ULong_t)
this),train,
"goff");
610 Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
622 if(
fTest)
if(strncmp(
fTest->GetName(),Form(
"fTestList_%lu",(ULong_t)
this),10))
delete fTest;
623 fTest =
new TEventList(Form(
"fTestList_%lu",(ULong_t)
this));
626 fData->Draw(Form(
">>fTestList_%lu",(ULong_t)
this),test,
"goff");
629 Warning(
"TMultiLayerPerceptron::TMultiLayerPerceptron",
"Data not set. Cannot define datasets");
712 fData->GetEntry(entry);
718 Int_t nentries =
fNetwork.GetEntriesFast();
719 for (Int_t i=0;i<nentries;i++) {
741 TString opt = option;
745 Bool_t newCanvas =
true;
746 Bool_t minE_Train =
false;
747 Bool_t minE_Test =
false;
748 if (opt.Contains(
"text"))
750 if (opt.Contains(
"graph"))
752 Int_t displayStepping = 1;
753 if (opt.Contains(
"update=")) {
754 TRegexp reg(
"update=[0-9]*");
755 TString out = opt(reg);
756 displayStepping = atoi(out.Data() + 7);
758 if (opt.Contains(
"current"))
760 if (opt.Contains(
"minerrortrain"))
762 if (opt.Contains(
"minerrortest"))
764 TVirtualPad *canvas = 0;
765 TMultiGraph *residual_plot = 0;
766 TGraph *train_residual_plot = 0;
767 TGraph *test_residual_plot = 0;
769 Error(
"Train",
"Training/Test samples still not defined. Cannot train the neural network");
772 Info(
"Train",
"Using %d train and %d test entries.",
776 std::cout <<
"Training the Neural Network" << std::endl;
778 residual_plot =
new TMultiGraph;
780 canvas =
new TCanvas(
"NNtraining",
"Neural Net training");
783 if(!canvas) canvas =
new TCanvas(
"NNtraining",
"Neural Net training");
785 train_residual_plot =
new TGraph(nEpoch);
786 test_residual_plot =
new TGraph(nEpoch);
787 canvas->SetLeftMargin(0.14);
788 train_residual_plot->SetLineColor(4);
789 test_residual_plot->SetLineColor(2);
790 residual_plot->Add(train_residual_plot);
791 residual_plot->Add(test_residual_plot);
792 residual_plot->Draw(
"LA");
793 if (residual_plot->GetXaxis()) residual_plot->GetXaxis()->SetTitle(
"Epoch");
794 if (residual_plot->GetYaxis()) residual_plot->GetYaxis()->SetTitle(
"Error");
797 if (!opt.Contains(
"+"))
802 Double_t *buffer =
new Double_t[els];
803 Double_t *dir =
new Double_t[els];
804 for (i = 0; i < els; i++)
807 TMatrixD bfgsh(matrix_size, matrix_size);
811 Double_t training_E = 1e10;
812 Double_t test_E = 1e10;
813 for (Int_t iepoch = 0; (iepoch < nEpoch) && (!minE_Train || training_E>minE) && (!minE_Test || test_E>minE) ; iepoch++) {
842 for (i = 0; i < els; i++)
843 onorm += dir[i] * dir[i];
848 Int_t nentries =
fNetwork.GetEntriesFast();
849 for (i=0;i<nentries;i++) {
851 prod -= dir[idx++] * neuron->
GetDEDw();
855 for (i=0;i<nentries;i++) {
857 prod -= dir[idx++] * synapse->
GetDEDw();
874 for (i = 0; i < els; i++)
875 onorm += dir[i] * dir[i];
878 Int_t nentries =
fNetwork.GetEntriesFast();
879 for (i=0;i<nentries;i++) {
884 for (i=0;i<nentries;i++) {
901 if (
GetBFGSH(bfgsh, gamma, delta)) {
916 Error(
"TMultiLayerPerceptron::Train()",
"Line search fail");
926 Error(
"TMultiLayerPerceptron::Train()",
"Stop.");
931 gSystem->ProcessEvents();
935 if ((verbosity % 2) && ((!(iepoch % displayStepping)) || (iepoch == nEpoch - 1))) {
936 std::cout <<
"Epoch: " << iepoch
937 <<
" learn=" << training_E
938 <<
" test=" << test_E
942 train_residual_plot->SetPoint(iepoch, iepoch,training_E);
943 test_residual_plot->SetPoint(iepoch, iepoch,test_E);
945 Double_t trp = train_residual_plot->GetY()[iepoch];
946 Double_t tep = test_residual_plot->GetY()[iepoch];
947 for (i = 1; i < nEpoch; i++) {
948 train_residual_plot->SetPoint(i, i, trp);
949 test_residual_plot->SetPoint(i, i, tep);
952 if ((!(iepoch % displayStepping)) || (iepoch == nEpoch - 1)) {
953 if (residual_plot->GetYaxis()) {
954 residual_plot->GetYaxis()->UnZoom();
955 residual_plot->GetYaxis()->SetTitleOffset(1.4);
956 residual_plot->GetYaxis()->SetDecimals();
968 std::cout <<
"Training done." << std::endl;
970 TLegend *legend =
new TLegend(.75, .80, .95, .95);
971 legend->AddEntry(residual_plot->GetListOfGraphs()->At(0),
972 "Training sample",
"L");
973 legend->AddEntry(residual_plot->GetListOfGraphs()->At(1),
1001 Int_t nEntries =
fLastLayer.GetEntriesFast();
1002 if (nEntries == 0)
return 0.0;
1032 Int_t nEvents = list->GetN();
1033 for (i = 0; i < nEvents; i++) {
1034 error +=
GetError(list->GetEntry(i));
1037 Int_t nEvents = (Int_t)
fData->GetEntries();
1038 for (i = 0; i < nEvents; i++) {
1051 for (Int_t i = 0; i <
fLastLayer.GetEntriesFast(); i++) {
1055 return (error / 2.);
1064 for (Int_t i = 0; i <
fLastLayer.GetEntriesFast(); i++) {
1066 Double_t output = neuron->
GetValue();
1068 if (target < DBL_EPSILON) {
1074 if ((1 - target) < DBL_EPSILON) {
1080 if (output == 0.0 || output == 1.0)
1083 error -= target *
TMath::Log(output / target) + (1-target) *
TMath::Log((1 - output)/(1 - target));
1095 for (Int_t i = 0; i <
fLastLayer.GetEntriesFast(); i++) {
1097 Double_t output = neuron->
GetValue();
1099 if (target > DBL_EPSILON) {
1103 error -= target *
TMath::Log(output / target);
1116 Int_t nentries =
fSynapses.GetEntriesFast();
1118 for (i=0;i<nentries;i++) {
1123 nentries =
fNetwork.GetEntriesFast();
1124 for (i=0;i<nentries;i++) {
1128 Double_t eventWeight = 1.;
1131 for (i = 0; i < nEvents; i++) {
1136 for (j=0;j<nentries;j++) {
1140 nentries =
fNetwork.GetEntriesFast();
1141 for (j=0;j<nentries;j++) {
1147 for (j=0;j<nentries;j++) {
1151 nentries =
fNetwork.GetEntriesFast();
1152 for (j=0;j<nentries;j++) {
1157 Int_t nEvents = (Int_t)
fData->GetEntries();
1158 for (i = 0; i < nEvents; i++) {
1163 for (j=0;j<nentries;j++) {
1167 nentries =
fNetwork.GetEntriesFast();
1168 for (j=0;j<nentries;j++) {
1174 for (j=0;j<nentries;j++) {
1178 nentries =
fNetwork.GetEntriesFast();
1179 for (j=0;j<nentries;j++) {
1191 Int_t nentries =
fSynapses.GetEntriesFast();
1197 for (j=0;j<nentries;j++) {
1201 nentries =
fNetwork.GetEntriesFast();
1202 for (j=0;j<nentries;j++) {
1220 Bool_t normalize =
false;
1221 fManager =
new TTreeFormulaManager;
1224 Int_t maxop, maxpar, maxconst;
1225 ROOT::v5::TFormula::GetMaxima(maxop, maxpar, maxconst);
1226 ROOT::v5::TFormula::SetMaxima(10, 10, 10);
1230 const TObjArray *inpL = input.Tokenize(
", ");
1233 R__ASSERT(nentries == inpL->GetLast()+1);
1234 for (j=0;j<nentries;j++) {
1236 const TString brName = ((TObjString *)inpL->At(j))->GetString();
1246 TString output = TString(
1249 const TObjArray *outL = output.Tokenize(
", ");
1252 R__ASSERT(nentries == outL->GetLast()+1);
1253 for (j=0;j<nentries;j++) {
1255 const TString brName = ((TObjString *)outL->At(j))->GetString();
1268 ROOT::v5::TFormula::SetMaxima(maxop, maxpar, maxconst);
1277 const TObjArray *inpL = input.Tokenize(
", ");
1278 Int_t nneurons = inpL->GetLast()+1;
1280 TString hiddenAndOutput = TString(
1286 for (i = 0; i<nneurons; i++) {
1287 const TString name = ((TObjString *)inpL->At(i))->GetString();
1288 TTreeFormula f(
"sizeTestFormula",name,
fData);
1290 if(f.GetMultiplicity()==1 && f.GetNdata()>1) {
1291 Warning(
"TMultiLayerPerceptron::ExpandStructure()",
"Variable size arrays cannot be used to build implicitely an input layer. The index 0 will be assumed.");
1298 else if(f.GetNdata()>1) {
1299 for(Int_t j=0; j<f.GetNdata(); j++) {
1300 if(i||j) newInput +=
",";
1308 if(i) newInput +=
",";
1314 fStructure = newInput +
":" + hiddenAndOutput;
1324 TString hidden = TString(
1327 TString output = TString(
1330 Int_t bll = atoi(TString(
1331 hidden(hidden.Last(
':') + 1,
1332 hidden.Length() - (hidden.Last(
':') + 1))).Data());
1333 if (input.Length() == 0) {
1334 Error(
"BuildNetwork()",
"malformed structure. No input layer.");
1337 if (output.Length() == 0) {
1338 Error(
"BuildNetwork()",
"malformed structure. No output layer.");
1353 const TObjArray *inpL = input.Tokenize(
", ");
1354 const Int_t nneurons =inpL->GetLast()+1;
1357 for (i = 0; i<nneurons; i++) {
1358 const TString name = ((TObjString *)inpL->At(i))->GetString();
1372 Int_t end = hidden.Index(
":", beg + 1);
1373 Int_t prevStart = 0;
1374 Int_t prevStop =
fNetwork.GetEntriesFast();
1379 end = hidden.Index(
":", beg + 1);
1382 BuildOneHiddenLayer(hidden(beg, hidden.Length() - beg), layer, prevStart, prevStop,
true);
1389 Int_t& prevStart, Int_t& prevStop,
1395 if (!sNumNodes.IsAlnum() || sNumNodes.IsAlpha()) {
1396 Error(
"BuildOneHiddenLayer",
1397 "The specification '%s' for hidden layer %d must contain only numbers!",
1398 sNumNodes.Data(), layer - 1);
1400 Int_t num = atoi(sNumNodes.Data());
1401 for (Int_t i = 0; i < num; i++) {
1402 name.Form(
"HiddenL%d:N%d",layer,i);
1405 for (Int_t j = prevStart; j < prevStop; j++) {
1413 Int_t nEntries =
fNetwork.GetEntriesFast();
1414 for (Int_t i = prevStop; i < nEntries; i++) {
1416 for (Int_t j = prevStop; j < nEntries; j++)
1421 prevStart = prevStop;
1422 prevStop =
fNetwork.GetEntriesFast();
1435 Int_t nneurons = output.CountChar(
',')+1;
1443 Int_t prevStop =
fNetwork.GetEntriesFast();
1444 Int_t prevStart = prevStop - prev;
1450 for (i = 0; i<nneurons; i++) {
1451 Ssiz_t nextpos=output.Index(
",",pos);
1453 name=output(pos,nextpos-pos);
1454 else name=output(pos,output.Length());
1457 for (j = prevStart; j < prevStop; j++) {
1465 Int_t nEntries =
fNetwork.GetEntriesFast();
1466 for (i = prevStop; i < nEntries; i++) {
1468 for (j = prevStop; j < nEntries; j++)
1485 TString opt = option;
1489 Error(
"DrawResult()",
"no such output.");
1493 if (!opt.Contains(
"nocanv"))
1494 new TCanvas(
"NNresult",
"Neural Net output");
1496 TEventList *events = 0;
1499 if (opt.Contains(
"train")) {
1501 setname = Form(
"train%d",index);
1502 }
else if (opt.Contains(
"test")) {
1504 setname = Form(
"test%d",index);
1506 if ((!
fData) || (!events)) {
1507 Error(
"DrawResult()",
"no dataset.");
1510 if (opt.Contains(
"comp")) {
1512 TString title =
"Neural Net Output control. ";
1514 setname =
"MLP_" + setname +
"_comp";
1515 TH2D *hist = ((TH2D *) gDirectory->Get(setname.Data()));
1517 hist =
new TH2D(setname.Data(), title.Data(), 50, -1, 1, 50, -1, 1);
1519 Int_t nEvents = events->GetN();
1520 for (i = 0; i < nEvents; i++) {
1527 TString title =
"Neural Net Output. ";
1529 setname =
"MLP_" + setname;
1530 TH1D *hist = ((TH1D *) gDirectory->Get(setname.Data()));
1532 hist =
new TH1D(setname, title, 50, 1, -1);
1534 Int_t nEvents = events->GetN();
1535 for (i = 0; i < nEvents; i++)
1536 hist->Fill(
Result(events->GetEntry(i), index));
1538 if (opt.Contains(
"train") && opt.Contains(
"test")) {
1541 hist = ((TH1D *) gDirectory->Get(
"MLP_test"));
1543 hist =
new TH1D(setname, title, 50, 1, -1);
1545 nEvents = events->GetN();
1546 for (i = 0; i < nEvents; i++)
1547 hist->Fill(
Result(events->GetEntry(i), index));
1559 TString filen = filename;
1560 std::ostream * output;
1562 Error(
"TMultiLayerPerceptron::DumpWeights()",
"Invalid file name");
1566 output = &std::cout;
1568 output =
new std::ofstream(filen.Data());
1570 *output <<
"#input normalization" << std::endl;
1573 for (j=0;j<nentries;j++) {
1575 *output << neuron->GetNormalisation()[0] <<
" " 1576 << neuron->GetNormalisation()[1] << std::endl;
1578 *output <<
"#output normalization" << std::endl;
1580 for (j=0;j<nentries;j++) {
1582 *output << neuron->GetNormalisation()[0] <<
" " 1583 << neuron->GetNormalisation()[1] << std::endl;
1585 *output <<
"#neurons weights" << std::endl;
1586 TObjArrayIter *it = (TObjArrayIter *)
fNetwork.MakeIterator();
1587 while ((neuron = (
TNeuron *) it->Next()))
1588 *output << neuron->GetWeight() << std::endl;
1590 it = (TObjArrayIter *)
fSynapses.MakeIterator();
1592 *output <<
"#synapses weights" << std::endl;
1593 while ((synapse = (
TSynapse *) it->Next()))
1594 *output << synapse->
GetWeight() << std::endl;
1597 ((std::ofstream *) output)->close();
1609 TString filen = filename;
1612 Error(
"TMultiLayerPerceptron::LoadWeights()",
"Invalid file name");
1615 char *buff =
new char[100];
1616 std::ifstream input(filen.Data());
1618 input.getline(buff, 100);
1619 TObjArrayIter *it = (TObjArrayIter *)
fFirstLayer.MakeIterator();
1622 while ((neuron = (
TNeuron *) it->Next())) {
1626 input.getline(buff, 100);
1628 input.getline(buff, 100);
1630 it = (TObjArrayIter *)
fLastLayer.MakeIterator();
1631 while ((neuron = (
TNeuron *) it->Next())) {
1635 input.getline(buff, 100);
1637 input.getline(buff, 100);
1639 it = (TObjArrayIter *)
fNetwork.MakeIterator();
1640 while ((neuron = (
TNeuron *) it->Next())) {
1645 input.getline(buff, 100);
1647 input.getline(buff, 100);
1648 it = (TObjArrayIter *)
fSynapses.MakeIterator();
1650 while ((synapse = (
TSynapse *) it->Next())) {
1665 TObjArrayIter *it = (TObjArrayIter *)
fNetwork.MakeIterator();
1667 while ((neuron = (
TNeuron *) it->Next()))
1670 it = (TObjArrayIter *)
fFirstLayer.MakeIterator();
1672 while ((neuron = (
TNeuron *) it->Next()))
1690 TString lg = language;
1694 Warning(
"TMultiLayerPerceptron::Export",
"Request to export a network using an external function");
1697 TString basefilename = filename;
1698 Int_t slash = basefilename.Last(
'/')+1;
1699 if (slash) basefilename = TString(basefilename(slash, basefilename.Length()-slash));
1701 TString classname = basefilename;
1702 TString header = filename;
1704 TString source = filename;
1706 std::ofstream headerfile(header);
1707 std::ofstream sourcefile(source);
1708 headerfile <<
"#ifndef " << basefilename <<
"_h" << std::endl;
1709 headerfile <<
"#define " << basefilename <<
"_h" << std::endl << std::endl;
1710 headerfile <<
"class " << classname <<
" { " << std::endl;
1711 headerfile <<
"public:" << std::endl;
1712 headerfile <<
" " << classname <<
"() {}" << std::endl;
1713 headerfile <<
" ~" << classname <<
"() {}" << std::endl;
1714 sourcefile <<
"#include \"" << header <<
"\"" << std::endl;
1715 sourcefile <<
"#include <cmath>" << std::endl << std::endl;
1716 headerfile <<
" double Value(int index";
1717 sourcefile <<
"double " << classname <<
"::Value(int index";
1718 for (i = 0; i <
fFirstLayer.GetEntriesFast(); i++) {
1719 headerfile <<
",double in" << i;
1720 sourcefile <<
",double in" << i;
1722 headerfile <<
");" << std::endl;
1723 sourcefile <<
") {" << std::endl;
1724 for (i = 0; i <
fFirstLayer.GetEntriesFast(); i++)
1725 sourcefile <<
" input" << i <<
" = (in" << i <<
" - " 1729 sourcefile <<
" switch(index) {" << std::endl;
1731 TObjArrayIter *it = (TObjArrayIter *)
fLastLayer.MakeIterator();
1733 while ((neuron = (
TNeuron *) it->Next()))
1734 sourcefile <<
" case " << idx++ <<
":" << std::endl
1735 <<
" return neuron" << neuron <<
"();" << std::endl;
1736 sourcefile <<
" default:" << std::endl
1737 <<
" return 0.;" << std::endl <<
" }" 1739 sourcefile <<
"}" << std::endl << std::endl;
1740 headerfile <<
" double Value(int index, double* input);" << std::endl;
1741 sourcefile <<
"double " << classname <<
"::Value(int index, double* input) {" << std::endl;
1742 for (i = 0; i <
fFirstLayer.GetEntriesFast(); i++)
1743 sourcefile <<
" input" << i <<
" = (input[" << i <<
"] - " 1747 sourcefile <<
" switch(index) {" << std::endl;
1749 it = (TObjArrayIter *)
fLastLayer.MakeIterator();
1751 while ((neuron = (
TNeuron *) it->Next()))
1752 sourcefile <<
" case " << idx++ <<
":" << std::endl
1753 <<
" return neuron" << neuron <<
"();" << std::endl;
1754 sourcefile <<
" default:" << std::endl
1755 <<
" return 0.;" << std::endl <<
" }" 1757 sourcefile <<
"}" << std::endl << std::endl;
1758 headerfile <<
"private:" << std::endl;
1759 for (i = 0; i <
fFirstLayer.GetEntriesFast(); i++)
1760 headerfile <<
" double input" << i <<
";" << std::endl;
1762 it = (TObjArrayIter *)
fNetwork.MakeIterator();
1764 while ((neuron = (
TNeuron *) it->Next())) {
1765 if (!neuron->
GetPre(0)) {
1766 headerfile <<
" double neuron" << neuron <<
"();" << std::endl;
1767 sourcefile <<
"double " << classname <<
"::neuron" << neuron
1768 <<
"() {" << std::endl;
1769 sourcefile <<
" return input" << idx++ <<
";" << std::endl;
1770 sourcefile <<
"}" << std::endl << std::endl;
1772 headerfile <<
" double input" << neuron <<
"();" << std::endl;
1773 sourcefile <<
"double " << classname <<
"::input" << neuron
1774 <<
"() {" << std::endl;
1775 sourcefile <<
" double input = " << neuron->
GetWeight()
1776 <<
";" << std::endl;
1779 while ((syn = neuron->
GetPre(n++))) {
1780 sourcefile <<
" input += synapse" << syn <<
"();" << std::endl;
1782 sourcefile <<
" return input;" << std::endl;
1783 sourcefile <<
"}" << std::endl << std::endl;
1785 headerfile <<
" double neuron" << neuron <<
"();" << std::endl;
1786 sourcefile <<
"double " << classname <<
"::neuron" << neuron <<
"() {" << std::endl;
1787 sourcefile <<
" double input = input" << neuron <<
"();" << std::endl;
1791 sourcefile <<
" return ((input < -709. ? 0. : (1/(1+exp(-input)))) * ";
1796 sourcefile <<
" return (input * ";
1801 sourcefile <<
" return (tanh(input) * ";
1806 sourcefile <<
" return (exp(-input*input) * ";
1811 sourcefile <<
" return (exp(input) / (";
1814 sourcefile <<
"exp(input" << side <<
"())";
1816 sourcefile <<
" + exp(input" << side <<
"())";
1817 sourcefile <<
") * ";
1822 sourcefile <<
" return (0.0 * ";
1827 sourcefile <<
"}" << std::endl << std::endl;
1832 it = (TObjArrayIter *)
fSynapses.MakeIterator();
1833 while ((synapse = (
TSynapse *) it->Next())) {
1834 headerfile <<
" double synapse" << synapse <<
"();" << std::endl;
1835 sourcefile <<
"double " << classname <<
"::synapse" 1836 << synapse <<
"() {" << std::endl;
1837 sourcefile <<
" return (neuron" << synapse->
GetPre()
1838 <<
"()*" << synapse->
GetWeight() <<
");" << std::endl;
1839 sourcefile <<
"}" << std::endl << std::endl;
1842 headerfile <<
"};" << std::endl << std::endl;
1843 headerfile <<
"#endif // " << basefilename <<
"_h" << std::endl << std::endl;
1846 std::cout << header <<
" and " << source <<
" created." << std::endl;
1848 else if(lg ==
"FORTRAN") {
1849 TString implicit =
" implicit double precision (a-h,n-z)\n";
1850 std::ofstream sigmoid(
"sigmoid.f");
1851 sigmoid <<
" double precision FUNCTION SIGMOID(X)" << std::endl
1853 <<
" IF(X.GT.37.) THEN" << std::endl
1854 <<
" SIGMOID = 1." << std::endl
1855 <<
" ELSE IF(X.LT.-709.) THEN" << std::endl
1856 <<
" SIGMOID = 0." << std::endl
1857 <<
" ELSE" << std::endl
1858 <<
" SIGMOID = 1./(1.+EXP(-X))" << std::endl
1859 <<
" ENDIF" << std::endl
1860 <<
" END" << std::endl;
1862 TString source = filename;
1864 std::ofstream sourcefile(source);
1867 sourcefile <<
" double precision function " << filename
1868 <<
"(x, index)" << std::endl;
1869 sourcefile << implicit;
1870 sourcefile <<
" double precision x(" <<
1871 fFirstLayer.GetEntriesFast() <<
")" << std::endl << std::endl;
1874 sourcefile <<
"C --- Last Layer" << std::endl;
1876 TObjArrayIter *it = (TObjArrayIter *)
fLastLayer.MakeIterator();
1878 TString ifelseif =
" if (index.eq.";
1879 while ((neuron = (
TNeuron *) it->Next())) {
1880 sourcefile << ifelseif.Data() << idx++ <<
") then" << std::endl
1882 <<
"=neuron" << neuron <<
"(x);" << std::endl;
1883 ifelseif =
" else if (index.eq.";
1885 sourcefile <<
" else" << std::endl
1886 <<
" " << filename <<
"=0.d0" << std::endl
1887 <<
" endif" << std::endl;
1888 sourcefile <<
" end" << std::endl;
1891 sourcefile <<
"C --- First and Hidden layers" << std::endl;
1893 it = (TObjArrayIter *)
fNetwork.MakeIterator();
1895 while ((neuron = (
TNeuron *) it->Next())) {
1896 sourcefile <<
" double precision function neuron" 1897 << neuron <<
"(x)" << std::endl
1899 sourcefile <<
" double precision x(" 1900 <<
fFirstLayer.GetEntriesFast() <<
")" << std::endl << std::endl;
1901 if (!neuron->
GetPre(0)) {
1902 sourcefile <<
" neuron" << neuron
1903 <<
" = (x(" << idx+1 <<
") - " 1907 <<
"d0" << std::endl;
1910 sourcefile <<
" neuron" << neuron
1911 <<
" = " << neuron->
GetWeight() <<
"d0" << std::endl;
1914 while ((syn = neuron->
GetPre(n++)))
1915 sourcefile <<
" neuron" << neuron
1916 <<
" = neuron" << neuron
1917 <<
" + synapse" << syn <<
"(x)" << std::endl;
1921 sourcefile <<
" neuron" << neuron
1922 <<
"= (sigmoid(neuron" << neuron <<
")*";
1931 sourcefile <<
" neuron" << neuron
1932 <<
"= (tanh(neuron" << neuron <<
")*";
1937 sourcefile <<
" neuron" << neuron
1938 <<
"= (exp(-neuron" << neuron <<
"*neuron" 1946 sourcefile <<
" div = exp(neuron" << side <<
"())" << std::endl;
1948 sourcefile <<
" div = div + exp(neuron" << side <<
"())" << std::endl;
1949 sourcefile <<
" neuron" << neuron ;
1950 sourcefile <<
"= (exp(neuron" << neuron <<
") / div * ";
1955 sourcefile <<
" neuron " << neuron <<
"= 0.";
1961 sourcefile <<
" end" << std::endl;
1966 sourcefile <<
"C --- Synapses" << std::endl;
1968 it = (TObjArrayIter *)
fSynapses.MakeIterator();
1969 while ((synapse = (
TSynapse *) it->Next())) {
1970 sourcefile <<
" double precision function " <<
"synapse" 1971 << synapse <<
"(x)\n" << implicit;
1972 sourcefile <<
" double precision x(" 1973 <<
fFirstLayer.GetEntriesFast() <<
")" << std::endl << std::endl;
1974 sourcefile <<
" synapse" << synapse
1975 <<
"=neuron" << synapse->
GetPre()
1976 <<
"(x)*" << synapse->
GetWeight() <<
"d0" << std::endl;
1977 sourcefile <<
" end" << std::endl << std::endl;
1981 std::cout << source <<
" created." << std::endl;
1983 else if(lg ==
"PYTHON") {
1984 TString classname = filename;
1985 TString pyfile = filename;
1987 std::ofstream pythonfile(pyfile);
1988 pythonfile <<
"from math import exp" << std::endl << std::endl;
1989 pythonfile <<
"from math import tanh" << std::endl << std::endl;
1990 pythonfile <<
"class " << classname <<
":" << std::endl;
1991 pythonfile <<
"\tdef value(self,index";
1992 for (i = 0; i <
fFirstLayer.GetEntriesFast(); i++) {
1993 pythonfile <<
",in" << i;
1995 pythonfile <<
"):" << std::endl;
1996 for (i = 0; i <
fFirstLayer.GetEntriesFast(); i++)
1997 pythonfile <<
"\t\tself.input" << i <<
" = (in" << i <<
" - " 2001 TObjArrayIter *it = (TObjArrayIter *)
fLastLayer.MakeIterator();
2003 while ((neuron = (
TNeuron *) it->Next()))
2004 pythonfile <<
"\t\tif index==" << idx++
2005 <<
": return self.neuron" << neuron <<
"();" << std::endl;
2006 pythonfile <<
"\t\treturn 0." << std::endl;
2008 it = (TObjArrayIter *)
fNetwork.MakeIterator();
2010 while ((neuron = (
TNeuron *) it->Next())) {
2011 pythonfile <<
"\tdef neuron" << neuron <<
"(self):" << std::endl;
2013 pythonfile <<
"\t\treturn self.input" << idx++ << std::endl;
2015 pythonfile <<
"\t\tinput = " << neuron->
GetWeight() << std::endl;
2018 while ((syn = neuron->
GetPre(n++)))
2019 pythonfile <<
"\t\tinput = input + self.synapse" 2020 << syn <<
"()" << std::endl;
2024 pythonfile <<
"\t\tif input<-709. : return " << neuron->
GetNormalisation()[1] << std::endl;
2025 pythonfile <<
"\t\treturn ((1/(1+exp(-input)))*";
2030 pythonfile <<
"\t\treturn (input*";
2035 pythonfile <<
"\t\treturn (tanh(input)*";
2040 pythonfile <<
"\t\treturn (exp(-input*input)*";
2045 pythonfile <<
"\t\treturn (exp(input) / (";
2048 pythonfile <<
"exp(self.neuron" << side <<
"())";
2050 pythonfile <<
" + exp(self.neuron" << side <<
"())";
2051 pythonfile <<
") * ";
2056 pythonfile <<
"\t\treturn 0.";
2065 it = (TObjArrayIter *)
fSynapses.MakeIterator();
2066 while ((synapse = (
TSynapse *) it->Next())) {
2067 pythonfile <<
"\tdef synapse" << synapse <<
"(self):" << std::endl;
2068 pythonfile <<
"\t\treturn (self.neuron" << synapse->
GetPre()
2069 <<
"()*" << synapse->
GetWeight() <<
")" << std::endl;
2073 std::cout << pyfile <<
" created." << std::endl;
2092 for (Int_t i = 0; i < n; i++) {
2093 j = (Int_t) (rnd.Rndm() * a);
2095 index[j] = index[i];
2108 Int_t *index =
new Int_t[nEvents];
2110 for (i = 0; i < nEvents; i++)
2116 for (i = 0; i < nEvents; i++) {
2121 for (j=0;j<nentries;j++) {
2127 nentries =
fNetwork.GetEntriesFast();
2128 for (j=0;j<nentries;j++) {
2136 for (j=0;j<nentries;j++) {
2154 TObjArrayIter *it = (TObjArrayIter *)
fNetwork.MakeIterator();
2157 while ((neuron = (
TNeuron *) it->Next())) {
2163 it = (TObjArrayIter *)
fSynapses.MakeIterator();
2166 while ((synapse = (
TSynapse *) it->Next())) {
2183 TObjArrayIter *it = (TObjArrayIter *)
fNetwork.MakeIterator();
2184 while ((neuron = (
TNeuron *) it->Next())) {
2185 neuron->
SetWeight(origin[idx] + (dir[idx] * dist));
2189 it = (TObjArrayIter *)
fSynapses.MakeIterator();
2190 while ((synapse = (
TSynapse *) it->Next())) {
2191 synapse->
SetWeight(origin[idx] + (dir[idx] * dist));
2205 TObjArrayIter *it = (TObjArrayIter *)
fNetwork.MakeIterator();
2206 while ((neuron = (
TNeuron *) it->Next()))
2207 dir[idx++] = -neuron->
GetDEDw();
2209 it = (TObjArrayIter *)
fSynapses.MakeIterator();
2210 while ((synapse = (
TSynapse *) it->Next()))
2211 dir[idx++] = -synapse->
GetDEDw();
2228 Double_t *origin =
new Double_t[
fNetwork.GetEntriesFast() +
2230 nentries =
fNetwork.GetEntriesFast();
2231 for (j=0;j<nentries;j++) {
2236 for (j=0;j<nentries;j++) {
2243 Double_t alpha1 = 0.;
2249 Double_t alpha3 = alpha2;
2250 MLP_Line(origin, direction, alpha2);
2252 Double_t err3 = err2;
2253 Bool_t bingo =
false;
2256 for (icount = 0; icount < 100; icount++) {
2258 MLP_Line(origin, direction, alpha3);
2275 for (icount = 0; icount < 100; icount++) {
2277 MLP_Line(origin, direction, alpha2);
2295 (err3 - err1) / ((err3 - err2) / (alpha3 - alpha2)
2296 - (err2 - err1) / (alpha2 - alpha1)));
2302 nentries =
fNetwork.GetEntriesFast();
2303 for (j=0;j<nentries;j++) {
2305 buffer[idx] = neuron->
GetWeight() - origin[idx];
2309 for (j=0;j<nentries;j++) {
2311 buffer[idx] = synapse->
GetWeight() - origin[idx];
2330 nentries =
fNetwork.GetEntriesFast();
2331 for (j=0;j<nentries;j++) {
2333 dir[idx] = -neuron->
GetDEDw() + beta * dir[idx];
2337 for (j=0;j<nentries;j++) {
2339 dir[idx] = -synapse->
GetDEDw() + beta * dir[idx];
2353 if ((Double_t) gd[0][0] == 0.)
2358 Double_t a = 1 / (Double_t) gd[0][0];
2359 Double_t f = 1 + ((Double_t) gHg[0][0] * a);
2384 nentries =
fNetwork.GetEntriesFast();
2385 for (j=0;j<nentries;j++) {
2387 gamma[idx++][0] = -neuron->
GetDEDw();
2390 for (j=0;j<nentries;j++) {
2392 gamma[idx++][0] = -synapse->
GetDEDw();
2394 for (Int_t i = 0; i < els; i++)
2395 delta[i].Assign(buffer[i]);
2399 nentries =
fNetwork.GetEntriesFast();
2400 for (j=0;j<nentries;j++) {
2402 gamma[idx++][0] += neuron->
GetDEDw();
2405 for (j=0;j<nentries;j++) {
2407 gamma[idx++][0] += synapse->
GetDEDw();
2419 Double_t output = 0;
2422 nentries =
fNetwork.GetEntriesFast();
2423 for (j=0;j<nentries;j++) {
2425 output += neuron->
GetDEDw() * dir[idx++];
2428 for (j=0;j<nentries;j++) {
2430 output += synapse->
GetDEDw() * dir[idx++];
2447 nentries =
fNetwork.GetEntriesFast();
2448 for (j=0;j<nentries;j++) {
2450 dedw[idx++][0] = neuron->
GetDEDw();
2453 for (j=0;j<nentries;j++) {
2455 dedw[idx++][0] = synapse->
GetDEDw();
2458 for (Int_t i = 0; i < els; i++)
2459 dir[i] = -direction[i][0];
2471 #define NeuronSize 2.5 2474 Float_t xStep = 1./(nLayers+1.);
2476 for(layer=0; layer< nLayers-1; layer++) {
2477 Float_t nNeurons_this = 0;
2480 nNeurons_this = input.CountChar(
',')+1;
2486 Int_t end = hidden.Index(
":", beg + 1);
2488 Int_t num = atoi(TString(hidden(beg, end - beg)).Data());
2491 end = hidden.Index(
":", beg + 1);
2492 if(layer==cnt) nNeurons_this = num;
2494 Int_t num = atoi(TString(hidden(beg, hidden.Length() - beg)).Data());
2496 if(layer==cnt) nNeurons_this = num;
2498 Float_t nNeurons_next = 0;
2499 if(layer==nLayers-2) {
2501 nNeurons_next = output.CountChar(
',')+1;
2507 Int_t end = hidden.Index(
":", beg + 1);
2509 Int_t num = atoi(TString(hidden(beg, end - beg)).Data());
2512 end = hidden.Index(
":", beg + 1);
2513 if(layer+1==cnt) nNeurons_next = num;
2515 Int_t num = atoi(TString(hidden(beg, hidden.Length() - beg)).Data());
2517 if(layer+1==cnt) nNeurons_next = num;
2519 Float_t yStep_this = 1./(nNeurons_this+1.);
2520 Float_t yStep_next = 1./(nNeurons_next+1.);
2521 TObjArrayIter* it = (TObjArrayIter *)
fSynapses.MakeIterator();
2523 Float_t maxWeight = 0;
2524 while ((theSynapse = (
TSynapse *) it->Next()))
2525 maxWeight = maxWeight < theSynapse->
GetWeight() ? theSynapse->
GetWeight() : maxWeight;
2527 it = (TObjArrayIter *)
fSynapses.MakeIterator();
2528 for(Int_t neuron1=0; neuron1<nNeurons_this; neuron1++) {
2529 for(Int_t neuron2=0; neuron2<nNeurons_next; neuron2++) {
2530 TLine* synapse =
new TLine(xStep*(layer+1),yStep_this*(neuron1+1),xStep*(layer+2),yStep_next*(neuron2+1));
2532 theSynapse = (
TSynapse *) it->Next();
2533 if (!theSynapse)
continue;
2534 synapse->SetLineWidth(Int_t((theSynapse->
GetWeight()/maxWeight)*10.));
2535 synapse->SetLineStyle(1);
2536 if(((TMath::Abs(theSynapse->
GetWeight())/maxWeight)*10.)<0.5) synapse->SetLineStyle(2);
2537 if(((TMath::Abs(theSynapse->
GetWeight())/maxWeight)*10.)<0.25) synapse->SetLineStyle(3);
2542 for(layer=0; layer< nLayers; layer++) {
2543 Float_t nNeurons = 0;
2546 nNeurons = input.CountChar(
',')+1;
2548 else if(layer==nLayers-1) {
2550 nNeurons = output.CountChar(
',')+1;
2556 Int_t end = hidden.Index(
":", beg + 1);
2558 Int_t num = atoi(TString(hidden(beg, end - beg)).Data());
2561 end = hidden.Index(
":", beg + 1);
2562 if(layer==cnt) nNeurons = num;
2564 Int_t num = atoi(TString(hidden(beg, hidden.Length() - beg)).Data());
2566 if(layer==cnt) nNeurons = num;
2568 Float_t yStep = 1./(nNeurons+1.);
2569 for(Int_t neuron=0; neuron<nNeurons; neuron++) {
2570 TMarker*
m =
new TMarker(xStep*(layer+1),yStep*(neuron+1),20);
2571 m->SetMarkerColor(4);
2577 const TObjArray *inpL = input.Tokenize(
" ,");
2578 const Int_t nrItems = inpL->GetLast()+1;
2579 Float_t yStep = 1./(nrItems+1);
2580 for (Int_t item = 0; item < nrItems; item++) {
2581 const TString brName = ((TObjString *)inpL->At(item))->GetString();
2582 TText* label =
new TText(0.5*xStep,yStep*(item+1),brName.Data());
2587 Int_t numOutNodes=
fLastLayer.GetEntriesFast();
2588 yStep=1./(numOutNodes+1);
2589 for (Int_t outnode=0; outnode<numOutNodes; outnode++) {
2591 if (neuron && neuron->GetName()) {
2592 TText* label =
new TText(xStep*nLayers,
Double_t DerivDir(Double_t *)
scalar product between gradient and direction = derivative along direction
virtual TMatrixTBase< Element > & UnitMatrix()
Make a unit matrix (matrix need not be a square one).
ENeuronType GetType() const
Returns the neuron type.
Bool_t LineSearch(Double_t *, Double_t *)
Search along the line defined by direction.
void SteepestDir(Double_t *)
Sets the search direction to steepest descent.
double dist(Rotation3D const &r1, Rotation3D const &r2)
TSynapse * GetPre(Int_t n) const
void Randomize() const
Randomize the weights.
void BuildHiddenLayers(TString &)
Builds hidden layers.
void BFGSDir(TMatrixD &, Double_t *)
Computes the direction for the BFGS algorithm as the product between the Hessian estimate (bfgsh) and...
Random number generator class based on M.
void ForceExternalValue(Double_t value)
Uses the branch type to force an external value.
Double_t fEpsilon
Eta - used in stochastic minimisation - Default=0.1.
TTreeFormulaManager * fManager
formula representing the event weight
void SetTestDataSet(TEventList *test)
Sets the Test dataset.
void SetNewEvent() const
Inform the neuron that inputs of the network have changed, so that the buffered values have to be rec...
Double_t fCurrentTreeWeight
index of the current tree in a chain
void SetEpsilon(Double_t eps)
Sets Epsilon - used in stochastic minimisation (look at the constructor for the complete description ...
void Export(Option_t *filename="NNfunction", Option_t *language="C++") const
Exports the NN as a function for any non-ROOT-dependant code Supported languages are: only C++ ...
Double_t Evaluate(Int_t index, Double_t *params) const
Returns the Neural Net for a given set of input parameters #parameters must equal #input neurons...
void AttachData()
Connects the TTree to Neurons in input and output layers.
void SetEtaDecay(Double_t ed)
Sets EtaDecay - Eta *= EtaDecay at each epoch (look at the constructor for the complete description o...
void SetEta(Double_t eta)
Sets Eta - used in stochastic minimisation (look at the constructor for the complete description of l...
void SetData(TTree *)
Set the data source.
Double_t GetDeDw() const
Computes the derivative of the error wrt the synapse weight.
void Shuffle(Int_t *, Int_t) const
Shuffle the Int_t index[n] in input.
Double_t GetSumSquareError() const
Error on the output for a given event.
TObjArray fNetwork
weight of the current tree in a chain
double beta(double x, double y)
Calculates the beta function.
void ConjugateGradientsDir(Double_t *, Double_t)
Sets the search direction to conjugate gradient direction beta should be: ||g_{(t+1)}||^2 / ||g_{(t)}...
Double_t GetBranch() const
Returns the formula value.
void DrawResult(Int_t index=0, Option_t *option="test") const
Draws the neural net output It produces an histogram with the output for the two datasets.
bool GetBFGSH(TMatrixD &, TMatrixD &, TMatrixD &)
Computes the hessian matrix using the BFGS update algorithm.
Double_t fEta
TTreeFormulaManager for the weight and neurons.
you should not use this method at all Int_t Int_t Double_t Double_t Double_t Int_t Double_t Double_t Double_t tau
Double_t GetValue() const
Computes the output using the appropriate function and all the weighted inputs, or uses the branch as...
TTreeFormula * UseBranch(TTree *, const char *)
Sets a formula that can be used to make the neuron an input.
virtual ~TMultiLayerPerceptron()
Destructor.
void SetReset(Int_t reset)
Sets number of epochs between two resets of the search direction to the steepest descent.
TNeuron * GetInLayer(Int_t n) const
Bool_t DumpWeights(Option_t *filename="-") const
Dumps the weights to a text file.
Int_t fCurrentTree
pointer to the tree used as datasource
TNeuron::ENeuronType fType
void SetWeight(Double_t w)
Sets the neuron weight to w.
you should not use this method at all Int_t Int_t Double_t Double_t Double_t Int_t Double_t Double_t Double_t Double_t Int_t m
TMatrixT< Double_t > TMatrixD
const Double_t * GetNormalisation() const
void BuildOneHiddenLayer(const TString &sNumNodes, Int_t &layer, Int_t &prevStart, Int_t &prevStop, Bool_t lastLayer)
Builds a hidden layer, updates the number of layers.
void AddInLayer(TNeuron *)
Tells a neuron which neurons form its layer (including itself).
void SetNormalisation(Double_t mean, Double_t RMS)
Sets the normalization variables.
void Train(Int_t nEpoch, Option_t *option="text", Double_t minE=0)
Train the network.
TNeuron::ENeuronType fOutType
virtual void Draw(Option_t *option="")
Draws the network structure.
Double_t GetError() const
Computes the error for output neurons.
void SetDEDw(Double_t in)
Sets the derivative of the total error wrt the neuron weight.
Bool_t fTestOwner
internal flag whether one has to delete fTraining or not
Double_t GetError(Int_t event) const
Error on the output for a given event.
Bool_t LoadWeights(Option_t *filename="")
Loads the weights from a text file conforming to the format defined by DumpWeights.
Int_t fReset
internal parameter used in line search
void SetTrainingDataSet(TEventList *train)
Sets the Training dataset.
void SetWeight(Double_t w)
Sets the weight of the synapse.
Double_t GetWeight() const
void SetGammaDelta(TMatrixD &, TMatrixD &, Double_t *)
Sets the gamma (g_{(t+1)}-g_{(t)}) and delta (w_{(t+1)}-w_{(t)}) vectors Gamma is computed here...
void SetDEDw(Double_t in)
Sets the derivative of the total error wrt the synapse weight.
Double_t GetDeDw() const
Computes the derivative of the error wrt the neuron weight.
void ComputeDEDw() const
Compute the DEDw = sum on all training events of dedw for each weight normalized by the number of eve...
Double_t fEtaDecay
Delta - used in stochastic minimisation - Default=0.
Double_t fDelta
Epsilon - used in stochastic minimisation - Default=0.
void SetDelta(Double_t delta)
Sets Delta - used in stochastic minimisation (look at the constructor for the complete description of...
Double_t fTau
EtaDecay - Eta *= EtaDecay at each epoch - Default=1.
Double_t GetTarget() const
Computes the normalized target pattern for output neurons.
void MLP_Line(Double_t *, Double_t *, Double_t)
Sets the weights to a point along a line Weights are set to [origin + (dist * dir)].
TEventList * fTest
EventList defining the events in the training dataset.
void MLP_Batch(Double_t *)
One step for the batch (stochastic) method.
Double_t GetWeight() const
ELearningMethod fLearningMethod
EventList defining the events in the test dataset.
void MLP_Stochastic(Double_t *)
One step for the stochastic method buffer should contain the previous dw vector and will be updated...
Bool_t fTrainingOwner
number of epochs between two resets of the search direction to the steepest descent - Default=50 ...
Double_t GetCrossEntropyBinary() const
Cross entropy error for sigmoid output neurons, for a given event.
void ExpandStructure()
Expand the structure of the first layer.
TMultiLayerPerceptron()
Default constructor.
void BuildFirstLayer(TString &)
Instanciates the neurons in input Inputs are normalised and the type is set to kOff (simple forward o...
void SetEventWeight(const char *)
Set the event weight.
void GetEntry(Int_t) const
Load an entry into the network.
Double_t Sqrt(Double_t x)
Double_t fLastAlpha
Tau - used in line search - Default=3.
Double_t Result(Int_t event, Int_t index=0) const
Computes the output for a given event.
Double_t GetCrossEntropy() const
Cross entropy error for a softmax output neuron, for a given event.
void BuildLastLayer(TString &, Int_t)
Builds the output layer Neurons are linear combinations of input, by defaul.
void SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method)
Sets the learning method.
void SetTau(Double_t tau)
Sets Tau - used in line search (look at the constructor for the complete description of learning meth...
void BuildNetwork()
Instanciates the network from the description.
TNeuron::ENeuronType GetType() const
TTreeFormula * fEventWeight
The Learning Method.