12 #ifndef ROOT_TMultiLayerPerceptron 13 #define ROOT_TMultiLayerPerceptron 17 #include "TObjArray.h" 24 class TTreeFormulaManager;
57 const char* training =
"Entry$%2==0",
58 const char* test =
"",
60 const char* extF =
"",
const char* extD =
"");
62 const char* weight, TTree* data = 0,
63 const char* training =
"Entry$%2==0",
64 const char* test =
"",
66 const char* extF =
"",
const char* extD =
"");
71 const char* extF =
"",
const char* extD =
"");
73 const char* weight, TTree* data,
77 const char* extF =
"",
const char* extD =
"");
86 void Train(Int_t nEpoch, Option_t* option =
"text", Double_t minE=0);
87 Double_t
Result(Int_t event, Int_t index = 0)
const;
88 Double_t
GetError(Int_t event)
const;
107 void DrawResult(Int_t index = 0, Option_t* option =
"test")
const;
108 Bool_t
DumpWeights(Option_t* filename =
"-")
const;
110 Double_t
Evaluate(Int_t index, Double_t* params)
const;
111 void Export(Option_t* filename =
"NNfunction", Option_t* language =
"C++")
const;
112 virtual void Draw(Option_t *option=
"");
139 Int_t& prevStart, Int_t& prevStop,
142 void Shuffle(Int_t*, Int_t)
const;
143 void MLP_Line(Double_t*, Double_t*, Double_t);
Double_t DerivDir(Double_t *)
scalar product between gradient and direction = derivative along direction
Bool_t LineSearch(Double_t *, Double_t *)
Search along the line defined by direction.
void SteepestDir(Double_t *)
Sets the search direction to steepest descent.
void Randomize() const
Randomize the weights.
void BuildHiddenLayers(TString &)
Builds hidden layers.
void BFGSDir(TMatrixD &, Double_t *)
Computes the direction for the BFGS algorithm as the product between the Hessian estimate (bfgsh) and...
Double_t fEpsilon
Eta - used in stochastic minimisation - Default=0.1.
TTreeFormulaManager * fManager
formula representing the event weight
void SetTestDataSet(TEventList *test)
Sets the Test dataset.
Double_t GetEpsilon() const
Double_t fCurrentTreeWeight
index of the current tree in a chain
void SetEpsilon(Double_t eps)
Sets Epsilon - used in stochastic minimisation (look at the constructor for the complete description ...
void Export(Option_t *filename="NNfunction", Option_t *language="C++") const
Exports the NN as a function for any non-ROOT-dependant code Supported languages are: only C++ ...
Double_t Evaluate(Int_t index, Double_t *params) const
Returns the Neural Net for a given set of input parameters #parameters must equal #input neurons...
void AttachData()
Connects the TTree to Neurons in input and output layers.
void SetEtaDecay(Double_t ed)
Sets EtaDecay - Eta *= EtaDecay at each epoch (look at the constructor for the complete description o...
void SetEta(Double_t eta)
Sets Eta - used in stochastic minimisation (look at the constructor for the complete description of l...
TMultiLayerPerceptron::ELearningMethod GetLearningMethod() const
void SetData(TTree *)
Set the data source.
void Shuffle(Int_t *, Int_t) const
Shuffle the Int_t index[n] in input.
Double_t GetSumSquareError() const
Error on the output for a given event.
TObjArray fNetwork
weight of the current tree in a chain
void ConjugateGradientsDir(Double_t *, Double_t)
Sets the search direction to conjugate gradient direction beta should be: ||g_{(t+1)}||^2 / ||g_{(t)}...
void DrawResult(Int_t index=0, Option_t *option="test") const
Draws the neural net output It produces an histogram with the output for the two datasets.
bool GetBFGSH(TMatrixD &, TMatrixD &, TMatrixD &)
Computes the hessian matrix using the BFGS update algorithm.
Double_t fEta
TTreeFormulaManager for the weight and neurons.
you should not use this method at all Int_t Int_t Double_t Double_t Double_t Int_t Double_t Double_t Double_t tau
TMultiLayerPerceptron & operator=(const TMultiLayerPerceptron &)
virtual ~TMultiLayerPerceptron()
Destructor.
void SetReset(Int_t reset)
Sets number of epochs between two resets of the search direction to the steepest descent.
Bool_t DumpWeights(Option_t *filename="-") const
Dumps the weights to a text file.
Int_t fCurrentTree
pointer to the tree used as datasource
TNeuron::ENeuronType fType
void BuildOneHiddenLayer(const TString &sNumNodes, Int_t &layer, Int_t &prevStart, Int_t &prevStop, Bool_t lastLayer)
Builds a hidden layer, updates the number of layers.
void Train(Int_t nEpoch, Option_t *option="text", Double_t minE=0)
Train the network.
TNeuron::ENeuronType fOutType
virtual void Draw(Option_t *option="")
Draws the network structure.
Bool_t fTestOwner
internal flag whether one has to delete fTraining or not
Double_t GetError(Int_t event) const
Error on the output for a given event.
Bool_t LoadWeights(Option_t *filename="")
Loads the weights from a text file conforming to the format defined by DumpWeights.
Int_t fReset
internal parameter used in line search
void SetTrainingDataSet(TEventList *train)
Sets the Training dataset.
void SetGammaDelta(TMatrixD &, TMatrixD &, Double_t *)
Sets the gamma (g_{(t+1)}-g_{(t)}) and delta (w_{(t+1)}-w_{(t)}) vectors Gamma is computed here...
void ComputeDEDw() const
Compute the DEDw = sum on all training events of dedw for each weight normalized by the number of eve...
Double_t fEtaDecay
Delta - used in stochastic minimisation - Default=0.
Double_t fDelta
Epsilon - used in stochastic minimisation - Default=0.
void SetDelta(Double_t delta)
Sets Delta - used in stochastic minimisation (look at the constructor for the complete description of...
Double_t fTau
EtaDecay - Eta *= EtaDecay at each epoch - Default=1.
void MLP_Line(Double_t *, Double_t *, Double_t)
Sets the weights to a point along a line Weights are set to [origin + (dist * dir)].
TEventList * fTest
EventList defining the events in the training dataset.
void MLP_Batch(Double_t *)
One step for the batch (stochastic) method.
ELearningMethod fLearningMethod
EventList defining the events in the test dataset.
void MLP_Stochastic(Double_t *)
One step for the stochastic method buffer should contain the previous dw vector and will be updated...
Double_t GetEtaDecay() const
Bool_t fTrainingOwner
number of epochs between two resets of the search direction to the steepest descent - Default=50 ...
Double_t GetCrossEntropyBinary() const
Cross entropy error for sigmoid output neurons, for a given event.
void ExpandStructure()
Expand the structure of the first layer.
TMultiLayerPerceptron()
Default constructor.
void BuildFirstLayer(TString &)
Instanciates the neurons in input Inputs are normalised and the type is set to kOff (simple forward o...
void SetEventWeight(const char *)
Set the event weight.
void GetEntry(Int_t) const
Load an entry into the network.
Double_t fLastAlpha
Tau - used in line search - Default=3.
Double_t Result(Int_t event, Int_t index=0) const
Computes the output for a given event.
Double_t GetCrossEntropy() const
Cross entropy error for a softmax output neuron, for a given event.
void BuildLastLayer(TString &, Int_t)
Builds the output layer Neurons are linear combinations of input, by defaul.
void SetLearningMethod(TMultiLayerPerceptron::ELearningMethod method)
Sets the learning method.
void SetTau(Double_t tau)
Sets Tau - used in line search (look at the constructor for the complete description of learning meth...
void BuildNetwork()
Instanciates the network from the description.
TNeuron::ENeuronType GetType() const
Double_t GetDelta() const
TString GetStructure() const
TTreeFormula * fEventWeight
The Learning Method.