[ VIGRA Homepage | Function Index | Class Index | Namespaces | File List | Main Page ]

details Machine Learning VIGRA

Classes

class  CompleteOOBInfo
class  CorrelationVisitor
class  OnlineLearnVisitor
class  OOB_Error
class  OOB_PerTreeError
class  ProblemSpec< LabelType >
 problem specification class for the random forest. More...
class  RandomForest< LabelType, PreprocessorTag >
class  RandomForestOptions
 Options object for the random forest. More...
class  RandomForestProgressVisitor
class  Sampler< Random >
 Create random samples from a sequence of indices. More...
class  SamplerOptions
 Options object for the Sampler class. More...
class  StopVisiting
class  VariableImportanceVisitor

Namespaces

namespace  vigra::rf::visitors

Enumerations

enum  Problem_t
 

problem types


Functions

template<class RF , class PR , class SM , class ST >
void after_tree_ip_impl (RF &rf, PR &pr, SM &sm, ST &st, int index)
template<class A , class B , class C , class D , class E , class F , class G , class H , class I , class J >
detail::VisitorNode< A,
detail::VisitorNode< B,
detail::VisitorNode< C,
detail::VisitorNode< D,
detail::VisitorNode< E,
detail::VisitorNode< F,
detail::VisitorNode< G,
detail::VisitorNode< H,
detail::VisitorNode< I,
detail::VisitorNode< J > > > > > > > > > > 
create_visitor (A &a, B &b, C &c, D &d, E &e, F &f, G &g, H &h, I &i, J &j)
template<class A , class B , class C , class D , class E , class F , class G , class H , class I >
detail::VisitorNode< A,
detail::VisitorNode< B,
detail::VisitorNode< C,
detail::VisitorNode< D,
detail::VisitorNode< E,
detail::VisitorNode< F,
detail::VisitorNode< G,
detail::VisitorNode< H,
detail::VisitorNode< I > > > > > > > > > 
create_visitor (A &a, B &b, C &c, D &d, E &e, F &f, G &g, H &h, I &i)
template<class A , class B , class C , class D , class E , class F , class G , class H >
detail::VisitorNode< A,
detail::VisitorNode< B,
detail::VisitorNode< C,
detail::VisitorNode< D,
detail::VisitorNode< E,
detail::VisitorNode< F,
detail::VisitorNode< G,
detail::VisitorNode< H > > > > > > > > 
create_visitor (A &a, B &b, C &c, D &d, E &e, F &f, G &g, H &h)
template<class A , class B , class C , class D , class E , class F , class G >
detail::VisitorNode< A,
detail::VisitorNode< B,
detail::VisitorNode< C,
detail::VisitorNode< D,
detail::VisitorNode< E,
detail::VisitorNode< F,
detail::VisitorNode< G > > > > > > > 
create_visitor (A &a, B &b, C &c, D &d, E &e, F &f, G &g)
template<class A , class B , class C , class D , class E , class F >
detail::VisitorNode< A,
detail::VisitorNode< B,
detail::VisitorNode< C,
detail::VisitorNode< D,
detail::VisitorNode< E,
detail::VisitorNode< F > > > > > > 
create_visitor (A &a, B &b, C &c, D &d, E &e, F &f)
template<class A , class B , class C , class D , class E >
detail::VisitorNode< A,
detail::VisitorNode< B,
detail::VisitorNode< C,
detail::VisitorNode< D,
detail::VisitorNode< E > > > > > 
create_visitor (A &a, B &b, C &c, D &d, E &e)
template<class A , class B , class C , class D >
detail::VisitorNode< A,
detail::VisitorNode< B,
detail::VisitorNode< C,
detail::VisitorNode< D > > > > 
create_visitor (A &a, B &b, C &c, D &d)
template<class A , class B , class C >
detail::VisitorNode< A,
detail::VisitorNode< B,
detail::VisitorNode< C > > > 
create_visitor (A &a, B &b, C &c)
template<class A , class B >
detail::VisitorNode< A,
detail::VisitorNode< B > > 
create_visitor (A &a, B &b)
template<class A >
detail::VisitorNode< A > create_visitor (A &a)
template<class U , class C1 , class U2 , class C2 , class Split_t , class Stop_t , class Visitor_t , class Random_t >
void reLearnTree (MultiArrayView< 2, U, C1 > const &features, MultiArrayView< 2, U2, C2 > const &response, int treeId, Visitor_t visitor_, Split_t split_, Stop_t stop_, Random_t &random)
void reset_tree (int tree_id)
double return_val ()
double return_val ()
void sample ()
 VariableImportanceVisitor (int rep_cnt=10)
template<class Tree , class Split , class Region , class Feature_t , class Label_t >
void visit_after_split (Tree &tree, Split &split, Region &parent, Region &leftChild, Region &rightChild, Feature_t &features, Label_t &labels)
template<class Tree , class Split , class Region , class Feature_t , class Label_t >
void visit_after_split (Tree &tree, Split &split, Region &parent, Region &leftChild, Region &rightChild, Feature_t &features, Label_t &labels)
template<class Tree , class Split , class Region , class Feature_t , class Label_t >
void visit_after_split (Tree &tree, Split &split, Region &parent, Region &leftChild, Region &rightChild, Feature_t &features, Label_t &labels)
template<class Tree , class Split , class Region , class Feature_t , class Label_t >
void visit_after_split (Tree &tree, Split &split, Region &parent, Region &leftChild, Region &rightChild, Feature_t &features, Label_t &labels)
template<class RF , class PR , class SM , class ST >
void visit_after_tree (RF &rf, PR &pr, SM &sm, ST &st, int index)
template<class RF , class PR , class SM , class ST >
void visit_after_tree (RF &rf, PR &pr, SM &sm, ST &st, int index)
template<class RF , class PR , class SM , class ST >
void visit_after_tree (RF &rf, PR &pr, SM &sm, ST &st, int index)
template<class RF , class PR , class SM , class ST >
void visit_after_tree (RF &rf, PR &pr, SM &sm, ST &st, int index)
template<class RF , class PR , class SM , class ST >
void visit_after_tree (RF &rf, PR &pr, SM &sm, ST &st, int index)
template<class RF , class PR , class SM , class ST >
void visit_after_tree (RF &rf, PR &pr, SM &sm, ST &st, int index)
template<class RF , class PR , class SM , class ST >
void visit_after_tree (RF &rf, PR &pr, SM &sm, ST &st, int index)
template<class RF , class PR >
void visit_at_beginning (RF const &rf, PR const &pr)
template<class RF , class PR >
void visit_at_beginning (RF &rf, const PR &pr)
template<class RF , class PR >
void visit_at_beginning (RF const &rf, PR const &pr)
template<class RF , class PR >
void visit_at_end (RF const &rf, PR const &pr)
template<class RF , class PR >
void visit_at_end (RF const &rf, PR const &pr)
template<class RF , class PR >
void visit_at_end (RF &rf, PR &pr)
template<class RF , class PR >
void visit_at_end (RF &rf, PR &pr)
template<class RF , class PR >
void visit_at_end (RF &rf, PR &pr)
template<class RF , class PR >
void visit_at_end (RF &rf, PR &pr)
template<class RF , class PR >
void visit_at_end (RF const &rf, PR const &pr)
template<class TR , class IntT , class TopT , class Feat >
void visit_external_node (TR &tr, IntT index, TopT node_t, Feat &features)
template<class TR , class IntT , class TopT , class Feat >
void visit_internal_node (TR &tr, IntT index, TopT node_t, Feat &features)
template<class TR , class IntT , class TopT , class Feat >
void visit_internal_node (TR &tr, IntT index, TopT node_t, Feat &features)

Variables

MultiArray< 2, double > breiman_per_tree
MultiArray< 2, double > corr_noise
MultiArray< 2, double > distance
MultiArray< 2, double > noise
ArrayVector< int > numChoices
double oob_breiman
double oob_breiman
double oob_mean
MultiArray< 2, double > oob_per_tree
double oob_per_tree2
double oob_std
MultiArray< 4, double > oobroc_per_tree
MultiArray< 2, double > similarity

Learning

Following functions differ in the degree of customization allowed



template<class U , class C1 , class U2 , class C2 , class Split_t , class Stop_t , class Visitor_t , class Random_t >
void learn (MultiArrayView< 2, U, C1 > const &features, MultiArrayView< 2, U2, C2 > const &response, Visitor_t visitor, Split_t split, Stop_t stop, Random_t const &random)
 learn on data with custom config and random number generator

prediction



template<class U , class C >
LabelType predictLabel (MultiArrayView< 2, U, C >const &features)
template<class U , class C >
LabelType predictLabel (MultiArrayView< 2, U, C > const &features, ArrayVectorView< double > prior) const
 predict a label with features and class priors
template<class U , class C , class Stop >
LabelType predictLabel (MultiArrayView< 2, U, C >const &features, Stop &stop) const
 predict a label given a feature.
template<class U , class C1 , class T , class C2 >
void predictLabels (MultiArrayView< 2, U, C1 >const &features, MultiArrayView< 2, T, C2 > &labels) const
 predict multiple labels with given features
template<class U , class C1 , class T , class C2 , class Stop >
void predictLabels (MultiArrayView< 2, U, C1 >const &features, MultiArrayView< 2, T, C2 > &labels, Stop &stop) const
template<class U , class C1 , class T , class C2 , class Stop >
void predictProbabilities (MultiArrayView< 2, U, C1 >const &features, MultiArrayView< 2, T, C2 > &prob, Stop &stop) const
 predict the class probabilities for multiple labels
template<class U , class C1 , class T , class C2 >
void predictProbabilities (MultiArrayView< 2, U, C1 >const &features, MultiArrayView< 2, T, C2 > &prob) const
 predict the class probabilities for multiple labels
template<class T1 , class T2 , class C >
void predictProbabilities (OnlinePredictionSet< T1 > &predictionSet, MultiArrayView< 2, T2, C > &prob)
template<class U , class C1 , class T , class C2 >
void predictRaw (MultiArrayView< 2, U, C1 >const &features, MultiArrayView< 2, T, C2 > &prob) const


Detailed Description

This module provides classification algorithms that map features to labels or label probabilities. Look at the RandomForest class first for a overview of most of the functionality provided as well as use cases.


Function Documentation

void visit_after_split ( Tree &  tree,
Split &  split,
Region &  parent,
Region &  leftChild,
Region &  rightChild,
Feature_t &  features,
Label_t &  labels 
) [inherited]

do something after the the Split has decided how to process the Region (Stack entry)

Parameters:
tree reference to the tree that is currently being learned
split reference to the split object
parent current stack entry which was used to decide the split
leftChild left stack entry that will be pushed
rightChild right stack entry that will be pushed.
features features matrix
labels label matrix
See also:
RF_Traits::StackEntry_t

Reimplemented in OnlineLearnVisitor, VariableImportanceVisitor, and CorrelationVisitor.

void visit_after_tree ( RF &  rf,
PR &  pr,
SM &  sm,
ST &  st,
int  index 
) [inherited]

do something after each tree has been learned

Parameters:
rf reference to the random forest object that called this visitor
pr reference to the preprocessor that processed the input
sm reference to the sampler object
st reference to the first stack entry
index index of current tree

Reimplemented in ClusterImportanceVisitor, OnlineLearnVisitor, OOB_PerTreeError, OOB_Error, CompleteOOBInfo, VariableImportanceVisitor, and RandomForestProgressVisitor.

void visit_at_end ( RF const &  rf,
PR const &  pr 
) [inherited]

do something after all trees have been learned

Parameters:
rf reference to the random forest object that called this visitor
pr reference to the preprocessor that processed the input

Reimplemented in RandomForestProgressVisitor, and CorrelationVisitor.

void visit_at_beginning ( RF const &  rf,
PR const &  pr 
) [inherited]

do something before learning starts

Parameters:
rf reference to the random forest object that called this visitor
pr reference to the Processor class used.

Reimplemented in ClusterImportanceVisitor, and RandomForestProgressVisitor.

void visit_external_node ( TR &  tr,
IntT  index,
TopT  node_t,
Feat &  features 
) [inherited]

do some thing while traversing tree after it has been learned (external nodes)

Parameters:
tr reference to the tree object that called this visitor
index index in the topology_ array we currently are at
node_t type of node we have (will be e_.... - )
features feature matrix
See also:
NodeTags;

you can create the node by using a switch on node_tag and using the corresponding Node objects. Or - if you do not care about the type use the NodeBase class.

void visit_internal_node ( TR &  tr,
IntT  index,
TopT  node_t,
Feat &  features 
) [inherited]

do something when visiting a internal node after it has been learned

See also:
visit_external_node

Reimplemented in OnlineLearnVisitor.

double return_val (  )  [inherited]

return a double value. The value of the first visitor encountered that has a return value is returned with the RandomForest::learn() method - or -1.0 if no return value visitor existed. This functionality basically only exists so that the OOB - visitor can return the oob error rate like in the old version of the random forest.

Reimplemented in StopVisiting.

double return_val (  )  [inherited]

return a double value. The value of the first visitor encountered that has a return value is returned with the RandomForest::learn() method - or -1.0 if no return value visitor existed. This functionality basically only exists so that the OOB - visitor can return the oob error rate like in the old version of the random forest.

Reimplemented from VisitorBase.

detail::VisitorNode<A> vigra::rf::visitors::create_visitor ( A &  a  ) 

factory method to to be used with RandomForest::learn()

detail::VisitorNode<A, detail::VisitorNode<B> > vigra::rf::visitors::create_visitor ( A &  a,
B &  b 
)

factory method to to be used with RandomForest::learn()

detail::VisitorNode<A, detail::VisitorNode<B, detail::VisitorNode<C> > > vigra::rf::visitors::create_visitor ( A &  a,
B &  b,
C &  c 
)

factory method to to be used with RandomForest::learn()

detail::VisitorNode<A, detail::VisitorNode<B, detail::VisitorNode<C, detail::VisitorNode<D> > > > vigra::rf::visitors::create_visitor ( A &  a,
B &  b,
C &  c,
D &  d 
)

factory method to to be used with RandomForest::learn()

detail::VisitorNode<A, detail::VisitorNode<B, detail::VisitorNode<C, detail::VisitorNode<D, detail::VisitorNode<E> > > > > vigra::rf::visitors::create_visitor ( A &  a,
B &  b,
C &  c,
D &  d,
E &  e 
)

factory method to to be used with RandomForest::learn()

detail::VisitorNode<A, detail::VisitorNode<B, detail::VisitorNode<C, detail::VisitorNode<D, detail::VisitorNode<E, detail::VisitorNode<F> > > > > > vigra::rf::visitors::create_visitor ( A &  a,
B &  b,
C &  c,
D &  d,
E &  e,
F &  f 
)

factory method to to be used with RandomForest::learn()

detail::VisitorNode<A, detail::VisitorNode<B, detail::VisitorNode<C, detail::VisitorNode<D, detail::VisitorNode<E, detail::VisitorNode<F, detail::VisitorNode<G> > > > > > > vigra::rf::visitors::create_visitor ( A &  a,
B &  b,
C &  c,
D &  d,
E &  e,
F &  f,
G &  g 
)

factory method to to be used with RandomForest::learn()

detail::VisitorNode<A, detail::VisitorNode<B, detail::VisitorNode<C, detail::VisitorNode<D, detail::VisitorNode<E, detail::VisitorNode<F, detail::VisitorNode<G, detail::VisitorNode<H> > > > > > > > vigra::rf::visitors::create_visitor ( A &  a,
B &  b,
C &  c,
D &  d,
E &  e,
F &  f,
G &  g,
H &  h 
)

factory method to to be used with RandomForest::learn()

detail::VisitorNode<A, detail::VisitorNode<B, detail::VisitorNode<C, detail::VisitorNode<D, detail::VisitorNode<E, detail::VisitorNode<F, detail::VisitorNode<G, detail::VisitorNode<H, detail::VisitorNode<I> > > > > > > > > vigra::rf::visitors::create_visitor ( A &  a,
B &  b,
C &  c,
D &  d,
E &  e,
F &  f,
G &  g,
H &  h,
I &  i 
)

factory method to to be used with RandomForest::learn()

detail::VisitorNode<A, detail::VisitorNode<B, detail::VisitorNode<C, detail::VisitorNode<D, detail::VisitorNode<E, detail::VisitorNode<F, detail::VisitorNode<G, detail::VisitorNode<H, detail::VisitorNode<I, detail::VisitorNode<J> > > > > > > > > > vigra::rf::visitors::create_visitor ( A &  a,
B &  b,
C &  c,
D &  d,
E &  e,
F &  f,
G &  g,
H &  h,
I &  i,
J &  j 
)

factory method to to be used with RandomForest::learn()

void visit_at_beginning ( RF &  rf,
const PR &  pr 
) [inherited]

Initialize, set the number of trees

void reset_tree ( int  tree_id  )  [inherited]

Reset a tree

void visit_after_tree ( RF &  rf,
PR &  pr,
SM &  sm,
ST &  st,
int  index 
) [inherited]

simply increase the tree count

Reimplemented from VisitorBase.

void visit_after_split ( Tree &  tree,
Split &  split,
Region &  parent,
Region &  leftChild,
Region &  rightChild,
Feature_t &  features,
Label_t &  labels 
) [inherited]

do something after the the Split has decided how to process the Region (Stack entry)

Parameters:
tree reference to the tree that is currently being learned
split reference to the split object
parent current stack entry which was used to decide the split
leftChild left stack entry that will be pushed
rightChild right stack entry that will be pushed.
features features matrix
labels label matrix
See also:
RF_Traits::StackEntry_t

Reimplemented from VisitorBase.

void visit_internal_node ( TR &  tr,
IntT  index,
TopT  node_t,
Feat &  features 
) [inherited]

do something when visiting a internal node during getToLeaf

remember as last node id, for finding the parent of the last external node also: adjust class counts and borders

Reimplemented from VisitorBase.

void visit_after_tree ( RF &  rf,
PR &  pr,
SM &  sm,
ST &  st,
int  index 
) [inherited]

does the basic calculation per tree

Reimplemented from VisitorBase.

void visit_at_end ( RF &  rf,
PR &  pr 
) [inherited]

Does the normalisation

void visit_after_tree ( RF &  rf,
PR &  pr,
SM &  sm,
ST &  st,
int  index 
) [inherited]

do something after each tree has been learned

Parameters:
rf reference to the random forest object that called this visitor
pr reference to the preprocessor that processed the input
sm reference to the sampler object
st reference to the first stack entry
index index of current tree

Reimplemented from VisitorBase.

void visit_at_end ( RF &  rf,
PR &  pr 
) [inherited]

Normalise variable importance after the number of trees is known.

void visit_after_tree ( RF &  rf,
PR &  pr,
SM &  sm,
ST &  st,
int  index 
) [inherited]

do something after each tree has been learned

Parameters:
rf reference to the random forest object that called this visitor
pr reference to the preprocessor that processed the input
sm reference to the sampler object
st reference to the first stack entry
index index of current tree

Reimplemented from VisitorBase.

void visit_at_end ( RF &  rf,
PR &  pr 
) [inherited]

Normalise variable importance after the number of trees is known.

VariableImportanceVisitor ( int  rep_cnt = 10  )  [inherited]

Constructor

Parameters:
rep_cnt (defautl: 10) how often should the permutation take place. Set to 1 to make calculation faster (but possibly more instable)
void visit_after_split ( Tree &  tree,
Split &  split,
Region &  parent,
Region &  leftChild,
Region &  rightChild,
Feature_t &  features,
Label_t &  labels 
) [inherited]

calculates impurity decrease based variable importance after every split.

Reimplemented from VisitorBase.

void after_tree_ip_impl ( RF &  rf,
PR &  pr,
SM &  sm,
ST &  st,
int  index 
) [inherited]

compute permutation based var imp. (Only an Array of size oob_sample_count x 1 is created.

  • apposed to oob_sample_count x feature_count in the other method.
See also:
FieldProxy
void visit_after_tree ( RF &  rf,
PR &  pr,
SM &  sm,
ST &  st,
int  index 
) [inherited]

calculate permutation based impurity after every tree has been learned default behaviour is that this happens out of place. If you have very big data sets and want to avoid copying of data set the in_place_ flag to true.

Reimplemented from VisitorBase.

void visit_at_end ( RF &  rf,
PR &  pr 
) [inherited]

Normalise variable importance after the number of trees is known.

void visit_after_tree ( RF &  rf,
PR &  pr,
SM &  sm,
ST &  st,
int  index 
) [inherited]

do something after each tree has been learned

Parameters:
rf reference to the random forest object that called this visitor
pr reference to the preprocessor that processed the input
sm reference to the sampler object
st reference to the first stack entry
index index of current tree

Reimplemented from VisitorBase.

void visit_at_end ( RF const &  rf,
PR const &  pr 
) [inherited]

do something after all trees have been learned

Parameters:
rf reference to the random forest object that called this visitor
pr reference to the preprocessor that processed the input

Reimplemented from VisitorBase.

void visit_at_beginning ( RF const &  rf,
PR const &  pr 
) [inherited]

do something before learning starts

Parameters:
rf reference to the random forest object that called this visitor
pr reference to the Processor class used.

Reimplemented from VisitorBase.

void visit_at_end ( RF const &  rf,
PR const &  pr 
) [inherited]

do something after all trees have been learned

Parameters:
rf reference to the random forest object that called this visitor
pr reference to the preprocessor that processed the input

Reimplemented from VisitorBase.

void visit_after_split ( Tree &  tree,
Split &  split,
Region &  parent,
Region &  leftChild,
Region &  rightChild,
Feature_t &  features,
Label_t &  labels 
) [inherited]

do something after the the Split has decided how to process the Region (Stack entry)

Parameters:
tree reference to the tree that is currently being learned
split reference to the split object
parent current stack entry which was used to decide the split
leftChild left stack entry that will be pushed
rightChild right stack entry that will be pushed.
features features matrix
labels label matrix
See also:
RF_Traits::StackEntry_t

Reimplemented from VisitorBase.

void reLearnTree ( MultiArrayView< 2, U, C1 > const &  features,
MultiArrayView< 2, U2, C2 > const &  response,
int  treeId,
Visitor_t  visitor_,
Split_t  split_,
Stop_t  stop_,
Random_t &  random 
) [inherited]

Todo:
replace this crappy class out. It uses function pointers. and is making code slower according to me. Comment from Nathan: This is copied from Rahul, so me=Rahul

void learn ( MultiArrayView< 2, U, C1 > const &  features,
MultiArrayView< 2, U2, C2 > const &  response,
Visitor_t  visitor,
Split_t  split,
Stop_t  stop,
Random_t const &  random 
) [inherited]

learn on data with custom config and random number generator

Parameters:
features a N x M matrix containing N samples with M features
response a N x D matrix containing the corresponding response. Current split functors assume D to be 1 and ignore any additional columns. This is not enforced to allow future support for uncertain labels, label independent strata etc. The Preprocessor specified during construction should be able to handle features and labels features and the labels. see also: SplitFunctor, Preprocessing
visitor visitor which is to be applied after each split, tree and at the end. Use rf_default for using default value. (No Visitors) see also: rf::visitors
split split functor to be used to calculate each split use rf_default() for using default value. (GiniSplit) see also: rf::split
stop predicate to be used to calculate each split use rf_default() for using default value. (EarlyStoppStd)
random RandomNumberGenerator to be used. Use rf_default() to use default value.(RandomMT19337)
LabelType predictLabel ( MultiArrayView< 2, U, C >const &  features,
Stop &  stop 
) const [inherited]

predict a label given a feature.

Parameters:
features,: a 1 by featureCount matrix containing data point to be predicted (this only works in classification setting)
stop,: early stopping criterion
Returns:
double value representing class. You can use the predictLabels() function together with the rf.external_parameter().class_type_ attribute to get back the same type used during learning.
LabelType predictLabel ( MultiArrayView< 2, U, C > const &  features,
ArrayVectorView< double >  prior 
) const [inherited]

predict a label with features and class priors

Parameters:
features,: same as above.
prior,: iterator to prior weighting of classes
Returns:
sam as above.
void sample (  )  [inherited]

Create a new sample.

void predictLabels ( MultiArrayView< 2, U, C1 >const &  features,
MultiArrayView< 2, T, C2 > &  labels 
) const [inherited]

predict multiple labels with given features

Parameters:
features,: a n by featureCount matrix containing data point to be predicted (this only works in classification setting)
labels,: a n by 1 matrix passed by reference to store output.
void predictProbabilities ( MultiArrayView< 2, U, C1 >const &  features,
MultiArrayView< 2, T, C2 > &  prob 
) const [inherited]

predict the class probabilities for multiple labels

Parameters:
features same as above
prob a n x class_count_ matrix. passed by reference to save class probabilities
void predictProbabilities ( MultiArrayView< 2, U, C1 >const &  features,
MultiArrayView< 2, T, C2 > &  prob,
Stop &  stop 
) const [inherited]

predict the class probabilities for multiple labels

Parameters:
features same as above
prob a n x class_count_ matrix. passed by reference to save class probabilities
stop earlystopping criterion
See also:
EarlyStopping

Variable Documentation

double oob_breiman [inherited]

Ensemble oob error rate

MultiArray<2, double> oob_per_tree [inherited]

OOB Error rate of each individual tree

double oob_mean [inherited]

Mean of oob_per_tree

double oob_std [inherited]

Standard deviation of oob_per_tree

double oob_breiman [inherited]

Ensemble OOB error

See also:
OOB_Error
double oob_per_tree2 [inherited]

Per Tree OOB error calculated as in OOB_PerTreeError (Ulli's version)

MultiArray<2, double> breiman_per_tree [inherited]

Column containing the development of the Ensemble error rate with increasing number of trees

MultiArray<4, double> oobroc_per_tree [inherited]

4 dimensional array containing the development of confusion matrices with number of trees - can be used to estimate ROC curves etc.

oobroc_per_tree(ii,jj,kk,ll) corresponds true label = ii predicted label = jj confusion matrix after ll trees

explanation of third index:

Two class case: kk = 0 - (treeCount-1) Threshold is on Probability for class 0 is kk/(treeCount-1); More classes: kk = 0. Threshold on probability set by argMax of the probability array.

MultiArray<2, double> noise [inherited]

additional noise features.

MultiArray<2, double> corr_noise [inherited]

how well can a noise column describe a partition created on variable ii.

MultiArray<2, double> similarity [inherited]

Similarity Matrix

(numberOfFeatures + 1) by (number Of Features + 1) Matrix gini_missc

  • row normalized by the number of times the column was chosen
  • mean of corr_noise subtracted
  • and symmetrised.
MultiArray<2, double> distance [inherited]

Distance Matrix 1-similarity

ArrayVector<int> numChoices [inherited]

How often was variable ii chosen

© Ullrich Köthe (ullrich.koethe@iwr.uni-heidelberg.de)
Heidelberg Collaboratory for Image Processing, University of Heidelberg, Germany

html generated using doxygen and Python
vigra 1.8.0 (20 Sep 2011)