X-Git-Url: https://www.fleuret.org/cgi-bin/gitweb/gitweb.cgi?p=folded-ctf.git;a=blobdiff_plain;f=decision_tree.h;h=5adcb0f3e25e33e569b41d248694d1a517e91635;hp=59dba57c7b3e4a28c4d8c3540f60312547d32ff7;hb=aed34255065b18c445d096f51bd2091833810a81;hpb=d922ad61d35e9a6996730bec24b16f8bf7bc426c diff --git a/decision_tree.h b/decision_tree.h index 59dba57..5adcb0f 100644 --- a/decision_tree.h +++ b/decision_tree.h @@ -12,10 +12,22 @@ // You should have received a copy of the GNU General Public License // // along with this program. If not, see . // // // -// Written by Francois Fleuret, (C) IDIAP // +// Written by Francois Fleuret // +// (C) Idiap Research Institute // +// // // Contact for comments & bug reports // /////////////////////////////////////////////////////////////////////////// +/* + + An implementation of the classifier with a decision tree. Each node + simply thresholds one of the component, and is chosen for maximum + loss reduction locally during training. The leaves are labelled with + the classifier response, which is chosen again for maximum loss + reduction. + + */ + #ifndef DECISION_TREE_H #define DECISION_TREE_H @@ -26,14 +38,14 @@ class DecisionTree : public Classifier { + static const int min_nb_samples_for_split = 5; + int _feature_index; scalar_t _threshold; scalar_t _weight; DecisionTree *_subtree_lesser, *_subtree_greater; - static const int min_nb_samples_for_split = 5; - void pick_best_split(SampleSet *sample_set, scalar_t *loss_derivatives);