Commit ece3483c authored by ehebrard's avatar ehebrard
Browse files

neurips

parent 7c0791b4
......@@ -85,13 +85,13 @@ int main(int argc, char *argv[]) {
WeightedDataset<int> input;
////// READING
try {
read_binary(input, opt);
} catch (const std::exception &e) {
// try {
// read_binary(input, opt);
// } catch (const std::exception &e) {
if (opt.verbosity >= DTOptions::NORMAL)
cout << "c binarizing...\n";
read_non_binary(input, opt);
}
// }
if (opt.sample < 1)
input.sample(opt.sample);
......
......@@ -65,6 +65,9 @@ int run_algorithm(DTOptions &opt) {
cout << "d examples=" << A.numExample() << " features=" << A.numFeature()
<< endl;
// A.perfectTree();
// A.minimize_error();
////// SOLVING
if (opt.mindepth) {
if (opt.minsize)
......@@ -72,10 +75,10 @@ int run_algorithm(DTOptions &opt) {
else
A.minimize_error_depth();
} else {
if (opt.minsize)
A.set_size_objective();
if (opt.minsize)
A.set_size_objective();
A.minimize_error();
}
}
Tree<E_t> sol = A.getSolution();
......
......@@ -5,7 +5,7 @@
#include <iostream>
#include <random>
#include <vector>
#include <cmath>
// #include <cmath>
#include "CmdLine.hpp"
#include "Partition.hpp"
......@@ -30,31 +30,31 @@ namespace blossom {
/// this needs to be templated with "T" and should be 0 for integral types
/// (e.g., static_cast<T>(1.e-9) should work)
// #define FLOAT_PRECISION std::numeric_limits<T>::epsilon()
#define FLOAT_PRECISION static_cast<T>(1.e-6)
template <typename T>
typename std::enable_if<std::is_integral<T>::value, bool>::type
equal(const T &a, const T &b) {
return a == b;
}
template <typename T>
typename std::enable_if<std::is_integral<T>::value, bool>::type
lt(const T &a, const T &b) {
return a < b;
}
template <typename T>
typename std::enable_if<std::is_floating_point<T>::value, bool>::type
equal(const T &a, const T &b) {
return std::fabs(a - b) < FLOAT_PRECISION;
}
template <typename T>
typename std::enable_if<std::is_floating_point<T>::value, bool>::type
lt(const T &a, const T &b) {
return a + FLOAT_PRECISION < b;
}
// #define FLOAT_PRECISION static_cast<T>(1.e-6)
//
// template <typename T>
// typename std::enable_if<std::is_integral<T>::value, bool>::type
// equal(const T &a, const T &b) {
// return a == b;
// }
//
// template <typename T>
// typename std::enable_if<std::is_integral<T>::value, bool>::type
// lt(const T &a, const T &b) {
// return a < b;
// }
//
// template <typename T>
// typename std::enable_if<std::is_floating_point<T>::value, bool>::type
// equal(const T &a, const T &b) {
// return std::fabs(a - b) < FLOAT_PRECISION;
// }
//
// template <typename T>
// typename std::enable_if<std::is_floating_point<T>::value, bool>::type
// lt(const T &a, const T &b) {
// return a + FLOAT_PRECISION < b;
// }
template <typename E_t> class CardinalityError;
......@@ -100,6 +100,8 @@ public:
E_t getUbError() const;
void perfectTree();
size_t getUbDepth() const;
size_t getUbSize() const;
......
......@@ -47,6 +47,9 @@ public:
template <typename boolean_function>
void split(Part &l1, Part &l2, boolean_function condition);
void addFalse(vector<int>::iterator elt_ptr);
void addTrue(vector<int>::iterator elt_ptr);
/*!@name Miscellaneous*/
//@{
std::ostream &display(std::ostream &os) const;
......@@ -82,6 +85,8 @@ public:
template <typename boolean_function>
void branch(const int node, const int x, const int y,
boolean_function condition);
void branch(const int node, const int x, const int y);
/*!@name Miscellaneous*/
//@{
......
......@@ -263,7 +263,7 @@ public:
// cout << ClassicEncoding<T>::value_set.size() << " " << (num_examples) ;
if (ClassicEncoding<T>::value_set.size() < sqrt(num_examples)) {
if (ClassicEncoding<T>::value_set.size() < sqrt(num_examples) or ClassicEncoding<T>::value_set.size() < 10) {
// cout << " full\n";
full_encoding();
......
......@@ -154,7 +154,9 @@ inline void WeightedDataset<E_t>::addExample(rIter beg_row, rIter end_row,
int f{0};
for (auto x{beg_row}; x != end_row; ++x) {
assert(*x == 0 or *x == 1);
// assert(*x == 0 or *x == 1);
if (*x != 0 and *x != 1)
throw 0;
if (x - beg_row != column) {
if (*x)
data[y].back().set(f);
......
......@@ -9,6 +9,7 @@
#include <iomanip>
#include <iostream>
#include <math.h>
#include <cmath>
// #define DEBUG_MODE
......@@ -29,6 +30,32 @@ floating_point fixedwidthfloat(const floating_point f, const int width) {
return (static_cast<floating_point>(i) / m);
}
#define FLOAT_PRECISION static_cast<T>(1.e-6)
template <typename T>
typename std::enable_if<std::is_integral<T>::value, bool>::type
equal(const T &a, const T &b) {
return a == b;
}
template <typename T>
typename std::enable_if<std::is_integral<T>::value, bool>::type
lt(const T &a, const T &b) {
return a < b;
}
template <typename T>
typename std::enable_if<std::is_floating_point<T>::value, bool>::type
equal(const T &a, const T &b) {
return std::fabs(a - b) < FLOAT_PRECISION;
}
template <typename T>
typename std::enable_if<std::is_floating_point<T>::value, bool>::type
lt(const T &a, const T &b) {
return a + FLOAT_PRECISION < b;
}
}
#endif // _BLOSSOM_UTILS_HPP
......@@ -334,6 +334,7 @@ void BacktrackingAlgorithm<ErrorPolicy, E_t>::print_new_best() {
<< search_size << " mem=" << setw(3) << wood.size()
<< " time=" << setprecision(max(4, static_cast<int>(log10(t))))
<< fixedwidthfloat(t, 3) << right << endl;
}
template <template <typename> class ErrorPolicy, typename E_t>
......@@ -601,7 +602,8 @@ void BacktrackingAlgorithm<ErrorPolicy, E_t>::sort_features(const int node) {
for (auto f{feature[node]}; f != end_feature[node]; ++f)
f_error[*f] = get_feature_error(node, *f);
sort(feature[node], end_feature[node],
[&](const int a, const int b) { return f_error[a] < f_error[b]; });
// [&](const int a, const int b) { return f_error[a] < f_error[b]; });
[&](const int a, const int b) { return (f_error[a] < f_error[b] or (f_error[a] == f_error[b] and a < b)); });
break;
case DTOptions::ENTROPY:
for (auto f{feature[node]}; f != end_feature[node]; ++f)
......@@ -830,12 +832,12 @@ bool BacktrackingAlgorithm<ErrorPolicy, E_t>::update_upperbound(const int node)
max_error[node] = err;
max_size[node] = sz;
#ifdef PRINTTRACE
if (PRINTTRACE)
cout << "new best for node " << node << ": feat=" << *feature[node]
<< ", error=" << max_error[node] << ", size=" << max_size[node]
<< endl;
#endif
// #ifdef PRINTTRACE
// if (PRINTTRACE)
// cout << "new best for node " << node << ": feat=" << *feature[node]
// << ", error=" << max_error[node] << ", size=" << max_size[node]
// << endl;
// #endif
if (node > 0) {
assert(parent[node] >= 0);
......@@ -863,6 +865,7 @@ bool BacktrackingAlgorithm<ErrorPolicy, E_t>::backtrack() {
#ifdef PRINTTRACE
if (PRINTTRACE) {
cout << setw(3) << decision.size();
for (auto i{0}; i < decision.size(); ++i)
cout << " ";
cout << "backtrack to " << backtrack_node << endl;
......@@ -874,13 +877,13 @@ bool BacktrackingAlgorithm<ErrorPolicy, E_t>::backtrack() {
(equal<E_t>(tree_error[backtrack_node],max_error[backtrack_node]) and
tree_size[backtrack_node] > max_size[backtrack_node])) {
#ifdef PRINTTRACE
if (PRINTTRACE) // and not updt)
cout << "new best for node " << backtrack_node
<< ": feat=" << *feature[backtrack_node]
<< ", error=" << max_error[backtrack_node]
<< ", size=" << max_size[backtrack_node] << endl;
#endif
// #ifdef PRINTTRACE
// if (PRINTTRACE) // and not updt)
// cout << "new best for node " << backtrack_node
// << ": feat=" << *feature[backtrack_node]
// << ", error=" << max_error[backtrack_node]
// << ", size=" << max_size[backtrack_node] << endl;
// #endif
tree_error[backtrack_node] = max_error[backtrack_node];
tree_size[backtrack_node] = max_size[backtrack_node];
......@@ -888,12 +891,12 @@ bool BacktrackingAlgorithm<ErrorPolicy, E_t>::backtrack() {
} else {
#ifdef PRINTTRACE
if (PRINTTRACE)
cout << "no improvement for node " << backtrack_node
<< ": feat=" << *feature[backtrack_node] << "("
<< ") -> free the best subtrees" << endl;
#endif
// #ifdef PRINTTRACE
// if (PRINTTRACE)
// cout << "no improvement for node " << backtrack_node
// << ": feat=" << *feature[backtrack_node] << "("
// << ") -> free the best subtrees" << endl;
// #endif
for (auto i{0}; i < 2; ++i)
if (child[i][backtrack_node] >= 0 and
......@@ -914,12 +917,13 @@ bool BacktrackingAlgorithm<ErrorPolicy, E_t>::backtrack() {
(not max_entropy(backtrack_node, *feature[backtrack_node])));
dead_end = (
// (depth[backtrack_node] == ub_depth - 1 and
// (node_error(backtrack_node) >= ub_error)) or
// current_error >= ub_error or
(lt<E_t>(0,ub_error) and equal<E_t>(max_error[backtrack_node], 0)) or
(lt<E_t>(0, ub_error) and equal<E_t>(max_error[backtrack_node], 0)) or
no_feature(backtrack_node) or
max_entropy(backtrack_node, *feature[backtrack_node])
or ( options.bounding and fail(backtrack_node) )
);
max_entropy(backtrack_node, *feature[backtrack_node]) or
(options.bounding and fail(backtrack_node)));
// backtrack again
if (dead_end) {
......@@ -1013,12 +1017,8 @@ void BacktrackingAlgorithm<ErrorPolicy, E_t>::branch(const int node, const int f
cout << setw(3) << decision.size();
for (auto i{0}; i < decision.size(); ++i)
cout << " ";
cout << "branch on " << node << " with " << f << " children: " << c[0]
// << " (" << P[0][c[0]].count() << "/" << P[1][c[0]].count() << ") and
// "
// << c[1] << "(" << P[0][c[1]].count() << "/" << P[1][c[1]].count()
// << ")"
<< endl;
cout << "branch on " << node << " with " << f << " ("
<< (end_feature[node] - feature[node]) << ")";
}
#endif
......@@ -1065,6 +1065,36 @@ void BacktrackingAlgorithm<ErrorPolicy, E_t>::branch(const int node, const int f
++current_size;
}
}
#ifdef PRINTTRACE
if (PRINTTRACE) {
// cout << setw(3) << decision.size()-1;
// for (auto i{0}; i < decision.size()-1; ++i)
// cout << " ";
// cout << "branch on " << node << " (" << P[node].count() << "/"
// << (usize(node) - P[node].count()) << ") with " << f
// cout << " children: " << child[0][node] << " (" << P[0][c[0]].count() << "/" << P[1][c[0]].count() << ") and " << c[1] << " ("
// << P[0][c[1]].count() << "/" << P[1][c[1]].count()
// << ")" << endl;
cout << " children: " << c[0] << " (" << P[0][c[0]].count() << "/" << P[1][c[0]].count() << ") and " << c[1] << " ("
<< P[0][c[1]].count() << "/" << P[1][c[1]].count()
<< ")" << endl;
// cout << " children: " << c[0] << " ("
// << (child[0][node] >= 0 ? error_policy.get_total(0, child[0][node]) :
// (child[0][node] == -1 ? error_policy.get_total(0, node) : 0))
// << "/"
// << (child[0][node] >= 0 ? error_policy.get_total(1, child[0][node]) :
// (child[0][node] == -2 ? error_policy.get_total(1, node) : 0))
// << ") and "
// << c[1] << " ("
// << (child[1][node] >= 0 ? error_policy.get_total(0, child[1][node]) :
// (child[1][node] == -1 ? error_policy.get_total(0, node) : 0))
// << "/"
// << (child[1][node] >= 0 ? error_policy.get_total(1, child[1][node]) :
// (child[1][node] == -2 ? error_policy.get_total(1, node) : 0))
// << ")" << endl;
}
#endif
update_upperbound(node);
}
......@@ -1172,16 +1202,16 @@ void BacktrackingAlgorithm<ErrorPolicy, E_t>::expend() {
break;
}
if (max_entropy(selected_node, *feature[selected_node])) {
cout << selected_node << " " << *feature[selected_node] << " "
<< get_feature_frequency(0, selected_node, *feature[selected_node])
<< " / " << error_policy.get_total(0, selected_node) << " || "
<< get_feature_frequency(1, selected_node, *feature[selected_node])
<< " / " << error_policy.get_total(1, selected_node) << endl;
}
assert(not max_entropy(selected_node, *feature[selected_node]));
// if (max_entropy(selected_node, *feature[selected_node])) {
//
// cout << selected_node << " " << *feature[selected_node] << " "
// << get_feature_frequency(0, selected_node, *feature[selected_node])
// << " / " << error_policy.get_total(0, selected_node) << " || "
// << get_feature_frequency(1, selected_node, *feature[selected_node])
// << " / " << error_policy.get_total(1, selected_node) << endl;
// }
//
// assert(not max_entropy(selected_node, *feature[selected_node]));
}
if (options.width > 1)
......@@ -1190,6 +1220,11 @@ void BacktrackingAlgorithm<ErrorPolicy, E_t>::expend() {
assert(feature[selected_node] >= ranked_feature[selected_node].begin() and
feature[selected_node] < end_feature[selected_node]);
// cout << "(" << (end_feature[selected_node] - feature[selected_node]) << ")";
// for(auto f{feature[selected_node]}; f!=end_feature[selected_node]; ++f)
// cout << setw(3) << *f << " [" << get_feature_error(selected_node, *f) << "]";
// cout << endl;
branch(selected_node, *(feature[selected_node]));
......@@ -1203,7 +1238,8 @@ void BacktrackingAlgorithm<ErrorPolicy, E_t>::initialise_search() {
num_level_zero_feature = num_feature;
setReverse();
setReverse();
start_time = cpu_time();
// search_size = 0;
......@@ -1374,6 +1410,11 @@ void BacktrackingAlgorithm<ErrorPolicy, E_t>::minimize_error() {
print_new_best();
}
template <template <typename> class ErrorPolicy, typename E_t>
void BacktrackingAlgorithm<ErrorPolicy, E_t>::perfectTree() {
ub_error = min_positive<E_t>();
}
template <template<typename> class ErrorPolicy, typename E_t>
void BacktrackingAlgorithm<ErrorPolicy, E_t>::minimize_error_depth() {
......@@ -1520,10 +1561,10 @@ void BacktrackingAlgorithm<ErrorPolicy, E_t>::clearExamples() {
template <template <typename> class ErrorPolicy, typename E_t>
bool BacktrackingAlgorithm<ErrorPolicy, E_t>::fail(const int b) const {
#ifdef PRINTTRACE
if (PRINTTRACE)
cout << "bound from " << b << endl;
#endif
// #ifdef PRINTTRACE
// if (PRINTTRACE)
// cout << "bound from " << b << endl;
// #endif
E_t lbe{0};
auto lbs{0};
......@@ -1544,15 +1585,15 @@ bool BacktrackingAlgorithm<ErrorPolicy, E_t>::fail(const int b) const {
lbs += min_size[child[i][p]];
++lbs;
}
#ifdef PRINTTRACE
if (PRINTTRACE)
cout << "parent " << p << " (ub=" << ube << "/" << ubs << ", lb=" << lbe
<< "/" << lbs << ") ["
<< (child[0][p] >= 0 ? min_error[child[0][p]] : 0) << "/"
<< (child[1][p] >= 0 ? min_error[child[1][p]] : 0) << " | "
<< (child[0][p] >= 0 ? min_size[child[0][p]] : 1) << "/"
<< (child[1][p] >= 0 ? min_size[child[1][p]] : 1) << "]\n";
#endif
// #ifdef PRINTTRACE
// if (PRINTTRACE)
// cout << "parent " << p << " (ub=" << ube << "/" << ubs << ", lb=" << lbe
// << "/" << lbs << ") ["
// << (child[0][p] >= 0 ? min_error[child[0][p]] : 0) << "/"
// << (child[1][p] >= 0 ? min_error[child[1][p]] : 0) << " | "
// << (child[0][p] >= 0 ? min_size[child[0][p]] : 1) << "/"
// << (child[1][p] >= 0 ? min_size[child[1][p]] : 1) << "]\n";
// #endif
if (lt<E_t>(ube, lbe) or
(equal<E_t>(lbe, ube) and (lbs >= ubs or not size_matters))) {
......
......@@ -31,6 +31,11 @@ size_t TreePartition::addNode() {
void TreePartition::remNode() { part.pop_back(); }
void TreePartition::branch(const int node, const int x, const int y) {
part[x].begin_idx = part[x].end_idx = part[node].begin_idx;
part[y].begin_idx = part[y].end_idx = part[node].end_idx;
}
Part &TreePartition::operator[](const int i) { return part[i]; }
const Part &TreePartition::operator[](const int i) const { return part[i]; }
......@@ -93,6 +98,14 @@ vector<int>::const_iterator Part::end() const {
return element.begin() + end_idx;
}
void Part::addTrue(vector<int>::iterator elt_ptr) {
std::swap(*elt_ptr, element[end_idx++]);
}
void Part::addFalse(vector<int>::iterator elt_ptr) {
std::swap(*elt_ptr, element[--begin_idx]);
}
std::ostream& Part::display(std::ostream& os) const {
assert(begin() <= end());
......
......@@ -15,6 +15,12 @@ void SparseSet::reserve(const size_t n) {
list_.push_back(list_.size());
}
}
void SparseSet::resize(const size_t n) {
reserve(n);
fill();
}
//
// void SparseSet::save(size_t &stamp1, size_t &stamp2) { stamp1 = size_; stamp2
// = start_; }
......
This diff is collapsed.
......@@ -272,4 +272,16 @@ keywords = {expert systems, induction, classification, knowledge acquisition, in
pages={173--186},
year={1972},
publisher={SIAM}
}
\ No newline at end of file
}
@inproceedings{NEURIPS2019_ac52c626,
author = {Hu, Xiyang and Rudin, Cynthia and Seltzer, Margo},
booktitle = {Advances in Neural Information Processing Systems},
editor = {H. Wallach and H. Larochelle and A. Beygelzimer and F. d\textquotesingle Alch\'{e}-Buc and E. Fox and R. Garnett},
pages = {},
publisher = {Curran Associates, Inc.},
title = {Optimal Sparse Decision Trees},
url = {https://proceedings.neurips.cc/paper/2019/file/ac52c626afc10d4075708ac4c778ddfc-Paper.pdf},
volume = {32},
year = {2019}
}
......@@ -11,24 +11,24 @@
\texttt{lymph} & 0 & 0.00$^*$ & 0 & 0.02$^*$ & 0 & 0.00$^*$ & 0 & 1.2$^*$ & 30 & 576 & 0 & 0.00\\
\texttt{iris-bin} & 1 & 0.00$^*$ & 1 & 0.00$^*$ & 1 & 0.01$^*$ & 1 & 21$^*$ & - & - & 1 & 0.00\\
\texttt{monk2-bin} & 0 & 0.00$^*$ & 0 & 0.01$^*$ & 0 & 0.00$^*$ & 0 & 0.89$^*$ & - & - & 0 & 0.00\\
\texttt{wine3} & 16 & 272 & 28 & 1555 & - & - & 19 & $\mathsmaller{\geq}1$h & - & - & 19 & 0.01\\
\texttt{wine1} & 22 & 545 & 33 & 1439 & - & - & 27 & $\mathsmaller{\geq}1$h & - & - & 25 & 0.01\\
\texttt{wine2} & 24 & 399 & 43 & 625 & - & - & 29 & $\mathsmaller{\geq}1$h & - & - & 29 & 0.02\\
\texttt{wine3} & 16 & 272 & 28 & 1555 & - & - & 19 & $\mathsmaller{\geq}1$h & - & - & 19 & 0.01\\
\texttt{wine-bin} & 0 & 0.00$^*$ & 0 & 0.01$^*$ & 0 & 0.00$^*$ & 0 & 0.86$^*$ & - & - & 0 & 0.01\\
\texttt{audiology} & 0 & 0.00$^*$ & 0 & 0.02$^*$ & 0 & 0.00$^*$ & 0 & 1.4$^*$ & 25 & 17 & 0 & 0.00\\
\texttt{heart-cleveland} & 0 & 0.00$^*$ & 0 & 0.04$^*$ & 0 & 0.08$^*$ & 0 & 1.2$^*$ & 127 & 7.6 & 0 & 0.00\\
\texttt{primary-tumor} & 15 & 0.00$^*$ & 15 & 2088 & - & - & 82 & $\mathsmaller{\geq}1$h & 31 & 3329 & 20 & 0.00\\
\texttt{ionosphere} & 0 & 0.00$^*$ & 0 & 2.1$^*$ & 0 & 110$^*$ & 0 & 8.1$^*$ & 225 & 0.00 & 0 & 0.01\\
\texttt{Ionosphere-bin} & 0 & 0.00$^*$ & 0 & 0.06$^*$ & 0 & 0.09$^*$ & 0 & 23$^*$ & - & - & 1 & 0.00\\
\texttt{ionosphere} & 0 & 0.00$^*$ & 0 & 2.1$^*$ & 0 & 110$^*$ & 0 & 8.1$^*$ & 225 & 0.00 & 0 & 0.01\\
\texttt{vote} & 0 & 0.00$^*$ & 0 & 0.01$^*$ & 0 & 0.00$^*$ & 0 & 2.3$^*$ & 132 & 9.9 & 0 & 0.00\\
\texttt{forest-fires} & 113 & 942 & 174 & 3052 & - & - & 247 & $\mathsmaller{\geq}1$h & - & - & 146 & 0.02\\
\texttt{balance-scale-bin} & 0 & 19$^*$ & 0 & 3.2$^*$ & 0 & 1.5$^*$ & 0 & 16$^*$ & - & - & 6 & 0.00\\
\texttt{soybean} & 2 & 0.00$^*$ & 7 & 1766 & - & - & 92 & $\mathsmaller{\geq}1$h & 84 & 11 & 2 & 0.00\\
\texttt{australian-credit} & 0 & 0.04$^*$ & 0 & 69$^*$ & - & - & 0 & 464$^*$ & 357 & 0.00 & 12 & 0.01\\
\texttt{breast-cancer} & 0 & 0.00$^*$ & 0 & 0.02$^*$ & 0 & 0.00$^*$ & 0 & 2.4$^*$ & 239 & 0.00 & 0 & 0.00\\
\texttt{breast-wisconsin} & 0 & 0.00$^*$ & 0 & 0.08$^*$ & 0 & 3.4$^*$ & 0 & 7.8$^*$ & 444 & 0.00 & 0 & 0.00\\
\texttt{diabetes} & 0 & 0.67$^*$ & 0 & 2157$^*$ & - & - & 0 & 463$^*$ & 500 & 0.00 & 35 & 0.01\\
\texttt{breast-cancer} & 0 & 0.00$^*$ & 0 & 0.02$^*$ & 0 & 0.00$^*$ & 0 & 2.4$^*$ & 239 & 0.00 & 0 & 0.00\\
\texttt{IndiansDiabetes-bin} & 8 & 4.7$^*$ & 43 & 433 & - & - & 268 & $\mathsmaller{\geq}1$h & - & - & 63 & 0.00\\
\texttt{diabetes} & 0 & 0.67$^*$ & 0 & 2157$^*$ & - & - & 0 & 463$^*$ & 500 & 0.00 & 35 & 0.01\\
\texttt{anneal} & 34 & 23$^*$ & 121 & 1210 & - & - & 187 & $\mathsmaller{\geq}1$h & 625 & 0.00 & 59 & 0.00\\
\texttt{vehicle} & 0 & 0.00$^*$ & 0 & 0.34$^*$ & 0 & 0.37$^*$ & 0 & 4.2$^*$ & - & - & 0 & 0.01\\
\texttt{titanic} & 35 & 3059 & 215 & 2804 & - & - & 342 & $\mathsmaller{\geq}1$h & - & - & 78 & 0.01\\
......@@ -40,13 +40,13 @@
\texttt{banknote-bin} & 2 & 0.00$^*$ & 2 & 463$^*$ & 2 & 738$^*$ & 610 & $\mathsmaller{\geq}1$h & - & - & 2 & 0.00\\
\texttt{yeast} & 28 & 1008 & 237 & 3555 & - & - & 463 & $\mathsmaller{\geq}1$h & 463 & 0.00 & 185 & 0.01\\
\texttt{winequality-red-bin} & 2 & 0.00$^*$ & 5 & 1076 & - & - & 10 & $\mathsmaller{\geq}1$h & - & - & 2 & 0.00\\
\texttt{car} & 0 & 0.26$^*$ & 0 & 0.03$^*$ & 0 & 0.03$^*$ & 0 & 3.3$^*$ & 518 & 0.00 & 11 & 0.00\\
\texttt{car\_evaluation-bin} & 80 & 0.00$^*$ & 80 & 31$^*$ & 80 & 9.2$^*$ & 80 & $\mathsmaller{\geq}1$h & - & - & 80 & 0.00\\
\texttt{car} & 0 & 0.26$^*$ & 0 & 0.03$^*$ & 0 & 0.03$^*$ & 0 & 3.3$^*$ & 518 & 0.00 & 11 & 0.00\\
\texttt{segment} & 0 & 0.00$^*$ & 0 & 0.36$^*$ & 0 & 0.08$^*$ & 0 & 1.9$^*$ & - & - & 0 & 0.01\\
\texttt{seismic\_bumps-bin} & 38 & 2591 & 128 & 143 & - & - & 170 & $\mathsmaller{\geq}1$h & - & - & 101 & 0.01\\
\texttt{splice-1} & 5 & 1160 & 1319 & 189 & - & - & 1535 & $\mathsmaller{\geq}1$h & - & - & 12 & 0.05\\
\texttt{kr-vs-kp} & 0 & 1897$^*$ & 295 & 1333 & - & - & 784 & $\mathsmaller{\geq}1$h & - & - & 12 & 0.01\\
\texttt{chess-bin} & 0 & 0.00$^*$ & 0 & 0.06$^*$ & 0 & 0.01$^*$ & 0 & 0.66$^*$ & - & - & 0 & 0.00\\
\texttt{kr-vs-kp} & 0 & 1897$^*$ & 295 & 1333 & - & - & 784 & $\mathsmaller{\geq}1$h & - & - & 12 & 0.01\\
\texttt{hypothyroid} & 17 & 0.96$^*$ & 115 & 3040 & - & - & 277 & $\mathsmaller{\geq}1$h & - & - & 31 & 0.01\\
\texttt{Statlog\_satellite-bin} & 3 & 219 & 116 & 2837 & - & - & 1072 & $\mathsmaller{\geq}1$h & - & - & 15 & 0.13\\
\texttt{bank\_conv-bin} & 169 & 2794 & 371 & 2954 & - & - & 521 & $\mathsmaller{\geq}1$h & - & - & 207 & 0.10\\
......@@ -57,23 +57,23 @@
\texttt{surgical-deepnet} & 965 & 2865 & 2638 & 3583 & - & - & 3690 & $\mathsmaller{\geq}1$h & - & - & 1089 & 14\\
\texttt{HTRU\_2-bin} & 219 & 550 & 399 & 2695 & 669 & $\mathsmaller{\geq}1$h & 1639 & $\mathsmaller{\geq}1$h & - & - & 293 & 0.08\\
\texttt{magic04-bin} & 1635 & 2746 & 3456 & 2402 & 3839 & $\mathsmaller{\geq}1$h & 6688 & $\mathsmaller{\geq}1$h & - & - & 2145 & 0.13\\
\texttt{letter} & 0 & 79$^*$ & 636 & 3519 & 725 & $\mathsmaller{\geq}1$h & 813 & $\mathsmaller{\geq}1$h & - & - & 21 & 0.31\\
\texttt{letter\_recognition-bin} & 11 & 712 & 649 & 2356 & 736 & $\mathsmaller{\geq}1$h & 32 & $\mathsmaller{\geq}1$h & - & - & 28 & 0.45\\
\texttt{letter} & 0 & 79$^*$ & 636 & 3519 & 725 & $\mathsmaller{\geq}1$h & 813 & $\mathsmaller{\geq}1$h & - & - & 21 & 0.31\\
\texttt{taiwan\_binarised} & 4217 & 1001 & 5908 & 2274 & - & - & 6636 & $\mathsmaller{\geq}1$h & - & - & 4710 & 0.54\\
\texttt{default\_credit-bin} & 4547 & 2019 & 5838 & 1485 & 5412 & $\mathsmaller{\geq}1$h & 6636 & $\mathsmaller{\geq}1$h & - & - & 4762 & 1.3\\
\texttt{adult\_discretized} & 3841 & 2632 & 5549 & 2236 & - & - & 7511 & $\mathsmaller{\geq}1$h & - & - & 4148 & 0.12\\
\texttt{Statlog\_shuttle-bin} & 0 & 0.02$^*$ & 0 & 49$^*$ & 0 & 99$^*$ & 0 & 16$^*$ & - & - & 0 & 3.6\\
\texttt{bank} & 3242 & 800 & 5287 & 0.00 & 4826 & $\mathsmaller{\geq}1$h & 5289 & $\mathsmaller{\geq}1$h & - & - & 3327 & 102\\
\texttt{mnist\_3} & 1079 & 376 & 4352 & 105 & 5171 & $\mathsmaller{\geq}1$h & 6131 & $\mathsmaller{\geq}1$h & - & - & 1169 & 6.7\\
\texttt{mnist\_0} & 383 & 413 & 2556 & 579 & 3314 & $\mathsmaller{\geq}1$h & 5923 & $\mathsmaller{\geq}1$h & - & - & 477 & 8.5\\
\texttt{mnist\_6} & 965 & 1742 & 2684 & 512 & 2699 & $\mathsmaller{\geq}1$h & 5918 & $\mathsmaller{\geq}1$h & - & - & 1211 & 7.4\\
\texttt{mnist\_4} & 801 & 398 & 4709 & 1076 & 5580 & $\mathsmaller{\geq}1$h & 5842 & $\mathsmaller{\geq}1$h & - & - & 1010 & 10\\
\texttt{mnist\_1} & 331 & 360 & 3450 & 3550 & 4544 & $\mathsmaller{\geq}1$h & 6742 & $\mathsmaller{\geq}1$h & - & - & 439 & 7.8\\
\texttt{mnist\_7} & 1082 & 1277 & 3483 & 265 & - & - & 6265 & $\mathsmaller{\geq}1$h & - & - & 1263 & 11\\
\texttt{mnist\_5} & 1973 & 491 & 3539 & 87 & 4379 & $\mathsmaller{\geq}1$h & 5421 & $\mathsmaller{\geq}1$h & - & - & 2266 & 6.9\\
\texttt{mnist\_1} & 331 & 360 & 3450 & 3550 & 4544 & $\mathsmaller{\geq}1$h & 6742 & $\mathsmaller{\geq}1$h & - & - & 439 & 7.8\\
\texttt{mnist\_2} & 1522 & 2520 & 3927 & 1659 & - & - & 5958 & $\mathsmaller{\geq}1$h & - & - & 1959 & 8.7\\
\texttt{mnist\_3} & 1079 & 376 & 4352 & 105 & 5171 & $\mathsmaller{\geq}1$h & 6131 & $\mathsmaller{\geq}1$h & - & - & 1169 & 6.7\\
\texttt{mnist\_9} & 1594 & 2124 & 4590 & 306 & - & - & 5949 & $\mathsmaller{\geq}1$h & - & - & 1722 & 7.1\\
\texttt{mnist\_8} & 696 & 2141 & 3583 & 207 & - & - & 5851 & $\mathsmaller{\geq}1$h & - & - & 916 & 7.9\\
\texttt{mnist\_7} & 1082 & 1277 & 3483 & 265 & - & - & 6265 & $\mathsmaller{\geq}1$h & - & - & 1263 & 11\\