Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] GH-6769: multinomial dt yuliia #16310

Open
wants to merge 32 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 13 commits
Commits
Show all changes
32 commits
Select commit Hold shift + click to select a range
46bf5cf
GH-6769: correct and prepare for multinomial
syzonyuliia-h2o Oct 12, 2023
44ae3c2
GH-6769: adapt binning for the multinomial classification
syzonyuliia-h2o Oct 22, 2023
476f0d0
GH-6769: adapt splitting for the multinomial classification
syzonyuliia-h2o Oct 22, 2023
9680a8d
GH-6769: enable DT creation for the multinomial classification
syzonyuliia-h2o Oct 22, 2023
ad3035a
GH-6769: detect and fix bugs and improvements
syzonyuliia-h2o Oct 26, 2023
631866a
GH-6769: fix tests
syzonyuliia-h2o Oct 26, 2023
b85c558
GH-6769: correct and prepare for multinomial
syzonyuliia-h2o Oct 12, 2023
c6ab1e7
GH-6769: adapt binning for the multinomial classification
syzonyuliia-h2o Oct 22, 2023
c689119
GH-6769: adapt splitting for the multinomial classification
syzonyuliia-h2o Oct 22, 2023
9c0f9b2
GH-6769: enable DT creation for the multinomial classification
syzonyuliia-h2o Oct 22, 2023
c954c1b
GH-6769: detect and fix bugs and improvements
syzonyuliia-h2o Oct 26, 2023
840b8f6
GH-6769: fix tests
syzonyuliia-h2o Oct 26, 2023
8e087c8
Merge remote-tracking branch 'origin/GH-6769_multinomial_DT_yuliia' i…
syzonyuliia-h2o Jun 19, 2024
def4e00
GH-6769: clean code
syzonyuliia-h2o Jul 22, 2024
24abdbe
GH-6769: enable multiclass probabilities in prediction
syzonyuliia-h2o Jul 22, 2024
9b1b472
GH-6769: refactor for multiclass entropy
syzonyuliia-h2o Jul 22, 2024
3cfcfa7
GH-6769: fix multiclass specifics
syzonyuliia-h2o Jul 22, 2024
5d38297
GH-6769: clean comments
syzonyuliia-h2o Jul 22, 2024
d7bb06a
GH-6769: remove restriction on binary response
syzonyuliia-h2o Jul 22, 2024
0ef110b
GH-6769: add distribution parameter
syzonyuliia-h2o Jul 22, 2024
2bcf611
GH-6769: fix categorical splitting bug
syzonyuliia-h2o Jul 22, 2024
455234e
GH-6769: update binomial test
syzonyuliia-h2o Jul 22, 2024
067369c
GH-6769: adapt tests for multinomial features
syzonyuliia-h2o Jul 22, 2024
789717d
GH-6769: add multinomial java tests
syzonyuliia-h2o Jul 22, 2024
19d8cda
GH-6769: add multinomial python test
syzonyuliia-h2o Jul 24, 2024
1a4db7c
GH-6769: add generated changes
syzonyuliia-h2o Jul 24, 2024
2875434
Fix python tests
valenad1 Jul 24, 2024
8e041f1
GH-6769: add R test multinomial
syzonyuliia-h2o Jul 24, 2024
1cb6282
GH-6769: add asserts to python test
syzonyuliia-h2o Jul 24, 2024
6663953
fix R test
valenad1 Jul 24, 2024
aa7eb38
add assert to python
valenad1 Jul 24, 2024
b7d2aae
add assert to R
valenad1 Jul 24, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 9 additions & 13 deletions h2o-algos/src/main/java/hex/tree/dt/DT.java
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
import hex.tree.dt.binning.Histogram;
syzonyuliia marked this conversation as resolved.
Show resolved Hide resolved
import hex.tree.dt.mrtasks.GetClassCountsMRTask;
import hex.tree.dt.mrtasks.ScoreDTTask;
import org.apache.commons.math3.util.Precision;
import org.apache.log4j.Logger;
import water.DKV;
import water.exceptions.H2OModelBuilderIllegalArgumentException;
Expand All @@ -19,7 +18,7 @@
import java.util.stream.Collectors;
import java.util.stream.IntStream;

import static hex.tree.dt.binning.SplitStatistics.entropyBinarySplit;
import static hex.tree.dt.binning.SplitStatistics.entropyBinarySplitMultinomial;

/**
* Decision Tree
Expand Down Expand Up @@ -108,8 +107,8 @@ private AbstractSplittingRule findBestSplit(Histogram histogram) {

private AbstractSplittingRule findBestSplitForFeature(Histogram histogram, int featureIndex) {
return (_train.vec(featureIndex).isNumeric()
? histogram.calculateSplitStatisticsForNumericFeature(featureIndex)
: histogram.calculateSplitStatisticsForCategoricalFeature(featureIndex))
? histogram.calculateSplitStatisticsForNumericFeature(featureIndex, _nclass)
: histogram.calculateSplitStatisticsForCategoricalFeature(featureIndex, _nclass))
.stream()
// todo - consider setting min count of samples in bin instead of filtering splits
.filter(binStatistics -> ((binStatistics._leftCount >= _min_rows)
Expand All @@ -128,6 +127,7 @@ private AbstractSplittingRule findBestSplitForFeature(Histogram histogram, int f


private static double calculateCriterionOfSplit(SplitStatistics binStatistics) {
// if(binStatistics.() == 2) // todo - fix bin statistics first, they are binomial-only now
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please finish those TODOs

return binStatistics.binaryEntropy();
syzonyuliia marked this conversation as resolved.
Show resolved Hide resolved
}

Expand All @@ -139,7 +139,7 @@ private static double calculateCriterionOfSplit(SplitStatistics binStatistics) {
*/
private int selectDecisionValue(int[] countsByClass) {
if (_nclass == 1) {
return countsByClass[0];
return 0;
}
int currentMaxClass = 0;
int currentMax = countsByClass[currentMaxClass];
Expand Down Expand Up @@ -205,11 +205,7 @@ public void buildNextNode(Queue<DataFeaturesLimits> limitsQueue, int nodeIndex)
// compute node depth
syzonyuliia marked this conversation as resolved.
Show resolved Hide resolved
int nodeDepth = (int) Math.floor(MathUtils.log2(nodeIndex + 1));
// stop building from this node, the node will be a leaf
if ((nodeDepth >= _parms._max_depth)
|| (countsByClass[0] <= _min_rows)
|| (countsByClass[1] <= _min_rows)
// || zeroRatio > 0.999 || zeroRatio < 0.001
) {
if ((nodeDepth >= _parms._max_depth) || Arrays.stream(countsByClass).anyMatch(c -> c <= _min_rows)) {
syzonyuliia marked this conversation as resolved.
Show resolved Hide resolved
// add imaginary left and right children to imitate valid tree structure
// left child
limitsQueue.add(null);
Expand All @@ -219,10 +215,10 @@ public void buildNextNode(Queue<DataFeaturesLimits> limitsQueue, int nodeIndex)
return;
}

Histogram histogram = new Histogram(_train, actualLimits, BinningStrategy.EQUAL_WIDTH/*, minNumSamplesInBin - todo consider*/);
Histogram histogram = new Histogram(_train, actualLimits, BinningStrategy.EQUAL_WIDTH, _nclass/*, minNumSamplesInBin - todo consider*/);

AbstractSplittingRule bestSplittingRule = findBestSplit(histogram);
double criterionForTheParentNode = entropyBinarySplit(1.0 * countsByClass[0] / (countsByClass[0] + countsByClass[1]));
double criterionForTheParentNode = entropyBinarySplitMultinomial(countsByClass, Arrays.stream(countsByClass).sum());
// if no split could be found, make a list from current node
// if the information gain is low, make a leaf from current node
if (bestSplittingRule == null
syzonyuliia marked this conversation as resolved.
Show resolved Hide resolved
Expand Down Expand Up @@ -365,7 +361,7 @@ public BuilderVisibility builderVisibility() {
public ModelCategory[] can_build() {
return new ModelCategory[]{
ModelCategory.Binomial,
// ModelCategory.Multinomial,
ModelCategory.Multinomial,
// ModelCategory.Ordinal,
// ModelCategory.Regression
};
Expand Down
6 changes: 3 additions & 3 deletions h2o-algos/src/main/java/hex/tree/dt/binning/AbstractBin.java
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@
* Single bin holding limits (min excluded), count of samples and count of class 0.
*/
public abstract class AbstractBin {
public int _count0;
public int[] _classesDistribution;
public int _count;

public int getCount0() {
return _count0;
public int getClassCount(int i) {
return _classesDistribution[i];
}

public abstract AbstractBin clone();
Expand Down
46 changes: 26 additions & 20 deletions h2o-algos/src/main/java/hex/tree/dt/binning/BinningStrategy.java
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,12 @@
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

import static hex.tree.dt.mrtasks.CountBinsSamplesCountsMRTask.COUNT;
import static hex.tree.dt.mrtasks.CountBinsSamplesCountsMRTask.COUNT_0;
import java.util.stream.DoubleStream;

/**
* Strategy for binning. Creates bins for single feature.
* Strategy for binning. Create bins for single feature.
*/
public enum BinningStrategy {

Expand All @@ -39,13 +38,13 @@ public enum BinningStrategy {
return roundToNDecimalPoints(number, DECIMALS_TO_CONSIDER);
}

private List<AbstractBin> createEmptyBinsFromBinningValues(List<Double> binningValues, double realMin, double realMax) {
private List<AbstractBin> createEmptyBinsFromBinningValues(List<Double> binningValues, double realMin, double realMax, int nclass) {
List<AbstractBin> emptyBins = new ArrayList<>();
// create bins between nearest binning values, don't create bin starting with the last value (on index size - 1)
for (int i = 0; i < binningValues.size() - 1; i++) {
emptyBins.add(
new NumericBin(roundToNDecimalPoints(binningValues.get(i)),
roundToNDecimalPoints(binningValues.get(i + 1))));
roundToNDecimalPoints(binningValues.get(i + 1)), nclass));
}
// set the firs min to some lower value (relative to step) so the actual value equal to min is not lost
((NumericBin) emptyBins.get(0)).setMin(realMin - MIN_REL_COEFF * (binningValues.get(1) - binningValues.get(0)));
Expand All @@ -55,7 +54,7 @@ private List<AbstractBin> createEmptyBinsFromBinningValues(List<Double> binningV
}

@Override
List<AbstractBin> createFeatureBins(Frame originData, DataFeaturesLimits featuresLimits, int feature) {
List<AbstractBin> createFeatureBins(Frame originData, DataFeaturesLimits featuresLimits, int feature, int nclass) {
if (originData.vec(feature).isNumeric()) {
NumericFeatureLimits featureLimits = (NumericFeatureLimits) featuresLimits.getFeatureLimits(feature);
double step = (featureLimits._max - featureLimits._min) / NUM_BINS;
Expand All @@ -69,7 +68,7 @@ List<AbstractBin> createFeatureBins(Frame originData, DataFeaturesLimits feature
binningValues.add(value);
}
List<AbstractBin> emptyBins = createEmptyBinsFromBinningValues(
binningValues, featureLimits._min, featureLimits._max);
binningValues, featureLimits._min, featureLimits._max, nclass);

return calculateNumericBinSamplesCount(originData, emptyBins, featuresLimits.toDoubles(), feature);
} else {
Expand All @@ -78,7 +77,7 @@ List<AbstractBin> createFeatureBins(Frame originData, DataFeaturesLimits feature
for (int category = 0; category < featureLimits._mask.length; category++) {
// if the category is present in feature values, add new bin for this category
if (featureLimits._mask[category]) {
emptyBins.add(new CategoricalBin(category));
emptyBins.add(new CategoricalBin(category, nclass));
}
}

Expand All @@ -95,7 +94,7 @@ List<AbstractBin> createFeatureBins(Frame originData, DataFeaturesLimits feature
*/
EQUAL_HEIGHT {
@Override
List<AbstractBin> createFeatureBins(Frame originData, DataFeaturesLimits featuresLimits, int feature) {
List<AbstractBin> createFeatureBins(Frame originData, DataFeaturesLimits featuresLimits, int feature, int nclass) {
return null;
}

Expand All @@ -106,7 +105,7 @@ List<AbstractBin> createFeatureBins(Frame originData, DataFeaturesLimits feature
*/
CUSTOM_BINS {
@Override
List<AbstractBin> createFeatureBins(Frame originData, DataFeaturesLimits featuresLimits, int feature) {
List<AbstractBin> createFeatureBins(Frame originData, DataFeaturesLimits featuresLimits, int feature, int nclass) {
return null;
}
};
Expand All @@ -121,7 +120,7 @@ List<AbstractBin> createFeatureBins(Frame originData, DataFeaturesLimits feature
* @param feature selected feature index
* @return list of created bins
*/
abstract List<AbstractBin> createFeatureBins(Frame originData, DataFeaturesLimits featuresLimits, int feature);
abstract List<AbstractBin> createFeatureBins(Frame originData, DataFeaturesLimits featuresLimits, int feature, int nclass);

/**
* Calculates samples count for given bins for categorical feature.
Expand All @@ -136,11 +135,14 @@ private static List<AbstractBin> calculateCategoricalBinSamplesCount(Frame data,
double[][] featuresLimits, int feature) {
// run MR task to compute accumulated statistic for bins - one task for one feature, calculates all bins at once
double[][] binsArray = bins.stream().map(AbstractBin::toDoubles).toArray(double[][]::new);
CountBinsSamplesCountsMRTask task = new CountBinsSamplesCountsMRTask(feature, featuresLimits, binsArray);
int countsOffset = CountBinsSamplesCountsMRTask.CAT_COUNT_OFFSET;
CountBinsSamplesCountsMRTask task = new CountBinsSamplesCountsMRTask(feature, featuresLimits, binsArray, countsOffset);
task.doAll(data);
for(int i = 0; i < binsArray.length; i ++) {
bins.get(i)._count = (int) task._bins[i][COUNT];
bins.get(i)._count0 = (int) task._bins[i][COUNT_0];
for (int i = 0; i < binsArray.length; i++) {
bins.get(i)._count = (int) task._bins[i][countsOffset];
bins.get(i)._classesDistribution =
DoubleStream.of(Arrays.copyOfRange(task._bins[i], countsOffset + 1, task._bins[i].length))
.mapToInt(c -> (int) c).toArray();
}
return bins;
}
Expand All @@ -158,11 +160,15 @@ private static List<AbstractBin> calculateNumericBinSamplesCount(Frame data, Lis
double[][] featuresLimits, int feature) {
// run MR task to compute accumulated statistic for bins - one task for one feature, calculates all bins at once
double[][] binsArray = bins.stream().map(AbstractBin::toDoubles).toArray(double[][]::new);
CountBinsSamplesCountsMRTask task = new CountBinsSamplesCountsMRTask(feature, featuresLimits, binsArray);
int countsOffset = CountBinsSamplesCountsMRTask.NUM_COUNT_OFFSET;
CountBinsSamplesCountsMRTask task = new CountBinsSamplesCountsMRTask(feature, featuresLimits, binsArray, countsOffset);
task.doAll(data);
for(int i = 0; i < binsArray.length; i ++) {
bins.get(i)._count = (int) task._bins[i][COUNT];
bins.get(i)._count0 = (int) task._bins[i][COUNT_0];

for (int i = 0; i < binsArray.length; i++) {
bins.get(i)._count = (int) task._bins[i][countsOffset];
bins.get(i)._classesDistribution =
DoubleStream.of(Arrays.copyOfRange(task._bins[i], countsOffset + 1, task._bins[i].length))
.mapToInt(c -> (int) c).toArray();
}
return bins;
}
Expand Down
18 changes: 12 additions & 6 deletions h2o-algos/src/main/java/hex/tree/dt/binning/CategoricalBin.java
Original file line number Diff line number Diff line change
@@ -1,33 +1,39 @@
package hex.tree.dt.binning;

import org.apache.commons.lang.ArrayUtils;

import java.util.Arrays;

/**
* For categorical features values are already binned to categories - each bin corresponds to one value (category)
*/
public class CategoricalBin extends AbstractBin {
public int _category;

public CategoricalBin(int category, int count, int count0) {
public CategoricalBin(int category, int[] classesDistribution, int count) {
_category = category;
_classesDistribution = classesDistribution;
_count = count;
_count0 = count0;
}

public CategoricalBin(int category) {
public CategoricalBin(int category, int nclass) {
_category = category;
_classesDistribution = new int[nclass];
_count = 0;
_count0 = 0;
}

public int getCategory() {
return _category;
}

public CategoricalBin clone() {
return new CategoricalBin(_category, _count, _count0);
return new CategoricalBin(_category, _classesDistribution, _count);
}

public double[] toDoubles() {
return new double[]{_category, _count, _count0};
// category|count|class0|class1|...
return ArrayUtils.addAll(new double[]{_category, _count},
Arrays.stream(_classesDistribution).asDoubleStream().toArray());
}

}
Loading
Loading