diff options
author | Andrej Shadura <andrewsh@debian.org> | 2019-03-09 22:30:40 +0000 |
---|---|---|
committer | Andrej Shadura <andrewsh@debian.org> | 2019-03-09 22:30:40 +0000 |
commit | 337087b668d3a54f3afee3a9adb597a32e9f7e94 (patch) | |
tree | d860094269622472f8079d497ac7af02dbb4e038 /src/de/lmu/ifi/dbs/elki/algorithm/clustering/correlation/CASH.java | |
parent | 14a486343aef55f97f54082d6b542dedebf6f3ba (diff) |
Import Upstream version 0.6.5~20141030
Diffstat (limited to 'src/de/lmu/ifi/dbs/elki/algorithm/clustering/correlation/CASH.java')
-rw-r--r-- | src/de/lmu/ifi/dbs/elki/algorithm/clustering/correlation/CASH.java | 196 |
1 files changed, 105 insertions, 91 deletions
diff --git a/src/de/lmu/ifi/dbs/elki/algorithm/clustering/correlation/CASH.java b/src/de/lmu/ifi/dbs/elki/algorithm/clustering/correlation/CASH.java index 8e5fa627..adef67ff 100644 --- a/src/de/lmu/ifi/dbs/elki/algorithm/clustering/correlation/CASH.java +++ b/src/de/lmu/ifi/dbs/elki/algorithm/clustering/correlation/CASH.java @@ -4,7 +4,7 @@ package de.lmu.ifi.dbs.elki.algorithm.clustering.correlation; This file is part of ELKI: Environment for Developing KDD-Applications Supported by Index-Structures - Copyright (C) 2013 + Copyright (C) 2014 Ludwig-Maximilians-Universität München Lehr- und Forschungseinheit für Datenbanksysteme ELKI Development Team @@ -57,8 +57,7 @@ import de.lmu.ifi.dbs.elki.database.query.distance.DistanceQuery; import de.lmu.ifi.dbs.elki.database.relation.MaterializedRelation; import de.lmu.ifi.dbs.elki.database.relation.Relation; import de.lmu.ifi.dbs.elki.datasource.filter.normalization.NonNumericFeaturesException; -import de.lmu.ifi.dbs.elki.distance.distancefunction.WeightedDistanceFunction; -import de.lmu.ifi.dbs.elki.distance.distancevalue.DoubleDistance; +import de.lmu.ifi.dbs.elki.distance.distancefunction.MatrixWeightedDistanceFunction; import de.lmu.ifi.dbs.elki.logging.Logging; import de.lmu.ifi.dbs.elki.logging.progress.FiniteProgress; import de.lmu.ifi.dbs.elki.math.linearalgebra.LinearEquationSystem; @@ -82,19 +81,20 @@ import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameters.Flag; import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameters.IntParameter; /** - * Provides the CASH algorithm, an subspace clustering algorithm based on the - * Hough transform. + * The CASH algorithm is a subspace clustering algorithm based on the Hough + * transform. * + * Reference: * <p> - * Reference: E. Achtert, C. Böhm, J. David, P. Kröger, A. Zimek: Robust - * clustering in arbitrarily oriented subspaces. <br> + * E. Achtert, C. Böhm, J. David, P. Kröger, A. Zimek:<br /> + * Robust clustering in arbitrarily oriented subspaces. <br> * In Proc. 8th SIAM Int. Conf. on Data Mining (SDM'08), Atlanta, GA, 2008 * </p> * * @author Elke Achtert * * @apiviz.has CASHInterval - * @apiviz.uses ParameterizationFunction + * @apiviz.has ParameterizationFunction * @apiviz.has LinearEquationModel * * @param <V> Vector type @@ -102,8 +102,11 @@ import de.lmu.ifi.dbs.elki.utilities.optionhandling.parameters.IntParameter; // todo elke hierarchy (later) @Title("CASH: Robust clustering in arbitrarily oriented subspaces") @Description("Subspace clustering algorithm based on the Hough transform.") -@Reference(authors = "E. Achtert, C. Böhm, J. David, P. Kröger, A. Zimek", title = "Robust clustering in arbitraily oriented subspaces", booktitle = "Proc. 8th SIAM Int. Conf. on Data Mining (SDM'08), Atlanta, GA, 2008", url = "http://www.siam.org/proceedings/datamining/2008/dm08_69_AchtertBoehmDavidKroegerZimek.pdf") -public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clustering<Model>> implements ClusteringAlgorithm<Clustering<Model>> { +@Reference(authors = "E. Achtert, C. Böhm, J. David, P. Kröger, A. Zimek", // +title = "Robust clustering in arbitraily oriented subspaces",// +booktitle = "Proc. 8th SIAM Int. Conf. on Data Mining (SDM'08), Atlanta, GA, 2008",// +url = "http://www.siam.org/proceedings/datamining/2008/dm08_69_AchtertBoehmDavidKroegerZimek.pdf") +public class CASH<V extends NumberVector> extends AbstractAlgorithm<Clustering<Model>> implements ClusteringAlgorithm<Clustering<Model>> { /** * The logger for this class. */ @@ -224,7 +227,7 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin */ public Clustering<Model> run(Database database, Relation<V> vrel) { this.fulldatabase = preprocess(database, vrel); - if (LOG.isVerbose()) { + if(LOG.isVerbose()) { StringBuilder msg = new StringBuilder(); msg.append("DB size: ").append(fulldatabase.size()); msg.append("\nmin Dim: ").append(minDim); @@ -236,17 +239,16 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin FiniteProgress progress = LOG.isVerbose() ? new FiniteProgress("CASH Clustering", fulldatabase.size(), LOG) : null; Clustering<Model> result = doRun(fulldatabase, progress); - if (progress != null) { - progress.ensureCompleted(LOG); - } + LOG.ensureCompleted(progress); - if (LOG.isVerbose()) { + if(LOG.isVerbose()) { StringBuilder msg = new StringBuilder(); - for (Cluster<Model> c : result.getAllClusters()) { - if (c.getModel() instanceof LinearEquationModel) { + for(Cluster<Model> c : result.getAllClusters()) { + if(c.getModel() instanceof LinearEquationModel) { LinearEquationModel s = (LinearEquationModel) c.getModel(); msg.append("\n Cluster: Dim: " + s.getLes().subspacedim() + " size: " + c.size()); - } else { + } + else { msg.append("\n Cluster: " + c.getModel().getClass().getName() + " size: " + c.size()); } } @@ -268,7 +270,7 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin MaterializedRelation<ParameterizationFunction> prep = new MaterializedRelation<>(db, type, ids); // Project - for (DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { + for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { ParameterizationFunction f = new ParameterizationFunction(vrel.get(iter)); prep.set(iter, f); } @@ -294,51 +296,54 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin ModifiableDBIDs noiseIDs = DBIDUtil.newHashSet(relation.getDBIDs()); initHeap(heap, relation, dim, noiseIDs); - if (LOG.isDebugging()) { + if(LOG.isDebugging()) { StringBuilder msg = new StringBuilder(); msg.append("XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"); msg.append("\nXXXX dim ").append(dim); msg.append("\nXXXX database.size ").append(relation.size()); msg.append("\nXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"); LOG.debugFine(msg.toString()); - } else if (LOG.isVerbose()) { + } + else if(LOG.isVerbose()) { StringBuilder msg = new StringBuilder(); msg.append("XXXX dim ").append(dim).append(" database.size ").append(relation.size()); LOG.verbose(msg.toString()); } // get the ''best'' d-dimensional intervals at max level - while (!heap.isEmpty()) { + while(!heap.isEmpty()) { CASHInterval interval = determineNextIntervalAtMaxLevel(heap); - if (LOG.isDebugging()) { + if(LOG.isDebugging()) { LOG.debugFine("next interval in dim " + dim + ": " + interval); - } else if (LOG.isVerbose()) { + } + else if(LOG.isVerbose()) { LOG.verbose("next interval in dim " + dim + ": " + interval); } // only noise left - if (interval == null) { + if(interval == null) { break; } // do a dim-1 dimensional run ModifiableDBIDs clusterIDs = DBIDUtil.newHashSet(); - if (dim > minDim + 1) { + if(dim > minDim + 1) { ModifiableDBIDs ids; Matrix basis_dim_minus_1; - if (adjust) { + if(adjust) { ids = DBIDUtil.newHashSet(); basis_dim_minus_1 = runDerivator(relation, dim, interval, ids); - } else { + } + else { ids = interval.getIDs(); basis_dim_minus_1 = determineBasis(SpatialUtil.centroid(interval)); } - if (ids.size() != 0) { + if(ids.size() != 0) { MaterializedRelation<ParameterizationFunction> db = buildDB(dim, basis_dim_minus_1, ids, relation); // add result of dim-1 to this result Clustering<Model> res_dim_minus_1 = doRun(db, progress); - for (Cluster<Model> cluster : res_dim_minus_1.getAllClusters()) { + for(Cluster<Model> cluster : res_dim_minus_1.getAllClusters()) { res.addToplevelCluster(cluster); noiseIDs.removeDBIDs(cluster.getIDs()); clusterIDs.addDBIDs(cluster.getIDs()); @@ -358,30 +363,31 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin // Rebuild heap ArrayList<IntegerPriorityObject<CASHInterval>> heapVector = new ArrayList<>(heap.size()); - for (ObjectHeap.UnsortedIter<IntegerPriorityObject<CASHInterval>> iter = heap.unsortedIter(); iter.valid(); iter.advance()) { + for(ObjectHeap.UnsortedIter<IntegerPriorityObject<CASHInterval>> iter = heap.unsortedIter(); iter.valid(); iter.advance()) { heapVector.add(iter.get()); } heap.clear(); - for (IntegerPriorityObject<CASHInterval> pair : heapVector) { + for(IntegerPriorityObject<CASHInterval> pair : heapVector) { CASHInterval currentInterval = pair.getObject(); currentInterval.removeIDs(clusterIDs); - if (currentInterval.getIDs().size() >= minPts) { + if(currentInterval.getIDs().size() >= minPts) { heap.add(new IntegerPriorityObject<>(currentInterval.priority(), currentInterval)); } } - if (progress != null) { + if(progress != null) { progress.setProcessed(processedIDs.size(), LOG); } } // put noise to clusters - if (!noiseIDs.isEmpty()) { - if (dim == noiseDim) { + if(!noiseIDs.isEmpty()) { + if(dim == noiseDim) { Cluster<Model> c = new Cluster<Model>(noiseIDs, true, ClusterModel.CLUSTER); res.addToplevelCluster(c); processedIDs.addDBIDs(noiseIDs); - } else if (noiseIDs.size() >= minPts) { + } + else if(noiseIDs.size() >= minPts) { LinearEquationSystem les = runDerivator(fulldatabase, dim - 1, noiseIDs); Cluster<Model> c = new Cluster<Model>(noiseIDs, true, new LinearEquationModel(les)); res.addToplevelCluster(c); @@ -389,22 +395,23 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin } } - if (LOG.isDebugging()) { + if(LOG.isDebugging()) { StringBuilder msg = new StringBuilder(); msg.append("noise fuer dim ").append(dim).append(": ").append(noiseIDs.size()); - for (Cluster<Model> c : res.getAllClusters()) { - if (c.getModel() instanceof LinearEquationModel) { + for(Cluster<Model> c : res.getAllClusters()) { + if(c.getModel() instanceof LinearEquationModel) { LinearEquationModel s = (LinearEquationModel) c.getModel(); msg.append("\n Cluster: Dim: " + s.getLes().subspacedim() + " size: " + c.size()); - } else { + } + else { msg.append("\n Cluster: " + c.getModel().getClass().getName() + " size: " + c.size()); } } LOG.debugFine(msg.toString()); } - if (progress != null) { + if(progress != null) { progress.setProcessed(processedIDs.size(), LOG); } return res; @@ -442,14 +449,15 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin double[] d_mins = new double[numDIntervals]; double[] d_maxs = new double[numDIntervals]; - if (LOG.isDebugging()) { + if(LOG.isDebugging()) { StringBuilder msg = new StringBuilder(); msg.append("d_min ").append(d_min); msg.append("\nd_max ").append(d_max); msg.append("\nnumDIntervals ").append(numDIntervals); msg.append("\ndIntervalSize ").append(dIntervalSize); LOG.debugFine(msg.toString()); - } else if (LOG.isVerbose()) { + } + else if(LOG.isVerbose()) { StringBuilder msg = new StringBuilder(); msg.append("d_min ").append(d_min); msg.append("\nd_max ").append(d_max); @@ -463,28 +471,30 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin double[] alphaMax = new double[dim - 1]; Arrays.fill(alphaMax, Math.PI); - for (int i = 0; i < numDIntervals; i++) { - if (i == 0) { + for(int i = 0; i < numDIntervals; i++) { + if(i == 0) { d_mins[i] = d_min; - } else { + } + else { d_mins[i] = d_maxs[i - 1]; } - if (i < numDIntervals - 1) { + if(i < numDIntervals - 1) { d_maxs[i] = d_mins[i] + dIntervalSize; - } else { + } + else { d_maxs[i] = d_max - d_mins[i]; } HyperBoundingBox alphaInterval = new HyperBoundingBox(alphaMin, alphaMax); ModifiableDBIDs intervalIDs = split.determineIDs(ids, alphaInterval, d_mins[i], d_maxs[i]); - if (intervalIDs != null && intervalIDs.size() >= minPts) { + if(intervalIDs != null && intervalIDs.size() >= minPts) { CASHInterval rootInterval = new CASHInterval(alphaMin, alphaMax, split, intervalIDs, -1, 0, d_mins[i], d_maxs[i]); heap.add(new IntegerPriorityObject<>(rootInterval.priority(), rootInterval)); } } - if (LOG.isDebuggingFiner()) { + if(LOG.isDebuggingFiner()) { StringBuilder msg = new StringBuilder(); msg.append("heap.size ").append(heap.size()); LOG.debugFiner(msg.toString()); @@ -509,12 +519,12 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin proxy.addRelation(prep); // Project - for (DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { + for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { ParameterizationFunction f = project(basis, relation.get(iter)); prep.set(iter, f); } - if (LOG.isDebugging()) { + if(LOG.isDebugging()) { LOG.debugFine("db fuer dim " + (dim - 1) + ": " + ids.size()); } @@ -546,7 +556,7 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin */ private Matrix determineBasis(double[] alpha) { double[] nn = new double[alpha.length + 1]; - for (int i = 0; i < nn.length; i++) { + for(int i = 0; i < nn.length; i++) { double alpha_i = i == alpha.length ? 0 : alpha[i]; nn[i] = sinusProduct(0, i, alpha) * StrictMath.cos(alpha_i); } @@ -566,7 +576,7 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin */ private double sinusProduct(int start, int end, double[] alpha) { double result = 1; - for (int j = start; j < end; j++) { + for(int j = start; j < end; j++) { result *= Math.sin(alpha[j]); } return result; @@ -582,8 +592,8 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin private CASHInterval determineNextIntervalAtMaxLevel(ObjectHeap<IntegerPriorityObject<CASHInterval>> heap) { CASHInterval next = doDetermineNextIntervalAtMaxLevel(heap); // noise path was chosen - while (next == null) { - if (heap.isEmpty()) { + while(next == null) { + if(heap.isEmpty()) { return null; } next = doDetermineNextIntervalAtMaxLevel(heap); @@ -602,45 +612,48 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin private CASHInterval doDetermineNextIntervalAtMaxLevel(ObjectHeap<IntegerPriorityObject<CASHInterval>> heap) { CASHInterval interval = heap.poll().getObject(); int dim = interval.getDimensionality(); - while (true) { + while(true) { // max level is reached - if (interval.getLevel() >= maxLevel && interval.getMaxSplitDimension() == (dim - 1)) { + if(interval.getLevel() >= maxLevel && interval.getMaxSplitDimension() == (dim - 1)) { return interval; } - if (heap.size() % 10000 == 0 && LOG.isVerbose()) { + if(heap.size() % 10000 == 0 && LOG.isVerbose()) { LOG.verbose("heap size " + heap.size()); } - if (heap.size() >= 40000) { + if(heap.size() >= 40000) { LOG.warning("Heap size > 40.000!!!"); heap.clear(); return null; } - if (LOG.isDebuggingFiner()) { + if(LOG.isDebuggingFiner()) { LOG.debugFiner("split " + interval.toString() + " " + interval.getLevel() + "-" + interval.getMaxSplitDimension()); } interval.split(); // noise - if (!interval.hasChildren()) { + if(!interval.hasChildren()) { return null; } CASHInterval bestInterval; - if (interval.getLeftChild() != null && interval.getRightChild() != null) { + if(interval.getLeftChild() != null && interval.getRightChild() != null) { int comp = interval.getLeftChild().compareTo(interval.getRightChild()); - if (comp < 0) { + if(comp < 0) { bestInterval = interval.getRightChild(); heap.add(new IntegerPriorityObject<>(interval.getLeftChild().priority(), interval.getLeftChild())); - } else { + } + else { bestInterval = interval.getLeftChild(); heap.add(new IntegerPriorityObject<>(interval.getRightChild().priority(), interval.getRightChild())); } - } else if (interval.getLeftChild() == null) { + } + else if(interval.getLeftChild() == null) { bestInterval = interval.getRightChild(); - } else { + } + else { bestInterval = interval.getLeftChild(); } @@ -665,7 +678,7 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin double d_min = Double.POSITIVE_INFINITY; double d_max = Double.NEGATIVE_INFINITY; - for (DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { + for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { ParameterizationFunction f = relation.get(iditer); HyperBoundingBox minMax = f.determineAlphaMinMax(box); double f_min = f.function(SpatialUtil.getMin(minMax)); @@ -694,25 +707,25 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin // set the parameters ListParameterization parameters = new ListParameterization(); - parameters.addParameter(PCAFilteredRunner.PCA_EIGENPAIR_FILTER, FirstNEigenPairFilter.class.getName()); + parameters.addParameter(PCAFilteredRunner.Parameterizer.PCA_EIGENPAIR_FILTER, FirstNEigenPairFilter.class.getName()); parameters.addParameter(FirstNEigenPairFilter.EIGENPAIR_FILTER_N, Integer.toString(dim - 1)); - DependencyDerivator<DoubleVector, DoubleDistance> derivator = null; - Class<DependencyDerivator<DoubleVector, DoubleDistance>> cls = ClassGenericsUtil.uglyCastIntoSubclass(DependencyDerivator.class); + DependencyDerivator<DoubleVector> derivator = null; + Class<DependencyDerivator<DoubleVector>> cls = ClassGenericsUtil.uglyCastIntoSubclass(DependencyDerivator.class); derivator = parameters.tryInstantiate(cls); CorrelationAnalysisSolution<DoubleVector> model = derivator.run(derivatorDB); Matrix weightMatrix = model.getSimilarityMatrix(); DoubleVector centroid = new DoubleVector(model.getCentroid()); - DistanceQuery<DoubleVector, DoubleDistance> df = QueryUtil.getDistanceQuery(derivatorDB, new WeightedDistanceFunction(weightMatrix)); - DoubleDistance eps = new DoubleDistance(0.25); + DistanceQuery<DoubleVector> df = QueryUtil.getDistanceQuery(derivatorDB, new MatrixWeightedDistanceFunction(weightMatrix)); + double eps = .25; ids.addDBIDs(interval.getIDs()); // Search for nearby vectors in original database - for (DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { - DoubleVector v = new DoubleVector(relation.get(iditer).getColumnVector().getArrayRef()); - DoubleDistance d = df.distance(v, centroid); - if (d.compareTo(eps) < 0) { + for(DBIDIter iditer = relation.iterDBIDs(); iditer.valid(); iditer.advance()) { + DoubleVector v = new DoubleVector(relation.get(iditer).getColumnVector()); + double d = df.distance(v, centroid); + if(d <= eps) { ids.add(iditer); } } @@ -739,12 +752,12 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin proxy.addRelation(prep); // Project - for (DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { + for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { DoubleVector v = new DoubleVector(relation.get(iter).getColumnVector().getArrayRef()); prep.set(iter, v); } - if (LOG.isDebugging()) { + if(LOG.isDebugging()) { LOG.debugFine("db fuer derivator : " + prep.size()); } @@ -767,16 +780,17 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin Database derivatorDB = buildDerivatorDB(relation, ids); ListParameterization parameters = new ListParameterization(); - parameters.addParameter(PCAFilteredRunner.PCA_EIGENPAIR_FILTER, FirstNEigenPairFilter.class.getName()); + parameters.addParameter(PCAFilteredRunner.Parameterizer.PCA_EIGENPAIR_FILTER, FirstNEigenPairFilter.class.getName()); parameters.addParameter(FirstNEigenPairFilter.EIGENPAIR_FILTER_N, Integer.toString(dimensionality)); - DependencyDerivator<DoubleVector, DoubleDistance> derivator = null; - Class<DependencyDerivator<DoubleVector, DoubleDistance>> cls = ClassGenericsUtil.uglyCastIntoSubclass(DependencyDerivator.class); + DependencyDerivator<DoubleVector> derivator = null; + Class<DependencyDerivator<DoubleVector>> cls = ClassGenericsUtil.uglyCastIntoSubclass(DependencyDerivator.class); derivator = parameters.tryInstantiate(cls); CorrelationAnalysisSolution<DoubleVector> model = derivator.run(derivatorDB); LinearEquationSystem les = model.getNormalizedLinearEquationSystem(null); return les; - } catch (NonNumericFeaturesException e) { + } + catch(NonNumericFeaturesException e) { throw new IllegalStateException("Error during normalization" + e); } } @@ -798,7 +812,7 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin proxy.addRelation(prep); // Project - for (DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { + for(DBIDIter iter = ids.iter(); iter.valid(); iter.advance()) { DoubleVector v = new DoubleVector(relation.get(iter).getColumnVector().getArrayRef()); prep.set(iter, v); } @@ -839,32 +853,32 @@ public class CASH<V extends NumberVector<?>> extends AbstractAlgorithm<Clusterin super.makeOptions(config); IntParameter minptsP = new IntParameter(MINPTS_ID); minptsP.addConstraint(CommonConstraints.GREATER_EQUAL_ONE_INT); - if (config.grab(minptsP)) { + if(config.grab(minptsP)) { minpts = minptsP.getValue(); } IntParameter maxlevelP = new IntParameter(MAXLEVEL_ID); maxlevelP.addConstraint(CommonConstraints.GREATER_EQUAL_ONE_INT); - if (config.grab(maxlevelP)) { + if(config.grab(maxlevelP)) { maxlevel = maxlevelP.getValue(); } IntParameter mindimP = new IntParameter(MINDIM_ID, 1); mindimP.addConstraint(CommonConstraints.GREATER_EQUAL_ONE_INT); - if (config.grab(mindimP)) { + if(config.grab(mindimP)) { mindim = mindimP.getValue(); } DoubleParameter jitterP = new DoubleParameter(JITTER_ID); jitterP.addConstraint(CommonConstraints.GREATER_THAN_ZERO_DOUBLE); - if (config.grab(jitterP)) { + if(config.grab(jitterP)) { jitter = jitterP.getValue(); } Flag adjustF = new Flag(ADJUST_ID); - if (config.grab(adjustF)) { + if(config.grab(adjustF)) { adjust = adjustF.getValue(); } } @Override - protected CASH<NumberVector<?>> makeInstance() { + protected CASH<NumberVector> makeInstance() { return new CASH<>(minpts, maxlevel, mindim, jitter, adjust); } } |