001/* 002 AUTOMATICALLY GENERATED BY jTemp FROM 003 /Users/jsh2/Work/openimaj/target/checkout/machine-learning/clustering/src/main/jtemp/org/openimaj/ml/clustering/assignment/soft/#T#KNNAssigner.jtemp 004*/ 005/** 006 * Copyright (c) 2011, The University of Southampton and the individual contributors. 007 * All rights reserved. 008 * 009 * Redistribution and use in source and binary forms, with or without modification, 010 * are permitted provided that the following conditions are met: 011 * 012 * * Redistributions of source code must retain the above copyright notice, 013 * this list of conditions and the following disclaimer. 014 * 015 * * Redistributions in binary form must reproduce the above copyright notice, 016 * this list of conditions and the following disclaimer in the documentation 017 * and/or other materials provided with the distribution. 018 * 019 * * Neither the name of the University of Southampton nor the names of its 020 * contributors may be used to endorse or promote products derived from this 021 * software without specific prior written permission. 022 * 023 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 024 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 025 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 026 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 027 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 028 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 029 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 030 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 031 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 032 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 033 */ 034package org.openimaj.ml.clustering.assignment.soft; 035 036import org.openimaj.feature.ByteFVComparison; 037import org.openimaj.knn.ByteNearestNeighbours; 038import org.openimaj.knn.ByteNearestNeighboursExact; 039import org.openimaj.knn.ByteNearestNeighboursProvider; 040import org.openimaj.knn.approximate.ByteNearestNeighboursKDTree; 041import org.openimaj.ml.clustering.assignment.SoftAssigner; 042import org.openimaj.ml.clustering.CentroidsProvider; 043import org.openimaj.util.pair.IndependentPair; 044 045/** 046 * A {@link SoftAssigner} that picks a fixed number of nearest neighbours. 047 * Weights returned are actually the distances to the centroids. 048 * 049 * @author Jonathon Hare (jsh2@ecs.soton.ac.uk) 050 * 051 */ 052public class ByteKNNAssigner implements SoftAssigner<byte[], float[]> { 053 protected ByteNearestNeighbours nn; 054 protected int numNeighbours; 055 056 /** 057 * Construct the assigner using the given cluster data. The assigner 058 * is backed by either a {@link ByteNearestNeighboursExact} or 059 * {@link ByteNearestNeighboursKDTree}, depending on whether the exact 060 * parameter is true or false. If the parameter is true, then the 061 * resultant {@link ByteNearestNeighboursExact} will use Euclidean 062 * distance. 063 * 064 * @param provider the cluster data provider 065 * @param exact if true, then use exact mode; false implies approximate mode. 066 * @param numNeighbours the number of nearest neighbours to select. 067 */ 068 public ByteKNNAssigner(CentroidsProvider<byte[]> provider, boolean exact, int numNeighbours) { 069 this.numNeighbours = numNeighbours; 070 071 if (exact) { 072 nn = new ByteNearestNeighboursExact(provider.getCentroids()); 073 } else { 074 if (provider instanceof ByteNearestNeighboursProvider) { 075 ByteNearestNeighbours internal = ((ByteNearestNeighboursProvider)provider).getNearestNeighbours(); 076 077 if (internal != null && internal instanceof ByteNearestNeighboursKDTree) { 078 nn = (ByteNearestNeighboursKDTree) internal; 079 return; 080 } 081 } 082 083 nn = new ByteNearestNeighboursKDTree(provider.getCentroids(), ByteNearestNeighboursKDTree.DEFAULT_NTREES, ByteNearestNeighboursKDTree.DEFAULT_NCHECKS); 084 } 085 } 086 087 /** 088 * Construct the assigner using the given cluster data. The assigner 089 * is backed by either a {@link ByteNearestNeighboursExact} or 090 * {@link ByteNearestNeighboursKDTree}, depending on whether the exact 091 * parameter is true or false. If the parameter is true, then the 092 * resultant {@link ByteNearestNeighboursExact} will use Euclidean 093 * distance. 094 * 095 * @param data the cluster data 096 * @param exact if true, then use exact mode; false implies approximate mode. 097 * @param numNeighbours the number of nearest neighbours to select. 098 */ 099 public ByteKNNAssigner(byte[][] data, boolean exact, int numNeighbours) { 100 this.numNeighbours = numNeighbours; 101 102 if (exact) { 103 nn = new ByteNearestNeighboursExact(data); 104 } else { 105 nn = new ByteNearestNeighboursKDTree(data, ByteNearestNeighboursKDTree.DEFAULT_NTREES, ByteNearestNeighboursKDTree.DEFAULT_NCHECKS); 106 } 107 } 108 109 /** 110 * Construct the assigner using the given cluster data and 111 * distance function. The assigner will operate in exact mode, 112 * using a {@link ByteNearestNeighboursExact}. 113 * 114 * @param provider the cluster data provider 115 * @param comparison the distance function 116 * @param numNeighbours the number of nearest neighbours to select. 117 */ 118 public ByteKNNAssigner(CentroidsProvider<byte[]> provider, ByteFVComparison comparison, int numNeighbours) { 119 this.numNeighbours = numNeighbours; 120 121 nn = new ByteNearestNeighboursExact(provider.getCentroids(), comparison); 122 } 123 124 /** 125 * Construct the assigner using the given cluster data and 126 * distance function. The assigner will operate in exact mode, 127 * using a {@link ByteNearestNeighboursExact}. 128 * 129 * @param data the cluster data 130 * @param comparison the distance function 131 * @param numNeighbours the number of nearest neighbours to select. 132 */ 133 public ByteKNNAssigner(byte[][] data, ByteFVComparison comparison, int numNeighbours) { 134 this.numNeighbours = numNeighbours; 135 136 nn = new ByteNearestNeighboursExact(data, comparison); 137 } 138 139 @Override 140 public int[][] assign(byte[][] data) { 141 int [][] indices = new int [data.length][numNeighbours]; 142 float [][] distances = new float [data.length][numNeighbours]; 143 144 nn.searchKNN(data, numNeighbours, indices, distances); 145 146 return indices; 147 } 148 149 @Override 150 public int[] assign(byte[] data) { 151 return assign(new byte[][] { data })[0]; 152 } 153 154 @Override 155 public void assignWeighted(byte[][] data, int[][] assignments, float[][] weights) { 156 nn.searchKNN(data, numNeighbours, assignments, weights); 157 } 158 159 @Override 160 public IndependentPair<int[], float[]> assignWeighted(byte[] data) { 161 int [][] indices = new int [data.length][numNeighbours]; 162 float [][] distances = new float [data.length][numNeighbours]; 163 164 nn.searchKNN(new byte[][] { data }, numNeighbours, indices, distances); 165 166 return new IndependentPair<int[], float[]>(indices[0], distances[0]); 167 } 168 169 @Override 170 public int numDimensions() { 171 return nn.numDimensions(); 172 } 173 174 @Override 175 public int size() { 176 return nn.size(); 177 } 178}