001/** 002 * Copyright (c) 2011, The University of Southampton and the individual contributors. 003 * All rights reserved. 004 * 005 * Redistribution and use in source and binary forms, with or without modification, 006 * are permitted provided that the following conditions are met: 007 * 008 * * Redistributions of source code must retain the above copyright notice, 009 * this list of conditions and the following disclaimer. 010 * 011 * * Redistributions in binary form must reproduce the above copyright notice, 012 * this list of conditions and the following disclaimer in the documentation 013 * and/or other materials provided with the distribution. 014 * 015 * * Neither the name of the University of Southampton nor the names of its 016 * contributors may be used to endorse or promote products derived from this 017 * software without specific prior written permission. 018 * 019 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 020 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 021 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 022 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 023 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 024 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 025 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 026 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 027 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 028 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 029 */ 030package org.openimaj.image.feature.local.affine; 031 032import java.util.ArrayList; 033import java.util.HashMap; 034import java.util.List; 035import java.util.Map; 036 037import org.openimaj.citation.annotation.Reference; 038import org.openimaj.citation.annotation.ReferenceType; 039import org.openimaj.image.FImage; 040import org.openimaj.image.Image; 041import org.openimaj.image.processing.transform.AffineParams; 042import org.openimaj.image.processing.transform.AffineSimulation; 043import org.openimaj.image.processor.SinglebandImageProcessor; 044import org.openimaj.math.geometry.point.ScaleSpacePoint; 045 046/** 047 * Base class for local feature detectors/extractors that use affine simulations 048 * in order to increase detections and improve performance with respect to 049 * affine change. 050 * 051 * @author Jonathon Hare (jsh2@ecs.soton.ac.uk) 052 * 053 * @param <Q> 054 * Type of interest point list 055 * @param <T> 056 * Type of interest point 057 * @param <I> 058 * Concrete subclass of {@link Image} 059 * @param <P> 060 * Pixel type 061 * 062 */ 063@Reference( 064 type = ReferenceType.Article, 065 author = { "Morel, Jean-Michel", "Yu, Guoshen" }, 066 title = "{ASIFT: A New Framework for Fully Affine Invariant Image Comparison}", 067 year = "2009", 068 journal = "SIAM J. Img. Sci.", 069 publisher = "Society for Industrial and Applied Mathematics") 070public abstract class AffineSimulationExtractor<Q extends List<T>, T extends ScaleSpacePoint, I extends Image<P, I> & SinglebandImageProcessor.Processable<Float, FImage, I>, P> 071{ 072 protected static final float PI = 3.141592654f; 073 protected float BorderFact = 6 * (float) Math.sqrt(2); 074 075 /** 076 * The list of all detected interest points 077 */ 078 public Q allInterestPoints; 079 080 /** 081 * The detected interest points, grouped by simulation 082 */ 083 public Map<AffineParams, Q> mappedInterestPoints; 084 085 /** 086 * The list of simulation parameters in the order the simulations were 087 * performed 088 */ 089 public List<AffineParams> simulationOrder; 090 091 /** 092 * Detect and describe the local features in the given (transformed) image. 093 * The returned features should be in the coordinate system of the given 094 * image; they will be automatically filtered and transformed back to the 095 * original coordinate system. 096 * 097 * @param image 098 * the image in which to detect features 099 * @return the local interest features 100 */ 101 protected abstract Q detectFeatures(I image); 102 103 /** 104 * Construct a new list capable of holding the extracted features. 105 * 106 * @return a new list 107 */ 108 protected abstract Q newList(); 109 110 /** 111 * Get the list of all the detected features 112 * 113 * @return the list of all detected features 114 */ 115 public Q getFeatures() { 116 return allInterestPoints; 117 } 118 119 /** 120 * Detect features in the given image, computing the simulations based on 121 * the given number of tilts. 122 * 123 * @param image 124 * the image 125 * @param num_of_tilts 126 * the number of tilt simulations 127 * @throws IllegalArgumentException 128 * if the number of tilts < 1 129 */ 130 public void detectFeatures(I image, int num_of_tilts) { 131 if (num_of_tilts < 1) { 132 throw new IllegalArgumentException("Number of tilts num_tilt should be equal or larger than 1."); 133 } 134 135 // setup the storage 136 allInterestPoints = newList(); 137 mappedInterestPoints = new HashMap<AffineParams, Q>(); 138 simulationOrder = new ArrayList<AffineParams>(); 139 140 final int num_rot_t2 = 10; // num rotations at final tilt 141 final float t_min = 1; 142 final float t_k = (float) Math.sqrt(2); 143 144 for (int tt = 1; tt <= num_of_tilts; tt++) { 145 final float t = t_min * (float) Math.pow(t_k, tt - 1); 146 AffineParams addedParams = null; 147 if (t == 1) { 148 addedParams = new AffineParams(0, t); 149 // lowe_sift_yu_nodouble_v1(image_tmp1,keypoints,display,verb); 150 final Q keypoints = detectFeatures(image.clone()); 151 152 /* Store the number of keypoints */ 153 mappedInterestPoints.put(addedParams, keypoints); 154 155 /* Store the keypoints */ 156 allInterestPoints.addAll(keypoints); 157 simulationOrder.add(addedParams); 158 } else { 159 int num_rots = Math.round(num_rot_t2 * t / 2); 160 161 if (num_rots % 2 == 1) { 162 num_rots = num_rots + 1; 163 } 164 num_rots = num_rots / 2; 165 166 final float delta_theta = PI / num_rots; 167 168 for (int rr = 1; rr <= num_rots; rr++) { 169 final float theta = delta_theta * (rr - 1); 170 171 final I image_tmp1 = AffineSimulation.transformImage(image, theta, t); 172 final Q keypoints = detectFeatures(image_tmp1); 173 174 filterEdgesTransformed(keypoints, theta, image, 1.0f / t); 175 176 AffineSimulation.transformToOriginal(keypoints, image, theta, t); 177 178 addedParams = new AffineParams(theta, t); 179 mappedInterestPoints.put(addedParams, keypoints); 180 allInterestPoints.addAll(keypoints); 181 simulationOrder.add(addedParams); 182 } 183 } 184 } 185 } 186 187 /** 188 * Detect features from a single simulation. 189 * 190 * @param image 191 * the image 192 * @param params 193 * the simulation parameters 194 * @return the detected features 195 */ 196 public Q detectFeatures(I image, AffineParams params) { 197 return detectFeatures(image, params.theta, params.tilt); 198 } 199 200 /** 201 * Detect features from a single simulation. 202 * 203 * @param image 204 * the image 205 * @param theta 206 * the rotation 207 * @param tilt 208 * the amount of tilt 209 * 210 * @return the detected features 211 */ 212 public Q detectFeatures(I image, float theta, float tilt) { 213 final I image_tmp1 = AffineSimulation.transformImage(image, theta, tilt); 214 215 final Q keypoints = detectFeatures(image_tmp1); 216 217 filterEdgesTransformed(keypoints, theta, image, 1.0f / tilt); 218 AffineSimulation.transformToOriginal(keypoints, image, theta, tilt); 219 220 return keypoints; 221 } 222 223 protected void filterEdgesTransformed(Q keypoints, float theta, I image, float t2) { 224 float x1, y1, x2, y2, x3, y3, x4, y4; 225 final List<T> keys_to_remove = new ArrayList<T>(); 226 227 /* Store the keypoints */ 228 final int imageWidth = image.getWidth(); 229 final int imageHeight = image.getHeight(); 230 for (int cc = 0; cc < keypoints.size(); cc++) { 231 /* 232 * check if the keypoint is located on the boundary of the 233 * parallelogram 234 */ 235 /* coordinates of the keypoint */ 236 final float x0 = keypoints.get(cc).getX(); 237 final float y0 = keypoints.get(cc).getY(); 238 final float scale1 = keypoints.get(cc).getScale(); 239 240 final float sin_theta = (float) Math.sin(theta); 241 final float cos_theta1 = (float) Math.cos(theta); 242 243 /* the coordinates of the 4 corners of the parallelogram */ 244 if (theta <= PI / 2.0) { 245 /* theta1 = theta * PI / 180; */ 246 x1 = imageHeight * sin_theta; 247 y1 = 0; 248 y2 = imageWidth * sin_theta; 249 x3 = imageWidth * cos_theta1; 250 x4 = 0; 251 y4 = imageHeight * cos_theta1; 252 x2 = x1 + x3; 253 y3 = y2 + y4; 254 255 /* 256 * Note that the vertical direction goes from top to bottom!!! 257 * The calculation above assumes that the vertical direction 258 * goes from the bottom to top. Thus the vertical coordinates 259 * need to be reversed!!! 260 */ 261 y1 = y3 - y1; 262 y2 = y3 - y2; 263 y4 = y3 - y4; 264 y3 = 0; 265 } else { 266 /* theta1 = theta * PI / 180; */ 267 y1 = -imageHeight * cos_theta1; 268 x2 = imageHeight * sin_theta; 269 x3 = 0; 270 y3 = imageWidth * sin_theta; 271 x4 = -imageWidth * cos_theta1; 272 y4 = 0; 273 x1 = x2 + x4; 274 y2 = y1 + y3; 275 276 /* 277 * Note that the vertical direction goes from top to bottom!!! 278 * The calculation above assumes that the vertical direction 279 * goes from the bottom to top. Thus the vertical coordinates 280 * need to be reversed!!! 281 */ 282 y1 = y2 - y1; 283 y3 = y2 - y3; 284 y4 = y2 - y4; 285 y2 = 0; 286 } 287 288 y1 = y1 * t2; 289 y2 = y2 * t2; 290 y3 = y3 * t2; 291 y4 = y4 * t2; 292 293 /* 294 * the distances from the keypoint to the 4 sides of the 295 * parallelogram 296 */ 297 final float d1 = (float) (Math.abs((x2 - x1) * (y1 - y0) - (x1 - x0) * (y2 - y1)) / Math.sqrt((x2 - x1) 298 * (x2 - x1) + (y2 - y1) * (y2 - y1))); 299 final float d2 = (float) (Math.abs((x3 - x2) * (y2 - y0) - (x2 - x0) * (y3 - y2)) / Math.sqrt((x3 - x2) 300 * (x3 - x2) + (y3 - y2) * (y3 - y2))); 301 final float d3 = (float) (Math.abs((x4 - x3) * (y3 - y0) - (x3 - x0) * (y4 - y3)) / Math.sqrt((x4 - x3) 302 * (x4 - x3) + (y4 - y3) * (y4 - y3))); 303 final float d4 = (float) (Math.abs((x1 - x4) * (y4 - y0) - (x4 - x0) * (y1 - y4)) / Math.sqrt((x1 - x4) 304 * (x1 - x4) + (y1 - y4) * (y1 - y4))); 305 306 final float BorderTh = BorderFact * scale1; 307 if ((d1 < BorderTh) || (d2 < BorderTh) || (d3 < BorderTh) || (d4 < BorderTh)) { 308 keys_to_remove.add(keypoints.get(cc)); 309 } 310 } 311 keypoints.removeAll(keys_to_remove); 312 } 313 314 /** 315 * get the detected interest points, grouped by simulation 316 * 317 * @return The detected interest points, grouped by simulation 318 */ 319 public Map<AffineParams, Q> getKeypointsMap() { 320 return mappedInterestPoints; 321 } 322}