001/** 002 * Copyright (c) 2011, The University of Southampton and the individual contributors. 003 * All rights reserved. 004 * 005 * Redistribution and use in source and binary forms, with or without modification, 006 * are permitted provided that the following conditions are met: 007 * 008 * * Redistributions of source code must retain the above copyright notice, 009 * this list of conditions and the following disclaimer. 010 * 011 * * Redistributions in binary form must reproduce the above copyright notice, 012 * this list of conditions and the following disclaimer in the documentation 013 * and/or other materials provided with the distribution. 014 * 015 * * Neither the name of the University of Southampton nor the names of its 016 * contributors may be used to endorse or promote products derived from this 017 * software without specific prior written permission. 018 * 019 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 020 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 021 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 022 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 023 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 024 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 025 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 026 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 027 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 028 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 029 */ 030package org.openimaj.ml.linear.learner.perceptron; 031 032 033import org.openimaj.math.matrix.MatlibMatrixUtils; 034import org.openimaj.ml.linear.kernel.VectorKernel; 035import org.openimaj.util.pair.IndependentPair; 036 037import ch.akuhn.matrix.DenseMatrix; 038import ch.akuhn.matrix.Matrix; 039import ch.akuhn.matrix.Vector; 040/** 041 * 042 * @author Sina Samangooei (ss@ecs.soton.ac.uk) 043 */ 044public class Projectron extends DoubleArrayKernelPerceptron{ 045 046 private static final double DEFAULT_ETA = 0.01f; 047 private Matrix Kinv; 048 private double eta; 049 /** 050 * @param kernel 051 * @param eta 052 */ 053 public Projectron(VectorKernel kernel, double eta) { 054 super(kernel); 055 this.eta = eta; 056 Kinv = DenseMatrix.dense(0, 0); 057 } 058 059 /** 060 * @param kernel 061 */ 062 public Projectron(VectorKernel kernel) { 063 this(kernel,DEFAULT_ETA); 064 } 065 066 @Override 067 public void update(double[] xt, PerceptronClass yt, PerceptronClass yt_prime) { 068 double kii = this.kernel.apply(IndependentPair.pair(xt,xt)); 069 070 // First calculate optimal weighting vector d 071 Vector kt = calculatekt(xt); 072 Vector d_optimal = Kinv.mult(kt); 073 double delta = Math.max(kii - d_optimal.dot(kt), 0); 074// this.bias += yt.v(); 075 if(delta <= eta){ 076 updateWeights(yt,d_optimal); 077 } else{ 078 super.update(xt, yt, yt_prime); 079 updateKinv(d_optimal,delta); 080 } 081 082 } 083 084 private void updateWeights(PerceptronClass y, Vector d_optimal) { 085 for (int i = 0; i < d_optimal.size(); i++) { 086 this.weights.set(i, this.weights.get(i) + y.v() * d_optimal.get(i)); 087 } 088 089 } 090 091 @Override 092 public double getBias() { 093 return 0; 094 } 095 096 private void updateKinv(Vector d_optimal, double delta) { 097 Matrix newKinv = null; 098 if(this.supports.size() > 1){ 099 Vector expandD = Vector.dense(d_optimal.size() + 1); 100 Matrix expandDMat = DenseMatrix.dense(expandD.size(), 1); 101 MatlibMatrixUtils.setSubVector(expandDMat.column(0), 0, d_optimal); 102 expandDMat.column(0).put(d_optimal.size(), -1); 103 newKinv = new DenseMatrix(Kinv.rowCount()+1, Kinv.columnCount() + 1); 104 MatlibMatrixUtils.setSubMatrix(newKinv, 0, 0, Kinv); 105 106 Matrix expandDMult = newKinv.newInstance(); 107 MatlibMatrixUtils.dotProductTranspose(expandDMat, expandDMat, expandDMult); 108 109 MatlibMatrixUtils.scaleInplace(expandDMult, 1/delta); 110 MatlibMatrixUtils.plusInplace(newKinv, expandDMult); 111 } else { 112 double[] only = this.supports.get(0); 113 newKinv = DenseMatrix.dense(1, 1); 114 newKinv.put(0, 0, 1/this.kernel.apply(IndependentPair.pair(only,only))); 115 } 116 this.Kinv = newKinv; 117 } 118 119 private Vector calculatekt(double[] xt) { 120 Vector ret = Vector.dense(this.supports.size()); 121 for (int i = 0; i < this.supports.size(); i++) { 122 ret.put(i, this.kernel.apply(IndependentPair.pair(xt,this.supports.get(i)))); 123 } 124 return ret; 125 } 126}