001/** 002 * Copyright (c) 2011, The University of Southampton and the individual contributors. 003 * All rights reserved. 004 * 005 * Redistribution and use in source and binary forms, with or without modification, 006 * are permitted provided that the following conditions are met: 007 * 008 * * Redistributions of source code must retain the above copyright notice, 009 * this list of conditions and the following disclaimer. 010 * 011 * * Redistributions in binary form must reproduce the above copyright notice, 012 * this list of conditions and the following disclaimer in the documentation 013 * and/or other materials provided with the distribution. 014 * 015 * * Neither the name of the University of Southampton nor the names of its 016 * contributors may be used to endorse or promote products derived from this 017 * software without specific prior written permission. 018 * 019 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 020 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 021 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 022 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 023 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 024 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 025 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 026 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 027 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 028 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 029 */ 030package org.openimaj.workinprogress.optimisation; 031 032import java.util.Random; 033 034import org.openimaj.data.DataSource; 035import org.openimaj.data.DoubleArrayBackedDataSource; 036import org.openimaj.workinprogress.optimisation.params.Parameters; 037import org.openimaj.workinprogress.optimisation.params.VectorParameters; 038 039import scala.actors.threadpool.Arrays; 040 041public class SGD<MODEL, DATATYPE, PTYPE extends Parameters<PTYPE>> { 042 public int maxEpochs = 100; 043 public int batchSize = 1; 044 public LearningRate<PTYPE> learningRate; 045 public MODEL model; 046 public DifferentiableObjectiveFunction<MODEL, DATATYPE, PTYPE> fcn; 047 048 public void train(DataSource<DATATYPE> data) { 049 final DATATYPE[] batch = data.createTemporaryArray(batchSize); 050 051 for (int e = 0; e < maxEpochs; e++) { 052 for (int i = 0; i < data.size(); i += batchSize) { 053 final int currentBatchStop = Math.min(data.size(), i + batchSize); 054 final int currentBatchSize = currentBatchStop - i; 055 data.getData(i, currentBatchStop, batch); 056 057 final PTYPE grads = fcn.derivative(model, batch[0]); 058 for (int j = 1; j < currentBatchSize; j++) { 059 grads.addInplace(fcn.derivative(model, batch[j])); 060 } 061 grads.multiplyInplace(learningRate.getRate(e, i, grads)); 062 fcn.updateModel(model, grads); 063 } 064 } 065 } 066 067 public double value(MODEL model, DATATYPE data) { 068 return 0; 069 } 070 071 public static void main(String[] args) { 072 final double[][] data = new double[1000][2]; 073 final Random rng = new Random(); 074 for (int i = 0; i < data.length; i++) { 075 final double x = rng.nextDouble(); 076 data[i][0] = x; 077 data[i][1] = 0.3 * x + 20 + (rng.nextGaussian() * 0.01); 078 } 079 final DoubleArrayBackedDataSource ds = new DoubleArrayBackedDataSource(data); 080 081 final double[] model = { 0, 0 }; 082 083 final DifferentiableObjectiveFunction<double[], double[], VectorParameters> fcn = new DifferentiableObjectiveFunction<double[], double[], VectorParameters>() 084 { 085 @Override 086 public double value(double[] model, double[] data) { 087 final double diff = data[1] - (model[0] * data[0] + model[1]); 088 return diff * diff; 089 } 090 091 @Override 092 public VectorParameters derivative(double[] model, double[] data) { 093 final double[] der = { 094 2 * data[0] * (-data[1] + model[0] * data[0] + model[1]), 095 2 * (-data[1] + model[0] * data[0] + model[1]) 096 }; 097 098 return new VectorParameters(der); 099 } 100 101 @Override 102 public void updateModel(double[] model, VectorParameters weights) { 103 model[0] -= weights.vector[0]; 104 model[1] -= weights.vector[1]; 105 } 106 }; 107 108 final SGD<double[], double[], VectorParameters> sgd = new SGD<double[], double[], VectorParameters>(); 109 sgd.model = model; 110 sgd.fcn = fcn; 111 sgd.learningRate = new StaticLearningRate<VectorParameters>(0.01); 112 sgd.batchSize = 1; 113 sgd.maxEpochs = 10; 114 115 sgd.train(ds); 116 117 System.out.println(Arrays.toString(model)); 118 } 119}