001/**
002 * Copyright (c) 2011, The University of Southampton and the individual contributors.
003 * All rights reserved.
004 *
005 * Redistribution and use in source and binary forms, with or without modification,
006 * are permitted provided that the following conditions are met:
007 *
008 *   *  Redistributions of source code must retain the above copyright notice,
009 *      this list of conditions and the following disclaimer.
010 *
011 *   *  Redistributions in binary form must reproduce the above copyright notice,
012 *      this list of conditions and the following disclaimer in the documentation
013 *      and/or other materials provided with the distribution.
014 *
015 *   *  Neither the name of the University of Southampton nor the names of its
016 *      contributors may be used to endorse or promote products derived from this
017 *      software without specific prior written permission.
018 *
019 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
020 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
021 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
022 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
023 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
024 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
025 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
026 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
027 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
028 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
029 */
030package org.openimaj.math.model;
031
032import java.util.List;
033
034import org.openimaj.util.pair.IndependentPair;
035
036/**
037 * Model of mapping between pairs of integers learned from a least-squares
038 * regression.
039 * 
040 * Basically this class learns the parameters m and c in y = mx + c.
041 * 
042 * @author Jonathon Hare (jsh2@ecs.soton.ac.uk)
043 * 
044 */
045public class LeastSquaresLinearModel implements EstimatableModel<Integer, Integer> {
046        private double c;
047        private double m;
048        private int nEstimates = 10;
049
050        /**
051         * Construct model
052         */
053        public LeastSquaresLinearModel() {
054                this.nEstimates = 2;
055        }
056
057        /**
058         * Construct model
059         * 
060         * @param nEstimates
061         *            minimum number of samples required for estimating model when
062         *            fitting
063         */
064        public LeastSquaresLinearModel(int nEstimates) {
065                if (nEstimates < 2)
066                        nEstimates = 2;
067                else
068                        this.nEstimates = nEstimates;
069        }
070
071        /***
072         * Using standard vertical linear regression as outlined here:
073         * http://mathworld.wolfram.com/LeastSquaresFitting.html
074         * 
075         * calculate the m and c of a line of best fit given the data.
076         * 
077         * @param data
078         *            Observed data
079         * 
080         */
081        @Override
082        public boolean estimate(List<? extends IndependentPair<Integer, Integer>> data) {
083                double sumXi = 0;
084                double sumYi = 0;
085                double sumXiXi = 0;
086                double sumXiYi = 0;
087                int n = 0;
088
089                for (final IndependentPair<Integer, Integer> pair : data) {
090                        final int xi = pair.firstObject();
091                        final int yi = pair.secondObject();
092
093                        sumXi += xi;
094                        sumYi += yi;
095                        sumXiXi += (xi * xi);
096                        sumXiYi += xi * yi;
097
098                        n++;
099                }
100
101                c = (sumYi * sumXiXi - sumXi * sumXiYi) / (n * sumXiXi - sumXi * sumXi);
102                m = (n * sumXiYi - sumXi * sumYi) / (n * sumXiXi - sumXi * sumXi);
103
104                return true;
105        }
106
107        @Override
108        public Integer predict(Integer data) {
109                return (int) Math.round((m * data) + c);
110        }
111
112        @Override
113        public int numItemsToEstimate() {
114                return nEstimates;
115        }
116
117        @Override
118        public LeastSquaresLinearModel clone() {
119                final LeastSquaresLinearModel model = new LeastSquaresLinearModel(nEstimates);
120                model.c = c;
121                model.m = m;
122                return model;
123        }
124
125        @Override
126        public String toString() {
127                return "Least Squares Fit: (m,c) = (" + m + "," + c + ")";
128        }
129
130        /**
131         * Get the gradient (m in y=mx+c)
132         * 
133         * @return the gradient
134         */
135        public double getM() {
136                return m;
137        }
138
139        /**
140         * Get the offset (c in y=mx+c)
141         * 
142         * @return the offset
143         */
144        public double getC() {
145                return c;
146        }
147}