001/**
002 * Copyright (c) 2011, The University of Southampton and the individual contributors.
003 * All rights reserved.
004 *
005 * Redistribution and use in source and binary forms, with or without modification,
006 * are permitted provided that the following conditions are met:
007 *
008 *   *  Redistributions of source code must retain the above copyright notice,
009 *      this list of conditions and the following disclaimer.
010 *
011 *   *  Redistributions in binary form must reproduce the above copyright notice,
012 *      this list of conditions and the following disclaimer in the documentation
013 *      and/or other materials provided with the distribution.
014 *
015 *   *  Neither the name of the University of Southampton nor the names of its
016 *      contributors may be used to endorse or promote products derived from this
017 *      software without specific prior written permission.
018 *
019 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
020 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
021 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
022 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
023 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
024 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
025 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
026 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
027 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
028 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
029 */
030package org.openimaj.ml.linear.learner.loss;
031
032import org.apache.logging.log4j.Logger;
033import org.apache.logging.log4j.LogManager;
034
035import org.openimaj.math.matrix.CFMatrixUtils;
036
037import gov.sandia.cognition.math.matrix.Matrix;
038import gov.sandia.cognition.math.matrix.Vector;
039import gov.sandia.cognition.math.matrix.mtj.SparseMatrixFactoryMTJ;
040
041public class MatSquareLossFunction extends LossFunction{
042        Logger logger = LogManager.getLogger(MatSquareLossFunction.class);
043        
044        private SparseMatrixFactoryMTJ spf;
045        public MatSquareLossFunction() {
046                spf = SparseMatrixFactoryMTJ.INSTANCE;
047        }
048        @Override
049        public Matrix gradient(Matrix W) {
050                Matrix ret = W.clone();
051                if(CFMatrixUtils.containsInfinity(X)){
052                        throw new RuntimeException();
053                }
054                if(CFMatrixUtils.containsInfinity(W)){
055                        throw new RuntimeException();
056                }
057                Matrix resid = CFMatrixUtils.fastdot(X,W);
058                if(CFMatrixUtils.containsInfinity(resid)){
059                        CFMatrixUtils.fastdot(X,W);
060                        throw new RuntimeException();
061                }
062                if(this.bias!=null)
063                {
064                        resid.plusEquals(this.bias);
065                }
066                CFMatrixUtils.fastminusEquals(resid, Y);
067                if(CFMatrixUtils.containsInfinity(resid)){
068                        throw new RuntimeException();
069                }
070                for (int t = 0; t < resid.getNumColumns(); t++) {
071                        Vector xcol = this.X.getRow(t).scale(resid.getElement(t, t)).clone();
072                        CFMatrixUtils.fastsetcol(ret,t, xcol);
073                }
074                return ret;
075        }
076        @Override
077        public double eval(Matrix W) {
078                Matrix resid = null;
079                if(W == null){
080                        resid = X.clone();
081                } else {
082                        resid = CFMatrixUtils.fastdot(X,W);
083                }
084                Matrix vnobias = resid.clone();
085                if(this.bias!=null)
086                {
087                        resid.plusEquals(this.bias);
088                }
089                Matrix v = resid.clone();
090                resid.minusEquals(Y);
091                double retval = 0;
092                
093                for (int t = 0; t < resid.getNumColumns(); t++) {
094                        double loss = resid.getElement(t, t);
095                        retval += loss * loss;
096                        logger.debug(
097                                        String.format(
098                                                        "yr=%d,y=%3.2f,v=%3.2f,v(no bias)=%2.5f,error=%2.5f,serror=%2.5f",
099                                                        t, Y.getElement(t, t), v.getElement(t, t), vnobias.getElement(t,t), loss, loss*loss
100                                                        )
101                                        );
102                }
103                return retval;
104        }
105        
106        @Override
107        public boolean test_backtrack(Matrix W, Matrix grad, Matrix prox, double eta) {
108                Matrix tmp = prox.minus(W);
109        double evalW = eval(W);
110                double evalProx = eval(prox);
111                Matrix fastdotGradTmp = CFMatrixUtils.fastdot(grad.transpose(),tmp);
112                double normGradProx = CFMatrixUtils.sum(fastdotGradTmp);
113                double normTmp = 0.5*eta*tmp.normFrobenius();
114                return (evalProx <= evalW + normGradProx + normTmp);
115        }
116        
117        @Override
118        public boolean isMatrixLoss() {
119                return true;
120        }
121}