001/**
002 * Copyright (c) 2011, The University of Southampton and the individual contributors.
003 * All rights reserved.
004 *
005 * Redistribution and use in source and binary forms, with or without modification,
006 * are permitted provided that the following conditions are met:
007 *
008 *   *  Redistributions of source code must retain the above copyright notice,
009 *      this list of conditions and the following disclaimer.
010 *
011 *   *  Redistributions in binary form must reproduce the above copyright notice,
012 *      this list of conditions and the following disclaimer in the documentation
013 *      and/or other materials provided with the distribution.
014 *
015 *   *  Neither the name of the University of Southampton nor the names of its
016 *      contributors may be used to endorse or promote products derived from this
017 *      software without specific prior written permission.
018 *
019 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
020 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
021 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
022 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
023 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
024 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
025 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
026 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
027 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
028 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
029 */
030package org.openimaj.ml.linear.learner.loss;
031
032import gov.sandia.cognition.math.matrix.Matrix;
033
034import org.apache.logging.log4j.Logger;
035import org.apache.logging.log4j.LogManager;
036
037public class SquareMissingLossFunction extends LossFunction {
038        Logger logger = LogManager.getLogger(SquareMissingLossFunction.class);
039
040        @Override
041        public Matrix gradient(Matrix W) {
042                final Matrix resid = X.times(W).minus(Y);
043                if (this.bias != null)
044                        resid.plusEquals(this.bias);
045                for (int r = 0; r < Y.getNumRows(); r++) {
046                        final double yc = Y.getElement(r, 0);
047                        if (Double.isNaN(yc)) {
048                                resid.setElement(r, 0, 0);
049                        }
050                }
051                return X.transpose().times(resid);
052        }
053
054        @Override
055        public double eval(Matrix W) {
056                Matrix v;
057                if (W == null) {
058                        v = this.X;
059                }
060                else {
061                        v = X.times(W);
062                }
063                final Matrix vWithoutBias = v.clone();
064                if (this.bias != null)
065                        v.plusEquals(this.bias);
066                double sum = 0;
067                for (int r = 0; r < Y.getNumRows(); r++) {
068                        for (int c = 0; c < Y.getNumColumns(); c++) {
069                                final double yr = Y.getElement(r, c);
070                                if (!Double.isNaN(yr)) {
071                                        final double val = v.getElement(r, c);
072                                        final double valNoBias = vWithoutBias.getElement(r, c);
073                                        final double delta = yr - val;
074                                        logger.debug(
075                                                        String.format(
076                                                                        "yr=%d,y=%3.2f,v=%3.2f,v(no bias)=%2.5f,delta=%2.5f",
077                                                                        r, yr, val, valNoBias, delta
078                                                                        )
079                                                        );
080                                        sum += delta * delta;
081                                }
082                        }
083                }
084                return sum;
085        }
086
087        @Override
088        public boolean isMatrixLoss() {
089                return false;
090        }
091
092}