001/** 002 * Copyright (c) 2011, The University of Southampton and the individual contributors. 003 * All rights reserved. 004 * 005 * Redistribution and use in source and binary forms, with or without modification, 006 * are permitted provided that the following conditions are met: 007 * 008 * * Redistributions of source code must retain the above copyright notice, 009 * this list of conditions and the following disclaimer. 010 * 011 * * Redistributions in binary form must reproduce the above copyright notice, 012 * this list of conditions and the following disclaimer in the documentation 013 * and/or other materials provided with the distribution. 014 * 015 * * Neither the name of the University of Southampton nor the names of its 016 * contributors may be used to endorse or promote products derived from this 017 * software without specific prior written permission. 018 * 019 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 020 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 021 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 022 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 023 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 024 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 025 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 026 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 027 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 028 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 029 */ 030package org.openimaj.ml.linear.experiments.sinabill; 031 032import gov.sandia.cognition.math.matrix.Matrix; 033 034import java.io.File; 035import java.io.IOException; 036import java.util.ArrayList; 037import java.util.HashMap; 038import java.util.List; 039import java.util.Map; 040 041import org.openimaj.io.FileUtils; 042import org.openimaj.io.IOUtils; 043import org.openimaj.math.matrix.CFMatrixUtils; 044import org.openimaj.ml.linear.data.BillMatlabFileDataGenerator; 045import org.openimaj.ml.linear.data.BillMatlabFileDataGenerator.Mode; 046import org.openimaj.ml.linear.evaluation.BilinearEvaluator; 047import org.openimaj.ml.linear.evaluation.RootMeanSumLossEvaluator; 048import org.openimaj.ml.linear.learner.BilinearLearnerParameters; 049import org.openimaj.ml.linear.learner.BilinearSparseOnlineLearner; 050import org.openimaj.ml.linear.learner.init.HardCodedInitStrat; 051import org.openimaj.ml.linear.learner.init.SingleValueInitStrat; 052import org.openimaj.ml.linear.learner.init.SparseZerosInitStrategy; 053import org.openimaj.util.pair.Pair; 054 055public class StreamAustrianDampeningExperiments extends BilinearExperiment { 056 057 // private static final String BATCH_EXPERIMENT = 058 // "batchStreamLossExperiments/batch_1366231606223/experiment.log"; 059 private static final String BATCH_EXPERIMENT = "batchStreamLossExperiments/batch_1366820115090/experiment.log"; 060 061 @Override 062 public String getExperimentName() { 063 return "streamingDampeningExperiments"; 064 } 065 066 @Override 067 public void performExperiment() throws Exception { 068 069 final Map<Integer, Double> batchLosses = loadBatchLoss(); 070 final BilinearLearnerParameters params = new BilinearLearnerParameters(); 071 params.put(BilinearLearnerParameters.ETA0_U, 0.01); 072 params.put(BilinearLearnerParameters.ETA0_W, 0.01); 073 params.put(BilinearLearnerParameters.LAMBDA, 0.001); 074 params.put(BilinearLearnerParameters.LAMBDA_W, 0.006); 075 params.put(BilinearLearnerParameters.BICONVEX_TOL, 0.01); 076 params.put(BilinearLearnerParameters.BICONVEX_MAXITER, 10); 077 params.put(BilinearLearnerParameters.BIAS, true); 078 params.put(BilinearLearnerParameters.ETA0_BIAS, 0.5); 079 params.put(BilinearLearnerParameters.WINITSTRAT, new SingleValueInitStrat(0.1)); 080 params.put(BilinearLearnerParameters.UINITSTRAT, new SparseZerosInitStrategy()); 081 final HardCodedInitStrat biasInitStrat = new HardCodedInitStrat(); 082 params.put(BilinearLearnerParameters.BIASINITSTRAT, biasInitStrat); 083 final BillMatlabFileDataGenerator bmfdg = new BillMatlabFileDataGenerator( 084 new File(MATLAB_DATA()), 085 98, 086 true 087 ); 088 prepareExperimentLog(params); 089 double dampening = 0.02d; 090 final double dampeningIncr = 0.1d; 091 final double dampeningMax = 0.021d; 092 final int maxItems = 15; 093 logger.debug( 094 String.format( 095 "Beggining dampening experiments: min=%2.5f,max=%2.5f,incr=%2.5f", 096 dampening, 097 dampeningMax, 098 dampeningIncr 099 100 )); 101 while (dampening < dampeningMax) { 102 params.put(BilinearLearnerParameters.DAMPENING, dampening); 103 logger.debug("Dampening is now: " + dampening); 104 final BilinearSparseOnlineLearner learner = new BilinearSparseOnlineLearner(params); 105 dampening += dampeningIncr; 106 int item = 0; 107 final BilinearEvaluator eval = new RootMeanSumLossEvaluator(); 108 eval.setLearner(learner); 109 bmfdg.setFold(-1, Mode.ALL); // go through all items in day order 110 boolean first = true; 111 while (true) { 112 final Pair<Matrix> next = bmfdg.generate(); 113 if (next == null) 114 break; 115 if (first) { 116 first = false; 117 biasInitStrat.setMatrix(next.secondObject()); 118 } 119 final List<Pair<Matrix>> asList = new ArrayList<Pair<Matrix>>(); 120 asList.add(next); 121 if (learner.getW() != null) { 122 if (!batchLosses.containsKey(item)) { 123 logger.debug(String.format("...No batch result found for: %d, done", item)); 124 break; 125 } 126 logger.debug("...Calculating regret for item" + item); 127 final double loss = eval.evaluate(asList); 128 logger.debug(String.format("... loss: %f", loss)); 129 final double batchloss = batchLosses.get(item); 130 logger.debug(String.format("... batch loss: %f", batchloss)); 131 logger.debug(String.format("... regret: %f", (loss - batchloss))); 132 } 133 if (item >= maxItems) 134 break; 135 learner.process(next.firstObject(), next.secondObject()); 136 final Matrix w = learner.getW(); 137 final Matrix u = learner.getU(); 138 logger.debug("W row sparcity: " + CFMatrixUtils.rowSparsity(w)); 139 logger.debug(String.format("W range: %2.5f -> %2.5f", CFMatrixUtils.min(w), CFMatrixUtils.max(w))); 140 logger.debug("U row sparcity: " + CFMatrixUtils.rowSparsity(u)); 141 logger.debug(String.format("U range: %2.5f -> %2.5f", CFMatrixUtils.min(u), CFMatrixUtils.max(u))); 142 143 logger.debug(String.format("... loss (post addition): %f", eval.evaluate(asList))); 144 logger.debug(String.format("Saving learner, Fold %d, Item %d", -1, item)); 145 final File learnerOut = new File(FOLD_ROOT(-1), String.format("learner_%d", item)); 146 IOUtils.writeBinary(learnerOut, learner); 147 148 item++; 149 } 150 151 } 152 } 153 154 private Map<Integer, Double> loadBatchLoss() throws IOException { 155 final String[] batchExperimentLines = FileUtils.readlines(new File( 156 DATA_ROOT(), 157 BATCH_EXPERIMENT 158 )); 159 int seenItems = 0; 160 final Map<Integer, Double> ret = new HashMap<Integer, Double>(); 161 for (final String line : batchExperimentLines) { 162 163 if (line.contains("New Item Seen: ")) { 164 seenItems = Integer.parseInt(line.split(":")[1].trim()); 165 } 166 167 if (line.contains("Loss:")) { 168 ret.put(seenItems, Double.parseDouble(line.split(":")[1].trim())); 169 } 170 } 171 return ret; 172 } 173 174 public static void main(String[] args) throws Exception { 175 final BilinearExperiment exp = new StreamAustrianDampeningExperiments(); 176 exp.performExperiment(); 177 } 178 179}