001/**
002 * Copyright (c) 2011, The University of Southampton and the individual contributors.
003 * All rights reserved.
004 *
005 * Redistribution and use in source and binary forms, with or without modification,
006 * are permitted provided that the following conditions are met:
007 *
008 *   *  Redistributions of source code must retain the above copyright notice,
009 *      this list of conditions and the following disclaimer.
010 *
011 *   *  Redistributions in binary form must reproduce the above copyright notice,
012 *      this list of conditions and the following disclaimer in the documentation
013 *      and/or other materials provided with the distribution.
014 *
015 *   *  Neither the name of the University of Southampton nor the names of its
016 *      contributors may be used to endorse or promote products derived from this
017 *      software without specific prior written permission.
018 *
019 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
020 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
021 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
022 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
023 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
024 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
025 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
026 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
027 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
028 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
029 */
030package org.openimaj.ml.annotation.bayes;
031
032import gov.sandia.cognition.learning.algorithm.IncrementalLearner;
033import gov.sandia.cognition.learning.algorithm.bayes.VectorNaiveBayesCategorizer;
034import gov.sandia.cognition.learning.algorithm.bayes.VectorNaiveBayesCategorizer.OnlineLearner;
035import gov.sandia.cognition.learning.data.DefaultInputOutputPair;
036import gov.sandia.cognition.learning.data.DefaultWeightedValueDiscriminant;
037import gov.sandia.cognition.math.LogMath;
038import gov.sandia.cognition.math.matrix.Vector;
039import gov.sandia.cognition.math.matrix.VectorFactory;
040import gov.sandia.cognition.statistics.distribution.UnivariateGaussian;
041import gov.sandia.cognition.util.AbstractCloneableSerializable;
042
043import java.util.ArrayList;
044import java.util.Collections;
045import java.util.List;
046import java.util.Set;
047
048import org.openimaj.feature.FeatureExtractor;
049import org.openimaj.feature.FeatureVector;
050import org.openimaj.feature.IdentityFeatureExtractor;
051import org.openimaj.ml.annotation.Annotated;
052import org.openimaj.ml.annotation.IncrementalAnnotator;
053import org.openimaj.ml.annotation.ScoredAnnotation;
054
055/**
056 * Annotator based on a Naive Bayes Classifier. Uses a
057 * {@link VectorNaiveBayesCategorizer} as the actual classifier.
058 * 
059 * @author Jonathon Hare (jsh2@ecs.soton.ac.uk)
060 * 
061 * @param <OBJECT>
062 *            Type of object being annotated
063 * @param <ANNOTATION>
064 *            Type of annotation
065 */
066public class NaiveBayesAnnotator<OBJECT, ANNOTATION>
067                extends
068                IncrementalAnnotator<OBJECT, ANNOTATION>
069{
070        private static class PDF extends UnivariateGaussian.PDF {
071                private static final long serialVersionUID = 1L;
072
073                private SufficientStatistic target;
074        }
075
076        private static class PDFLearner extends AbstractCloneableSerializable
077                        implements
078                        IncrementalLearner<Double, PDF>
079        {
080                private static final long serialVersionUID = 1L;
081
082                final UnivariateGaussian.IncrementalEstimator distrLearner = new UnivariateGaussian.IncrementalEstimator();
083
084                @Override
085                public PDF createInitialLearnedObject() {
086                        final PDF pdf = new PDF();
087                        pdf.target = distrLearner.createInitialLearnedObject();
088                        return pdf;
089                }
090
091                @Override
092                public void update(PDF pdf, Double data) {
093                        distrLearner.update(pdf.target, data);
094
095                        pdf.setMean(pdf.target.getMean());
096                        pdf.setVariance(pdf.target.getVariance());
097                }
098
099                @Override
100                public void update(PDF pdf, Iterable<? extends Double> data) {
101                        distrLearner.update(pdf.target, data);
102
103                        pdf.setMean(pdf.target.getMean());
104                        pdf.setVariance(pdf.target.getVariance());
105                }
106        }
107
108        /**
109         * Modes of operation for prediction using the {@link NaiveBayesAnnotator}.
110         * 
111         * @author Jonathon Hare (jsh2@ecs.soton.ac.uk)
112         * 
113         */
114        public static enum Mode {
115                /**
116                 * The probability of each class should be calculated, and the
117                 * annotations returned should contain every annotation with its
118                 * associated probability in decreasing order of probability.
119                 */
120                ALL {
121                        @Override
122                        protected <ANNOTATION> List<ScoredAnnotation<ANNOTATION>>
123                                        getAnnotations(VectorNaiveBayesCategorizer<ANNOTATION, PDF> categorizer, Vector vec)
124                        {
125                                final List<ScoredAnnotation<ANNOTATION>> results = new ArrayList<ScoredAnnotation<ANNOTATION>>();
126
127                                double logDenominator = Double.NEGATIVE_INFINITY;
128                                for (final ANNOTATION category : categorizer.getCategories()) {
129                                        final double logPosterior = categorizer.computeLogPosterior(vec, category);
130
131                                        logDenominator = LogMath.add(logDenominator, logPosterior);
132                                        results.add(new ScoredAnnotation<ANNOTATION>(category, (float) logPosterior));
133                                }
134
135                                for (final ScoredAnnotation<ANNOTATION> scored : results)
136                                        scored.confidence = (float) Math.exp(scored.confidence - logDenominator);
137
138                                Collections.sort(results, Collections.reverseOrder());
139
140                                return results;
141                        }
142                },
143                /**
144                 * Only the single most likely annotation will be returned.
145                 */
146                MAXIMUM_LIKELIHOOD {
147                        @Override
148                        protected <ANNOTATION> List<ScoredAnnotation<ANNOTATION>>
149                                        getAnnotations(VectorNaiveBayesCategorizer<ANNOTATION, PDF> categorizer, Vector vec)
150                        {
151                                final List<ScoredAnnotation<ANNOTATION>> results = new ArrayList<ScoredAnnotation<ANNOTATION>>();
152
153                                final DefaultWeightedValueDiscriminant<ANNOTATION> r = categorizer.evaluateWithDiscriminant(vec);
154
155                                results.add(new ScoredAnnotation<ANNOTATION>(r.getValue(), (float) Math.exp(r.getWeight())));
156
157                                return results;
158                        }
159                };
160
161                protected abstract <ANNOTATION> List<ScoredAnnotation<ANNOTATION>>
162                                getAnnotations(VectorNaiveBayesCategorizer<ANNOTATION, PDF> categorizer, Vector vec);
163        }
164
165        private VectorNaiveBayesCategorizer<ANNOTATION, PDF> categorizer;
166        private OnlineLearner<ANNOTATION, PDF> learner;
167        private final Mode mode;
168        private FeatureExtractor<? extends FeatureVector, OBJECT> extractor;
169
170        /**
171         * Construct a {@link NaiveBayesAnnotator} with the given feature extractor
172         * and mode of operation.
173         * 
174         * @param extractor
175         *            the feature extractor.
176         * @param mode
177         *            the mode of operation during prediction
178         */
179        public NaiveBayesAnnotator(FeatureExtractor<? extends FeatureVector, OBJECT> extractor, Mode mode) {
180                this.extractor = extractor;
181                this.mode = mode;
182                reset();
183        }
184
185        /**
186         * Convenience method to construct a {@link NaiveBayesAnnotator} in the case
187         * where the raw objects are themselves the feature and thus an
188         * {@link IdentityFeatureExtractor} can be used. This method is equivalent
189         * to calling
190         * <tt>new NaiveBayesAnnotator<OBJECT,ANNOTATION>(new IdentityFeatureExtractor<OBJECT>(), mode)</tt>
191         * .
192         * 
193         * @param mode
194         *            the mode of operation during prediction
195         * @return the new {@link NaiveBayesAnnotator}
196         */
197        public static <OBJECT extends FeatureVector, ANNOTATION> NaiveBayesAnnotator<OBJECT, ANNOTATION> create(Mode mode) {
198                return new NaiveBayesAnnotator<OBJECT, ANNOTATION>(new IdentityFeatureExtractor<OBJECT>(), mode);
199        }
200
201        @Override
202        public void train(Annotated<OBJECT, ANNOTATION> annotated) {
203                final FeatureVector feature = extractor.extractFeature(annotated.getObject());
204                final Vector vec = VectorFactory.getDefault().copyArray(feature.asDoubleVector());
205
206                for (final ANNOTATION ann : annotated.getAnnotations()) {
207                        learner.update(categorizer, new DefaultInputOutputPair<Vector, ANNOTATION>(vec, ann));
208                }
209        }
210
211        @Override
212        public void reset() {
213                learner = new VectorNaiveBayesCategorizer.OnlineLearner<ANNOTATION, PDF>();
214                learner.setDistributionLearner(new PDFLearner());
215                categorizer = learner.createInitialLearnedObject();
216        }
217
218        @Override
219        public Set<ANNOTATION> getAnnotations() {
220                return categorizer.getCategories();
221        }
222
223        @Override
224        public List<ScoredAnnotation<ANNOTATION>> annotate(OBJECT object) {
225                final FeatureVector feature = extractor.extractFeature(object);
226                final Vector vec = VectorFactory.getDefault().copyArray(feature.asDoubleVector());
227
228                return mode.getAnnotations(categorizer, vec);
229        }
230}