001/**
002 * Copyright (c) 2011, The University of Southampton and the individual contributors.
003 * All rights reserved.
004 *
005 * Redistribution and use in source and binary forms, with or without modification,
006 * are permitted provided that the following conditions are met:
007 *
008 *   *  Redistributions of source code must retain the above copyright notice,
009 *      this list of conditions and the following disclaimer.
010 *
011 *   *  Redistributions in binary form must reproduce the above copyright notice,
012 *      this list of conditions and the following disclaimer in the documentation
013 *      and/or other materials provided with the distribution.
014 *
015 *   *  Neither the name of the University of Southampton nor the names of its
016 *      contributors may be used to endorse or promote products derived from this
017 *      software without specific prior written permission.
018 *
019 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
020 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
021 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
022 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
023 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
024 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
025 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
026 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
027 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
028 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
029 */
030package org.openimaj.hadoop.tools.twitter.token.outputmode.stats;
031
032import java.util.HashMap;
033import java.util.Map.Entry;
034
035import org.apache.hadoop.fs.Path;
036import org.openimaj.hadoop.mapreduce.MultiStagedJob;
037import org.openimaj.hadoop.tools.HadoopToolsUtil;
038import org.openimaj.hadoop.tools.twitter.HadoopTwitterTokenToolOptions;
039import org.openimaj.hadoop.tools.twitter.token.mode.TwitterTokenMode;
040import org.openimaj.hadoop.tools.twitter.token.mode.dfidf.CountWordsAcrossTimeperiod;
041import org.openimaj.hadoop.tools.twitter.token.outputmode.TwitterTokenOutputMode;
042import org.openimaj.hadoop.tools.twitter.token.outputmode.sparsecsv.WordIndex;
043import org.openimaj.util.pair.IndependentPair;
044
045public class StatsOutputMode extends TwitterTokenOutputMode {
046
047        private MultiStagedJob stages;
048
049        @Override
050        public void write(HadoopTwitterTokenToolOptions opts,TwitterTokenMode completedMode) throws Exception {
051                
052                this.stages = new MultiStagedJob(
053                                HadoopToolsUtil.getInputPaths(completedMode.finalOutput(opts),CountWordsAcrossTimeperiod.WORDCOUNT_DIR),
054                                HadoopToolsUtil.getOutputPath(outputPath),
055                                opts.getArgs()
056                );
057                // Three stage process
058                // 1a. Write all the words (word per line)
059                new WordIndex().stage(stages);
060                final Path wordIndex = stages.runAll();
061                
062                HashMap<String, IndependentPair<Long, Long>> wordCountLines = WordIndex.readWordCountLines(wordIndex.toString(),"");
063                StatsWordMatch matches = new StatsWordMatch();
064                for (Entry<String, IndependentPair<Long, Long>> entry : wordCountLines.entrySet()) {
065                        String word = entry.getKey();
066                        IndependentPair<Long, Long> countLine = entry.getValue();
067                        Long count = countLine.firstObject();   
068                        matches.updateStats(word, count);
069                }
070                
071                System.out.println(matches);
072        }
073
074}