001/**
002 * Copyright (c) 2011, The University of Southampton and the individual contributors.
003 * All rights reserved.
004 *
005 * Redistribution and use in source and binary forms, with or without modification,
006 * are permitted provided that the following conditions are met:
007 *
008 *   *  Redistributions of source code must retain the above copyright notice,
009 *      this list of conditions and the following disclaimer.
010 *
011 *   *  Redistributions in binary form must reproduce the above copyright notice,
012 *      this list of conditions and the following disclaimer in the documentation
013 *      and/or other materials provided with the distribution.
014 *
015 *   *  Neither the name of the University of Southampton nor the names of its
016 *      contributors may be used to endorse or promote products derived from this
017 *      software without specific prior written permission.
018 *
019 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
020 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
021 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
022 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
023 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
024 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
025 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
026 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
027 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
028 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
029 */
030package org.openimaj.hadoop.tools.twitter.token.outputmode.correlation;
031
032import java.util.Iterator;
033import java.util.LinkedHashMap;
034import java.util.Map;
035
036import org.apache.hadoop.fs.FSDataOutputStream;
037import org.apache.hadoop.fs.FileSystem;
038import org.apache.hadoop.fs.Path;
039import org.joda.time.DateTime;
040import org.kohsuke.args4j.Option;
041import org.openimaj.hadoop.mapreduce.MultiStagedJob;
042import org.openimaj.hadoop.tools.HadoopToolsUtil;
043import org.openimaj.hadoop.tools.twitter.HadoopTwitterTokenToolOptions;
044import org.openimaj.hadoop.tools.twitter.token.mode.TwitterTokenMode;
045import org.openimaj.hadoop.tools.twitter.token.mode.dfidf.CountTweetsInTimeperiod;
046import org.openimaj.hadoop.tools.twitter.token.mode.dfidf.CountWordsAcrossTimeperiod;
047import org.openimaj.hadoop.tools.twitter.token.outputmode.TwitterTokenOutputMode;
048import org.openimaj.hadoop.tools.twitter.token.outputmode.sparsecsv.TimeIndex;
049import org.openimaj.io.IOUtils;
050import org.openimaj.twitter.finance.YahooFinanceData;
051import org.openimaj.util.pair.IndependentPair;
052
053public class CorrelationOutputMode extends TwitterTokenOutputMode {
054
055        @Option(name = "--max-p-value", aliases = "-maxp", required = false, usage = "The maximum P-Value")
056        double maxp = -1;
057
058        @Override
059        public void write(HadoopTwitterTokenToolOptions opts, TwitterTokenMode completedMode) throws Exception {
060                // Get time period
061                final Path[] paths = HadoopToolsUtil.getInputPaths(completedMode.finalOutput(opts),
062                                CountTweetsInTimeperiod.TIMECOUNT_DIR);
063                final IndependentPair<Long, Long> startend = readBegginingEndTime(paths, opts);
064                // Get yahoo finance data for this time period
065                final YahooFinanceData finance = new YahooFinanceData("AAPL", new DateTime(startend.firstObject()), new DateTime(
066                                startend.secondObject()));
067                final Map<String, double[]> timeperiodFinance = finance.results();
068                final String financeOut = outputPath + "/financedata";
069                final Path p = HadoopToolsUtil.getOutputPath(financeOut);
070                final FileSystem fs = HadoopToolsUtil.getFileSystem(p);
071                final FSDataOutputStream os = fs.create(p);
072                IOUtils.writeASCII(os, finance);
073                // Correlate words with this time period's finance data
074                final MultiStagedJob stages = new MultiStagedJob(
075                                HadoopToolsUtil.getInputPaths(completedMode.finalOutput(opts), CountWordsAcrossTimeperiod.WORDCOUNT_DIR),
076                                HadoopToolsUtil.getOutputPath(outputPath),
077                                opts.getArgs()
078                                );
079                stages.queueStage(new CorrelateWordTimeSeries(financeOut, startend));
080                stages.queueStage(new CorrelateWordSort(maxp));
081                stages.runAll();
082        }
083
084        private IndependentPair<Long, Long> readBegginingEndTime(Path[] paths, HadoopTwitterTokenToolOptions opts)
085                        throws Exception
086        {
087                final MultiStagedJob stages = new MultiStagedJob(
088                                paths,
089                                HadoopToolsUtil.getOutputPath(outputPath),
090                                opts.getArgs()
091                                );
092                stages.queueStage(new TimeIndex().stage());
093                stages.runAll();
094                final LinkedHashMap<Long, IndependentPair<Long, Long>> tindex = TimeIndex.readTimeCountLines(outputPath);
095                final Iterator<Long> ks = tindex.keySet().iterator();
096                final long first = ks.next();
097                long last = first;
098                for (; ks.hasNext();) {
099                        last = ks.next();
100                }
101                return IndependentPair.pair(first, last);
102        }
103}