001/** 002 * Copyright (c) 2011, The University of Southampton and the individual contributors. 003 * All rights reserved. 004 * 005 * Redistribution and use in source and binary forms, with or without modification, 006 * are permitted provided that the following conditions are met: 007 * 008 * * Redistributions of source code must retain the above copyright notice, 009 * this list of conditions and the following disclaimer. 010 * 011 * * Redistributions in binary form must reproduce the above copyright notice, 012 * this list of conditions and the following disclaimer in the documentation 013 * and/or other materials provided with the distribution. 014 * 015 * * Neither the name of the University of Southampton nor the names of its 016 * contributors may be used to endorse or promote products derived from this 017 * software without specific prior written permission. 018 * 019 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 020 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 021 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 022 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 023 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 024 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 025 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 026 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 027 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 028 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 029 */ 030package org.openimaj.hadoop.tools.twitter.token.mode.pointwisemi; 031 032import java.io.BufferedReader; 033import java.io.File; 034import java.io.IOException; 035import java.io.InputStreamReader; 036 037import org.apache.hadoop.fs.FSDataInputStream; 038import org.apache.hadoop.fs.FileSystem; 039import org.apache.hadoop.fs.Path; 040import org.kohsuke.args4j.Option; 041import org.openimaj.hadoop.mapreduce.MultiStagedJob; 042import org.openimaj.hadoop.tools.HadoopToolsUtil; 043import org.openimaj.hadoop.tools.twitter.HadoopTwitterTokenToolOptions; 044import org.openimaj.hadoop.tools.twitter.token.mode.TwitterTokenMode; 045import org.openimaj.hadoop.tools.twitter.token.mode.pointwisemi.count.PairMutualInformation; 046import org.openimaj.hadoop.tools.twitter.token.mode.pointwisemi.sort.PMIPairSort; 047 048/** 049 * Perform DFIDF and output such that each timeslot is a instance and each word a feature 050 * @author Sina Samangooei (ss@ecs.soton.ac.uk) 051 * 052 */ 053public class PairwiseMutualInformationMode implements TwitterTokenMode { 054 055 private MultiStagedJob stages; 056 private String[] fstage; 057 @Option(name="--time-delta", aliases="-t", required=false, usage="The length of a time window in minutes (defaults to -1, i.e. not used, one time period)", metaVar="STRING") 058 private long timeDelta = -1; 059 @Option(name="--min-p-value", aliases="-minp", required=false, usage="The minimum PMI value") 060 double minp = 0; 061 @Option(name="--min-pair-count", aliases="-minpc", required=false, usage="The minimum number of times a pair must occur") 062 int minPairCount = 0; 063 064 @Override 065 public void perform(final HadoopTwitterTokenToolOptions opts) throws Exception { 066 Path outpath = HadoopToolsUtil.getOutputPath(opts); 067 this.stages = new MultiStagedJob(HadoopToolsUtil.getInputPaths(opts),outpath,opts.getArgs()); 068 stages.queueStage(new PairMutualInformation(opts.getNonHadoopArgs(),timeDelta)); 069 stages.queueStage(new PMIPairSort(minp, minPairCount, outpath)); 070 stages.runAll(); 071 } 072 073 @Override 074 public String[] finalOutput(HadoopTwitterTokenToolOptions opts) throws Exception { 075 return this.fstage; 076 } 077 078 public static BufferedReader sortedPMIReader(File outputLocation) throws IOException { 079 Path path = HadoopToolsUtil.getInputPaths(outputLocation.getAbsolutePath() + Path.SEPARATOR + PMIPairSort.PMI_NAME)[0]; 080 FileSystem fs = HadoopToolsUtil.getFileSystem(path); 081 FSDataInputStream is = fs.open(path); 082 return new BufferedReader(new InputStreamReader(is,"UTF-8")); 083 } 084 085}