001/**
002 * Copyright (c) 2011, The University of Southampton and the individual contributors.
003 * All rights reserved.
004 *
005 * Redistribution and use in source and binary forms, with or without modification,
006 * are permitted provided that the following conditions are met:
007 *
008 *   *  Redistributions of source code must retain the above copyright notice,
009 *      this list of conditions and the following disclaimer.
010 *
011 *   *  Redistributions in binary form must reproduce the above copyright notice,
012 *      this list of conditions and the following disclaimer in the documentation
013 *      and/or other materials provided with the distribution.
014 *
015 *   *  Neither the name of the University of Southampton nor the names of its
016 *      contributors may be used to endorse or promote products derived from this
017 *      software without specific prior written permission.
018 *
019 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
020 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
021 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
022 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
023 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
024 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
025 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
026 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
027 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
028 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
029 */
030package org.openimaj.hadoop.tools.twitter.token.outputmode.timeseries;
031
032import java.io.ByteArrayOutputStream;
033import java.io.DataInput;
034import java.io.IOException;
035import java.util.ArrayList;
036import java.util.Arrays;
037import java.util.List;
038
039import org.apache.hadoop.io.BytesWritable;
040import org.apache.hadoop.io.NullWritable;
041import org.apache.hadoop.io.Text;
042import org.apache.hadoop.mapreduce.Mapper;
043import org.openimaj.hadoop.tools.twitter.utils.WordDFIDF;
044import org.openimaj.io.IOUtils;
045import org.openimaj.io.wrappers.ReadableListBinary;
046
047/**
048 * given a list of configured words, emits only those words
049 * @author Sina Samangooei (ss@ecs.soton.ac.uk)
050 *
051 */
052public class SpecificWordSelectionMapper extends Mapper<Text, BytesWritable, Text, BytesWritable> {
053        
054        
055        private static List<String> wordlist;
056
057        @Override
058        protected void setup(org.apache.hadoop.mapreduce.Mapper<Text,BytesWritable,Text,BytesWritable>.Context context) throws java.io.IOException ,InterruptedException {
059                load(context);
060        }
061
062        private static void load(Mapper<Text,BytesWritable,Text,BytesWritable>.Context context) {
063                if(wordlist == null){
064                        
065                        wordlist = Arrays.asList(context.getConfiguration().getStrings(SpecificWordStageProvider.WORD_TIME_SERIES));
066                }
067        };
068        
069        @Override
070        protected void map(final Text key, BytesWritable value, final Mapper<Text,BytesWritable,Text,BytesWritable>.Context context) throws java.io.IOException ,InterruptedException {
071                if(wordlist.contains(key.toString())){
072                        IOUtils.deserialize(value.getBytes(), new ReadableListBinary<Object>(new ArrayList<Object>()){
073                                @Override
074                                protected Object readValue(DataInput in) throws IOException {
075                                        WordDFIDF idf = new WordDFIDF();
076                                        idf.readBinary(in);
077                                        try {
078                                                ByteArrayOutputStream baos = new ByteArrayOutputStream();
079                                                IOUtils.writeBinary(baos, idf);
080                                                context.write(key, new BytesWritable(baos.toByteArray()));
081                                        } catch (InterruptedException e) {
082                                                throw new IOException("");
083                                        }
084                                        return NullWritable.get();
085                                }
086                        });
087                }
088        };
089}