001/** 002 * Copyright (c) 2011, The University of Southampton and the individual contributors. 003 * All rights reserved. 004 * 005 * Redistribution and use in source and binary forms, with or without modification, 006 * are permitted provided that the following conditions are met: 007 * 008 * * Redistributions of source code must retain the above copyright notice, 009 * this list of conditions and the following disclaimer. 010 * 011 * * Redistributions in binary form must reproduce the above copyright notice, 012 * this list of conditions and the following disclaimer in the documentation 013 * and/or other materials provided with the distribution. 014 * 015 * * Neither the name of the University of Southampton nor the names of its 016 * contributors may be used to endorse or promote products derived from this 017 * software without specific prior written permission. 018 * 019 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 020 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 021 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 022 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 023 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 024 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 025 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 026 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 027 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 028 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 029 */ 030package org.openimaj.hadoop.sequencefile; 031 032import java.io.IOException; 033 034import org.apache.hadoop.conf.Configuration; 035import org.apache.hadoop.fs.FileSystem; 036import org.apache.hadoop.fs.Path; 037import org.apache.hadoop.io.SequenceFile; 038import org.apache.hadoop.io.SequenceFile.CompressionType; 039import org.apache.hadoop.io.SequenceFile.Metadata; 040import org.apache.hadoop.io.compress.CompressionCodec; 041import org.apache.hadoop.io.compress.DefaultCodec; 042import org.apache.hadoop.mapreduce.RecordWriter; 043import org.apache.hadoop.mapreduce.TaskAttemptContext; 044import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; 045import org.apache.hadoop.util.ReflectionUtils; 046 047 048/** 049 * Output format for a extended {@link SequenceFile} that includes 050 * additional metadata in the file header. The metadata is provided 051 * though a {@link MetadataConfiguration} object which is constructed 052 * using information stored in the Hadoop {@link Configuration}. 053 * 054 * @author Jonathon Hare (jsh2@ecs.soton.ac.uk) 055 * 056 * @param <K> Key type of {@link SequenceFile} 057 * @param <V> Value type of {@link SequenceFile} 058 */ 059public class MetadataSequenceFileOutputFormat<K, V> extends SequenceFileOutputFormat<K, V> { 060 @Override 061 public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context ) throws IOException, InterruptedException { 062 Configuration conf = context.getConfiguration(); 063 064 CompressionCodec codec = null; 065 CompressionType compressionType = CompressionType.NONE; 066 if (getCompressOutput(context)) { 067 // find the kind of compression to do 068 compressionType = getOutputCompressionType(context); 069 070 // find the right codec 071 Class<?> codecClass = getOutputCompressorClass(context, DefaultCodec.class); 072 codec = (CompressionCodec) ReflectionUtils.newInstance(codecClass, conf); 073 } 074 // get the path of the temporary output file 075 Path file = getDefaultWorkFile(context, ""); 076 FileSystem fs = file.getFileSystem(conf); 077 078 Metadata md = MetadataConfiguration.getMetadata(conf); 079 080 final SequenceFile.Writer out = 081 SequenceFile.createWriter(fs, conf, file, 082 context.getOutputKeyClass(), 083 context.getOutputValueClass(), 084 compressionType, 085 codec, 086 context, md); 087 088 return new RecordWriter<K, V>() { 089 090 @Override 091 public void write(K key, V value) 092 throws IOException { 093 094 out.append(key, value); 095 } 096 097 @Override 098 public void close(TaskAttemptContext context) throws IOException { 099 out.close(); 100 } 101 }; 102 } 103}