Skip to content

Commit 486ef2b

Browse files
write part files directly into the target directory on S3 file systems
1 parent 428c378 commit 486ef2b

File tree

1 file changed

+118
-0
lines changed

1 file changed

+118
-0
lines changed
Lines changed: 118 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,118 @@
1+
/**
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package com.bc.calvalus.processing.beam;
20+
21+
import org.apache.hadoop.classification.InterfaceAudience;
22+
import org.apache.hadoop.classification.InterfaceStability;
23+
import org.apache.hadoop.conf.Configuration;
24+
import org.apache.hadoop.fs.FileSystem;
25+
import org.apache.hadoop.fs.Path;
26+
import org.apache.hadoop.io.SequenceFile;
27+
import org.apache.hadoop.io.SequenceFile.CompressionType;
28+
import org.apache.hadoop.io.compress.CompressionCodec;
29+
import org.apache.hadoop.io.compress.DefaultCodec;
30+
import org.apache.hadoop.mapreduce.Job;
31+
import org.apache.hadoop.mapreduce.JobContext;
32+
import org.apache.hadoop.mapreduce.OutputFormat;
33+
import org.apache.hadoop.mapreduce.RecordWriter;
34+
import org.apache.hadoop.mapreduce.TaskAttemptContext;
35+
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
36+
import org.apache.hadoop.util.ReflectionUtils;
37+
38+
import java.io.IOException;
39+
40+
/** An {@link OutputFormat} that writes {@link SequenceFile}s. */
41+
@InterfaceAudience.Public
42+
@InterfaceStability.Stable
43+
public class S3aCompatibleSequenceFileOutputFormat<K,V> extends S3aCompatibleOutputFormat<K, V> {
44+
45+
protected SequenceFile.Writer getSequenceWriter(TaskAttemptContext context,
46+
Class<?> keyClass, Class<?> valueClass)
47+
throws IOException {
48+
Configuration conf = context.getConfiguration();
49+
50+
CompressionCodec codec = null;
51+
CompressionType compressionType = CompressionType.NONE;
52+
if (getCompressOutput(context)) {
53+
// find the kind of compression to do
54+
compressionType = getOutputCompressionType(context);
55+
// find the right codec
56+
Class<?> codecClass = getOutputCompressorClass(context,
57+
DefaultCodec.class);
58+
codec = (CompressionCodec)
59+
ReflectionUtils.newInstance(codecClass, conf);
60+
}
61+
// get the path of the temporary output file
62+
Path file = getDefaultWorkFile(context, "");
63+
FileSystem fs = file.getFileSystem(conf);
64+
return SequenceFile.createWriter(fs, conf, file,
65+
keyClass,
66+
valueClass,
67+
compressionType,
68+
codec,
69+
context);
70+
}
71+
72+
public RecordWriter<K, V>
73+
getRecordWriter(TaskAttemptContext context
74+
) throws IOException, InterruptedException {
75+
final SequenceFile.Writer out = getSequenceWriter(context,
76+
context.getOutputKeyClass(), context.getOutputValueClass());
77+
78+
return new RecordWriter<K, V>() {
79+
80+
public void write(K key, V value)
81+
throws IOException {
82+
83+
out.append(key, value);
84+
}
85+
86+
public void close(TaskAttemptContext context) throws IOException {
87+
out.close();
88+
}
89+
};
90+
}
91+
92+
/**
93+
* Get the {@link CompressionType} for the output {@link SequenceFile}.
94+
* @param job the {@link Job}
95+
* @return the {@link CompressionType} for the output {@link SequenceFile},
96+
* defaulting to {@link CompressionType#RECORD}
97+
*/
98+
public static CompressionType getOutputCompressionType(JobContext job) {
99+
String val = job.getConfiguration().get(FileOutputFormat.COMPRESS_TYPE,
100+
CompressionType.RECORD.toString());
101+
return CompressionType.valueOf(val);
102+
}
103+
104+
/**
105+
* Set the {@link CompressionType} for the output {@link SequenceFile}.
106+
* @param job the {@link Job} to modify
107+
* @param style the {@link CompressionType} for the output
108+
* {@link SequenceFile}
109+
*/
110+
public static void setOutputCompressionType(Job job,
111+
CompressionType style) {
112+
setCompressOutput(job, true);
113+
job.getConfiguration().set(FileOutputFormat.COMPRESS_TYPE,
114+
style.toString());
115+
}
116+
117+
}
118+

0 commit comments

Comments
 (0)