1 | |
|
2 | |
|
3 | |
|
4 | |
|
5 | |
|
6 | |
|
7 | |
|
8 | |
|
9 | |
|
10 | |
|
11 | |
|
12 | |
|
13 | |
|
14 | |
|
15 | |
|
16 | |
|
17 | |
|
18 | |
|
19 | |
package org.apache.giraph.io.internal; |
20 | |
|
21 | |
import org.apache.giraph.io.EdgeInputFormat; |
22 | |
import org.apache.giraph.io.EdgeReader; |
23 | |
import org.apache.giraph.job.HadoopUtils; |
24 | |
import org.apache.hadoop.conf.Configuration; |
25 | |
import org.apache.hadoop.io.Writable; |
26 | |
import org.apache.hadoop.io.WritableComparable; |
27 | |
import org.apache.hadoop.mapreduce.InputSplit; |
28 | |
import org.apache.hadoop.mapreduce.JobContext; |
29 | |
import org.apache.hadoop.mapreduce.TaskAttemptContext; |
30 | |
|
31 | |
import java.io.DataInput; |
32 | |
import java.io.DataOutput; |
33 | |
import java.io.IOException; |
34 | |
import java.util.List; |
35 | |
|
36 | |
|
37 | |
|
38 | |
|
39 | |
|
40 | |
|
41 | |
|
42 | |
|
43 | |
|
44 | |
|
45 | |
|
46 | |
|
47 | |
public class WrappedEdgeInputFormat<I extends WritableComparable, |
48 | |
E extends Writable> extends EdgeInputFormat<I, E> { |
49 | |
|
50 | |
private EdgeInputFormat<I, E> originalInputFormat; |
51 | |
|
52 | |
|
53 | |
|
54 | |
|
55 | |
|
56 | |
|
57 | |
public WrappedEdgeInputFormat( |
58 | 0 | EdgeInputFormat<I, E> edgeInputFormat) { |
59 | 0 | originalInputFormat = edgeInputFormat; |
60 | 0 | } |
61 | |
|
62 | |
@Override |
63 | |
public void checkInputSpecs(Configuration conf) { |
64 | 0 | originalInputFormat.checkInputSpecs(getConf()); |
65 | 0 | } |
66 | |
|
67 | |
@Override |
68 | |
public List<InputSplit> getSplits(JobContext context, |
69 | |
int minSplitCountHint) throws IOException, InterruptedException { |
70 | 0 | return originalInputFormat.getSplits( |
71 | 0 | HadoopUtils.makeJobContext(getConf(), context), |
72 | |
minSplitCountHint); |
73 | |
} |
74 | |
|
75 | |
@Override |
76 | |
public EdgeReader<I, E> createEdgeReader(InputSplit split, |
77 | |
TaskAttemptContext context) throws IOException { |
78 | 0 | EdgeReader<I, E> edgeReader = |
79 | 0 | originalInputFormat.createEdgeReader(split, |
80 | 0 | HadoopUtils.makeTaskAttemptContext(getConf(), context)); |
81 | 0 | return new WrappedEdgeReader<I, E>(edgeReader, getConf()); |
82 | |
} |
83 | |
|
84 | |
@Override |
85 | |
public void writeInputSplit(InputSplit inputSplit, |
86 | |
DataOutput dataOutput) throws IOException { |
87 | 0 | originalInputFormat.writeInputSplit(inputSplit, dataOutput); |
88 | 0 | } |
89 | |
|
90 | |
@Override |
91 | |
public InputSplit readInputSplit( |
92 | |
DataInput dataInput) throws IOException, ClassNotFoundException { |
93 | 0 | return originalInputFormat.readInputSplit(dataInput); |
94 | |
} |
95 | |
} |