1 | |
|
2 | |
|
3 | |
|
4 | |
|
5 | |
|
6 | |
|
7 | |
|
8 | |
|
9 | |
|
10 | |
|
11 | |
|
12 | |
|
13 | |
|
14 | |
|
15 | |
|
16 | |
|
17 | |
|
18 | |
|
19 | |
package org.apache.hcatalog.mapreduce; |
20 | |
|
21 | |
import org.apache.giraph.io.hcatalog.GiraphHCatInputFormat; |
22 | |
import org.apache.hadoop.conf.Configuration; |
23 | |
import org.apache.hadoop.hive.conf.HiveConf; |
24 | |
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; |
25 | |
import org.apache.hadoop.hive.metastore.api.MetaException; |
26 | |
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; |
27 | |
import org.apache.hadoop.hive.metastore.api.Partition; |
28 | |
import org.apache.hadoop.hive.metastore.api.StorageDescriptor; |
29 | |
import org.apache.hadoop.hive.ql.metadata.HiveException; |
30 | |
import org.apache.hadoop.hive.ql.metadata.Table; |
31 | |
import org.apache.hadoop.mapreduce.InputSplit; |
32 | |
import org.apache.hadoop.mapreduce.RecordReader; |
33 | |
import org.apache.hcatalog.common.ErrorType; |
34 | |
import org.apache.hcatalog.common.HCatException; |
35 | |
import org.apache.hcatalog.common.HCatUtil; |
36 | |
import org.apache.hcatalog.data.schema.HCatSchema; |
37 | |
import org.apache.thrift.TException; |
38 | |
|
39 | |
import java.io.IOException; |
40 | |
import java.util.ArrayList; |
41 | |
import java.util.HashMap; |
42 | |
import java.util.List; |
43 | |
import java.util.Map; |
44 | |
import java.util.Properties; |
45 | |
|
46 | |
|
47 | |
|
48 | |
|
49 | |
public class HCatUtils { |
50 | |
|
51 | |
|
52 | |
|
53 | 0 | private HCatUtils() { } |
54 | |
|
55 | |
|
56 | |
|
57 | |
|
58 | |
|
59 | |
|
60 | |
|
61 | |
|
62 | |
|
63 | |
|
64 | |
public static InputJobInfo getInputJobInfo( |
65 | |
Configuration conf, InputJobInfo inputJobInfo) |
66 | |
throws IOException { |
67 | 0 | HiveMetaStoreClient client = null; |
68 | |
HiveConf hiveConf; |
69 | |
try { |
70 | 0 | if (conf != null) { |
71 | 0 | hiveConf = HCatUtil.getHiveConf(conf); |
72 | |
} else { |
73 | 0 | hiveConf = new HiveConf(GiraphHCatInputFormat.class); |
74 | |
} |
75 | 0 | client = HCatUtil.getHiveClient(hiveConf); |
76 | 0 | Table table = HCatUtil.getTable(client, inputJobInfo.getDatabaseName(), |
77 | 0 | inputJobInfo.getTableName()); |
78 | |
|
79 | 0 | List<PartInfo> partInfoList = new ArrayList<PartInfo>(); |
80 | |
|
81 | 0 | inputJobInfo.setTableInfo(HCatTableInfo.valueOf(table.getTTable())); |
82 | 0 | if (table.getPartitionKeys().size() != 0) { |
83 | |
|
84 | 0 | List<Partition> parts = client.listPartitionsByFilter( |
85 | 0 | inputJobInfo.getDatabaseName(), |
86 | 0 | inputJobInfo.getTableName(), |
87 | 0 | inputJobInfo.getFilter(), |
88 | |
(short) -1); |
89 | |
|
90 | 0 | if (parts != null) { |
91 | |
|
92 | |
|
93 | 0 | int maxPart = hiveConf.getInt("hcat.metastore.maxpartitions", 100000); |
94 | 0 | if (parts.size() > maxPart) { |
95 | 0 | throw new HCatException(ErrorType.ERROR_EXCEED_MAXPART, |
96 | 0 | "total number of partitions is " + parts.size()); |
97 | |
} |
98 | |
|
99 | |
|
100 | 0 | for (Partition ptn : parts) { |
101 | 0 | HCatSchema schema = HCatUtil.extractSchema( |
102 | |
new org.apache.hadoop.hive.ql.metadata.Partition(table, ptn)); |
103 | 0 | PartInfo partInfo = extractPartInfo(schema, ptn.getSd(), |
104 | 0 | ptn.getParameters(), conf, inputJobInfo); |
105 | 0 | partInfo.setPartitionValues(InternalUtil.createPtnKeyValueMap(table, |
106 | |
ptn)); |
107 | 0 | partInfoList.add(partInfo); |
108 | 0 | } |
109 | |
} |
110 | 0 | } else { |
111 | |
|
112 | 0 | HCatSchema schema = HCatUtil.extractSchema(table); |
113 | 0 | PartInfo partInfo = extractPartInfo(schema, table.getTTable().getSd(), |
114 | 0 | table.getParameters(), conf, inputJobInfo); |
115 | 0 | partInfo.setPartitionValues(new HashMap<String, String>()); |
116 | 0 | partInfoList.add(partInfo); |
117 | |
} |
118 | 0 | inputJobInfo.setPartitions(partInfoList); |
119 | 0 | } catch (MetaException e) { |
120 | 0 | throw new IOException("Got MetaException", e); |
121 | 0 | } catch (NoSuchObjectException e) { |
122 | 0 | throw new IOException("Got NoSuchObjectException", e); |
123 | 0 | } catch (TException e) { |
124 | 0 | throw new IOException("Got TException", e); |
125 | 0 | } catch (HiveException e) { |
126 | 0 | throw new IOException("Got HiveException", e); |
127 | |
} finally { |
128 | 0 | HCatUtil.closeHiveClientQuietly(client); |
129 | 0 | } |
130 | 0 | return inputJobInfo; |
131 | |
} |
132 | |
|
133 | |
|
134 | |
|
135 | |
|
136 | |
|
137 | |
|
138 | |
|
139 | |
|
140 | |
|
141 | |
|
142 | |
|
143 | |
|
144 | |
private static PartInfo extractPartInfo( |
145 | |
HCatSchema schema, StorageDescriptor sd, Map<String, String> parameters, |
146 | |
Configuration conf, InputJobInfo inputJobInfo) throws IOException { |
147 | 0 | StorerInfo storerInfo = InternalUtil.extractStorerInfo(sd, parameters); |
148 | |
|
149 | 0 | Properties hcatProperties = new Properties(); |
150 | 0 | HCatStorageHandler storageHandler = HCatUtil.getStorageHandler(conf, |
151 | |
storerInfo); |
152 | |
|
153 | |
|
154 | 0 | Map<String, String> jobProperties = |
155 | 0 | HCatUtil.getInputJobProperties(storageHandler, inputJobInfo); |
156 | |
|
157 | 0 | for (Map.Entry<String, String> param : parameters.entrySet()) { |
158 | 0 | hcatProperties.put(param.getKey(), param.getValue()); |
159 | 0 | } |
160 | |
|
161 | 0 | return new PartInfo(schema, storageHandler, sd.getLocation(), |
162 | 0 | hcatProperties, jobProperties, inputJobInfo.getTableInfo()); |
163 | |
} |
164 | |
|
165 | |
|
166 | |
|
167 | |
|
168 | |
|
169 | |
|
170 | |
|
171 | |
|
172 | |
public static RecordReader newHCatReader( |
173 | |
HCatStorageHandler storageHandler, |
174 | |
Map<String, String> valuesNotInDataCols) { |
175 | 0 | return new HCatRecordReader(storageHandler, valuesNotInDataCols); |
176 | |
} |
177 | |
|
178 | |
|
179 | |
|
180 | |
|
181 | |
|
182 | |
|
183 | |
|
184 | |
|
185 | |
public static HCatSplit castToHCatSplit(InputSplit split) |
186 | |
throws IOException { |
187 | 0 | return InternalUtil.castToHCatSplit(split); |
188 | |
} |
189 | |
} |