21
21
import java .io .File ;
22
22
import java .io .IOException ;
23
23
import java .util .ArrayList ;
24
+ import java .util .Arrays ;
24
25
import java .util .Collections ;
25
26
import java .util .HashMap ;
26
27
import java .util .List ;
@@ -167,27 +168,36 @@ public String getCreateTableStmt() throws IOException {
167
168
}
168
169
169
170
boolean first = true ;
170
- String partitionKey = options .getHivePartitionKey ();
171
- for ( String col : colNames ) {
172
- if ( col . equals ( partitionKey )) {
173
- throw new IllegalArgumentException ( "Partition key " + col + " cannot "
174
- + "be a column to import." );
175
- }
171
+ final String partitionKey = options .getHivePartitionKey ();
172
+ if ( Arrays . asList ( colNames ). contains ( partitionKey ) ) {
173
+ throw new IllegalArgumentException (
174
+ "Partition key " + partitionKey + " cannot be a column to import." );
175
+ }
176
+ for ( final String col : colNames ) {
176
177
177
178
if (!first ) {
178
179
sb .append (", " );
179
180
}
180
181
181
182
first = false ;
182
183
183
- String hiveColType ;
184
- if (options .getFileLayout () == SqoopOptions .FileLayout .TextFile ) {
184
+ final String hiveColType ;
185
+ switch (options .getFileLayout ()) {
186
+ case TextFile :
187
+ case AvroDataFile :
185
188
Integer colType = columnTypes .get (col );
186
189
hiveColType = getHiveColumnTypeForTextTable (userMapping , col , colType );
187
- } else if (options .getFileLayout () == SqoopOptions .FileLayout .ParquetFile ) {
188
- hiveColType = HiveTypes .toHiveType (columnNameToAvroFieldSchema .get (col ), options );
189
- } else {
190
- throw new RuntimeException ("File format is not supported for Hive tables." );
190
+ break ;
191
+ case ParquetFile :
192
+ hiveColType =
193
+ HiveTypes .toHiveType (columnNameToAvroFieldSchema .get (col ), options );
194
+ break ;
195
+ case BinaryFile :
196
+ case SequenceFile :
197
+ default :
198
+ throw new RuntimeException (
199
+ "File format is not supported for Hive tables: "
200
+ + options .getFileLayout ());
191
201
}
192
202
193
203
sb .append ('`' ).append (col ).append ("` " ).append (hiveColType );
@@ -208,9 +218,14 @@ public String getCreateTableStmt() throws IOException {
208
218
.append (" STRING) " );
209
219
}
210
220
211
- if (SqoopOptions .FileLayout .ParquetFile .equals (options .getFileLayout ())) {
221
+ switch (options .getFileLayout ()) {
222
+ case ParquetFile :
212
223
sb .append ("STORED AS PARQUET" );
213
- } else {
224
+ break ;
225
+ case AvroDataFile :
226
+ sb .append ("STORED AS AVRO" );
227
+ break ;
228
+ case TextFile : {
214
229
sb .append ("ROW FORMAT DELIMITED FIELDS TERMINATED BY '" );
215
230
sb .append (getHiveOctalCharCode ((int ) options .getOutputFieldDelim ()));
216
231
sb .append ("' LINES TERMINATED BY '" );
@@ -226,14 +241,21 @@ public String getCreateTableStmt() throws IOException {
226
241
sb .append ("' STORED AS TEXTFILE" );
227
242
}
228
243
}
244
+ break ;
245
+ default :
246
+ throw new RuntimeException (
247
+ "File format is not supported for Hive tables: "
248
+ + options .getFileLayout ());
249
+ }
229
250
230
251
if (isHiveExternalTableSet ) {
231
252
// add location
232
253
sb .append (" LOCATION '" +options .getHiveExternalTableDir ()+"'" );
233
254
}
234
255
235
- LOG .debug ("Create statement: " + sb .toString ());
236
- return sb .toString ();
256
+ final String tableCreateStatement = sb .toString ();
257
+ LOG .debug ("Create statement: " + tableCreateStatement );
258
+ return tableCreateStatement ;
237
259
}
238
260
239
261
private Map <String , Schema > getColumnNameToAvroTypeMapping () {
0 commit comments