Skip to content

Commit 299e666

Browse files
authored
[#3129] fix(spark-connector): fix create table failed for using hive (#3169)
### What changes were proposed in this pull request? transform `hive` provider to `text` format ### Why are the changes needed? Fix: #3129 ### Does this PR introduce _any_ user-facing change? no ### How was this patch tested? add UT and IT
1 parent 503c144 commit 299e666

File tree

3 files changed

+31
-0
lines changed

3 files changed

+31
-0
lines changed

integration-test/src/test/java/com/datastrato/gravitino/integration/test/spark/hive/SparkHiveCatalogIT.java

+24
Original file line numberDiff line numberDiff line change
@@ -258,6 +258,30 @@ void testHiveFormatWithLocationTable() {
258258
});
259259
}
260260

261+
@Test
262+
void testHiveFormatWithUsingHive() {
263+
String tableName = "test_hive_format_using_hive_table";
264+
dropTableIfExists(tableName);
265+
String createTableSql = getCreateSimpleTableString(tableName);
266+
createTableSql += "USING HIVE";
267+
sql(createTableSql);
268+
SparkTableInfo tableInfo = getTableInfo(tableName);
269+
270+
SparkTableInfoChecker checker =
271+
SparkTableInfoChecker.create()
272+
.withName(tableName)
273+
.withTableProperties(
274+
ImmutableMap.of(
275+
HivePropertiesConstants.SPARK_HIVE_INPUT_FORMAT,
276+
HivePropertiesConstants.TEXT_INPUT_FORMAT_CLASS,
277+
HivePropertiesConstants.SPARK_HIVE_OUTPUT_FORMAT,
278+
HivePropertiesConstants.IGNORE_KEY_OUTPUT_FORMAT_CLASS,
279+
HivePropertiesConstants.SPARK_HIVE_SERDE_LIB,
280+
HivePropertiesConstants.LAZY_SIMPLE_SERDE_CLASS));
281+
checker.check(tableInfo);
282+
checkTableReadWrite(tableInfo);
283+
}
284+
261285
@Test
262286
void testHiveFormatWithUsing() {
263287
String tableName = "test_hive_format_using_table";

spark-connector/spark-connector/src/main/java/com/datastrato/gravitino/spark/connector/hive/HivePropertiesConverter.java

+1
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ public class HivePropertiesConverter implements PropertiesConverter {
2727
"orc", HivePropertiesConstants.GRAVITINO_HIVE_FORMAT_ORC,
2828
"parquet", HivePropertiesConstants.GRAVITINO_HIVE_FORMAT_PARQUET,
2929
"textfile", HivePropertiesConstants.GRAVITINO_HIVE_FORMAT_TEXTFILE,
30+
"hive", HivePropertiesConstants.GRAVITINO_HIVE_FORMAT_TEXTFILE,
3031
"json", HivePropertiesConstants.GRAVITINO_HIVE_FORMAT_JSON,
3132
"csv", HivePropertiesConstants.GRAVITINO_HIVE_FORMAT_CSV,
3233
"avro", HivePropertiesConstants.GRAVITINO_HIVE_FORMAT_AVRO);

spark-connector/spark-connector/src/test/java/com/datastrato/gravitino/spark/connector/hive/TestHivePropertiesConverter.java

+6
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,12 @@ void testTableFormat() {
3838
ImmutableMap.of(TableCatalog.PROP_PROVIDER, "PARQUET"));
3939
Assertions.assertEquals(
4040
hiveProperties.get(HivePropertiesConstants.GRAVITINO_HIVE_FORMAT), "PARQUET");
41+
hiveProperties =
42+
hivePropertiesConverter.toGravitinoTableProperties(
43+
ImmutableMap.of(TableCatalog.PROP_PROVIDER, "HIVE"));
44+
Assertions.assertEquals(
45+
hiveProperties.get(HivePropertiesConstants.GRAVITINO_HIVE_FORMAT),
46+
HivePropertiesConstants.GRAVITINO_HIVE_FORMAT_TEXTFILE);
4147
Assertions.assertThrowsExactly(
4248
NotSupportedException.class,
4349
() ->

0 commit comments

Comments
 (0)