Skip to content

Commit 78c0114

Browse files
authored
[#6623] fix(spark): Load JDBC driver explictly to fix No suitable driver found for mysql:xx error (#6621)
### What changes were proposed in this pull request? If user set `spark.sql.hive.metastore.jars` to `path` in spark configuration and hive metastore uri is not set explictly, Spark will use Isolated client class loader to load JDBC drivers. which makes Iceberg couldn't load corresponding class loader. ### Why are the changes needed? Fix: #6623 ### Does this PR introduce _any_ user-facing change? no ### How was this patch tested? test locally
1 parent aa014fc commit 78c0114

File tree

1 file changed

+12
-0
lines changed

1 file changed

+12
-0
lines changed

spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/GravitinoIcebergCatalog.java

+12
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@
2222
import java.lang.reflect.InvocationTargetException;
2323
import java.lang.reflect.Method;
2424
import java.util.Map;
25+
import org.apache.commons.lang3.StringUtils;
26+
import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergConstants;
2527
import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergPropertiesUtils;
2628
import org.apache.gravitino.rel.Table;
2729
import org.apache.gravitino.spark.connector.PropertiesConverter;
@@ -58,6 +60,16 @@ public class GravitinoIcebergCatalog extends BaseCatalog
5860
@Override
5961
protected TableCatalog createAndInitSparkCatalog(
6062
String name, CaseInsensitiveStringMap options, Map<String, String> properties) {
63+
String jdbcDriver = properties.get(IcebergConstants.GRAVITINO_JDBC_DRIVER);
64+
if (StringUtils.isNotBlank(jdbcDriver)) {
65+
// If `spark.sql.hive.metastore.jars` is set, Spark will use an isolated client class loader
66+
// to load JDBC drivers, which makes Iceberg could not find corresponding JDBC driver.
67+
try {
68+
Class.forName(jdbcDriver);
69+
} catch (Exception e) {
70+
throw new RuntimeException(e);
71+
}
72+
}
6173
String catalogBackendName = IcebergPropertiesUtils.getCatalogBackendName(properties);
6274
Map<String, String> all =
6375
getPropertiesConverter().toSparkCatalogProperties(options, properties);

0 commit comments

Comments
 (0)