From cba0c606acad0ef30d6dae763585379a3d522a15 Mon Sep 17 00:00:00 2001 From: FANNG Date: Tue, 11 Mar 2025 19:24:04 +0800 Subject: [PATCH] [#6623] fix(spark): Load JDBC driver explictly to fix `No suitable driver found for mysql:xx` error (#6621) ### What changes were proposed in this pull request? If user set `spark.sql.hive.metastore.jars` to `path` in spark configuration and hive metastore uri is not set explictly, Spark will use Isolated client class loader to load JDBC drivers. which makes Iceberg couldn't load corresponding class loader. ### Why are the changes needed? Fix: #6623 ### Does this PR introduce _any_ user-facing change? no ### How was this patch tested? test locally --- .../connector/iceberg/GravitinoIcebergCatalog.java | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/GravitinoIcebergCatalog.java b/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/GravitinoIcebergCatalog.java index ccadc69e49a..e6d59c853b3 100644 --- a/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/GravitinoIcebergCatalog.java +++ b/spark-connector/spark-common/src/main/java/org/apache/gravitino/spark/connector/iceberg/GravitinoIcebergCatalog.java @@ -22,6 +22,8 @@ import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Map; +import org.apache.commons.lang3.StringUtils; +import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergConstants; import org.apache.gravitino.catalog.lakehouse.iceberg.IcebergPropertiesUtils; import org.apache.gravitino.rel.Table; import org.apache.gravitino.spark.connector.PropertiesConverter; @@ -58,6 +60,16 @@ public class GravitinoIcebergCatalog extends BaseCatalog @Override protected TableCatalog createAndInitSparkCatalog( String name, CaseInsensitiveStringMap options, Map properties) { + String jdbcDriver = properties.get(IcebergConstants.GRAVITINO_JDBC_DRIVER); + if (StringUtils.isNotBlank(jdbcDriver)) { + // If `spark.sql.hive.metastore.jars` is set, Spark will use an isolated client class loader + // to load JDBC drivers, which makes Iceberg could not find corresponding JDBC driver. + try { + Class.forName(jdbcDriver); + } catch (Exception e) { + throw new RuntimeException(e); + } + } String catalogBackendName = IcebergPropertiesUtils.getCatalogBackendName(properties); Map all = getPropertiesConverter().toSparkCatalogProperties(options, properties);