diff --git a/fe/be-java-extensions/jdbc-scanner/pom.xml b/fe/be-java-extensions/jdbc-scanner/pom.xml
index 0f3eace7dd..a37b5e0f62 100644
--- a/fe/be-java-extensions/jdbc-scanner/pom.xml
+++ b/fe/be-java-extensions/jdbc-scanner/pom.xml
@@ -46,8 +46,8 @@ under the License.
provided
- com.alibaba
- druid
+ com.zaxxer
+ HikariCP
provided
diff --git a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/BaseJdbcExecutor.java b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/BaseJdbcExecutor.java
index 40ef980a17..6ff9f6ec69 100644
--- a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/BaseJdbcExecutor.java
+++ b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/BaseJdbcExecutor.java
@@ -28,8 +28,8 @@ import org.apache.doris.thrift.TJdbcExecutorCtorParams;
import org.apache.doris.thrift.TJdbcOperation;
import org.apache.doris.thrift.TOdbcTableType;
-import com.alibaba.druid.pool.DruidDataSource;
import com.google.common.base.Preconditions;
+import com.zaxxer.hikari.HikariDataSource;
import org.apache.log4j.Logger;
import org.apache.thrift.TDeserializer;
import org.apache.thrift.TException;
@@ -55,8 +55,8 @@ import java.util.function.Function;
public abstract class BaseJdbcExecutor implements JdbcExecutor {
private static final Logger LOG = Logger.getLogger(BaseJdbcExecutor.class);
private static final TBinaryProtocol.Factory PROTOCOL_FACTORY = new TBinaryProtocol.Factory();
- private DruidDataSource druidDataSource = null;
- private final byte[] druidDataSourceLock = new byte[0];
+ private HikariDataSource hikariDataSource = null;
+ private final byte[] hikariDataSourceLock = new byte[0];
private TOdbcTableType tableType;
private JdbcDataSourceConfig config;
private Connection conn = null;
@@ -119,10 +119,10 @@ public abstract class BaseJdbcExecutor implements JdbcExecutor {
closeResources(resultSet, stmt, conn);
}
} finally {
- if (config.getConnectionPoolMinSize() == 0 && druidDataSource != null) {
- druidDataSource.close();
+ if (config.getConnectionPoolMinSize() == 0 && hikariDataSource != null) {
+ hikariDataSource.close();
JdbcDataSource.getDataSource().getSourcesMap().remove(config.createCacheKey());
- druidDataSource = null;
+ hikariDataSource = null;
}
}
}
@@ -151,10 +151,10 @@ public abstract class BaseJdbcExecutor implements JdbcExecutor {
}
public void cleanDataSource() {
- if (druidDataSource != null) {
- druidDataSource.close();
+ if (hikariDataSource != null) {
+ hikariDataSource.close();
JdbcDataSource.getDataSource().getSourcesMap().remove(config.createCacheKey());
- druidDataSource = null;
+ hikariDataSource = null;
}
}
@@ -303,40 +303,34 @@ public abstract class BaseJdbcExecutor implements JdbcExecutor {
}
private void init(JdbcDataSourceConfig config, String sql) throws UdfRuntimeException {
- String druidDataSourceKey = config.createCacheKey();
+ ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
+ String hikariDataSourceKey = config.createCacheKey();
try {
ClassLoader parent = getClass().getClassLoader();
ClassLoader classLoader = UdfUtils.getClassLoader(config.getJdbcDriverUrl(), parent);
- druidDataSource = JdbcDataSource.getDataSource().getSource(druidDataSourceKey);
- if (druidDataSource == null) {
- synchronized (druidDataSourceLock) {
- druidDataSource = JdbcDataSource.getDataSource().getSource(druidDataSourceKey);
- if (druidDataSource == null) {
+ hikariDataSource = JdbcDataSource.getDataSource().getSource(hikariDataSourceKey);
+ if (hikariDataSource == null) {
+ synchronized (hikariDataSourceLock) {
+ hikariDataSource = JdbcDataSource.getDataSource().getSource(hikariDataSourceKey);
+ if (hikariDataSource == null) {
long start = System.currentTimeMillis();
- DruidDataSource ds = new DruidDataSource();
- ds.setDriverClassLoader(classLoader);
+ Thread.currentThread().setContextClassLoader(classLoader);
+ HikariDataSource ds = new HikariDataSource();
ds.setDriverClassName(config.getJdbcDriverClass());
- ds.setUrl(config.getJdbcUrl());
+ ds.setJdbcUrl(config.getJdbcUrl());
ds.setUsername(config.getJdbcUser());
ds.setPassword(config.getJdbcPassword());
- ds.setMinIdle(config.getConnectionPoolMinSize()); // default 1
- ds.setInitialSize(config.getConnectionPoolMinSize()); // default 1
- ds.setMaxActive(config.getConnectionPoolMaxSize()); // default 10
- ds.setMaxWait(config.getConnectionPoolMaxWaitTime()); // default 5000
- ds.setTestWhileIdle(true);
- ds.setTestOnBorrow(false);
+ ds.setMinimumIdle(config.getConnectionPoolMinSize()); // default 1
+ ds.setMaximumPoolSize(config.getConnectionPoolMaxSize()); // default 10
+ ds.setConnectionTimeout(config.getConnectionPoolMaxWaitTime()); // default 5000
+ ds.setMaxLifetime(config.getConnectionPoolMaxLifeTime()); // default 30 min
+ ds.setIdleTimeout(config.getConnectionPoolMaxLifeTime() / 2L); // default 15 min
setValidationQuery(ds);
- // default 3 min
- ds.setTimeBetweenEvictionRunsMillis(config.getConnectionPoolMaxLifeTime() / 10L);
- // default 15 min
- ds.setMinEvictableIdleTimeMillis(config.getConnectionPoolMaxLifeTime() / 2L);
- // default 30 min
- ds.setMaxEvictableIdleTimeMillis(config.getConnectionPoolMaxLifeTime());
- ds.setKeepAlive(config.isConnectionPoolKeepAlive());
- // default 6 min
- ds.setKeepAliveBetweenTimeMillis(config.getConnectionPoolMaxLifeTime() / 5L);
- druidDataSource = ds;
- JdbcDataSource.getDataSource().putSource(druidDataSourceKey, ds);
+ if (config.isConnectionPoolKeepAlive()) {
+ ds.setKeepaliveTime(config.getConnectionPoolMaxLifeTime() / 5L); // default 6 min
+ }
+ hikariDataSource = ds;
+ JdbcDataSource.getDataSource().putSource(hikariDataSourceKey, ds);
LOG.info("JdbcClient set"
+ " ConnectionPoolMinSize = " + config.getConnectionPoolMinSize()
+ ", ConnectionPoolMaxSize = " + config.getConnectionPoolMaxSize()
@@ -350,7 +344,7 @@ public abstract class BaseJdbcExecutor implements JdbcExecutor {
}
long start = System.currentTimeMillis();
- conn = druidDataSource.getConnection();
+ conn = hikariDataSource.getConnection();
LOG.info("get connection [" + (config.getJdbcUrl() + config.getJdbcUser()) + "] cost: " + (
System.currentTimeMillis() - start)
+ " ms");
@@ -365,11 +359,13 @@ public abstract class BaseJdbcExecutor implements JdbcExecutor {
throw new UdfRuntimeException("FileNotFoundException failed: ", e);
} catch (Exception e) {
throw new UdfRuntimeException("Initialize datasource failed: ", e);
+ } finally {
+ Thread.currentThread().setContextClassLoader(oldClassLoader);
}
}
- protected void setValidationQuery(DruidDataSource ds) {
- ds.setValidationQuery("SELECT 1");
+ protected void setValidationQuery(HikariDataSource ds) {
+ ds.setConnectionTestQuery("SELECT 1");
}
protected void initializeStatement(Connection conn, JdbcDataSourceConfig config, String sql) throws SQLException {
diff --git a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/DB2JdbcExecutor.java b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/DB2JdbcExecutor.java
index 2c20757ef8..a95970af23 100644
--- a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/DB2JdbcExecutor.java
+++ b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/DB2JdbcExecutor.java
@@ -22,7 +22,7 @@ import org.apache.doris.common.jni.vec.ColumnType.Type;
import org.apache.doris.common.jni.vec.ColumnValueConverter;
import org.apache.doris.common.jni.vec.VectorTable;
-import com.alibaba.druid.pool.DruidDataSource;
+import com.zaxxer.hikari.HikariDataSource;
import java.math.BigDecimal;
import java.sql.Date;
@@ -37,8 +37,8 @@ public class DB2JdbcExecutor extends BaseJdbcExecutor {
}
@Override
- protected void setValidationQuery(DruidDataSource ds) {
- ds.setValidationQuery("select 1 from sysibm.sysdummy1");
+ protected void setValidationQuery(HikariDataSource ds) {
+ ds.setConnectionTestQuery("select 1 from sysibm.sysdummy1");
}
@Override
diff --git a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/DefaultJdbcExecutor.java b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/DefaultJdbcExecutor.java
index aaa13a0f2d..9faaadad15 100644
--- a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/DefaultJdbcExecutor.java
+++ b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/DefaultJdbcExecutor.java
@@ -28,7 +28,6 @@ import org.apache.doris.thrift.TJdbcExecutorCtorParams;
import org.apache.doris.thrift.TJdbcOperation;
import org.apache.doris.thrift.TOdbcTableType;
-import com.alibaba.druid.pool.DruidDataSource;
import com.clickhouse.data.value.UnsignedByte;
import com.clickhouse.data.value.UnsignedInteger;
import com.clickhouse.data.value.UnsignedLong;
@@ -36,6 +35,7 @@ import com.clickhouse.data.value.UnsignedShort;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.MoreExecutors;
import com.vesoft.nebula.client.graph.data.ValueWrapper;
+import com.zaxxer.hikari.HikariDataSource;
import org.apache.log4j.Logger;
import org.apache.thrift.TDeserializer;
import org.apache.thrift.TException;
@@ -92,8 +92,8 @@ public class DefaultJdbcExecutor {
private int batchSizeNum = 0;
private int curBlockRows = 0;
private static final byte[] emptyBytes = new byte[0];
- private DruidDataSource druidDataSource = null;
- private final byte[] druidDataSourceLock = new byte[0];
+ private HikariDataSource hikariDataSource = null;
+ private final byte[] hikariDataSourceLock = new byte[0];
private TOdbcTableType tableType;
private JdbcDataSourceConfig config;
@@ -147,10 +147,10 @@ public class DefaultJdbcExecutor {
closeResources(resultSet, stmt, conn);
}
} finally {
- if (config.getConnectionPoolMinSize() == 0 && druidDataSource != null) {
- druidDataSource.close();
+ if (config.getConnectionPoolMinSize() == 0 && hikariDataSource != null) {
+ hikariDataSource.close();
JdbcDataSource.getDataSource().getSourcesMap().remove(config.createCacheKey());
- druidDataSource = null;
+ hikariDataSource = null;
}
}
}
@@ -185,10 +185,10 @@ public class DefaultJdbcExecutor {
}
public void cleanDataSource() {
- if (druidDataSource != null) {
- druidDataSource.close();
+ if (hikariDataSource != null) {
+ hikariDataSource.close();
JdbcDataSource.getDataSource().getSourcesMap().remove(config.createCacheKey());
- druidDataSource = null;
+ hikariDataSource = null;
}
}
@@ -340,7 +340,8 @@ public class DefaultJdbcExecutor {
}
private void init(JdbcDataSourceConfig config, String sql) throws UdfRuntimeException {
- String druidDataSourceKey = config.createCacheKey();
+ ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
+ String hikariDataSourceKey = config.createCacheKey();
try {
if (isNebula()) {
batchSizeNum = config.getBatchSize();
@@ -351,36 +352,29 @@ public class DefaultJdbcExecutor {
} else {
ClassLoader parent = getClass().getClassLoader();
ClassLoader classLoader = UdfUtils.getClassLoader(config.getJdbcDriverUrl(), parent);
- druidDataSource = JdbcDataSource.getDataSource().getSource(druidDataSourceKey);
- if (druidDataSource == null) {
- synchronized (druidDataSourceLock) {
- druidDataSource = JdbcDataSource.getDataSource().getSource(druidDataSourceKey);
- if (druidDataSource == null) {
+ hikariDataSource = JdbcDataSource.getDataSource().getSource(hikariDataSourceKey);
+ if (hikariDataSource == null) {
+ synchronized (hikariDataSourceLock) {
+ hikariDataSource = JdbcDataSource.getDataSource().getSource(hikariDataSourceKey);
+ if (hikariDataSource == null) {
long start = System.currentTimeMillis();
- DruidDataSource ds = new DruidDataSource();
- ds.setDriverClassLoader(classLoader);
+ Thread.currentThread().setContextClassLoader(classLoader);
+ HikariDataSource ds = new HikariDataSource();
ds.setDriverClassName(config.getJdbcDriverClass());
- ds.setUrl(config.getJdbcUrl());
+ ds.setJdbcUrl(config.getJdbcUrl());
ds.setUsername(config.getJdbcUser());
ds.setPassword(config.getJdbcPassword());
- ds.setMinIdle(config.getConnectionPoolMinSize()); // default 1
- ds.setInitialSize(config.getConnectionPoolMinSize()); // default 1
- ds.setMaxActive(config.getConnectionPoolMaxSize()); // default 10
- ds.setMaxWait(config.getConnectionPoolMaxWaitTime()); // default 5000
- ds.setTestWhileIdle(true);
- ds.setTestOnBorrow(false);
+ ds.setMinimumIdle(config.getConnectionPoolMinSize()); // default 1
+ ds.setMaximumPoolSize(config.getConnectionPoolMaxSize()); // default 10
+ ds.setConnectionTimeout(config.getConnectionPoolMaxWaitTime()); // default 5000
+ ds.setMaxLifetime(config.getConnectionPoolMaxLifeTime()); // default 30 min
+ ds.setIdleTimeout(config.getConnectionPoolMaxLifeTime() / 2L); // default 15 min
setValidationQuery(ds, config.getTableType());
- // default 3 min
- ds.setTimeBetweenEvictionRunsMillis(config.getConnectionPoolMaxLifeTime() / 10L);
- // default 15 min
- ds.setMinEvictableIdleTimeMillis(config.getConnectionPoolMaxLifeTime() / 2L);
- // default 30 min
- ds.setMaxEvictableIdleTimeMillis(config.getConnectionPoolMaxLifeTime());
- ds.setKeepAlive(config.isConnectionPoolKeepAlive());
- // default 6 min
- ds.setKeepAliveBetweenTimeMillis(config.getConnectionPoolMaxLifeTime() / 5L);
- druidDataSource = ds;
- JdbcDataSource.getDataSource().putSource(druidDataSourceKey, ds);
+ if (config.isConnectionPoolKeepAlive()) {
+ ds.setKeepaliveTime(config.getConnectionPoolMaxLifeTime() / 5L); // default 6 min
+ }
+ hikariDataSource = ds;
+ JdbcDataSource.getDataSource().putSource(hikariDataSourceKey, ds);
LOG.info("JdbcClient set"
+ " ConnectionPoolMinSize = " + config.getConnectionPoolMinSize()
+ ", ConnectionPoolMaxSize = " + config.getConnectionPoolMaxSize()
@@ -394,7 +388,7 @@ public class DefaultJdbcExecutor {
}
long start = System.currentTimeMillis();
- conn = druidDataSource.getConnection();
+ conn = hikariDataSource.getConnection();
LOG.info("get connection [" + (config.getJdbcUrl() + config.getJdbcUser()) + "] cost: " + (
System.currentTimeMillis() - start)
+ " ms");
@@ -421,16 +415,18 @@ public class DefaultJdbcExecutor {
throw new UdfRuntimeException("FileNotFoundException failed: ", e);
} catch (Exception e) {
throw new UdfRuntimeException("Initialize datasource failed: ", e);
+ } finally {
+ Thread.currentThread().setContextClassLoader(oldClassLoader);
}
}
- private void setValidationQuery(DruidDataSource ds, TOdbcTableType tableType) {
+ private void setValidationQuery(HikariDataSource ds, TOdbcTableType tableType) {
if (tableType == TOdbcTableType.ORACLE || tableType == TOdbcTableType.OCEANBASE_ORACLE) {
- ds.setValidationQuery("SELECT 1 FROM dual");
+ ds.setConnectionTestQuery("SELECT 1 FROM dual");
} else if (tableType == TOdbcTableType.SAP_HANA) {
- ds.setValidationQuery("SELECT 1 FROM DUMMY");
+ ds.setConnectionTestQuery("SELECT 1 FROM DUMMY");
} else {
- ds.setValidationQuery("SELECT 1");
+ ds.setConnectionTestQuery("SELECT 1");
}
}
diff --git a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcDataSource.java b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcDataSource.java
index 3c8ac38cf7..447566596a 100644
--- a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcDataSource.java
+++ b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/JdbcDataSource.java
@@ -17,7 +17,7 @@
package org.apache.doris.jdbc;
-import com.alibaba.druid.pool.DruidDataSource;
+import com.zaxxer.hikari.HikariDataSource;
import org.apache.log4j.Logger;
import java.util.Map;
@@ -30,7 +30,7 @@ import java.util.concurrent.TimeUnit;
public class JdbcDataSource {
private static final Logger LOG = Logger.getLogger(JdbcDataSource.class);
private static final JdbcDataSource jdbcDataSource = new JdbcDataSource();
- private final Map sourcesMap = new ConcurrentHashMap<>();
+ private final Map sourcesMap = new ConcurrentHashMap<>();
private final Map lastAccessTimeMap = new ConcurrentHashMap<>();
private final ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
private long cleanupInterval = 8 * 60 * 60 * 1000; // 8 hours
@@ -44,17 +44,17 @@ public class JdbcDataSource {
return jdbcDataSource;
}
- public DruidDataSource getSource(String cacheKey) {
+ public HikariDataSource getSource(String cacheKey) {
lastAccessTimeMap.put(cacheKey, System.currentTimeMillis());
return sourcesMap.get(cacheKey);
}
- public void putSource(String cacheKey, DruidDataSource ds) {
+ public void putSource(String cacheKey, HikariDataSource ds) {
sourcesMap.put(cacheKey, ds);
lastAccessTimeMap.put(cacheKey, System.currentTimeMillis());
}
- public Map getSourcesMap() {
+ public Map getSourcesMap() {
return sourcesMap;
}
@@ -72,7 +72,7 @@ public class JdbcDataSource {
long now = System.currentTimeMillis();
lastAccessTimeMap.forEach((key, lastAccessTime) -> {
if (now - lastAccessTime > cleanupInterval) {
- DruidDataSource ds = sourcesMap.remove(key);
+ HikariDataSource ds = sourcesMap.remove(key);
if (ds != null) {
ds.close();
}
diff --git a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/OracleJdbcExecutor.java b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/OracleJdbcExecutor.java
index 9ea902efb1..0c29ac440e 100644
--- a/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/OracleJdbcExecutor.java
+++ b/fe/be-java-extensions/jdbc-scanner/src/main/java/org/apache/doris/jdbc/OracleJdbcExecutor.java
@@ -22,7 +22,7 @@ import org.apache.doris.common.jni.vec.ColumnType.Type;
import org.apache.doris.common.jni.vec.ColumnValueConverter;
import org.apache.doris.common.jni.vec.VectorTable;
-import com.alibaba.druid.pool.DruidDataSource;
+import com.zaxxer.hikari.HikariDataSource;
import org.apache.log4j.Logger;
import java.math.BigDecimal;
@@ -40,8 +40,8 @@ public class OracleJdbcExecutor extends BaseJdbcExecutor {
}
@Override
- protected void setValidationQuery(DruidDataSource ds) {
- ds.setValidationQuery("SELECT 1 FROM dual");
+ protected void setValidationQuery(HikariDataSource ds) {
+ ds.setConnectionTestQuery("SELECT 1 FROM dual");
}
@Override
diff --git a/fe/be-java-extensions/preload-extensions/pom.xml b/fe/be-java-extensions/preload-extensions/pom.xml
index 3ef662af79..3627b912d3 100644
--- a/fe/be-java-extensions/preload-extensions/pom.xml
+++ b/fe/be-java-extensions/preload-extensions/pom.xml
@@ -204,8 +204,8 @@ under the License.
ojdbc8
- com.alibaba
- druid
+ com.zaxxer
+ HikariCP
com.clickhouse
diff --git a/fe/fe-core/pom.xml b/fe/fe-core/pom.xml
index 6fd4846d46..3479a23ad2 100644
--- a/fe/fe-core/pom.xml
+++ b/fe/fe-core/pom.xml
@@ -647,10 +647,9 @@ under the License.
${antlr4.version}
-
- com.alibaba
- druid
+ com.zaxxer
+ HikariCP
diff --git a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcClient.java b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcClient.java
index 7e093f1008..05346a8db9 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcClient.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/jdbc/client/JdbcClient.java
@@ -25,9 +25,9 @@ import org.apache.doris.common.DdlException;
import org.apache.doris.common.util.Util;
import org.apache.doris.datasource.jdbc.JdbcIdentifierMapping;
-import com.alibaba.druid.pool.DruidDataSource;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
+import com.zaxxer.hikari.HikariDataSource;
import lombok.Data;
import lombok.Getter;
import org.apache.logging.log4j.LogManager;
@@ -57,8 +57,8 @@ public abstract class JdbcClient {
private String catalogName;
protected String dbType;
protected String jdbcUser;
- protected URLClassLoader classLoader = null;
- protected DruidDataSource dataSource = null;
+ protected ClassLoader classLoader = null;
+ protected HikariDataSource dataSource = null;
protected boolean isOnlySpecifiedDatabase;
protected boolean isLowerCaseMetaNames;
protected String metaNamesMapping;
@@ -105,55 +105,53 @@ public abstract class JdbcClient {
Optional.ofNullable(jdbcClientConfig.getExcludeDatabaseMap()).orElse(Collections.emptyMap());
String jdbcUrl = jdbcClientConfig.getJdbcUrl();
this.dbType = parseDbType(jdbcUrl);
+ initializeClassLoader(jdbcClientConfig);
initializeDataSource(jdbcClientConfig);
this.jdbcLowerCaseMetaMatching = new JdbcIdentifierMapping(isLowerCaseMetaNames, metaNamesMapping, this);
}
- // Initialize DruidDataSource
+ // Initialize DataSource
private void initializeDataSource(JdbcClientConfig config) {
ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
try {
- // TODO(ftw): The problem here is that the jar package is handled by FE
- // and URLClassLoader may load the jar package directly into memory
- URL[] urls = {new URL(JdbcResource.getFullDriverUrl(config.getDriverUrl()))};
- // set parent ClassLoader to null, we can achieve class loading isolation.
- ClassLoader parent = getClass().getClassLoader();
- ClassLoader classLoader = URLClassLoader.newInstance(urls, parent);
- if (LOG.isDebugEnabled()) {
- LOG.debug("parent ClassLoader: {}, old ClassLoader: {}, class Loader: {}.",
- parent, oldClassLoader, classLoader);
- }
- Thread.currentThread().setContextClassLoader(classLoader);
- dataSource = new DruidDataSource();
- dataSource.setDriverClassLoader(classLoader);
+ Thread.currentThread().setContextClassLoader(this.classLoader);
+ dataSource = new HikariDataSource();
dataSource.setDriverClassName(config.getDriverClass());
- dataSource.setUrl(config.getJdbcUrl());
+ dataSource.setJdbcUrl(config.getJdbcUrl());
dataSource.setUsername(config.getUser());
dataSource.setPassword(config.getPassword());
- dataSource.setMinIdle(config.getConnectionPoolMinSize()); // default 1
- dataSource.setInitialSize(config.getConnectionPoolMinSize()); // default 1
- dataSource.setMaxActive(config.getConnectionPoolMaxSize()); // default 10
+ dataSource.setMinimumIdle(config.getConnectionPoolMinSize()); // default 1
+ dataSource.setMaximumPoolSize(config.getConnectionPoolMaxSize()); // default 10
// set connection timeout to 5s.
// The default is 30s, which is too long.
// Because when querying information_schema db, BE will call thrift rpc(default timeout is 30s)
// to FE to get schema info, and may create connection here, if we set it too long and the url is invalid,
// it may cause the thrift rpc timeout.
- dataSource.setMaxWait(config.getConnectionPoolMaxWaitTime()); // default 5000
- dataSource.setTimeBetweenEvictionRunsMillis(config.getConnectionPoolMaxLifeTime() / 10L); // default 3 min
- dataSource.setMinEvictableIdleTimeMillis(config.getConnectionPoolMaxLifeTime() / 2L); // default 15 min
- dataSource.setMaxEvictableIdleTimeMillis(config.getConnectionPoolMaxLifeTime()); // default 30 min
+ dataSource.setConnectionTimeout(config.getConnectionPoolMaxWaitTime()); // default 5000
+ dataSource.setMaxLifetime(config.getConnectionPoolMaxLifeTime()); // default 30 min
+ dataSource.setIdleTimeout(config.getConnectionPoolMaxLifeTime() / 2L); // default 15 min
LOG.info("JdbcClient set"
+ " ConnectionPoolMinSize = " + config.getConnectionPoolMinSize()
+ ", ConnectionPoolMaxSize = " + config.getConnectionPoolMaxSize()
+ ", ConnectionPoolMaxWaitTime = " + config.getConnectionPoolMaxWaitTime()
+ ", ConnectionPoolMaxLifeTime = " + config.getConnectionPoolMaxLifeTime());
- } catch (MalformedURLException e) {
- throw new JdbcClientException("MalformedURLException to load class about " + config.getDriverUrl(), e);
+ } catch (Exception e) {
+ throw new JdbcClientException(e.getMessage());
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoader);
}
}
+ private void initializeClassLoader(JdbcClientConfig config) {
+ try {
+ URL[] urls = {new URL(JdbcResource.getFullDriverUrl(config.getDriverUrl()))};
+ ClassLoader parent = getClass().getClassLoader();
+ this.classLoader = URLClassLoader.newInstance(urls, parent);
+ } catch (MalformedURLException e) {
+ throw new RuntimeException("Error loading JDBC driver.", e);
+ }
+ }
+
public static String parseDbType(String jdbcUrl) {
try {
return JdbcResource.parseDbType(jdbcUrl);
@@ -167,13 +165,17 @@ public abstract class JdbcClient {
}
public Connection getConnection() throws JdbcClientException {
+ ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
Connection conn;
try {
+ Thread.currentThread().setContextClassLoader(this.classLoader);
conn = dataSource.getConnection();
} catch (Exception e) {
String errorMessage = String.format("Can not connect to jdbc due to error: %s, Catalog name: %s",
e.getMessage(), this.getCatalogName());
throw new JdbcClientException(errorMessage, e);
+ } finally {
+ Thread.currentThread().setContextClassLoader(oldClassLoader);
}
return conn;
}
diff --git a/fe/pom.xml b/fe/pom.xml
index 54ba8a28c7..fc6d6e4d8f 100644
--- a/fe/pom.xml
+++ b/fe/pom.xml
@@ -228,7 +228,7 @@ under the License.
${fe.dir}/../
1.2-SNAPSHOT
UTF-8
- 2.0.0
+ 2.0.1
1.8
1.8
@@ -258,7 +258,7 @@ under the License.
2.7
1.1.1
5.8.2
- 1.2.5
+ 4.0.3
0.4.6
0.16.0
8.5.86
@@ -1538,9 +1538,9 @@ under the License.
- com.alibaba
- druid
- ${druid.version}
+ com.zaxxer
+ HikariCP
+ ${hikaricp.version}
com.clickhouse