[Enhancement](jdbc catalog) Change Jdbc connection pool to hikari (#34045) (#34310)

This commit is contained in:
zy-kkk
2024-04-29 20:22:48 +08:00
committed by GitHub
parent 7cb00a8e54
commit 3495ed58e0
10 changed files with 122 additions and 129 deletions

View File

@ -25,9 +25,9 @@ import org.apache.doris.common.DdlException;
import org.apache.doris.common.util.Util;
import org.apache.doris.datasource.jdbc.JdbcIdentifierMapping;
import com.alibaba.druid.pool.DruidDataSource;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.zaxxer.hikari.HikariDataSource;
import lombok.Data;
import lombok.Getter;
import org.apache.logging.log4j.LogManager;
@ -57,8 +57,8 @@ public abstract class JdbcClient {
private String catalogName;
protected String dbType;
protected String jdbcUser;
protected URLClassLoader classLoader = null;
protected DruidDataSource dataSource = null;
protected ClassLoader classLoader = null;
protected HikariDataSource dataSource = null;
protected boolean isOnlySpecifiedDatabase;
protected boolean isLowerCaseMetaNames;
protected String metaNamesMapping;
@ -105,55 +105,53 @@ public abstract class JdbcClient {
Optional.ofNullable(jdbcClientConfig.getExcludeDatabaseMap()).orElse(Collections.emptyMap());
String jdbcUrl = jdbcClientConfig.getJdbcUrl();
this.dbType = parseDbType(jdbcUrl);
initializeClassLoader(jdbcClientConfig);
initializeDataSource(jdbcClientConfig);
this.jdbcLowerCaseMetaMatching = new JdbcIdentifierMapping(isLowerCaseMetaNames, metaNamesMapping, this);
}
// Initialize DruidDataSource
// Initialize DataSource
private void initializeDataSource(JdbcClientConfig config) {
ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
try {
// TODO(ftw): The problem here is that the jar package is handled by FE
// and URLClassLoader may load the jar package directly into memory
URL[] urls = {new URL(JdbcResource.getFullDriverUrl(config.getDriverUrl()))};
// set parent ClassLoader to null, we can achieve class loading isolation.
ClassLoader parent = getClass().getClassLoader();
ClassLoader classLoader = URLClassLoader.newInstance(urls, parent);
if (LOG.isDebugEnabled()) {
LOG.debug("parent ClassLoader: {}, old ClassLoader: {}, class Loader: {}.",
parent, oldClassLoader, classLoader);
}
Thread.currentThread().setContextClassLoader(classLoader);
dataSource = new DruidDataSource();
dataSource.setDriverClassLoader(classLoader);
Thread.currentThread().setContextClassLoader(this.classLoader);
dataSource = new HikariDataSource();
dataSource.setDriverClassName(config.getDriverClass());
dataSource.setUrl(config.getJdbcUrl());
dataSource.setJdbcUrl(config.getJdbcUrl());
dataSource.setUsername(config.getUser());
dataSource.setPassword(config.getPassword());
dataSource.setMinIdle(config.getConnectionPoolMinSize()); // default 1
dataSource.setInitialSize(config.getConnectionPoolMinSize()); // default 1
dataSource.setMaxActive(config.getConnectionPoolMaxSize()); // default 10
dataSource.setMinimumIdle(config.getConnectionPoolMinSize()); // default 1
dataSource.setMaximumPoolSize(config.getConnectionPoolMaxSize()); // default 10
// set connection timeout to 5s.
// The default is 30s, which is too long.
// Because when querying information_schema db, BE will call thrift rpc(default timeout is 30s)
// to FE to get schema info, and may create connection here, if we set it too long and the url is invalid,
// it may cause the thrift rpc timeout.
dataSource.setMaxWait(config.getConnectionPoolMaxWaitTime()); // default 5000
dataSource.setTimeBetweenEvictionRunsMillis(config.getConnectionPoolMaxLifeTime() / 10L); // default 3 min
dataSource.setMinEvictableIdleTimeMillis(config.getConnectionPoolMaxLifeTime() / 2L); // default 15 min
dataSource.setMaxEvictableIdleTimeMillis(config.getConnectionPoolMaxLifeTime()); // default 30 min
dataSource.setConnectionTimeout(config.getConnectionPoolMaxWaitTime()); // default 5000
dataSource.setMaxLifetime(config.getConnectionPoolMaxLifeTime()); // default 30 min
dataSource.setIdleTimeout(config.getConnectionPoolMaxLifeTime() / 2L); // default 15 min
LOG.info("JdbcClient set"
+ " ConnectionPoolMinSize = " + config.getConnectionPoolMinSize()
+ ", ConnectionPoolMaxSize = " + config.getConnectionPoolMaxSize()
+ ", ConnectionPoolMaxWaitTime = " + config.getConnectionPoolMaxWaitTime()
+ ", ConnectionPoolMaxLifeTime = " + config.getConnectionPoolMaxLifeTime());
} catch (MalformedURLException e) {
throw new JdbcClientException("MalformedURLException to load class about " + config.getDriverUrl(), e);
} catch (Exception e) {
throw new JdbcClientException(e.getMessage());
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoader);
}
}
private void initializeClassLoader(JdbcClientConfig config) {
try {
URL[] urls = {new URL(JdbcResource.getFullDriverUrl(config.getDriverUrl()))};
ClassLoader parent = getClass().getClassLoader();
this.classLoader = URLClassLoader.newInstance(urls, parent);
} catch (MalformedURLException e) {
throw new RuntimeException("Error loading JDBC driver.", e);
}
}
public static String parseDbType(String jdbcUrl) {
try {
return JdbcResource.parseDbType(jdbcUrl);
@ -167,13 +165,17 @@ public abstract class JdbcClient {
}
public Connection getConnection() throws JdbcClientException {
ClassLoader oldClassLoader = Thread.currentThread().getContextClassLoader();
Connection conn;
try {
Thread.currentThread().setContextClassLoader(this.classLoader);
conn = dataSource.getConnection();
} catch (Exception e) {
String errorMessage = String.format("Can not connect to jdbc due to error: %s, Catalog name: %s",
e.getMessage(), this.getCatalogName());
throw new JdbcClientException(errorMessage, e);
} finally {
Thread.currentThread().setContextClassLoader(oldClassLoader);
}
return conn;
}