[test](jdbc) add jdbc and hive regression test (#13143)

1. Modify default behavior of `build.sh`
    The `BUILD_JAVA_UDF` is default ON, so that jvm is needed for compilation and runtime.

2. Add docker-compose for MySQL 5.7, PostgreSQL 14 and Hive 2
   See `docker/thirdparties/docker-compose`.

3. Add some regression test cases for jdbc query on MySQL, PG and Hive Catalog
   The default is `false`, if set to true, you need first start docker for MySQL/PG/Hive.

4. Support `if not exists` and `if exists` for create/drop resource and create/drop encryptkey
This commit is contained in:
Mingyu Chen
2022-10-21 15:29:27 +08:00
committed by GitHub
parent ccc04210d6
commit 847b80ebfa
51 changed files with 26302 additions and 71 deletions

View File

@ -1831,14 +1831,14 @@ create_stmt ::=
RESULT = new AlterTableStmt(tableName, Lists.newArrayList(new CreateIndexClause(tableName, new IndexDef(indexName, ifNotExists, cols, indexType, comment), false)));
:}
/* resource */
| KW_CREATE opt_external:isExternal KW_RESOURCE ident_or_text:resourceName opt_properties:properties
| KW_CREATE opt_external:isExternal KW_RESOURCE opt_if_not_exists:ifNotExists ident_or_text:resourceName opt_properties:properties
{:
RESULT = new CreateResourceStmt(isExternal, resourceName, properties);
RESULT = new CreateResourceStmt(isExternal, ifNotExists, resourceName, properties);
:}
/* encryptkey */
| KW_CREATE KW_ENCRYPTKEY encryptkey_name:keyName KW_AS STRING_LITERAL:keyString
| KW_CREATE KW_ENCRYPTKEY opt_if_not_exists:ifNotExists encryptkey_name:keyName KW_AS STRING_LITERAL:keyString
{:
RESULT = new CreateEncryptKeyStmt(keyName, keyString);
RESULT = new CreateEncryptKeyStmt(ifNotExists, keyName, keyString);
:}
/* sync job */
| KW_CREATE KW_SYNC ident:db DOT ident_or_text:jobName LPAREN channel_desc_list:channelDescList RPAREN binlog_desc:binlog opt_properties:properties
@ -2714,13 +2714,13 @@ drop_stmt ::=
{:
RESULT = new DropMaterializedViewStmt(ifExists, mvName, tableName);
:}
| KW_DROP KW_RESOURCE ident_or_text:resourceName
| KW_DROP KW_RESOURCE opt_if_exists:ifExists ident_or_text:resourceName
{:
RESULT = new DropResourceStmt(resourceName);
RESULT = new DropResourceStmt(ifExists, resourceName);
:}
| KW_DROP KW_ENCRYPTKEY encryptkey_name:keyName
| KW_DROP KW_ENCRYPTKEY opt_if_exists:ifExists encryptkey_name:keyName
{:
RESULT = new DropEncryptKeyStmt(keyName);
RESULT = new DropEncryptKeyStmt(ifExists, keyName);
:}
| KW_DROP KW_SQL_BLOCK_RULE opt_if_exists:ifExists ident_list:ruleNames
{:

View File

@ -41,15 +41,21 @@ import com.google.common.base.Strings;
* CREATE ENCRYPTKEY test.key1 AS "beijing";
*/
public class CreateEncryptKeyStmt extends DdlStmt {
private final boolean ifNotExists;
private final EncryptKeyName encryptKeyName;
private final String keyString;
private EncryptKey encryptKey;
public CreateEncryptKeyStmt(EncryptKeyName encryptKeyName, String keyString) {
public CreateEncryptKeyStmt(boolean ifNotExists, EncryptKeyName encryptKeyName, String keyString) {
this.ifNotExists = ifNotExists;
this.encryptKeyName = encryptKeyName;
this.keyString = keyString;
}
public boolean isIfNotExists() {
return ifNotExists;
}
public EncryptKeyName getEncryptKeyName() {
return encryptKeyName;
}

View File

@ -36,17 +36,24 @@ public class CreateResourceStmt extends DdlStmt {
private static final String TYPE = "type";
private final boolean isExternal;
private final boolean ifNotExists;
private final String resourceName;
private final Map<String, String> properties;
private ResourceType resourceType;
public CreateResourceStmt(boolean isExternal, String resourceName, Map<String, String> properties) {
public CreateResourceStmt(boolean isExternal, boolean ifNotExists, String resourceName,
Map<String, String> properties) {
this.isExternal = isExternal;
this.ifNotExists = ifNotExists;
this.resourceName = resourceName;
this.properties = properties;
this.resourceType = ResourceType.UNKNOWN;
}
public boolean isIfNotExists() {
return ifNotExists;
}
public String getResourceName() {
return resourceName;
}

View File

@ -19,7 +19,6 @@ package org.apache.doris.analysis;
import org.apache.doris.catalog.EncryptKeySearchDesc;
import org.apache.doris.catalog.Env;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.common.ErrorCode;
import org.apache.doris.common.ErrorReport;
import org.apache.doris.common.UserException;
@ -27,13 +26,19 @@ import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;
public class DropEncryptKeyStmt extends DdlStmt {
private final boolean ifExists;
private final EncryptKeyName encryptKeyName;
private EncryptKeySearchDesc encryptKeySearchDesc;
public DropEncryptKeyStmt(EncryptKeyName encryptKeyName) {
public DropEncryptKeyStmt(boolean ifExists, EncryptKeyName encryptKeyName) {
this.ifExists = ifExists;
this.encryptKeyName = encryptKeyName;
}
public boolean isIfExists() {
return ifExists;
}
public EncryptKeyName getEncryptKeyName() {
return encryptKeyName;
}
@ -43,7 +48,7 @@ public class DropEncryptKeyStmt extends DdlStmt {
}
@Override
public void analyze(Analyzer analyzer) throws AnalysisException, UserException {
public void analyze(Analyzer analyzer) throws UserException {
super.analyze(analyzer);
// check operation privilege

View File

@ -27,12 +27,18 @@ import org.apache.doris.qe.ConnectContext;
// DROP RESOURCE resource_name
public class DropResourceStmt extends DdlStmt {
private boolean ifExists;
private String resourceName;
public DropResourceStmt(String resourceName) {
public DropResourceStmt(boolean ifExists, String resourceName) {
this.ifExists = ifExists;
this.resourceName = resourceName;
}
public boolean isIfExists() {
return ifExists;
}
public String getResourceName() {
return resourceName;
}

View File

@ -943,7 +943,7 @@ public class RestoreJob extends AbstractJob {
} else {
try {
// restore resource
resourceMgr.createResource(remoteOdbcResource);
resourceMgr.createResource(remoteOdbcResource, false);
} catch (DdlException e) {
status = new Status(ErrCode.COMMON_ERROR, e.getMessage());
return;

View File

@ -774,25 +774,30 @@ public class Database extends MetaObject implements Writable, DatabaseIf<Table>
return ClusterNamespace.getNameFromFullName(fullQualifiedName).equalsIgnoreCase(InfoSchemaDb.DATABASE_NAME);
}
public synchronized void addEncryptKey(EncryptKey encryptKey) throws UserException {
addEncryptKeyImpl(encryptKey, false);
Env.getCurrentEnv().getEditLog().logAddEncryptKey(encryptKey);
public synchronized void addEncryptKey(EncryptKey encryptKey, boolean ifNotExists) throws UserException {
if (addEncryptKeyImpl(encryptKey, false, ifNotExists)) {
Env.getCurrentEnv().getEditLog().logAddEncryptKey(encryptKey);
}
}
public synchronized void replayAddEncryptKey(EncryptKey encryptKey) {
try {
addEncryptKeyImpl(encryptKey, true);
addEncryptKeyImpl(encryptKey, true, true);
} catch (UserException e) {
Preconditions.checkArgument(false);
}
}
private void addEncryptKeyImpl(EncryptKey encryptKey, boolean isReplay) throws UserException {
private boolean addEncryptKeyImpl(EncryptKey encryptKey, boolean isReplay, boolean ifNotExists)
throws UserException {
String keyName = encryptKey.getEncryptKeyName().getKeyName();
EncryptKey existKey = dbEncryptKey.getName2EncryptKey().get(keyName);
if (!isReplay) {
if (existKey != null) {
if (existKey.isIdentical(encryptKey)) {
if (ifNotExists) {
return false;
}
throw new UserException("encryptKey ["
+ existKey.getEncryptKeyName().toString() + "] already exists");
}
@ -800,25 +805,32 @@ public class Database extends MetaObject implements Writable, DatabaseIf<Table>
}
dbEncryptKey.getName2EncryptKey().put(keyName, encryptKey);
return true;
}
public synchronized void dropEncryptKey(EncryptKeySearchDesc encryptKeySearchDesc) throws UserException {
dropEncryptKeyImpl(encryptKeySearchDesc);
Env.getCurrentEnv().getEditLog().logDropEncryptKey(encryptKeySearchDesc);
public synchronized void dropEncryptKey(EncryptKeySearchDesc encryptKeySearchDesc, boolean ifExists)
throws UserException {
if (dropEncryptKeyImpl(encryptKeySearchDesc, ifExists)) {
Env.getCurrentEnv().getEditLog().logDropEncryptKey(encryptKeySearchDesc);
}
}
public synchronized void replayDropEncryptKey(EncryptKeySearchDesc encryptKeySearchDesc) {
try {
dropEncryptKeyImpl(encryptKeySearchDesc);
dropEncryptKeyImpl(encryptKeySearchDesc, true);
} catch (UserException e) {
Preconditions.checkArgument(false);
}
}
private void dropEncryptKeyImpl(EncryptKeySearchDesc encryptKeySearchDesc) throws UserException {
private boolean dropEncryptKeyImpl(EncryptKeySearchDesc encryptKeySearchDesc, boolean ifExists)
throws UserException {
String keyName = encryptKeySearchDesc.getKeyEncryptKeyName().getKeyName();
EncryptKey existKey = dbEncryptKey.getName2EncryptKey().get(keyName);
if (existKey == null) {
if (ifExists) {
return false;
}
throw new UserException("Unknown encryptKey, encryptKey=" + encryptKeySearchDesc.toString());
}
boolean isFound = false;
@ -826,9 +838,13 @@ public class Database extends MetaObject implements Writable, DatabaseIf<Table>
isFound = true;
}
if (!isFound) {
if (ifExists) {
return false;
}
throw new UserException("Unknown encryptKey, encryptKey=" + encryptKeySearchDesc.toString());
}
dbEncryptKey.getName2EncryptKey().remove(keyName);
return true;
}
public synchronized List<EncryptKey> getEncryptKeys() {

View File

@ -33,7 +33,7 @@ public class EncryptKeyHelper {
public static void createEncryptKey(CreateEncryptKeyStmt stmt) throws UserException {
EncryptKeyName name = stmt.getEncryptKeyName();
Database db = Env.getCurrentInternalCatalog().getDbOrDdlException(name.getDb());
db.addEncryptKey(stmt.getEncryptKey());
db.addEncryptKey(stmt.getEncryptKey(), stmt.isIfNotExists());
}
public static void replayCreateEncryptKey(EncryptKey encryptKey) throws MetaNotFoundException {
@ -45,7 +45,7 @@ public class EncryptKeyHelper {
public static void dropEncryptKey(DropEncryptKeyStmt stmt) throws UserException {
EncryptKeyName name = stmt.getEncryptKeyName();
Database db = Env.getCurrentInternalCatalog().getDbOrDdlException(name.getDb());
db.dropEncryptKey(stmt.getEncryptKeysSearchDesc());
db.dropEncryptKey(stmt.getEncryptKeysSearchDesc(), stmt.isIfExists());
}
public static void replayDropEncryptKey(EncryptKeySearchDesc encryptKeySearchDesc) throws MetaNotFoundException {

View File

@ -248,6 +248,9 @@ public class JdbcTable extends Table {
if (Strings.isNullOrEmpty(jdbcTypeName)) {
throw new DdlException("property " + TABLE_TYPE + " must be set");
}
if (!TABLE_TYPE_MAP.containsKey(jdbcTypeName.toLowerCase())) {
throw new DdlException("Unknown jdbc table type: " + jdbcTypeName);
}
Resource resource = Env.getCurrentEnv().getResourceMgr().getResource(resourceName);
if (resource == null) {

View File

@ -73,7 +73,6 @@ public abstract class Resource implements Writable {
public static Resource fromStmt(CreateResourceStmt stmt) throws DdlException {
Resource resource = getResourceInstance(stmt.getResourceType(), stmt.getResourceName());
resource.setProperties(stmt.getProperties());
return resource;
}

View File

@ -77,17 +77,23 @@ public class ResourceMgr implements Writable {
throw new DdlException("Only support SPARK, ODBC_CATALOG ,JDBC, and REMOTE_STORAGE resource.");
}
Resource resource = Resource.fromStmt(stmt);
createResource(resource);
// log add
Env.getCurrentEnv().getEditLog().logCreateResource(resource);
LOG.info("Create resource success. Resource: {}", resource);
if (createResource(resource, stmt.isIfNotExists())) {
Env.getCurrentEnv().getEditLog().logCreateResource(resource);
LOG.info("Create resource success. Resource: {}", resource);
}
}
public void createResource(Resource resource) throws DdlException {
// Return true if the resource is truly added,
// otherwise, return false or throw exception.
public boolean createResource(Resource resource, boolean ifNotExists) throws DdlException {
String resourceName = resource.getName();
if (nameToResource.putIfAbsent(resourceName, resource) != null) {
if (ifNotExists) {
return false;
}
throw new DdlException("Resource(" + resourceName + ") already exist");
}
return true;
}
public void replayCreateResource(Resource resource) {
@ -97,6 +103,9 @@ public class ResourceMgr implements Writable {
public void dropResource(DropResourceStmt stmt) throws DdlException {
String resourceName = stmt.getResourceName();
if (!nameToResource.containsKey(resourceName)) {
if (stmt.isIfExists()) {
return;
}
throw new DdlException("Resource(" + resourceName + ") does not exist");
}

View File

@ -1690,7 +1690,7 @@ public class Config extends ConfigBase {
* Temp config for multi catalog feature.
* Should be removed when this feature is ready.
*/
@ConfField(mutable = false, masterOnly = true)
@ConfField(mutable = true, masterOnly = true)
public static boolean enable_multi_catalog = false;
@ConfField(mutable = true, masterOnly = false)

View File

@ -63,7 +63,7 @@ public class CreateResourceStmtTest {
Map<String, String> properties = Maps.newHashMap();
properties.put("type", "spark");
CreateResourceStmt stmt = new CreateResourceStmt(true, resourceName1, properties);
CreateResourceStmt stmt = new CreateResourceStmt(true, false, resourceName1, properties);
stmt.analyze(analyzer);
Assert.assertEquals(resourceName1, stmt.getResourceName());
Assert.assertEquals(Resource.ResourceType.SPARK, stmt.getResourceType());
@ -71,7 +71,7 @@ public class CreateResourceStmtTest {
properties = Maps.newHashMap();
properties.put("type", "odbc_catalog");
stmt = new CreateResourceStmt(true, resourceName2, properties);
stmt = new CreateResourceStmt(true, false, resourceName2, properties);
stmt.analyze(analyzer);
Assert.assertEquals(resourceName2, stmt.getResourceName());
Assert.assertEquals(Resource.ResourceType.ODBC_CATALOG, stmt.getResourceType());
@ -79,7 +79,7 @@ public class CreateResourceStmtTest {
properties = Maps.newHashMap();
properties.put("type", "s3");
stmt = new CreateResourceStmt(true, resourceName3, properties);
stmt = new CreateResourceStmt(true, false, resourceName3, properties);
stmt.analyze(analyzer);
Assert.assertEquals(resourceName3, stmt.getResourceName());
Assert.assertEquals(ResourceType.S3, stmt.getResourceType());
@ -100,7 +100,7 @@ public class CreateResourceStmtTest {
Map<String, String> properties = Maps.newHashMap();
properties.put("type", "hadoop");
CreateResourceStmt stmt = new CreateResourceStmt(true, resourceName1, properties);
CreateResourceStmt stmt = new CreateResourceStmt(true, false, resourceName1, properties);
stmt.analyze(analyzer);
}
}

View File

@ -85,7 +85,7 @@ public class OdbcCatalogResourceTest {
};
// host: 127.0.0.1, port: 7777, without driver and odbc_type
CreateResourceStmt stmt = new CreateResourceStmt(true, name, properties);
CreateResourceStmt stmt = new CreateResourceStmt(true, false, name, properties);
stmt.analyze(analyzer);
OdbcCatalogResource resource = (OdbcCatalogResource) Resource.fromStmt(stmt);
Assert.assertEquals(name, resource.getName());
@ -98,7 +98,7 @@ public class OdbcCatalogResourceTest {
// with driver and odbc_type
properties.put("driver", "mysql");
properties.put("odbc_type", "mysql");
stmt = new CreateResourceStmt(true, name, properties);
stmt = new CreateResourceStmt(true, false, name, properties);
stmt.analyze(analyzer);
resource = (OdbcCatalogResource) Resource.fromStmt(stmt);
Assert.assertEquals("mysql", resource.getProperty("driver"));

View File

@ -118,7 +118,7 @@ public class ResourceMgrTest {
// spark resource
// add
ResourceMgr mgr = new ResourceMgr();
CreateResourceStmt stmt = new CreateResourceStmt(true, sparkResName, sparkProperties);
CreateResourceStmt stmt = new CreateResourceStmt(true, false, sparkResName, sparkProperties);
stmt.analyze(analyzer);
Assert.assertEquals(0, mgr.getResourceNum());
mgr.createResource(stmt);
@ -138,12 +138,12 @@ public class ResourceMgrTest {
Assert.assertEquals(workingDir, ((SparkResource) mgr.getResource(sparkResName)).getWorkingDir());
// drop
DropResourceStmt dropStmt = new DropResourceStmt(sparkResName);
DropResourceStmt dropStmt = new DropResourceStmt(false, sparkResName);
mgr.dropResource(dropStmt);
Assert.assertEquals(0, mgr.getResourceNum());
// s3 resource
stmt = new CreateResourceStmt(true, s3ResName, s3Properties);
stmt = new CreateResourceStmt(true, false, s3ResName, s3Properties);
stmt.analyze(analyzer);
Assert.assertEquals(0, mgr.getResourceNum());
mgr.createResource(stmt);
@ -159,7 +159,7 @@ public class ResourceMgrTest {
// Assert.assertEquals(s3Region, ((S3Resource) mgr.getResource(s3ResName)).getProperty("s3_region"));
// drop
dropStmt = new DropResourceStmt(s3ResName);
dropStmt = new DropResourceStmt(false, s3ResName);
mgr.dropResource(dropStmt);
Assert.assertEquals(0, mgr.getResourceNum());
@ -183,7 +183,7 @@ public class ResourceMgrTest {
// add
ResourceMgr mgr = new ResourceMgr();
CreateResourceStmt stmt = new CreateResourceStmt(true, sparkResName, sparkProperties);
CreateResourceStmt stmt = new CreateResourceStmt(true, false, sparkResName, sparkProperties);
stmt.analyze(analyzer);
Assert.assertEquals(0, mgr.getResourceNum());
mgr.createResource(stmt);
@ -198,7 +198,7 @@ public class ResourceMgrTest {
// drop
ResourceMgr mgr = new ResourceMgr();
Assert.assertEquals(0, mgr.getResourceNum());
DropResourceStmt stmt = new DropResourceStmt(sparkResName);
DropResourceStmt stmt = new DropResourceStmt(false, sparkResName);
mgr.dropResource(stmt);
}
}

View File

@ -97,7 +97,7 @@ public class S3ResourceTest {
};
// resource with default settings
CreateResourceStmt stmt = new CreateResourceStmt(true, name, s3Properties);
CreateResourceStmt stmt = new CreateResourceStmt(true, false, name, s3Properties);
stmt.analyze(analyzer);
S3Resource s3Resource = (S3Resource) Resource.fromStmt(stmt);
Assert.assertEquals(name, s3Resource.getName());
@ -115,7 +115,7 @@ public class S3ResourceTest {
s3Properties.put("s3_max_connections", "100");
s3Properties.put("s3_request_timeout_ms", "2000");
s3Properties.put("s3_connection_timeout_ms", "2000");
stmt = new CreateResourceStmt(true, name, s3Properties);
stmt = new CreateResourceStmt(true, false, name, s3Properties);
stmt.analyze(analyzer);
s3Resource = (S3Resource) Resource.fromStmt(stmt);
@ -142,7 +142,7 @@ public class S3ResourceTest {
}
};
s3Properties.remove("s3_root_path");
CreateResourceStmt stmt = new CreateResourceStmt(true, name, s3Properties);
CreateResourceStmt stmt = new CreateResourceStmt(true, false, name, s3Properties);
stmt.analyze(analyzer);
Resource.fromStmt(stmt);
}

View File

@ -80,7 +80,7 @@ public class SparkResourceTest {
};
// master: spark, deploy_mode: cluster
CreateResourceStmt stmt = new CreateResourceStmt(true, name, properties);
CreateResourceStmt stmt = new CreateResourceStmt(true, false, name, properties);
stmt.analyze(analyzer);
SparkResource resource = (SparkResource) Resource.fromStmt(stmt);
Assert.assertEquals(name, resource.getName());
@ -94,7 +94,7 @@ public class SparkResourceTest {
// master: spark, deploy_mode: client
properties.put("spark.submit.deployMode", "client");
stmt = new CreateResourceStmt(true, name, properties);
stmt = new CreateResourceStmt(true, false, name, properties);
stmt.analyze(analyzer);
resource = (SparkResource) Resource.fromStmt(stmt);
Assert.assertEquals("client", resource.getDeployMode().name().toLowerCase());
@ -107,7 +107,7 @@ public class SparkResourceTest {
properties.put("spark.driver.memory", "1g");
properties.put("spark.hadoop.yarn.resourcemanager.address", "127.0.0.1:9999");
properties.put("spark.hadoop.fs.defaultFS", "hdfs://127.0.0.1:10000");
stmt = new CreateResourceStmt(true, name, properties);
stmt = new CreateResourceStmt(true, false, name, properties);
stmt.analyze(analyzer);
resource = (SparkResource) Resource.fromStmt(stmt);
Assert.assertTrue(resource.isYarnMaster());
@ -140,7 +140,7 @@ public class SparkResourceTest {
properties.put("spark.driver.memory", "1g");
properties.put("spark.hadoop.yarn.resourcemanager.address", "127.0.0.1:9999");
properties.put("spark.hadoop.fs.defaultFS", "hdfs://127.0.0.1:10000");
CreateResourceStmt stmt = new CreateResourceStmt(true, name, properties);
CreateResourceStmt stmt = new CreateResourceStmt(true, false, name, properties);
stmt.analyze(analyzer);
SparkResource resource = (SparkResource) Resource.fromStmt(stmt);
SparkResource copiedResource = resource.getCopiedResource();
@ -172,7 +172,7 @@ public class SparkResourceTest {
}
};
CreateResourceStmt stmt = new CreateResourceStmt(true, name, properties);
CreateResourceStmt stmt = new CreateResourceStmt(true, false, name, properties);
stmt.analyze(analyzer);
Resource.fromStmt(stmt);
}