[feature](insert)support default value when create hive table (#33666)

Issue Number: #31442

hive3 support create table with column's default value
if use hive3, we can write default value to table
This commit is contained in:
slothever
2024-04-19 11:28:10 +08:00
committed by morningman
parent 734520a77b
commit 561afde0c4
10 changed files with 280 additions and 125 deletions

View File

@ -188,6 +188,12 @@ public class Column implements Writable, GsonPostProcessable {
false, null);
}
public Column(String name, Type type, boolean isKey, AggregateType aggregateType, boolean isAllowNull,
String defaultValue, String comment, boolean visible, int colUniqueId) {
this(name, type, isKey, aggregateType, isAllowNull, -1, defaultValue, comment, visible, null, colUniqueId, null,
false, null);
}
public Column(String name, Type type, boolean isKey, AggregateType aggregateType, boolean isAllowNull,
String defaultValue, String comment, boolean visible, DefaultValueExprDef defaultValueExprDef,
int colUniqueId, String realDefaultValue) {

View File

@ -63,6 +63,8 @@ public interface HMSCachedClient {
List<FieldSchema> getSchema(String dbName, String tblName);
Map<String, String> getDefaultColumnValues(String dbName, String tblName);
List<ColumnStatisticsObj> getTableColumnStatistics(String dbName, String tblName,
List<String> columns);

View File

@ -584,13 +584,17 @@ public class HMSExternalTable extends ExternalTable implements MTMVRelatedTableI
}
private List<Column> getHiveSchema() {
HMSCachedClient client = ((HMSExternalCatalog) catalog).getClient();
List<Column> columns;
List<FieldSchema> schema = ((HMSExternalCatalog) catalog).getClient().getSchema(dbName, name);
List<FieldSchema> schema = client.getSchema(dbName, name);
Map<String, String> colDefaultValues = client.getDefaultColumnValues(dbName, name);
List<Column> tmpSchema = Lists.newArrayListWithCapacity(schema.size());
for (FieldSchema field : schema) {
tmpSchema.add(new Column(field.getName().toLowerCase(Locale.ROOT),
String fieldName = field.getName().toLowerCase(Locale.ROOT);
String defaultValue = colDefaultValues.getOrDefault(fieldName, null);
tmpSchema.add(new Column(fieldName,
HiveMetaStoreClientHelper.hiveTypeToDorisType(field.getType()), true, null,
true, field.getComment(), true, -1));
true, defaultValue, field.getComment(), true, -1));
}
columns = tmpSchema;
return columns;

View File

@ -71,7 +71,7 @@ public class HiveVersionUtil {
return DEFAULT_HIVE_VERSION;
}
} else if (major >= 3) {
return HiveVersion.V2_3;
return HiveVersion.V3_0;
} else {
LOG.warn("invalid hive version: " + version);
return DEFAULT_HIVE_VERSION;

View File

@ -49,6 +49,7 @@ import org.apache.logging.log4j.Logger;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
@ -245,6 +246,15 @@ public class PostgreSQLJdbcHMSCachedClient extends JdbcHMSCachedClient {
}
}
@Override
public Map<String, String> getDefaultColumnValues(String dbName, String tblName) {
if (LOG.isDebugEnabled()) {
LOG.debug("Do not support default column values in PostgreSQLJdbcHMSCachedClient."
+ " Will use null values instead.");
}
return new HashMap<>();
}
@Override
public Table getTable(String dbName, String tblName) {
String sql = "SELECT \"TBL_ID\", \"TBL_NAME\", \"DBS\".\"NAME\", \"OWNER\", \"CREATE_TIME\","

View File

@ -18,6 +18,7 @@
package org.apache.doris.datasource.hive;
import org.apache.doris.analysis.TableName;
import org.apache.doris.catalog.Column;
import org.apache.doris.common.Config;
import org.apache.doris.datasource.DatabaseMetadata;
import org.apache.doris.datasource.TableMetadata;
@ -46,6 +47,7 @@ import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
import org.apache.hadoop.hive.metastore.api.DataOperationType;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.DefaultConstraintsRequest;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.LockComponent;
import org.apache.hadoop.hive.metastore.api.LockResponse;
@ -53,17 +55,22 @@ import org.apache.hadoop.hive.metastore.api.LockState;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NotificationEventResponse;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.TableValidWriteIds;
import org.apache.hadoop.hive.metastore.txn.TxnUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import shade.doris.hive.org.apache.thrift.TApplicationException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
import java.util.BitSet;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Optional;
import java.util.Queue;
@ -153,8 +160,28 @@ public class ThriftHMSCachedClient implements HMSCachedClient {
try {
// String location,
if (tbl instanceof HiveTableMetadata) {
Table hiveTable = HiveUtil.toHiveTable((HiveTableMetadata) tbl);
List<Column> tableColumns = ((HiveTableMetadata) tbl).getColumns();
List<SQLDefaultConstraint> dvs = new ArrayList<>(tableColumns.size());
for (Column tableColumn : tableColumns) {
if (tableColumn.hasDefaultValue()) {
SQLDefaultConstraint dv = new SQLDefaultConstraint();
dv.setTable_db(tbl.getDbName());
dv.setTable_name(tbl.getTableName());
dv.setColumn_name(tableColumn.getName());
dv.setDefault_value(tableColumn.getDefaultValue());
dv.setDc_name(tableColumn.getName() + "_dv_constraint");
dvs.add(dv);
}
}
ugiDoAs(() -> {
client.client.createTable(HiveUtil.toHiveTable((HiveTableMetadata) tbl));
if (!dvs.isEmpty()) {
// foreignKeys, uniqueConstraints, notNullConstraints, defaultConstraints, checkConstraints
client.client.createTableWithConstraints(hiveTable, null,
null, null, null, dvs, null);
return null;
}
client.client.createTable(hiveTable);
return null;
});
}
@ -293,6 +320,37 @@ public class ThriftHMSCachedClient implements HMSCachedClient {
}
}
public Map<String, String> getDefaultColumnValues(String dbName, String tblName) {
Map<String, String> res = new HashMap<>();
try (ThriftHMSClient client = getClient()) {
try {
DefaultConstraintsRequest req = new DefaultConstraintsRequest();
req.setDb_name(dbName);
req.setTbl_name(tblName);
List<SQLDefaultConstraint> dvcs = ugiDoAs(() -> {
try {
return client.client.getDefaultConstraints(req);
} catch (TApplicationException e) {
if (e.getMessage().contains("Invalid method name: 'get_default_constraints'")) {
// the getDefaultConstraints method only supported on hive3
return ImmutableList.of();
}
throw e;
}
});
for (SQLDefaultConstraint dvc : dvcs) {
res.put(dvc.getColumn_name().toLowerCase(Locale.ROOT), dvc.getDefault_value());
}
return res;
} catch (Exception e) {
client.setThrowable(e);
throw e;
}
} catch (Exception e) {
throw new HMSClientException("failed to get table %s in db %s from hms client", e, tblName, dbName);
}
}
@Override
public Table getTable(String dbName, String tblName) {
try (ThriftHMSClient client = getClient()) {

View File

@ -1089,7 +1089,10 @@ public class HiveMetaStoreClient implements IMetaStoreClient, AutoCloseable {
List<SQLCheckConstraint> checkConstraints)
throws AlreadyExistsException, InvalidObjectException,
MetaException, NoSuchObjectException, TException {
if (hiveVersion != HiveVersion.V3_0) {
throw new UnsupportedOperationException("Table with default values is not supported "
+ "if the hive version is less than 3.0. Can set 'hive.version' to 3.0 in properties.");
}
if (!tbl.isSetCatName()) {
String defaultCat = getDefaultCatalog(conf);
tbl.setCatName(defaultCat);

View File

@ -162,6 +162,11 @@ public class TestHMSCachedClient implements HMSCachedClient {
return null;
}
@Override
public Map<String, String> getDefaultColumnValues(String dbName, String tblName) {
return new HashMap<>();
}
@Override
public List<ColumnStatisticsObj> getTableColumnStatistics(String dbName, String tblName, List<String> columns) {
return null;

View File

@ -396,22 +396,43 @@ suite("test_hive_ctas", "p0,external,hive,external_docker,external_docker_hive")
sql """ switch `${catalog_name}` """
sql """ CREATE DATABASE IF NOT EXISTS `test_ctas_all_type` """;
sql """ use test_ctas_all_type """;
// TODO: work on hive3
// sql """
// CREATE TABLE IF NOT EXISTS all_types_ctas_${file_format}_with_dv(
// `col1` BOOLEAN DEFAULT 'false' COMMENT 'col1',
// `col2` TINYINT DEFAULT '127' COMMENT 'col2',
// `col3` SMALLINT DEFAULT '32767' COMMENT 'col3',
// `col4` INT DEFAULT '2147483647' COMMENT 'col4',
// `col5` BIGINT DEFAULT '9223372036854775807' COMMENT 'col5',
// `col6` CHAR(10) DEFAULT 'default' COMMENT 'col6',
// `col7` FLOAT DEFAULT '1' COMMENT 'col7',
// `col8` DOUBLE DEFAULT '3.141592653' COMMENT 'col8',
// `col9` DECIMAL(9,4) DEFAULT '99999.9999' COMMENT 'col9',
// `col10` VARCHAR(11) DEFAULT 'default' COMMENT 'col10',
// `col11` STRING DEFAULT 'default' COMMENT 'col11',
// `col12` DATE DEFAULT '2023-05-29' COMMENT 'col12',
// `col13` DATETIME DEFAULT current_timestamp COMMENT 'col13'
// ) ENGINE=hive
// PROPERTIES (
// 'file_format'='${file_format}'
// )
// """
sql """
CREATE TABLE IF NOT EXISTS all_types_ctas_${file_format}(
`col1` BOOLEAN DEFAULT 'false' COMMENT 'col1',
`col2` TINYINT DEFAULT '127' COMMENT 'col2',
`col3` SMALLINT DEFAULT '32767' COMMENT 'col3',
`col4` INT DEFAULT '2147483647' COMMENT 'col4',
`col5` BIGINT DEFAULT '9223372036854775807' COMMENT 'col5',
`col6` CHAR(10) DEFAULT 'default' COMMENT 'col6',
`col7` FLOAT DEFAULT '1' COMMENT 'col7',
`col8` DOUBLE DEFAULT '3.141592653' COMMENT 'col8',
`col9` DECIMAL(9,4) DEFAULT '99999.9999' COMMENT 'col9',
`col10` VARCHAR(11) DEFAULT 'default' COMMENT 'col10',
`col11` STRING DEFAULT 'default' COMMENT 'col11',
`col12` DATE DEFAULT '2023-05-29' COMMENT 'col12',
`col13` DATETIME DEFAULT current_timestamp COMMENT 'col13'
`col1` BOOLEAN COMMENT 'col1',
`col2` TINYINT COMMENT 'col2',
`col3` SMALLINT COMMENT 'col3',
`col4` INT COMMENT 'col4',
`col5` BIGINT COMMENT 'col5',
`col6` CHAR(10) COMMENT 'col6',
`col7` FLOAT COMMENT 'col7',
`col8` DOUBLE COMMENT 'col8',
`col9` DECIMAL(9,4) COMMENT 'col9',
`col10` VARCHAR(11) COMMENT 'col10',
`col11` STRING COMMENT 'col11',
`col12` DATE COMMENT 'col12',
`col13` DATETIME COMMENT 'col13'
) ENGINE=hive
PROPERTIES (
'file_format'='${file_format}'

View File

@ -91,113 +91,27 @@ suite("test_hive_ddl", "p0,external,hive,external_docker,external_docker_hive")
}
}
def test_loc_tbl = { String file_format, String externalEnvIp, String hdfs_port, String catalog_name ->
logger.info("Test create/drop table with location...")
def test_tbl_default_val = { String file_format, String externalEnvIp, String hms_port,
String hdfs_port, String catalog_name ->
// create and insert default value is supported on hive3, we can test default hive version 2.3
sql """switch ${catalog_name}"""
def loc = "${externalEnvIp}:${hdfs_port}/tmp/hive/test_hive_loc_db"
sql """ create database if not exists `test_hive_loc`
properties('location'='hdfs://${loc}')
sql """ create database if not exists `test_hive_default_val`
"""
sql """use `test_hive_loc`"""
// case1. the table default location is inherited from db
sql """DROP TABLE IF EXISTS `loc_tbl_${file_format}_default`"""
sql """
CREATE TABLE loc_tbl_${file_format}_default (
`col` STRING COMMENT 'col'
) ENGINE=hive
PROPERTIES (
'file_format'='${file_format}'
)
"""
def create_tbl_res = sql """ show create table loc_tbl_${file_format}_default """
logger.info("${create_tbl_res}")
assertTrue(create_tbl_res.toString().containsIgnoreCase("${loc}/loc_tbl_${file_format}_default"))
sql """ INSERT INTO loc_tbl_${file_format}_default values(1) """
def tvfRes = sql """ SELECT * FROM hdfs(
'uri'='hdfs://${loc}/loc_tbl_${file_format}_default/*',
'format' = '${file_format}',
'fs.defaultFS' = 'hdfs://${externalEnvIp}:${hdfs_port}'
)
"""
logger.info("${tvfRes}")
assertTrue(!tvfRes.isEmpty())
sql """DROP TABLE `loc_tbl_${file_format}_default`"""
def tvfDropRes = sql """ SELECT * FROM hdfs(
'uri'='hdfs://${loc}/loc_tbl_${file_format}_default/*',
'format' = '${file_format}',
'fs.defaultFS' = 'hdfs://${externalEnvIp}:${hdfs_port}'
)
"""
logger.info("${tvfDropRes}")
assertTrue(tvfDropRes.isEmpty())
// case2. use a custom location to create table
def tbl_loc = "hdfs://${loc}/custom_loc"
sql """ DROP TABLE IF EXISTS loc_tbl_${file_format}_customm"""
sql """
CREATE TABLE loc_tbl_${file_format}_custom (
`col` STRING COMMENT 'col'
) ENGINE=hive
PROPERTIES (
'file_format'='${file_format}',
'location'='${tbl_loc}'
)
"""
def create_tbl_res2 = sql """ show create table loc_tbl_${file_format}_custom """
logger.info("${create_tbl_res2}")
assertTrue(create_tbl_res2.toString().containsIgnoreCase("${tbl_loc}"))
sql """ INSERT INTO loc_tbl_${file_format}_custom values(1) """
def tvfRes2 = sql """ SELECT * FROM hdfs(
'uri'='${tbl_loc}/*',
'format' = '${file_format}',
'fs.defaultFS' = 'hdfs://${externalEnvIp}:${hdfs_port}'
)
"""
logger.info("${tvfRes2}")
assertTrue(!tvfRes2.isEmpty())
sql """DROP TABLE `loc_tbl_${file_format}_custom`"""
def tvfDropRes2 = sql """ SELECT * FROM hdfs(
'uri'='${tbl_loc}/*',
'format' = '${file_format}',
'fs.defaultFS' = 'hdfs://${externalEnvIp}:${hdfs_port}'
)
"""
logger.info("${tvfDropRes2}")
assertTrue(tvfDropRes2.isEmpty())
// case3. check default
sql """
CREATE TABLE all_default_values_${file_format}(
`col1` BOOLEAN DEFAULT 'false' COMMENT 'col1',
`col2` TINYINT DEFAULT '127' COMMENT 'col2',
`col3` SMALLINT DEFAULT '32767' COMMENT 'col3',
`col4` INT DEFAULT '2147483647' COMMENT 'col4',
`col5` BIGINT DEFAULT '9223372036854775807' COMMENT 'col5',
`col6` CHAR(10) DEFAULT 'default' COMMENT 'col6',
`col7` FLOAT DEFAULT '1' COMMENT 'col7',
`col8` DOUBLE DEFAULT '3.141592653' COMMENT 'col8',
`col9` DECIMAL(9,4) DEFAULT '99999.9999' COMMENT 'col9',
`col10` VARCHAR(11) DEFAULT 'default' COMMENT 'col10',
`col11` STRING DEFAULT 'default' COMMENT 'col11',
`col12` DATE DEFAULT '2023-05-29' COMMENT 'col12',
`col13` DATETIME DEFAULT current_timestamp COMMENT 'col13'
) ENGINE=hive
PROPERTIES (
'file_format'='${file_format}'
)
"""
// need support default insert:
// sql """ INSERT INTO all_default_values_${file_format}
// VALUES(null, null, null, null, null, null, null, null, null, null, null, null, null)
// """
// sql """ INSERT INTO all_default_values_${file_format} (col1, col3, col5, col7, col12)
// VALUES(null, null, null, null)
// """
// order_qt_default_val01 """ SELECT * FROM all_default_values_${file_format} """
sql """DROP TABLE `all_default_values_${file_format}`"""
sql """use `test_hive_default_val`"""
test {
sql """
CREATE TABLE all_default_values_${file_format}_hive2(
`col1` BOOLEAN DEFAULT 'false' COMMENT 'col1',
`col2` TINYINT DEFAULT '127' COMMENT 'col2'
) ENGINE=hive
PROPERTIES (
'file_format'='${file_format}'
)
"""
exception "java.sql.SQLException: errCode = 2, detailMessage = errCode = 2, detailMessage = failed to create database from hms client. reason: java.lang.UnsupportedOperationException: Table with default values is not supported if the hive version is less than 3.0. Can set 'hive.version' to 3.0 in properties."
}
sql """DROP DATABASE `test_hive_default_val`"""
test {
sql """
@ -283,7 +197,138 @@ suite("test_hive_ddl", "p0,external,hive,external_docker,external_docker_hive")
exception "errCode = 2, detailMessage = errCode = 2, detailMessage = date literal [2020-09-20 02:60] is invalid: Text '2020-09-20 02:60' could not be parsed: Invalid value for MinuteOfHour (valid values 0 - 59): 60"
}
// case4. check some exceptions
// test 'hive.version' = '3.0'
// sql """drop catalog if exists ${catalog_name}_hive3"""
// sql """create catalog if not exists ${catalog_name}_hive3 properties (
// 'type'='hms',
// 'hive.metastore.uris' = 'thrift://${externalEnvIp}:${hms_port}',
// 'fs.defaultFS' = 'hdfs://${externalEnvIp}:${hdfs_port}',
// 'hive.version' = '3.0'
// );"""
// sql """ switch ${catalog_name}_hive3 """
// sql """ create database if not exists `test_hive_default_val_hive3`
// """
// sql """use `test_hive_default_val_hive3`"""
// // test create hive3 table when use 'hive.version' = '3.0'
// sql """
// CREATE TABLE all_default_values_${file_format}(
// `col1` BOOLEAN DEFAULT 'false' COMMENT 'col1',
// `col2` TINYINT DEFAULT '127' COMMENT 'col2',
// `col3` SMALLINT DEFAULT '32767' COMMENT 'col3',
// `col4` INT DEFAULT '2147483647' COMMENT 'col4',
// `col5` BIGINT DEFAULT '9223372036854775807' COMMENT 'col5',
// `col6` CHAR(10) DEFAULT 'default' COMMENT 'col6',
// `col7` FLOAT DEFAULT '1' COMMENT 'col7',
// `col8` DOUBLE DEFAULT '3.141592653' COMMENT 'col8',
// `col9` DECIMAL(9,4) DEFAULT '99999.9999' COMMENT 'col9',
// `col10` VARCHAR(11) DEFAULT 'default' COMMENT 'col10',
// `col11` STRING DEFAULT 'default' COMMENT 'col11',
// `col12` DATE DEFAULT '2023-05-29' COMMENT 'col12',
// `col13` DATETIME DEFAULT current_timestamp COMMENT 'col13'
// ) ENGINE=hive
// PROPERTIES (
// 'file_format'='${file_format}'
// )
// """
// // TODO: work on hive3
// sql """ INSERT INTO all_default_values_${file_format}
// VALUES(null, null, null, null, null, null, null, null, null, null, null, null, null)
// """
// sql """ INSERT INTO all_default_values_${file_format} (col1, col3, col5, col7, col12)
// VALUES(false, null, 3.4, null, null)
// """
// sql """ INSERT INTO all_default_values_${file_format} (col2, col4, col6, col9, col11)
// VALUES(-128, null, 'A', null, '2024-07-30')
// """
// order_qt_default_val01 """ SELECT * FROM all_default_values_${file_format} """
// test {
// sql """ INSERT INTO all_default_values_${file_format} (col2, col4, col6, col9, col11)
// VALUES("123", "abcd", 'Ab', null, '2024-07-30')
// """
// exception "errCode = 2, detailMessage = errCode = 2, detailMessage = Invalid number format: abcd"
// }
//
// sql """DROP TABLE `all_default_values_${file_format}`"""
}
def test_loc_tbl = { String file_format, String externalEnvIp, String hdfs_port, String catalog_name ->
logger.info("Test create/drop table with location...")
sql """switch ${catalog_name}"""
def loc = "${externalEnvIp}:${hdfs_port}/tmp/hive/test_hive_loc_db"
sql """ create database if not exists `test_hive_loc`
properties('location'='hdfs://${loc}')
"""
sql """use `test_hive_loc`"""
// case1. the table default location is inherited from db
sql """DROP TABLE IF EXISTS `loc_tbl_${file_format}_default`"""
sql """
CREATE TABLE loc_tbl_${file_format}_default (
`col` STRING COMMENT 'col'
) ENGINE=hive
PROPERTIES (
'file_format'='${file_format}'
)
"""
def create_tbl_res = sql """ show create table loc_tbl_${file_format}_default """
logger.info("${create_tbl_res}")
assertTrue(create_tbl_res.toString().containsIgnoreCase("${loc}/loc_tbl_${file_format}_default"))
sql """ INSERT INTO loc_tbl_${file_format}_default values(1) """
def tvfRes = sql """ SELECT * FROM hdfs(
'uri'='hdfs://${loc}/loc_tbl_${file_format}_default/*',
'format' = '${file_format}',
'fs.defaultFS' = 'hdfs://${externalEnvIp}:${hdfs_port}'
)
"""
logger.info("${tvfRes}")
assertTrue(!tvfRes.isEmpty())
sql """DROP TABLE `loc_tbl_${file_format}_default`"""
def tvfDropRes = sql """ SELECT * FROM hdfs(
'uri'='hdfs://${loc}/loc_tbl_${file_format}_default/*',
'format' = '${file_format}',
'fs.defaultFS' = 'hdfs://${externalEnvIp}:${hdfs_port}'
)
"""
logger.info("${tvfDropRes}")
assertTrue(tvfDropRes.isEmpty())
// case2. use a custom location to create table
def tbl_loc = "hdfs://${loc}/custom_loc"
sql """ DROP TABLE IF EXISTS loc_tbl_${file_format}_customm"""
sql """
CREATE TABLE loc_tbl_${file_format}_custom (
`col` STRING COMMENT 'col'
) ENGINE=hive
PROPERTIES (
'file_format'='${file_format}',
'location'='${tbl_loc}'
)
"""
def create_tbl_res2 = sql """ show create table loc_tbl_${file_format}_custom """
logger.info("${create_tbl_res2}")
assertTrue(create_tbl_res2.toString().containsIgnoreCase("${tbl_loc}"))
sql """ INSERT INTO loc_tbl_${file_format}_custom values(1) """
def tvfRes2 = sql """ SELECT * FROM hdfs(
'uri'='${tbl_loc}/*',
'format' = '${file_format}',
'fs.defaultFS' = 'hdfs://${externalEnvIp}:${hdfs_port}'
)
"""
logger.info("${tvfRes2}")
assertTrue(!tvfRes2.isEmpty())
sql """DROP TABLE `loc_tbl_${file_format}_custom`"""
def tvfDropRes2 = sql """ SELECT * FROM hdfs(
'uri'='${tbl_loc}/*',
'format' = '${file_format}',
'fs.defaultFS' = 'hdfs://${externalEnvIp}:${hdfs_port}'
)
"""
logger.info("${tvfDropRes2}")
assertTrue(tvfDropRes2.isEmpty())
// case3. check some exceptions
def comment_check = sql """ CREATE TABLE ex_tbl_${file_format}(
`col1` INT COMMENT 'col1',
`col2` STRING COMMENT 'col2',
@ -659,6 +704,7 @@ suite("test_hive_ddl", "p0,external,hive,external_docker,external_docker_hive")
for (String file_format in file_formats) {
logger.info("Process file format " + file_format)
test_loc_tbl(file_format, externalEnvIp, hdfs_port, catalog_name)
test_tbl_default_val(file_format, externalEnvIp, hms_port, hdfs_port, catalog_name)
test_db_tbl(file_format, externalEnvIp, hdfs_port, catalog_name)
for (String compression in compressions) {