[refactor](catalog) move some package related to catalog (#31022)

Move some package and files related to multi-catalog.
No logic is changed.
This commit is contained in:
Mingyu Chen
2024-02-17 22:25:38 +08:00
committed by yiguolei
parent fc53c7210b
commit cc727cfc32
52 changed files with 197 additions and 680 deletions

View File

@ -40,7 +40,7 @@ import org.apache.doris.common.util.ParseUtil;
import org.apache.doris.common.util.PrintableMap;
import org.apache.doris.common.util.PropertyAnalyzer;
import org.apache.doris.common.util.Util;
import org.apache.doris.external.elasticsearch.EsUtil;
import org.apache.doris.datasource.es.EsUtil;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.qe.ConnectContext;

View File

@ -21,7 +21,6 @@
package org.apache.doris.analysis;
import org.apache.doris.catalog.Env;
import org.apache.doris.catalog.HudiUtils;
import org.apache.doris.catalog.TableIf;
import org.apache.doris.catalog.Type;
import org.apache.doris.catalog.external.HMSExternalTable;
@ -33,6 +32,7 @@ import org.apache.doris.common.UserException;
import org.apache.doris.common.io.Text;
import org.apache.doris.common.io.Writable;
import org.apache.doris.common.util.TimeUtils;
import org.apache.doris.datasource.hudi.HudiUtils;
import org.apache.doris.rewrite.ExprRewriter;
import org.apache.doris.rewrite.ExprRewriter.ClauseType;

View File

@ -133,13 +133,13 @@ import org.apache.doris.datasource.EsExternalCatalog;
import org.apache.doris.datasource.ExternalMetaCacheMgr;
import org.apache.doris.datasource.ExternalMetaIdMgr;
import org.apache.doris.datasource.InternalCatalog;
import org.apache.doris.datasource.es.EsRepository;
import org.apache.doris.datasource.hive.HiveTransactionMgr;
import org.apache.doris.datasource.hive.event.MetastoreEventsProcessor;
import org.apache.doris.deploy.DeployManager;
import org.apache.doris.deploy.impl.AmbariDeployManager;
import org.apache.doris.deploy.impl.K8sDeployManager;
import org.apache.doris.deploy.impl.LocalFileDeployManager;
import org.apache.doris.external.elasticsearch.EsRepository;
import org.apache.doris.ha.BDBHA;
import org.apache.doris.ha.FrontendNodeType;
import org.apache.doris.ha.HAProtocol;

View File

@ -19,7 +19,7 @@ package org.apache.doris.catalog;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.proc.BaseProcResult;
import org.apache.doris.external.elasticsearch.EsUtil;
import org.apache.doris.datasource.es.EsUtil;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;

View File

@ -20,10 +20,10 @@ package org.apache.doris.catalog;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.UserException;
import org.apache.doris.common.io.Text;
import org.apache.doris.external.elasticsearch.EsMetaStateTracker;
import org.apache.doris.external.elasticsearch.EsRestClient;
import org.apache.doris.external.elasticsearch.EsTablePartitions;
import org.apache.doris.external.elasticsearch.EsUtil;
import org.apache.doris.datasource.es.EsMetaStateTracker;
import org.apache.doris.datasource.es.EsRestClient;
import org.apache.doris.datasource.es.EsTablePartitions;
import org.apache.doris.datasource.es.EsUtil;
import org.apache.doris.thrift.TEsTable;
import org.apache.doris.thrift.TTableDescriptor;
import org.apache.doris.thrift.TTableType;

View File

@ -1,107 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.catalog;
import com.google.common.collect.Maps;
import java.util.Iterator;
import java.util.Map;
/**
* Iceberg property contains information to connect a remote iceberg db or table.
*/
public class IcebergProperty {
public static final String ICEBERG_DATABASE = "iceberg.database";
public static final String ICEBERG_TABLE = "iceberg.table";
public static final String ICEBERG_HIVE_METASTORE_URIS = "iceberg.hive.metastore.uris";
public static final String ICEBERG_CATALOG_TYPE = "iceberg.catalog.type";
public static final String ICEBERG_HDFS_PREFIX = "dfs";
private boolean exist;
private String database;
private String table;
private String hiveMetastoreUris;
private String catalogType;
private Map<String, String> dfsProperties = Maps.newHashMap();
private void initDfsProperties(Map<String, String> properties) {
Iterator<Map.Entry<String, String>> iterator = properties.entrySet().iterator();
while (iterator.hasNext()) {
Map.Entry<String, String> entry = iterator.next();
if (entry.getKey().startsWith(ICEBERG_HDFS_PREFIX)) {
dfsProperties.put(entry.getKey(), entry.getValue());
}
}
}
public IcebergProperty(Map<String, String> properties) {
if (properties != null && !properties.isEmpty()) {
this.exist = true;
this.database = properties.get(ICEBERG_DATABASE);
this.table = properties.get(ICEBERG_TABLE);
this.hiveMetastoreUris = properties.get(ICEBERG_HIVE_METASTORE_URIS);
this.catalogType = properties.get(ICEBERG_CATALOG_TYPE);
initDfsProperties(properties);
} else {
this.exist = false;
}
}
// Create a new Iceberg property from other property
public IcebergProperty(IcebergProperty otherProperty) {
this.exist = otherProperty.exist;
this.database = otherProperty.database;
this.table = otherProperty.table;
this.hiveMetastoreUris = otherProperty.hiveMetastoreUris;
this.catalogType = otherProperty.catalogType;
this.dfsProperties = otherProperty.dfsProperties;
}
public boolean isExist() {
return exist;
}
public String getDatabase() {
return database;
}
public String getTable() {
return table;
}
public String getHiveMetastoreUris() {
return hiveMetastoreUris;
}
public String getCatalogType() {
return catalogType;
}
public String getProperties() {
return "";
}
public void setTable(String table) {
this.table = table;
}
public Map<String, String> getDfsProperties() {
return dfsProperties;
}
}

View File

@ -20,8 +20,8 @@ package org.apache.doris.catalog.external;
import org.apache.doris.catalog.Column;
import org.apache.doris.catalog.EsTable;
import org.apache.doris.datasource.EsExternalCatalog;
import org.apache.doris.external.elasticsearch.EsRestClient;
import org.apache.doris.external.elasticsearch.EsUtil;
import org.apache.doris.datasource.es.EsRestClient;
import org.apache.doris.datasource.es.EsUtil;
import org.apache.doris.thrift.TEsTable;
import org.apache.doris.thrift.TTableDescriptor;
import org.apache.doris.thrift.TTableType;

View File

@ -20,7 +20,6 @@ package org.apache.doris.catalog.external;
import org.apache.doris.catalog.Column;
import org.apache.doris.catalog.Env;
import org.apache.doris.catalog.HiveMetaStoreClientHelper;
import org.apache.doris.catalog.HudiUtils;
import org.apache.doris.catalog.ListPartitionItem;
import org.apache.doris.catalog.PartitionItem;
import org.apache.doris.catalog.PartitionType;
@ -32,7 +31,8 @@ import org.apache.doris.datasource.HMSExternalCatalog;
import org.apache.doris.datasource.hive.HMSCachedClient;
import org.apache.doris.datasource.hive.HiveMetaStoreCache;
import org.apache.doris.datasource.hive.HivePartition;
import org.apache.doris.external.iceberg.util.IcebergUtils;
import org.apache.doris.datasource.hudi.HudiUtils;
import org.apache.doris.datasource.iceberg.IcebergUtils;
import org.apache.doris.mtmv.MTMVMaxTimestampSnapshot;
import org.apache.doris.mtmv.MTMVRelatedTableIf;
import org.apache.doris.mtmv.MTMVSnapshotIf;

View File

@ -20,7 +20,7 @@ package org.apache.doris.catalog.external;
import org.apache.doris.catalog.Column;
import org.apache.doris.catalog.HiveMetaStoreClientHelper;
import org.apache.doris.datasource.iceberg.IcebergExternalCatalog;
import org.apache.doris.external.iceberg.util.IcebergUtils;
import org.apache.doris.datasource.iceberg.IcebergUtils;
import org.apache.doris.statistics.AnalysisInfo;
import org.apache.doris.statistics.BaseAnalysisTask;
import org.apache.doris.statistics.ColumnStatistic;

View File

@ -24,7 +24,7 @@ import org.apache.doris.catalog.PartitionType;
import org.apache.doris.catalog.RangePartitionInfo;
import org.apache.doris.catalog.TableIf.TableType;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.external.elasticsearch.EsShardPartitions;
import org.apache.doris.datasource.es.EsShardPartitions;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;

View File

@ -21,8 +21,8 @@ import org.apache.doris.catalog.Database;
import org.apache.doris.catalog.EsTable;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.common.util.ListComparator;
import org.apache.doris.external.elasticsearch.EsShardPartitions;
import org.apache.doris.external.elasticsearch.EsShardRouting;
import org.apache.doris.datasource.es.EsShardPartitions;
import org.apache.doris.datasource.es.EsShardRouting;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;

View File

@ -20,8 +20,8 @@ package org.apache.doris.datasource;
import org.apache.doris.catalog.EsResource;
import org.apache.doris.catalog.external.EsExternalDatabase;
import org.apache.doris.common.DdlException;
import org.apache.doris.external.elasticsearch.DorisEsException;
import org.apache.doris.external.elasticsearch.EsRestClient;
import org.apache.doris.datasource.es.DorisEsException;
import org.apache.doris.datasource.es.EsRestClient;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;

View File

@ -132,10 +132,10 @@ import org.apache.doris.common.util.QueryableReentrantLock;
import org.apache.doris.common.util.SqlParserUtils;
import org.apache.doris.common.util.TimeUtils;
import org.apache.doris.common.util.Util;
import org.apache.doris.datasource.es.EsRepository;
import org.apache.doris.datasource.hive.HMSCachedClient;
import org.apache.doris.datasource.hive.HMSCachedClientFactory;
import org.apache.doris.datasource.property.constants.HMSProperties;
import org.apache.doris.external.elasticsearch.EsRepository;
import org.apache.doris.nereids.trees.plans.commands.info.DropMTMVInfo;
import org.apache.doris.nereids.trees.plans.commands.info.TableNameInfo;
import org.apache.doris.persist.AlterDatabasePropertyInfo;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.elasticsearch;
package org.apache.doris.datasource.es;
public class DorisEsException extends RuntimeException {

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.elasticsearch;
package org.apache.doris.datasource.es;
/**

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.elasticsearch;
package org.apache.doris.datasource.es;
import org.apache.doris.catalog.EsTable;
import org.apache.doris.common.UserException;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.elasticsearch;
package org.apache.doris.datasource.es;
import org.apache.doris.thrift.TNetworkAddress;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.elasticsearch;
package org.apache.doris.datasource.es;
import org.apache.doris.catalog.Database;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.elasticsearch;
package org.apache.doris.datasource.es;
import org.apache.doris.common.util.JsonUtil;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.elasticsearch;
package org.apache.doris.datasource.es;
import org.apache.doris.analysis.SinglePartitionDesc;
import org.apache.doris.catalog.PartitionKey;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.elasticsearch;
package org.apache.doris.datasource.es;
import org.apache.doris.thrift.TNetworkAddress;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.elasticsearch;
package org.apache.doris.datasource.es;
import org.apache.doris.catalog.Column;
import org.apache.doris.catalog.EsTable;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.elasticsearch;
package org.apache.doris.datasource.es;
import lombok.AllArgsConstructor;
import lombok.Data;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.elasticsearch;
package org.apache.doris.datasource.es;
import org.apache.doris.analysis.DistributionDesc;
import org.apache.doris.analysis.PartitionDesc;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.elasticsearch;
package org.apache.doris.datasource.es;
import org.apache.doris.catalog.Column;
import org.apache.doris.catalog.EsTable;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.elasticsearch;
package org.apache.doris.datasource.es;
import java.util.HashMap;
import java.util.Map;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.elasticsearch;
package org.apache.doris.datasource.es;
import org.apache.doris.analysis.BinaryPredicate;
import org.apache.doris.analysis.BoolLiteral;
@ -477,7 +477,7 @@ public final class QueryBuilders {
* @param out used to generate JSON elements
* @throws IOException if IO error occurred
*/
abstract void toJson(JsonGenerator out) throws IOException;
public abstract void toJson(JsonGenerator out) throws IOException;
/**
* Convert query to JSON format and catch error.
@ -509,7 +509,7 @@ public final class QueryBuilders {
}
@Override
void toJson(JsonGenerator out) throws IOException {
public void toJson(JsonGenerator out) throws IOException {
JsonNode jsonNode = mapper.readTree(value);
out.writeStartObject();
Iterator<Entry<String, JsonNode>> values = jsonNode.fields();
@ -541,26 +541,26 @@ public final class QueryBuilders {
return this;
}
BoolQueryBuilder filter(QueryBuilder queryBuilder) {
public BoolQueryBuilder filter(QueryBuilder queryBuilder) {
Objects.requireNonNull(queryBuilder);
filterClauses.add(queryBuilder);
return this;
}
BoolQueryBuilder mustNot(QueryBuilder queryBuilder) {
public BoolQueryBuilder mustNot(QueryBuilder queryBuilder) {
Objects.requireNonNull(queryBuilder);
mustNotClauses.add(queryBuilder);
return this;
}
BoolQueryBuilder should(QueryBuilder queryBuilder) {
public BoolQueryBuilder should(QueryBuilder queryBuilder) {
Objects.requireNonNull(queryBuilder);
shouldClauses.add(queryBuilder);
return this;
}
@Override
protected void toJson(JsonGenerator out) throws IOException {
public void toJson(JsonGenerator out) throws IOException {
out.writeStartObject();
out.writeFieldName("bool");
out.writeStartObject();
@ -603,7 +603,7 @@ public final class QueryBuilders {
}
@Override
void toJson(final JsonGenerator out) throws IOException {
public void toJson(final JsonGenerator out) throws IOException {
out.writeStartObject();
out.writeFieldName("term");
out.writeStartObject();
@ -627,7 +627,7 @@ public final class QueryBuilders {
}
@Override
void toJson(final JsonGenerator out) throws IOException {
public void toJson(final JsonGenerator out) throws IOException {
out.writeStartObject();
out.writeFieldName("terms");
out.writeStartObject();
@ -645,7 +645,7 @@ public final class QueryBuilders {
/**
* A Query that matches documents within an range of terms
*/
static class RangeQueryBuilder extends QueryBuilder {
public static class RangeQueryBuilder extends QueryBuilder {
private final String field;
@ -672,29 +672,29 @@ public final class QueryBuilders {
return this;
}
RangeQueryBuilder lt(Object value) {
public RangeQueryBuilder lt(Object value) {
return to(value, false);
}
RangeQueryBuilder lte(Object value) {
public RangeQueryBuilder lte(Object value) {
return to(value, true);
}
RangeQueryBuilder gt(Object value) {
public RangeQueryBuilder gt(Object value) {
return from(value, false);
}
RangeQueryBuilder gte(Object value) {
public RangeQueryBuilder gte(Object value) {
return from(value, true);
}
RangeQueryBuilder format(String format) {
public RangeQueryBuilder format(String format) {
this.format = format;
return this;
}
@Override
void toJson(final JsonGenerator out) throws IOException {
public void toJson(final JsonGenerator out) throws IOException {
if (lt == null && gt == null) {
throw new IllegalStateException("Either lower or upper bound should be provided");
}
@ -745,7 +745,7 @@ public final class QueryBuilders {
}
@Override
void toJson(JsonGenerator out) throws IOException {
public void toJson(JsonGenerator out) throws IOException {
out.writeStartObject();
out.writeFieldName("wildcard");
out.writeStartObject();
@ -768,7 +768,7 @@ public final class QueryBuilders {
}
@Override
void toJson(JsonGenerator out) throws IOException {
public void toJson(JsonGenerator out) throws IOException {
out.writeStartObject();
out.writeFieldName("exists");
out.writeStartObject();
@ -788,7 +788,7 @@ public final class QueryBuilders {
}
@Override
void toJson(final JsonGenerator out) throws IOException {
public void toJson(final JsonGenerator out) throws IOException {
out.writeStartObject();
out.writeFieldName("match_all");
out.writeStartObject();

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.elasticsearch;
package org.apache.doris.datasource.es;
import org.apache.doris.catalog.Column;
import org.apache.doris.catalog.EsTable;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.elasticsearch;
package org.apache.doris.datasource.es;
/**

View File

@ -39,7 +39,6 @@ import org.apache.doris.datasource.CacheException;
import org.apache.doris.datasource.HMSExternalCatalog;
import org.apache.doris.datasource.hive.AcidInfo.DeleteDeltaInfo;
import org.apache.doris.datasource.property.PropertyConverter;
import org.apache.doris.external.hive.util.HiveUtil;
import org.apache.doris.fs.FileSystemCache;
import org.apache.doris.fs.RemoteFiles;
import org.apache.doris.fs.remote.RemoteFile;

View File

@ -0,0 +1,103 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.datasource.hive;
import org.apache.doris.catalog.external.HMSExternalTable;
import org.apache.doris.common.UserException;
import org.apache.doris.fs.remote.BrokerFileSystem;
import org.apache.doris.fs.remote.RemoteFileSystem;
import com.google.common.base.Preconditions;
import org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat;
import org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.util.ReflectionUtils;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
/**
* Hive util for create or query hive table.
*/
public final class HiveUtil {
private HiveUtil() {
}
/**
* get input format class from inputFormatName.
*
* @param jobConf jobConf used when getInputFormatClass
* @param inputFormatName inputFormat class name
* @param symlinkTarget use target inputFormat class when inputFormat is SymlinkTextInputFormat
* @return a class of inputFormat.
* @throws UserException when class not found.
*/
public static InputFormat<?, ?> getInputFormat(JobConf jobConf,
String inputFormatName, boolean symlinkTarget) throws UserException {
try {
Class<? extends InputFormat<?, ?>> inputFormatClass = getInputFormatClass(jobConf, inputFormatName);
if (symlinkTarget && (inputFormatClass == SymlinkTextInputFormat.class)) {
// symlink targets are always TextInputFormat
inputFormatClass = TextInputFormat.class;
}
return ReflectionUtils.newInstance(inputFormatClass, jobConf);
} catch (ClassNotFoundException | RuntimeException e) {
throw new UserException("Unable to create input format " + inputFormatName, e);
}
}
@SuppressWarnings({"unchecked", "RedundantCast"})
private static Class<? extends InputFormat<?, ?>> getInputFormatClass(JobConf conf, String inputFormatName)
throws ClassNotFoundException {
// CDH uses different names for Parquet
if ("parquet.hive.DeprecatedParquetInputFormat".equals(inputFormatName)
|| "parquet.hive.MapredParquetInputFormat".equals(inputFormatName)) {
return MapredParquetInputFormat.class;
}
Class<?> clazz = conf.getClassByName(inputFormatName);
return (Class<? extends InputFormat<?, ?>>) clazz.asSubclass(InputFormat.class);
}
public static boolean isSplittable(RemoteFileSystem remoteFileSystem, String inputFormat,
String location, JobConf jobConf) throws UserException {
if (remoteFileSystem instanceof BrokerFileSystem) {
return ((BrokerFileSystem) remoteFileSystem).isSplittable(location, inputFormat);
}
// All supported hive input format are splittable
return HMSExternalTable.SUPPORTED_HIVE_FILE_FORMATS.contains(inputFormat);
}
public static String getHivePartitionValue(String part) {
String[] kv = part.split("=");
Preconditions.checkState(kv.length == 2, String.format("Malformed partition name %s", part));
try {
// hive partition value maybe contains special characters like '=' and '/'
return URLDecoder.decode(kv[1], StandardCharsets.UTF_8.name());
} catch (UnsupportedEncodingException e) {
// It should not be here
throw new RuntimeException(e);
}
}
}

View File

@ -15,7 +15,14 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.catalog;
package org.apache.doris.datasource.hudi;
import org.apache.doris.catalog.ArrayType;
import org.apache.doris.catalog.MapType;
import org.apache.doris.catalog.ScalarType;
import org.apache.doris.catalog.StructField;
import org.apache.doris.catalog.StructType;
import org.apache.doris.catalog.Type;
import com.google.common.base.Preconditions;
import org.apache.avro.LogicalType;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.iceberg.util;
package org.apache.doris.datasource.iceberg;
import org.apache.doris.analysis.BinaryPredicate;

View File

@ -15,7 +15,7 @@
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.common.util;
package org.apache.doris.datasource.kafka;
import org.apache.doris.catalog.Env;
import org.apache.doris.common.LoadException;

View File

@ -1,208 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.hive.util;
import org.apache.doris.catalog.ArrayType;
import org.apache.doris.catalog.Column;
import org.apache.doris.catalog.ScalarType;
import org.apache.doris.catalog.Type;
import org.apache.doris.catalog.external.HMSExternalTable;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.common.UserException;
import org.apache.doris.fs.remote.BrokerFileSystem;
import org.apache.doris.fs.remote.RemoteFileSystem;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.io.SymlinkTextInputFormat;
import org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.util.List;
/**
* Hive util for create or query hive table.
*/
public final class HiveUtil {
private static final Logger LOG = LogManager.getLogger(HiveUtil.class);
private HiveUtil() {
}
/**
* get input format class from inputFormatName.
*
* @param jobConf jobConf used when getInputFormatClass
* @param inputFormatName inputFormat class name
* @param symlinkTarget use target inputFormat class when inputFormat is SymlinkTextInputFormat
* @return a class of inputFormat.
* @throws UserException when class not found.
*/
public static InputFormat<?, ?> getInputFormat(JobConf jobConf,
String inputFormatName, boolean symlinkTarget) throws UserException {
try {
Class<? extends InputFormat<?, ?>> inputFormatClass = getInputFormatClass(jobConf, inputFormatName);
if (symlinkTarget && (inputFormatClass == SymlinkTextInputFormat.class)) {
// symlink targets are always TextInputFormat
inputFormatClass = TextInputFormat.class;
}
return ReflectionUtils.newInstance(inputFormatClass, jobConf);
} catch (ClassNotFoundException | RuntimeException e) {
throw new UserException("Unable to create input format " + inputFormatName, e);
}
}
@SuppressWarnings({"unchecked", "RedundantCast"})
private static Class<? extends InputFormat<?, ?>> getInputFormatClass(JobConf conf, String inputFormatName)
throws ClassNotFoundException {
// CDH uses different names for Parquet
if ("parquet.hive.DeprecatedParquetInputFormat".equals(inputFormatName)
|| "parquet.hive.MapredParquetInputFormat".equals(inputFormatName)) {
return MapredParquetInputFormat.class;
}
Class<?> clazz = conf.getClassByName(inputFormatName);
return (Class<? extends InputFormat<?, ?>>) clazz.asSubclass(InputFormat.class);
}
/**
* transform hiveSchema to Doris schema.
*
* @param hiveSchema hive schema
* @return doris schema
* @throws AnalysisException when transform failed.
*/
public static List<Column> transformHiveSchema(List<FieldSchema> hiveSchema) throws AnalysisException {
List<Column> newSchema = Lists.newArrayList();
for (FieldSchema hiveColumn : hiveSchema) {
try {
newSchema.add(HiveUtil.transformHiveField(hiveColumn));
} catch (UnsupportedOperationException e) {
LOG.warn("Unsupported data type in Doris, ignore column[{}], with error: {}",
hiveColumn.getName(), e.getMessage());
throw e;
}
}
return newSchema;
}
/**
* tranform hive field to doris column.
*
* @param field hive field to be transformed
* @return doris column
*/
public static Column transformHiveField(FieldSchema field) {
TypeInfo hiveTypeInfo = TypeInfoUtils.getTypeInfoFromTypeString(field.getType());
Type type = convertHiveTypeToiveDoris(hiveTypeInfo);
return new Column(field.getName(), type, false, null, true, null, field.getComment());
}
private static Type convertHiveTypeToiveDoris(TypeInfo hiveTypeInfo) {
switch (hiveTypeInfo.getCategory()) {
case PRIMITIVE: {
PrimitiveTypeInfo primitiveTypeInfo = (PrimitiveTypeInfo) hiveTypeInfo;
switch (primitiveTypeInfo.getPrimitiveCategory()) {
case VOID:
return Type.NULL;
case BOOLEAN:
return Type.BOOLEAN;
case BYTE:
return Type.TINYINT;
case SHORT:
return Type.SMALLINT;
case INT:
return Type.INT;
case LONG:
return Type.BIGINT;
case FLOAT:
return Type.FLOAT;
case DOUBLE:
return Type.DOUBLE;
case STRING:
return Type.STRING;
case CHAR:
return Type.CHAR;
case VARCHAR:
return Type.VARCHAR;
case DATE:
return ScalarType.getDefaultDateType(Type.DATE);
case TIMESTAMP:
return ScalarType.getDefaultDateType(Type.DATETIME);
case DECIMAL:
return Type.DECIMALV2;
default:
throw new UnsupportedOperationException("Unsupported type: "
+ primitiveTypeInfo.getPrimitiveCategory());
}
}
case LIST:
TypeInfo elementTypeInfo = ((ListTypeInfo) hiveTypeInfo)
.getListElementTypeInfo();
Type newType = null;
if (elementTypeInfo.getCategory() == ObjectInspector.Category.PRIMITIVE) {
newType = convertHiveTypeToiveDoris(elementTypeInfo);
} else {
throw new UnsupportedOperationException("Unsupported type: " + hiveTypeInfo.toString());
}
return new ArrayType(newType);
case MAP:
case STRUCT:
case UNION:
default:
throw new UnsupportedOperationException("Unsupported type: " + hiveTypeInfo.toString());
}
}
public static boolean isSplittable(RemoteFileSystem remoteFileSystem, String inputFormat,
String location, JobConf jobConf) throws UserException {
if (remoteFileSystem instanceof BrokerFileSystem) {
return ((BrokerFileSystem) remoteFileSystem).isSplittable(location, inputFormat);
}
// All supported hive input format are splittable
return HMSExternalTable.SUPPORTED_HIVE_FILE_FORMATS.contains(inputFormat);
}
public static String getHivePartitionValue(String part) {
String[] kv = part.split("=");
Preconditions.checkState(kv.length == 2, String.format("Malformed partition name %s", part));
try {
// hive partition value maybe contains special characters like '=' and '/'
return URLDecoder.decode(kv[1], StandardCharsets.UTF_8.name());
} catch (UnsupportedEncodingException e) {
// It should not be here
throw new RuntimeException(e);
}
}
}

View File

@ -1,116 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.iceberg.util;
import org.apache.doris.catalog.ArrayType;
import org.apache.doris.catalog.MapType;
import org.apache.doris.catalog.PrimitiveType;
import org.apache.doris.catalog.ScalarType;
import org.apache.doris.catalog.StructField;
import org.apache.doris.catalog.StructType;
import org.apache.iceberg.types.Type;
import org.apache.iceberg.types.Types;
import java.util.List;
/**
* Convert Doris type to Iceberg type
*/
public class DorisTypeToType extends DorisTypeVisitor<Type> {
private final StructType root;
private int nextId = 0;
public DorisTypeToType() {
this.root = null;
}
public DorisTypeToType(StructType root) {
this.root = root;
// the root struct's fields use the first ids
this.nextId = root.getFields().size();
}
private int getNextId() {
int next = nextId;
nextId += 1;
return next;
}
@Override
public Type struct(StructType struct, List<Type> types) {
throw new UnsupportedOperationException(
"Not a supported type: " + struct.toSql(0));
}
@Override
public Type field(StructField field, Type typeResult) {
return typeResult;
}
@Override
public Type array(ArrayType array, Type elementType) {
throw new UnsupportedOperationException(
"Not a supported type: " + array.toSql(0));
}
@Override
public Type map(MapType map, Type keyType, Type valueType) {
throw new UnsupportedOperationException(
"Not a supported type: " + map.toSql(0));
}
@Override
public Type atomic(org.apache.doris.catalog.Type atomic) {
if (atomic.getPrimitiveType().equals(PrimitiveType.BOOLEAN)) {
return Types.BooleanType.get();
} else if (atomic.getPrimitiveType().equals(PrimitiveType.TINYINT)
|| atomic.getPrimitiveType().equals(PrimitiveType.SMALLINT)
|| atomic.getPrimitiveType().equals(PrimitiveType.INT)) {
return Types.IntegerType.get();
} else if (atomic.getPrimitiveType().equals(PrimitiveType.BIGINT)
|| atomic.getPrimitiveType().equals(PrimitiveType.LARGEINT)) {
return Types.LongType.get();
} else if (atomic.getPrimitiveType().equals(PrimitiveType.FLOAT)) {
return Types.FloatType.get();
} else if (atomic.getPrimitiveType().equals(PrimitiveType.DOUBLE)) {
return Types.DoubleType.get();
} else if (atomic.getPrimitiveType().equals(PrimitiveType.CHAR)
|| atomic.getPrimitiveType().equals(PrimitiveType.VARCHAR)) {
return Types.StringType.get();
} else if (atomic.getPrimitiveType().equals(PrimitiveType.DATE)
|| atomic.getPrimitiveType().equals(PrimitiveType.DATEV2)) {
return Types.DateType.get();
} else if (atomic.getPrimitiveType().equals(PrimitiveType.TIME)
|| atomic.getPrimitiveType().equals(PrimitiveType.TIMEV2)) {
return Types.TimeType.get();
} else if (atomic.getPrimitiveType().equals(PrimitiveType.DECIMALV2)
|| atomic.getPrimitiveType().isDecimalV3Type()) {
return Types.DecimalType.of(
((ScalarType) atomic).getScalarPrecision(),
((ScalarType) atomic).getScalarScale());
} else if (atomic.getPrimitiveType().equals(PrimitiveType.DATETIME)
|| atomic.getPrimitiveType().equals(PrimitiveType.DATETIMEV2)) {
return Types.TimestampType.withZone();
}
// unsupported type: PrimitiveType.HLL BITMAP BINARY
throw new UnsupportedOperationException(
"Not a supported type: " + atomic.getPrimitiveType());
}
}

View File

@ -1,79 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.iceberg.util;
import org.apache.doris.catalog.ArrayType;
import org.apache.doris.catalog.MapType;
import org.apache.doris.catalog.StructField;
import org.apache.doris.catalog.StructType;
import org.apache.doris.catalog.Type;
import com.google.common.collect.Lists;
import java.util.List;
/**
* Utils to visit doris and iceberg type
* @param <T>
*/
public class DorisTypeVisitor<T> {
public static <T> T visit(Type type, DorisTypeVisitor<T> visitor) {
if (type instanceof StructType) {
List<StructField> fields = ((StructType) type).getFields();
List<T> fieldResults = Lists.newArrayListWithExpectedSize(fields.size());
for (StructField field : fields) {
fieldResults.add(visitor.field(
field,
visit(field.getType(), visitor)));
}
return visitor.struct((StructType) type, fieldResults);
} else if (type instanceof MapType) {
return visitor.map((MapType) type,
visit(((MapType) type).getKeyType(), visitor),
visit(((MapType) type).getValueType(), visitor));
} else if (type instanceof ArrayType) {
return visitor.array(
(ArrayType) type,
visit(((ArrayType) type).getItemType(), visitor));
} else {
return visitor.atomic(type);
}
}
public T struct(StructType struct, List<T> fieldResults) {
return null;
}
public T field(StructField field, T typeResult) {
return null;
}
public T array(ArrayType array, T elementResult) {
return null;
}
public T map(MapType map, T keyResult, T valueResult) {
return null;
}
public T atomic(Type atomic) {
return null;
}
}

View File

@ -1,98 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.external.iceberg.util;
import org.apache.doris.catalog.ScalarType;
import org.apache.doris.catalog.Type;
import org.apache.iceberg.Schema;
import org.apache.iceberg.types.TypeUtil;
import org.apache.iceberg.types.Types;
import java.util.List;
/**
* Convert Iceberg types to Doris type
*/
public class TypeToDorisType extends TypeUtil.SchemaVisitor<Type> {
public TypeToDorisType() {
}
@Override
public Type schema(Schema schema, Type structType) {
return structType;
}
@Override
public Type struct(Types.StructType struct, List<Type> fieldResults) {
throw new UnsupportedOperationException(
String.format("Cannot convert Iceberg type[%s] to Doris type.", struct));
}
@Override
public Type field(Types.NestedField field, Type fieldResult) {
return fieldResult;
}
@Override
public Type list(Types.ListType list, Type elementResult) {
throw new UnsupportedOperationException(
String.format("Cannot convert Iceberg type[%s] to Doris type.", list));
}
@Override
public Type map(Types.MapType map, Type keyResult, Type valueResult) {
throw new UnsupportedOperationException(
String.format("Cannot convert Iceberg type[%s] to Doris type.", map));
}
@Override
public Type primitive(org.apache.iceberg.types.Type.PrimitiveType primitive) {
switch (primitive.typeId()) {
case BOOLEAN:
return Type.BOOLEAN;
case INTEGER:
return Type.INT;
case LONG:
return Type.BIGINT;
case FLOAT:
return Type.FLOAT;
case DOUBLE:
return Type.DOUBLE;
case DECIMAL:
Types.DecimalType decimal = (Types.DecimalType) primitive;
return ScalarType.createDecimalType(decimal.precision(), decimal.scale());
case DATE:
return ScalarType.getDefaultDateType(Type.DATE);
case TIMESTAMP:
return ScalarType.getDefaultDateType(Type.DATETIME);
case STRING:
return Type.STRING;
// use varchar
case UUID:
return Type.VARCHAR;
// unsupported primitive type
case TIME:
case FIXED:
case BINARY:
default:
throw new UnsupportedOperationException(String.format("Cannot convert Iceberg type[%s] to Doris type.",
primitive));
}
}
}

View File

@ -31,12 +31,12 @@ import org.apache.doris.common.Pair;
import org.apache.doris.common.UserException;
import org.apache.doris.common.io.Text;
import org.apache.doris.common.util.DebugUtil;
import org.apache.doris.common.util.KafkaUtil;
import org.apache.doris.common.util.LogBuilder;
import org.apache.doris.common.util.LogKey;
import org.apache.doris.common.util.SmallFileMgr;
import org.apache.doris.common.util.SmallFileMgr.SmallFile;
import org.apache.doris.common.util.TimeUtils;
import org.apache.doris.datasource.kafka.KafkaUtil;
import org.apache.doris.load.routineload.kafka.KafkaConfiguration;
import org.apache.doris.load.routineload.kafka.KafkaDataSourceProperties;
import org.apache.doris.persist.AlterRoutineLoadJobOperationLog;

View File

@ -51,7 +51,7 @@ import org.apache.doris.common.util.ParseUtil;
import org.apache.doris.common.util.PropertyAnalyzer;
import org.apache.doris.common.util.Util;
import org.apache.doris.datasource.InternalCatalog;
import org.apache.doris.external.elasticsearch.EsUtil;
import org.apache.doris.datasource.es.EsUtil;
import org.apache.doris.mysql.privilege.PrivPredicate;
import org.apache.doris.nereids.analyzer.UnboundFunction;
import org.apache.doris.nereids.analyzer.UnboundSlot;

View File

@ -30,13 +30,13 @@ import org.apache.doris.catalog.RangePartitionInfo;
import org.apache.doris.catalog.external.EsExternalTable;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.common.UserException;
import org.apache.doris.external.elasticsearch.EsShardPartitions;
import org.apache.doris.external.elasticsearch.EsShardRouting;
import org.apache.doris.external.elasticsearch.EsTablePartitions;
import org.apache.doris.external.elasticsearch.QueryBuilders;
import org.apache.doris.external.elasticsearch.QueryBuilders.BoolQueryBuilder;
import org.apache.doris.external.elasticsearch.QueryBuilders.BuilderOptions;
import org.apache.doris.external.elasticsearch.QueryBuilders.QueryBuilder;
import org.apache.doris.datasource.es.EsShardPartitions;
import org.apache.doris.datasource.es.EsShardRouting;
import org.apache.doris.datasource.es.EsTablePartitions;
import org.apache.doris.datasource.es.QueryBuilders;
import org.apache.doris.datasource.es.QueryBuilders.BoolQueryBuilder;
import org.apache.doris.datasource.es.QueryBuilders.BuilderOptions;
import org.apache.doris.datasource.es.QueryBuilders.QueryBuilder;
import org.apache.doris.planner.external.ExternalScanNode;
import org.apache.doris.planner.external.FederationBackendPolicy;
import org.apache.doris.statistics.StatisticalType;

View File

@ -20,7 +20,6 @@ package org.apache.doris.planner.external.hudi;
import org.apache.doris.analysis.TupleDescriptor;
import org.apache.doris.catalog.Env;
import org.apache.doris.catalog.HiveMetaStoreClientHelper;
import org.apache.doris.catalog.HudiUtils;
import org.apache.doris.catalog.PartitionItem;
import org.apache.doris.catalog.Type;
import org.apache.doris.catalog.external.ExternalTable;
@ -28,6 +27,7 @@ import org.apache.doris.common.AnalysisException;
import org.apache.doris.common.UserException;
import org.apache.doris.common.util.LocationPath;
import org.apache.doris.datasource.hive.HivePartition;
import org.apache.doris.datasource.hudi.HudiUtils;
import org.apache.doris.planner.ListPartitionPrunerV2;
import org.apache.doris.planner.PlanNodeId;
import org.apache.doris.planner.external.FileSplit;

View File

@ -33,7 +33,7 @@ import org.apache.doris.common.UserException;
import org.apache.doris.common.util.LocationPath;
import org.apache.doris.common.util.TimeUtils;
import org.apache.doris.datasource.iceberg.IcebergExternalCatalog;
import org.apache.doris.external.iceberg.util.IcebergUtils;
import org.apache.doris.datasource.iceberg.IcebergUtils;
import org.apache.doris.planner.PlanNodeId;
import org.apache.doris.planner.external.FileQueryScanNode;
import org.apache.doris.planner.external.TableFormatType;

View File

@ -22,7 +22,7 @@ import org.apache.doris.catalog.external.ExternalTable;
import org.apache.doris.catalog.external.HMSExternalTable;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.datasource.hive.HiveMetaStoreCache;
import org.apache.doris.external.hive.util.HiveUtil;
import org.apache.doris.datasource.hive.HiveUtil;
import org.apache.doris.statistics.util.StatisticsUtil;
import com.google.common.collect.Sets;

View File

@ -17,6 +17,8 @@
package org.apache.doris.external.elasticsearch;
import org.apache.doris.datasource.es.EsNodeInfo;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.Assert;

View File

@ -20,6 +20,8 @@ package org.apache.doris.external.elasticsearch;
import org.apache.doris.catalog.CatalogTestUtil;
import org.apache.doris.catalog.Env;
import org.apache.doris.catalog.EsTable;
import org.apache.doris.datasource.es.EsShardPartitions;
import org.apache.doris.datasource.es.EsTablePartitions;
import org.junit.Assert;
import org.junit.Test;

View File

@ -26,6 +26,7 @@ import org.apache.doris.catalog.FakeEditLog;
import org.apache.doris.catalog.FakeEnv;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.FeMetaVersion;
import org.apache.doris.datasource.es.EsUtil;
import org.apache.doris.meta.MetaContext;
import org.junit.BeforeClass;

View File

@ -21,6 +21,11 @@ import org.apache.doris.catalog.Column;
import org.apache.doris.catalog.EsTable;
import org.apache.doris.catalog.PrimitiveType;
import org.apache.doris.common.ExceptionChecker;
import org.apache.doris.datasource.es.DorisEsException;
import org.apache.doris.datasource.es.EsRestClient;
import org.apache.doris.datasource.es.EsUtil;
import org.apache.doris.datasource.es.MappingPhase;
import org.apache.doris.datasource.es.SearchContext;
import com.fasterxml.jackson.databind.node.ObjectNode;
import mockit.Expectations;

View File

@ -21,6 +21,11 @@ import org.apache.doris.catalog.Column;
import org.apache.doris.catalog.EsTable;
import org.apache.doris.catalog.PrimitiveType;
import org.apache.doris.common.ExceptionChecker;
import org.apache.doris.datasource.es.EsNodeInfo;
import org.apache.doris.datasource.es.EsRestClient;
import org.apache.doris.datasource.es.EsShardPartitions;
import org.apache.doris.datasource.es.PartitionPhase;
import org.apache.doris.datasource.es.SearchContext;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.ObjectMapper;

View File

@ -31,7 +31,8 @@ import org.apache.doris.analysis.LikePredicate;
import org.apache.doris.analysis.SlotRef;
import org.apache.doris.analysis.StringLiteral;
import org.apache.doris.catalog.Type;
import org.apache.doris.external.elasticsearch.QueryBuilders.BuilderOptions;
import org.apache.doris.datasource.es.QueryBuilders;
import org.apache.doris.datasource.es.QueryBuilders.BuilderOptions;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.ObjectMapper;

View File

@ -35,7 +35,7 @@ import org.apache.doris.common.MetaNotFoundException;
import org.apache.doris.common.Pair;
import org.apache.doris.common.UserException;
import org.apache.doris.common.jmockit.Deencapsulation;
import org.apache.doris.common.util.KafkaUtil;
import org.apache.doris.datasource.kafka.KafkaUtil;
import org.apache.doris.load.RoutineLoadDesc;
import org.apache.doris.load.loadv2.LoadTask;
import org.apache.doris.load.routineload.kafka.KafkaConfiguration;

View File

@ -26,8 +26,8 @@ import org.apache.doris.catalog.Table;
import org.apache.doris.common.InternalErrorCode;
import org.apache.doris.common.UserException;
import org.apache.doris.common.jmockit.Deencapsulation;
import org.apache.doris.common.util.KafkaUtil;
import org.apache.doris.datasource.InternalCatalog;
import org.apache.doris.datasource.kafka.KafkaUtil;
import org.apache.doris.persist.EditLog;
import org.apache.doris.thrift.TKafkaRLTaskProgress;
import org.apache.doris.transaction.TransactionException;