[Refactor](Tvf) delete some unused code of tvf and add doc for queries tvf (#26460)

1. delete some unused code of tvf
2. add doc for `queries` tvf: #25051
This commit is contained in:
Tiewei Fang
2023-11-09 09:06:09 +08:00
committed by GitHub
parent 66e591f7f2
commit 7df60a4980
8 changed files with 168 additions and 50 deletions

View File

@ -40,11 +40,6 @@ public class ExportFailMsg implements Writable {
@SerializedName("msg")
private String msg;
public ExportFailMsg() {
this.cancelType = CancelType.UNKNOWN;
this.msg = "";
}
public ExportFailMsg(CancelType cancelType, String msg) {
this.cancelType = cancelType;
this.msg = msg;
@ -54,18 +49,10 @@ public class ExportFailMsg implements Writable {
return cancelType;
}
public void setCancelType(CancelType cancelType) {
this.cancelType = cancelType;
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
@Override
public String toString() {
return "ExportFailMsg [cancelType=" + cancelType + ", msg=" + msg + "]";

View File

@ -227,7 +227,6 @@ public class ExportJob implements Writable {
public void generateOutfileStatement() throws UserException {
exportTable.readLock();
try {
// generateQueryStmtOld
generateQueryStmt();
} finally {
exportTable.readUnlock();

View File

@ -33,7 +33,6 @@ import org.apache.doris.catalog.StructType;
import org.apache.doris.catalog.Table;
import org.apache.doris.catalog.Type;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.common.FeConstants;
import org.apache.doris.common.Pair;
import org.apache.doris.common.UserException;
import org.apache.doris.common.util.BrokerUtil;
@ -311,13 +310,13 @@ public abstract class ExternalFileTableValuedFunction extends TableValuedFunctio
if (!csvSchema.isEmpty()) {
return csvSchema;
}
if (FeConstants.runningUnitTest) {
Object mockedUtObj = FeConstants.unitTestConstant;
if (mockedUtObj instanceof List) {
return ((List<Column>) mockedUtObj);
}
return new ArrayList<>();
}
// if (FeConstants.runningUnitTest) {
// Object mockedUtObj = FeConstants.unitTestConstant;
// if (mockedUtObj instanceof List) {
// return ((List<Column>) mockedUtObj);
// }
// return new ArrayList<>();
// }
if (this.columns != null) {
return columns;
}

View File

@ -60,7 +60,6 @@ public class HdfsTableValuedFunction extends ExternalFileTableValuedFunction {
// 3. analyze other properties
for (String key : otherProps.keySet()) {
if (HdfsResource.HADOOP_FS_NAME.equalsIgnoreCase(key)) {
// because HADOOP_FS_NAME contains upper and lower case
locationProperties.put(HdfsResource.HADOOP_FS_NAME, otherProps.get(key));
} else {
locationProperties.put(key, otherProps.get(key));

View File

@ -20,15 +20,12 @@ package org.apache.doris.tablefunction;
import org.apache.doris.catalog.Env;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.common.ClientPool;
import org.apache.doris.common.MetaNotFoundException;
import org.apache.doris.common.Pair;
import org.apache.doris.common.UserException;
import org.apache.doris.common.proc.FrontendsProcNode;
import org.apache.doris.common.util.NetUtils;
import org.apache.doris.common.util.TimeUtils;
import org.apache.doris.datasource.CatalogIf;
import org.apache.doris.datasource.HMSExternalCatalog;
import org.apache.doris.datasource.property.constants.HMSProperties;
import org.apache.doris.planner.external.iceberg.IcebergMetadataCache;
import org.apache.doris.qe.ConnectContext;
import org.apache.doris.qe.QueryDetail;
@ -55,10 +52,7 @@ import com.google.common.base.Stopwatch;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.gson.Gson;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.iceberg.Snapshot;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.thrift.TException;
@ -67,13 +61,10 @@ import org.jetbrains.annotations.NotNull;
import java.time.Instant;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
public class MetadataGenerator {
private static final Logger LOG = LogManager.getLogger(MetadataGenerator.class);
@ -480,22 +471,6 @@ public class MetadataGenerator {
result.setDataBatch(filterColumnsRows);
}
private static org.apache.iceberg.Table getIcebergTable(HMSExternalCatalog catalog, String db, String tbl)
throws MetaNotFoundException {
org.apache.iceberg.hive.HiveCatalog hiveCatalog = new org.apache.iceberg.hive.HiveCatalog();
Configuration conf = new HdfsConfiguration();
Map<String, String> properties = catalog.getCatalogProperty().getHadoopProperties();
for (Map.Entry<String, String> entry : properties.entrySet()) {
conf.set(entry.getKey(), entry.getValue());
}
hiveCatalog.setConf(conf);
Map<String, String> catalogProperties = new HashMap<>();
catalogProperties.put(HMSProperties.HIVE_METASTORE_URIS, catalog.getHiveMetastoreUris());
catalogProperties.put("uri", catalog.getHiveMetastoreUris());
hiveCatalog.initialize("hive", catalogProperties);
return hiveCatalog.loadTable(TableIdentifier.of(db, tbl));
}
private static long convertToDateTimeV2(
int year, int month, int day, int hour, int minute, int second, int microsecond) {
return (long) microsecond | (long) second << 20 | (long) minute << 26 | (long) hour << 32