[refactor](statistic) fetch statistic data with catalog and database id (#33928) (#34202)

bp #33928
This commit is contained in:
Mingyu Chen
2024-04-27 09:38:41 +08:00
committed by GitHub
parent c998e2f714
commit 4b9772062b
18 changed files with 152 additions and 96 deletions

View File

@ -319,7 +319,7 @@ public abstract class Table extends MetaObject implements Writable, TableIf {
name = newName;
}
void setQualifiedDbName(String qualifiedDbName) {
public void setQualifiedDbName(String qualifiedDbName) {
this.qualifiedDbName = qualifiedDbName;
}

View File

@ -260,7 +260,7 @@ public enum ErrorCode {
ERR_VIEW_NO_EXPLAIN(1345, new byte[]{'H', 'Y', '0', '0', '0'}, "EXPLAIN/SHOW can not be issued; lacking "
+ "privileges for underlying table"),
ERR_FRM_UNKNOWN_TYPE(1346, new byte[]{'H', 'Y', '0', '0', '0'}, "File '%s' has unknown type '%s' in its header"),
ERR_WRONG_OBJECT(1347, new byte[]{'H', 'Y', '0', '0', '0'}, "'%s.%s' is not %s"),
ERR_WRONG_OBJECT(1347, new byte[]{'H', 'Y', '0', '0', '0'}, "'%s.%s' is not %s. %s."),
ERR_NONUPDATEABLE_COLUMN(1348, new byte[]{'H', 'Y', '0', '0', '0'}, "Column '%s' is not updatable"),
ERR_VIEW_SELECT_DERIVED(1349, new byte[]{'H', 'Y', '0', '0', '0'}, "View's SELECT contains a subquery in the FROM"
+ " clause"),

View File

@ -865,18 +865,22 @@ public class InternalCatalog implements CatalogIf<Database> {
// Check if a view
if (stmt.isView()) {
if (!(table instanceof View)) {
ErrorReport.reportDdlException(ErrorCode.ERR_WRONG_OBJECT, dbName, tableName, "VIEW");
ErrorReport.reportDdlException(ErrorCode.ERR_WRONG_OBJECT, dbName, tableName, "VIEW",
genDropHint(table));
}
} else {
if (table instanceof View) {
ErrorReport.reportDdlException(ErrorCode.ERR_WRONG_OBJECT, dbName, tableName, "TABLE");
ErrorReport.reportDdlException(ErrorCode.ERR_WRONG_OBJECT, dbName, tableName, "TABLE",
genDropHint(table));
}
}
if (!stmt.isMaterializedView() && table instanceof MTMV) {
ErrorReport.reportDdlException(ErrorCode.ERR_WRONG_OBJECT, dbName, tableName, "TABLE");
ErrorReport.reportDdlException(ErrorCode.ERR_WRONG_OBJECT, dbName, tableName, "TABLE",
genDropHint(table));
} else if (stmt.isMaterializedView() && !(table instanceof MTMV)) {
ErrorReport.reportDdlException(ErrorCode.ERR_WRONG_OBJECT, dbName, tableName, "MTMV");
ErrorReport.reportDdlException(ErrorCode.ERR_WRONG_OBJECT, dbName, tableName, "MTMV",
genDropHint(table));
}
if (!stmt.isForceDrop()) {
@ -939,6 +943,18 @@ public class InternalCatalog implements CatalogIf<Database> {
tableName, dbName, stmt.isForceDrop(), costTimes);
}
private static String genDropHint(TableIf table) {
String type = "";
if (table instanceof View) {
type = "VIEW";
} else if (table instanceof OlapTable) {
type = "TABLE";
} else if (table instanceof MTMV) {
type = "MATERIALIZED VIEW";
}
return "Use 'DROP " + type + " " + table.getName();
}
public boolean unprotectDropTable(Database db, Table table, boolean isForceDrop, boolean isReplay,
long recycleTime) {
if (table.getType() == TableType.ELASTICSEARCH) {

View File

@ -247,7 +247,8 @@ public class MinidumpUtils {
}
private static Histogram getColumnHistogram(TableIf table, String colName) {
return Env.getCurrentEnv().getStatisticsCache().getHistogram(table.getId(), colName);
return Env.getCurrentEnv().getStatisticsCache().getHistogram(
table.getDatabase().getCatalog().getId(), table.getDatabase().getId(), table.getId(), colName);
}
/**

View File

@ -1046,7 +1046,7 @@ public class ShowExecutor {
} else {
if (showStmt.isView()) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_OBJECT, showStmt.getDb(),
showStmt.getTable(), "VIEW");
showStmt.getTable(), "VIEW", "Use 'SHOW CREATE TABLE '" + table.getName());
}
rows.add(Lists.newArrayList(table.getName(), createTableStmt.get(0)));
resultSet = table.getType() != TableType.MATERIALIZED_VIEW
@ -2509,8 +2509,9 @@ public class ShowExecutor {
}
private void getStatsForAllColumns(List<Pair<Pair<String, String>, ColumnStatistic>> columnStatistics,
TableIf tableIf) throws AnalysisException {
List<ResultRow> resultRows = StatisticsRepository.queryColumnStatisticsForTable(tableIf.getId());
TableIf tableIf) {
List<ResultRow> resultRows = StatisticsRepository.queryColumnStatisticsForTable(
tableIf.getDatabase().getCatalog().getId(), tableIf.getDatabase().getId(), tableIf.getId());
// row[4] is index id, row[5] is column name.
for (ResultRow row : resultRows) {
String indexName = tableIf.getName();
@ -2555,7 +2556,9 @@ public class ShowExecutor {
columnStatistics.add(Pair.of(Pair.of(indexName, colName), columnStatistic));
} else if (partitionNames == null) {
ColumnStatistic columnStatistic =
StatisticsRepository.queryColumnStatisticsByName(tableIf.getId(), indexId, colName);
StatisticsRepository.queryColumnStatisticsByName(
tableIf.getDatabase().getCatalog().getId(),
tableIf.getDatabase().getId(), tableIf.getId(), indexId, colName);
columnStatistics.add(Pair.of(Pair.of(indexName, colName), columnStatistic));
} else {
String finalIndexName = indexName;

View File

@ -3296,9 +3296,10 @@ public class FrontendServiceImpl implements FrontendService.Iface {
ColStatsData data = GsonUtils.GSON.fromJson(request.colStatsData, ColStatsData.class);
ColumnStatistic c = data.toColumnStatistic();
if (c == ColumnStatistic.UNKNOWN) {
Env.getCurrentEnv().getStatisticsCache().invalidate(k.tableId, k.idxId, k.colName);
Env.getCurrentEnv().getStatisticsCache().invalidate(k.catalogId, k.dbId, k.tableId, k.idxId, k.colName);
} else {
Env.getCurrentEnv().getStatisticsCache().updateColStatsCache(k.tableId, k.idxId, k.colName, c);
Env.getCurrentEnv().getStatisticsCache().updateColStatsCache(
k.catalogId, k.dbId, k.tableId, k.idxId, k.colName, c);
}
// Return Ok anyway
return new TStatus(TStatusCode.OK);

View File

@ -618,7 +618,7 @@ public class AnalysisManager implements Writable {
invalidateLocalStats(catalogId, dbId, tblId, cols, tableStats);
// Drop stats ddl is master only operation.
invalidateRemoteStats(catalogId, dbId, tblId, cols, dropStatsStmt.isAllColumns());
StatisticsRepository.dropStatistics(tblId, cols);
StatisticsRepository.dropStatisticsByColNames(catalogId, dbId, tblId, cols);
}
public void dropStats(TableIf table) throws DdlException {
@ -633,7 +633,7 @@ public class AnalysisManager implements Writable {
invalidateLocalStats(catalogId, dbId, tableId, cols, tableStats);
// Drop stats ddl is master only operation.
invalidateRemoteStats(catalogId, dbId, tableId, cols, true);
StatisticsRepository.dropStatistics(table.getId(), cols);
StatisticsRepository.dropStatisticsByColNames(catalogId, dbId, table.getId(), cols);
}
public void invalidateLocalStats(long catalogId, long dbId, long tableId,
@ -666,7 +666,7 @@ public class AnalysisManager implements Writable {
}
}
tableStats.removeColumn(indexName, column);
statisticsCache.invalidate(tableId, indexId, column);
statisticsCache.invalidate(catalogId, dbId, tableId, indexId, column);
}
}
tableStats.updatedTime = 0;

View File

@ -71,7 +71,8 @@ public class ColumnStatisticsCacheLoader extends BasicAsyncCacheLoader<Statistic
private Optional<ColumnStatistic> loadFromStatsTable(StatisticsCacheKey key) {
List<ResultRow> columnResults = null;
try {
columnResults = StatisticsRepository.loadColStats(key.tableId, key.idxId, key.colName);
columnResults = StatisticsRepository.loadColStats(
key.catalogId, key.dbId, key.tableId, key.idxId, key.colName);
} catch (InternalQueryExecutionException e) {
LOG.info("Failed to load stats for table {} column {}. Reason:{}",
key.tableId, key.colName, e.getMessage());

View File

@ -75,7 +75,8 @@ public class HistogramTask extends BaseAnalysisTask {
StringSubstitutor stringSubstitutor = new StringSubstitutor(params);
StatisticsUtil.execUpdate(stringSubstitutor.replace(ANALYZE_HISTOGRAM_SQL_TEMPLATE_TABLE));
Env.getCurrentEnv().getStatisticsCache().refreshHistogramSync(tbl.getId(), -1, col.getName());
Env.getCurrentEnv().getStatisticsCache().refreshHistogramSync(
tbl.getDatabase().getCatalog().getId(), tbl.getDatabase().getId(), tbl.getId(), -1, col.getName());
}
@Override

View File

@ -91,16 +91,16 @@ public class StatisticsCache {
return ColumnStatistic.UNKNOWN;
}
public Histogram getHistogram(long tblId, String colName) {
return getHistogram(tblId, -1, colName).orElse(null);
public Histogram getHistogram(long ctlId, long dbId, long tblId, String colName) {
return getHistogram(ctlId, dbId, tblId, -1, colName).orElse(null);
}
public Optional<Histogram> getHistogram(long tblId, long idxId, String colName) {
private Optional<Histogram> getHistogram(long ctlId, long dbId, long tblId, long idxId, String colName) {
ConnectContext ctx = ConnectContext.get();
if (ctx != null && ctx.getSessionVariable().internalSession) {
return Optional.empty();
}
StatisticsCacheKey k = new StatisticsCacheKey(tblId, idxId, colName);
StatisticsCacheKey k = new StatisticsCacheKey(ctlId, dbId, tblId, idxId, colName);
try {
CompletableFuture<Optional<Histogram>> f = histogramCache.get(k);
if (f.isDone()) {
@ -112,24 +112,22 @@ public class StatisticsCache {
return Optional.empty();
}
public void invalidate(long tblId, long idxId, String colName) {
columnStatisticsCache.synchronous().invalidate(new StatisticsCacheKey(tblId, idxId, colName));
public void invalidate(long ctlId, long dbId, long tblId, long idxId, String colName) {
columnStatisticsCache.synchronous().invalidate(new StatisticsCacheKey(ctlId, dbId, tblId, idxId, colName));
}
public void updateColStatsCache(long tblId, long idxId, String colName, ColumnStatistic statistic) {
columnStatisticsCache.synchronous().put(new StatisticsCacheKey(tblId, idxId, colName), Optional.of(statistic));
public void updateColStatsCache(long ctlId, long dbId, long tblId, long idxId, String colName,
ColumnStatistic statistic) {
columnStatisticsCache.synchronous()
.put(new StatisticsCacheKey(ctlId, dbId, tblId, idxId, colName), Optional.of(statistic));
}
public void refreshColStatsSync(long tblId, long idxId, String colName) {
columnStatisticsCache.synchronous().refresh(new StatisticsCacheKey(-1, -1, tblId, idxId, colName));
public void refreshColStatsSync(long ctlId, long dbId, long tblId, long idxId, String colName) {
columnStatisticsCache.synchronous().refresh(new StatisticsCacheKey(ctlId, dbId, tblId, idxId, colName));
}
public void refreshColStatsSync(long catalogId, long dbId, long tblId, long idxId, String colName) {
columnStatisticsCache.synchronous().refresh(new StatisticsCacheKey(catalogId, dbId, tblId, idxId, colName));
}
public void refreshHistogramSync(long tblId, long idxId, String colName) {
histogramCache.synchronous().refresh(new StatisticsCacheKey(tblId, idxId, colName));
public void refreshHistogramSync(long ctlId, long dbId, long tblId, long idxId, String colName) {
histogramCache.synchronous().refresh(new StatisticsCacheKey(ctlId, dbId, tblId, idxId, colName));
}
public void preHeat() {
@ -168,11 +166,9 @@ public class StatisticsCache {
for (ResultRow r : recentStatsUpdatedCols) {
try {
StatsId statsId = new StatsId(r);
long tblId = statsId.tblId;
long idxId = statsId.idxId;
String colId = statsId.colId;
final StatisticsCacheKey k =
new StatisticsCacheKey(tblId, idxId, colId);
new StatisticsCacheKey(statsId.catalogId, statsId.dbId, statsId.tblId, statsId.idxId,
statsId.colId);
ColumnStatistic c = ColumnStatistic.fromResultRow(r);
if (c.count > 0 && c.ndv == 0 && c.count != c.numNulls) {
c = ColumnStatistic.UNKNOWN;
@ -189,10 +185,11 @@ public class StatisticsCache {
*/
public void syncColStats(ColStatsData data) {
StatsId statsId = data.statsId;
final StatisticsCacheKey k = new StatisticsCacheKey(statsId.tblId, statsId.idxId, statsId.colId);
final StatisticsCacheKey k = new StatisticsCacheKey(statsId.catalogId, statsId.dbId, statsId.tblId,
statsId.idxId, statsId.colId);
ColumnStatistic columnStatistic = data.toColumnStatistic();
if (columnStatistic == ColumnStatistic.UNKNOWN) {
invalidate(k.tableId, k.idxId, k.colName);
invalidate(k.catalogId, k.dbId, k.tableId, k.idxId, k.colName);
} else {
putCache(k, columnStatistic);
}

View File

@ -41,10 +41,6 @@ public class StatisticsCacheKey {
private static final String DELIMITER = "-";
public StatisticsCacheKey(long tableId, long idxId, String colName) {
this(-1, -1, tableId, idxId, colName);
}
public StatisticsCacheKey(long catalogId, long dbId, long tableId, long idxId, String colName) {
this.catalogId = catalogId;
this.dbId = dbId;
@ -55,7 +51,7 @@ public class StatisticsCacheKey {
@Override
public int hashCode() {
return Objects.hash(tableId, idxId, colName);
return Objects.hash(catalogId, dbId, tableId, idxId, colName);
}
@Override
@ -67,13 +63,16 @@ public class StatisticsCacheKey {
return false;
}
StatisticsCacheKey k = (StatisticsCacheKey) obj;
return this.tableId == k.tableId && this.idxId == k.idxId && this.colName.equals(k.colName);
return this.catalogId == k.catalogId && this.dbId == k.dbId && this.tableId == k.tableId
&& this.idxId == k.idxId && this.colName.equals(k.colName);
}
@Override
public String toString() {
StringJoiner sj = new StringJoiner(DELIMITER);
sj.add("ColumnStats");
sj.add(String.valueOf(catalogId));
sj.add(String.valueOf(dbId));
sj.add(String.valueOf(tableId));
sj.add(String.valueOf(idxId));
sj.add(colName);

View File

@ -63,22 +63,22 @@ public class StatisticsRepository {
private static final String FETCH_COLUMN_STATISTIC_TEMPLATE = "SELECT * FROM "
+ FULL_QUALIFIED_COLUMN_STATISTICS_NAME
+ " WHERE `id` = '${id}'";
+ " WHERE `id` = '${id}' AND `catalog_id` = '${catalogId}' AND `db_id` = '${dbId}'";
private static final String FETCH_PARTITIONS_STATISTIC_TEMPLATE = "SELECT * FROM "
+ FULL_QUALIFIED_COLUMN_STATISTICS_NAME
+ " WHERE `id` IN (${idList})";
+ " WHERE `id` IN (${idList}) AND `catalog_id` = '${catalogId}' AND `db_id` = '${dbId}'";
private static final String FETCH_COLUMN_HISTOGRAM_TEMPLATE = "SELECT * FROM "
+ FULL_QUALIFIED_COLUMN_HISTOGRAM_NAME
+ " WHERE `id` = '${id}'";
+ " WHERE `id` = '${id}' AND `catalog_id` = '${catalogId}' AND `db_id` = '${dbId}'";
private static final String INSERT_INTO_COLUMN_STATISTICS = "INSERT INTO "
private static final String INSERT_INTO_COLUMN_STATISTICS_FOR_ALTER = "INSERT INTO "
+ FULL_QUALIFIED_COLUMN_STATISTICS_NAME + " VALUES('${id}', ${catalogId}, ${dbId}, ${tblId}, '${idxId}',"
+ "'${colId}', ${partId}, ${count}, ${ndv}, ${nullCount}, ${min}, ${max}, ${dataSize}, NOW())";
private static final String DROP_TABLE_STATISTICS_TEMPLATE = "DELETE FROM " + FeConstants.INTERNAL_DB_NAME
+ "." + "${tblName}" + " WHERE ${condition}";
private static final String DELETE_TABLE_STATISTICS_TEMPLATE = "DELETE FROM " + FeConstants.INTERNAL_DB_NAME
+ "." + "${tblName}" + " WHERE ${condition} AND `catalog_id` = '${catalogId}' AND `db_id` = '${dbId}'";
private static final String FETCH_RECENT_STATS_UPDATED_COL =
"SELECT * FROM "
@ -95,21 +95,22 @@ public class StatisticsRepository {
private static final String FETCH_STATS_PART_ID = "SELECT * FROM "
+ FeConstants.INTERNAL_DB_NAME + "." + StatisticConstants.STATISTIC_TBL_NAME
+ " WHERE tbl_id = ${tblId}"
+ " WHERE tbl_id = ${tblId} AND `catalog_id` = '${catalogId}' AND `db_id` = '${dbId}'"
+ " AND part_id IS NOT NULL";
private static final String QUERY_PARTITION_STATISTICS = "SELECT * FROM " + FeConstants.INTERNAL_DB_NAME
+ "." + StatisticConstants.STATISTIC_TBL_NAME + " WHERE "
+ " ${inPredicate}"
+ " ${inPredicate} AND `catalog_id` = '${catalogId}' AND `db_id` = '${dbId}'"
+ " AND part_id IS NOT NULL";
private static final String FETCH_TABLE_STATISTICS = "SELECT * FROM "
+ FeConstants.INTERNAL_DB_NAME + "." + StatisticConstants.STATISTIC_TBL_NAME
+ " WHERE tbl_id = ${tblId}"
+ " WHERE tbl_id = ${tblId} AND `catalog_id` = '${catalogId}' AND `db_id` = '${dbId}'"
+ " AND part_id IS NULL";
public static ColumnStatistic queryColumnStatisticsByName(long tableId, long indexId, String colName) {
ResultRow resultRow = queryColumnStatisticById(tableId, indexId, colName);
public static ColumnStatistic queryColumnStatisticsByName(
long ctlId, long dbId, long tableId, long indexId, String colName) {
ResultRow resultRow = queryColumnStatisticById(ctlId, dbId, tableId, indexId, colName);
if (resultRow == null) {
return ColumnStatistic.UNKNOWN;
}
@ -127,31 +128,35 @@ public class StatisticsRepository {
}
partitionIds.add(partition.getId());
}
return queryPartitionStatistics(dbObjects.table.getId(),
return queryPartitionStatistics(dbObjects.catalog.getId(), dbObjects.db.getId(), dbObjects.table.getId(),
colName, partitionIds).stream().map(ColumnStatistic::fromResultRow).collect(
Collectors.toList());
}
public static List<ResultRow> queryColumnStatisticsForTable(long tableId)
throws AnalysisException {
public static List<ResultRow> queryColumnStatisticsForTable(long ctlId, long dbId, long tableId) {
Map<String, String> params = new HashMap<>();
params.put("tblId", String.valueOf(tableId));
generateCtlDbIdParams(ctlId, dbId, params);
List<ResultRow> rows = StatisticsUtil.executeQuery(FETCH_TABLE_STATISTICS, params);
return rows == null ? Collections.emptyList() : rows;
}
public static ResultRow queryColumnStatisticById(long tblId, long indexId, String colName) {
return queryColumnStatisticById(tblId, indexId, colName, false);
private static ResultRow queryColumnStatisticById(
long ctlId, long dbId, long tblId, long indexId, String colName) {
return queryColumnStatisticById(ctlId, dbId, tblId, indexId, colName, false);
}
public static ResultRow queryColumnHistogramById(long tblId, long indexId, String colName) {
return queryColumnStatisticById(tblId, indexId, colName, true);
private static ResultRow queryColumnHistogramById(
long ctlId, long dbId, long tblId, long indexId, String colName) {
return queryColumnStatisticById(ctlId, dbId, tblId, indexId, colName, true);
}
private static ResultRow queryColumnStatisticById(long tblId, long indexId, String colName, boolean isHistogram) {
private static ResultRow queryColumnStatisticById(long ctlId, long dbId, long tblId, long indexId, String colName,
boolean isHistogram) {
Map<String, String> map = new HashMap<>();
String id = constructId(tblId, indexId, colName);
map.put("id", StatisticsUtil.escapeSQL(id));
generateCtlDbIdParams(ctlId, dbId, map);
List<ResultRow> rows = isHistogram ? StatisticsUtil.executeQuery(FETCH_COLUMN_HISTOGRAM_TEMPLATE, map) :
StatisticsUtil.executeQuery(FETCH_COLUMN_STATISTIC_TEMPLATE, map);
int size = rows.size();
@ -161,19 +166,22 @@ public class StatisticsRepository {
return size == 0 ? null : rows.get(0);
}
public static List<ResultRow> queryPartitionStatistics(long tblId, String colName, Set<Long> partIds) {
private static List<ResultRow> queryPartitionStatistics(
long ctlId, long dbId, long tblId, String colName, Set<Long> partIds) {
StringJoiner sj = new StringJoiner(",");
for (Long partId : partIds) {
sj.add("'" + constructId(tblId, -1, colName, partId) + "'");
}
Map<String, String> params = new HashMap<>();
params.put("idList", sj.toString());
generateCtlDbIdParams(ctlId, dbId, params);
List<ResultRow> rows = StatisticsUtil.executeQuery(FETCH_PARTITIONS_STATISTIC_TEMPLATE, params);
return rows == null ? Collections.emptyList() : rows;
}
public static Histogram queryColumnHistogramByName(long tableId, long indexId, String colName) {
ResultRow resultRow = queryColumnHistogramById(tableId, indexId, colName);
private static Histogram queryColumnHistogramByName(
long ctlId, long dbId, long tableId, long indexId, String colName) {
ResultRow resultRow = queryColumnHistogramById(ctlId, dbId, tableId, indexId, colName);
if (resultRow == null) {
return Histogram.UNKNOWN;
}
@ -188,19 +196,22 @@ public class StatisticsRepository {
return stringJoiner.toString();
}
public static void dropStatistics(Set<String> partIds) throws DdlException {
dropStatisticsByPartId(partIds, StatisticConstants.STATISTIC_TBL_NAME);
public static void dropStatisticsByPartIds(long ctlId, long dbId, long tblId, Set<String> partIds)
throws DdlException {
dropStatisticsByPartId(ctlId, dbId, tblId, partIds, StatisticConstants.STATISTIC_TBL_NAME);
}
public static void dropStatistics(long tblId, Set<String> colNames) throws DdlException {
public static void dropStatisticsByColNames(
long ctlId, long dbId, long tblId, Set<String> colNames) throws DdlException {
if (colNames == null) {
return;
}
dropStatisticsByColName(tblId, colNames, StatisticConstants.STATISTIC_TBL_NAME);
dropStatisticsByColName(tblId, colNames, StatisticConstants.HISTOGRAM_TBL_NAME);
dropStatisticsByColName(ctlId, dbId, tblId, colNames, StatisticConstants.STATISTIC_TBL_NAME);
dropStatisticsByColName(ctlId, dbId, tblId, colNames, StatisticConstants.HISTOGRAM_TBL_NAME);
}
public static void dropStatisticsByColName(long tblId, Set<String> colNames, String statsTblName)
private static void dropStatisticsByColName(
long ctlId, long dbId, long tblId, Set<String> colNames, String statsTblName)
throws DdlException {
Map<String, String> params = new HashMap<>();
params.put("tblName", statsTblName);
@ -214,38 +225,43 @@ public class StatisticsRepository {
inPredicate.append(",");
columnCount++;
if (columnCount == Config.max_allowed_in_element_num_of_delete) {
executeDropSql(inPredicate, tblId, params);
executeDropSql(inPredicate, ctlId, dbId, tblId, params);
columnCount = 0;
inPredicate.setLength(0);
}
}
if (inPredicate.length() > 0) {
executeDropSql(inPredicate, tblId, params);
executeDropSql(inPredicate, ctlId, dbId, tblId, params);
}
}
public static void executeDropSql(StringBuilder inPredicate, long tblId, Map<String, String> params)
private static void executeDropSql(
StringBuilder inPredicate, long ctlId, long dbId, long tblId, Map<String, String> params)
throws DdlException {
if (inPredicate.length() > 0) {
inPredicate.delete(inPredicate.length() - 1, inPredicate.length());
}
String predicate = String.format("tbl_id = '%s' AND %s IN (%s)", tblId, "col_id", inPredicate);
params.put("condition", predicate);
generateCtlDbIdParams(ctlId, dbId, params);
try {
StatisticsUtil.execUpdate(new StringSubstitutor(params).replace(DROP_TABLE_STATISTICS_TEMPLATE));
StatisticsUtil.execUpdate(new StringSubstitutor(params).replace(DELETE_TABLE_STATISTICS_TEMPLATE));
} catch (Exception e) {
throw new DdlException(e.getMessage(), e);
}
}
public static void dropStatisticsByPartId(Set<String> partIds, String statsTblName) throws DdlException {
private static void dropStatisticsByPartId(
long ctlId, long dbId, long tblId, Set<String> partIds, String statsTblName) throws DdlException {
Map<String, String> params = new HashMap<>();
String right = StatisticsUtil.joinElementsToString(partIds, ",");
String inPredicate = String.format(" part_id IN (%s)", right);
params.put("tblName", statsTblName);
params.put("condition", inPredicate);
generateCtlDbIdParams(ctlId, dbId, params);
params.put("tblId", String.valueOf(tblId));
try {
StatisticsUtil.execUpdate(new StringSubstitutor(params).replace(DROP_TABLE_STATISTICS_TEMPLATE));
StatisticsUtil.execUpdate(new StringSubstitutor(params).replace(DELETE_TABLE_STATISTICS_TEMPLATE));
} catch (Exception e) {
throw new DdlException(e.getMessage(), e);
}
@ -313,7 +329,7 @@ public class StatisticsRepository {
if (partitionIds.isEmpty()) {
// update table granularity statistics
params.put("partId", "NULL");
StatisticsUtil.execUpdate(INSERT_INTO_COLUMN_STATISTICS, params);
StatisticsUtil.execUpdate(INSERT_INTO_COLUMN_STATISTICS_FOR_ALTER, params);
ColStatsData data = new ColStatsData(constructId(objects.table.getId(), indexId, colName),
objects.catalog.getId(), objects.db.getId(), objects.table.getId(), indexId, colName,
null, columnStatistic);
@ -331,7 +347,7 @@ public class StatisticsRepository {
for (Long partitionId : partitionIds) {
HashMap<String, String> partParams = Maps.newHashMap(params);
partParams.put("partId", String.valueOf(partitionId));
StatisticsUtil.execUpdate(INSERT_INTO_COLUMN_STATISTICS, partParams);
StatisticsUtil.execUpdate(INSERT_INTO_COLUMN_STATISTICS_FOR_ALTER, partParams);
// TODO cache partition granular statistics
// Env.getCurrentEnv().getStatisticsCache()
// .updateColStatsCache(partitionId, -1, colName, builder.build());
@ -350,10 +366,11 @@ public class StatisticsRepository {
return StatisticsUtil.execStatisticQuery(new StringSubstitutor(params).replace(FETCH_STATS_FULL_NAME));
}
public static Map<String, Set<String>> fetchColAndPartsForStats(long tblId) {
public static Map<String, Set<String>> fetchColAndPartsForStats(long ctlId, long dbId, long tblId) {
Map<String, String> params = Maps.newHashMap();
params.put("tblId", String.valueOf(tblId));
StringSubstitutor stringSubstitutor = new StringSubstitutor(params);
generateCtlDbIdParams(ctlId, dbId, params);
String partSql = stringSubstitutor.replace(FETCH_STATS_PART_ID);
List<ResultRow> resultRows = StatisticsUtil.execStatisticQuery(partSql);
@ -376,11 +393,11 @@ public class StatisticsRepository {
return columnToPartitions;
}
public static List<ResultRow> loadColStats(long tableId, long idxId, String colName) {
public static List<ResultRow> loadColStats(long ctlId, long dbId, long tableId, long idxId, String colName) {
Map<String, String> params = new HashMap<>();
String id = constructId(tableId, idxId, colName);
params.put("id", StatisticsUtil.escapeSQL(id));
generateCtlDbIdParams(ctlId, dbId, params);
return StatisticsUtil.execStatisticQuery(new StringSubstitutor(params)
.replace(FETCH_COLUMN_STATISTIC_TEMPLATE));
}
@ -388,12 +405,23 @@ public class StatisticsRepository {
public static List<ResultRow> loadPartStats(Collection<StatisticsCacheKey> keys) {
String inPredicate = "CONCAT(tbl_id, '-', idx_id, '-', col_id) in (%s)";
StringJoiner sj = new StringJoiner(",");
long ctlId = -1;
long dbId = -1;
// ATTN: ctlId and dbId should be same in all keys
for (StatisticsCacheKey statisticsCacheKey : keys) {
sj.add("'" + statisticsCacheKey.toString() + "'");
ctlId = statisticsCacheKey.catalogId;
dbId = statisticsCacheKey.dbId;
}
Map<String, String> params = new HashMap<>();
params.put("inPredicate", String.format(inPredicate, sj.toString()));
generateCtlDbIdParams(ctlId, dbId, params);
return StatisticsUtil.execStatisticQuery(new StringSubstitutor(params)
.replace(QUERY_PARTITION_STATISTICS));
}
private static void generateCtlDbIdParams(long ctdId, long dbId, Map<String, String> params) {
params.put("catalogId", String.valueOf(ctdId));
params.put("dbId", String.valueOf(dbId));
}
}