[improvement](stats) Catch exception properly #22503

Catch exception instead of throw to caller directly to avoid unexpected interruption of upper logic
This commit is contained in:
AKIRA
2023-08-03 15:16:55 +08:00
committed by GitHub
parent 3961b8df76
commit 27f6e4649e
2 changed files with 44 additions and 28 deletions

View File

@ -174,8 +174,7 @@ public class AnalysisManager extends Daemon implements Writable {
}
}
public List<AnalysisInfo> buildAnalysisInfosForDB(DatabaseIf<TableIf> db, AnalyzeProperties analyzeProperties)
throws DdlException {
public List<AnalysisInfo> buildAnalysisInfosForDB(DatabaseIf<TableIf> db, AnalyzeProperties analyzeProperties) {
List<TableIf> tbls = db.getTables();
List<AnalysisInfo> analysisInfos = new ArrayList<>();
db.readLock();
@ -195,12 +194,18 @@ public class AnalysisManager extends Daemon implements Writable {
try {
analyzeTblStmt.check();
} catch (AnalysisException analysisException) {
throw new DdlException(analysisException.getMessage(), analysisException);
LOG.warn("Failed to build analyze job: {}",
analysisException.getMessage(), analysisException);
}
analyzeStmts.add(analyzeTblStmt);
}
for (AnalyzeTblStmt analyzeTblStmt : analyzeStmts) {
analysisInfos.add(buildAndAssignJob(analyzeTblStmt));
try {
analysisInfos.add(buildAndAssignJob(analyzeTblStmt));
} catch (DdlException e) {
LOG.warn("Failed to build analyze job: {}",
e.getMessage(), e);
}
}
} finally {
db.readUnlock();

View File

@ -73,8 +73,8 @@ public class StatisticsAutoAnalyzer extends MasterDaemon {
return;
}
analyzePeriodically();
if (!Config.enable_full_auto_analyze) {
analyzePeriodically();
analyzeAutomatically();
} else {
analyzeAll();
@ -85,26 +85,28 @@ public class StatisticsAutoAnalyzer extends MasterDaemon {
private void analyzeAll() {
Set<CatalogIf> catalogs = Env.getCurrentEnv().getCatalogMgr().getCopyOfCatalog();
for (CatalogIf ctl : catalogs) {
try {
Collection<DatabaseIf> dbs = ctl.getAllDbs();
for (DatabaseIf<TableIf> databaseIf : dbs) {
if (StatisticConstants.STATISTICS_DB_BLACK_LIST.contains(databaseIf.getFullName())) {
Collection<DatabaseIf> dbs = ctl.getAllDbs();
for (DatabaseIf<TableIf> databaseIf : dbs) {
if (StatisticConstants.STATISTICS_DB_BLACK_LIST.contains(databaseIf.getFullName())) {
continue;
}
AnalysisManager analysisManager = Env.getCurrentEnv().getAnalysisManager();
List<AnalysisInfo> analysisInfos = analysisManager.buildAnalysisInfosForDB(databaseIf,
AnalyzeProperties.DEFAULT_PROP);
for (AnalysisInfo analysisInfo : analysisInfos) {
analysisInfo = getReAnalyzeRequiredPart(analysisInfo);
if (analysisInfo == null) {
continue;
}
AnalysisManager analysisManager = Env.getCurrentEnv().getAnalysisManager();
List<AnalysisInfo> analysisInfos = analysisManager.buildAnalysisInfosForDB(databaseIf,
AnalyzeProperties.DEFAULT_PROP);
for (AnalysisInfo analysisInfo : analysisInfos) {
analysisInfo = getReAnalyzeRequiredPart(analysisInfo);
if (analysisInfo == null) {
continue;
}
try {
analysisManager.createSystemAnalysisJob(analysisInfo, analysisTaskExecutor);
} catch (Exception e) {
LOG.warn("Failed to create analysis job", e);
}
}
} catch (Throwable t) {
LOG.warn("Failed to analyze all statistics.", t);
}
}
analyzePeriodically();
@ -160,16 +162,20 @@ public class StatisticsAutoAnalyzer extends MasterDaemon {
* @return new job info after check
* @throws Throwable failed to check
*/
private AnalysisInfo getReAnalyzeRequiredPart(AnalysisInfo jobInfo) throws Throwable {
private AnalysisInfo getReAnalyzeRequiredPart(AnalysisInfo jobInfo) {
long lastExecTimeInMs = jobInfo.lastExecTimeInMs;
TableIf table = StatisticsUtil
.findTable(jobInfo.catalogName, jobInfo.dbName, jobInfo.tblName);
TableStatistic tblStats = StatisticsRepository.fetchTableLevelStats(table.getId());
TableStatistic tblStats = null;
try {
tblStats = StatisticsRepository.fetchTableLevelStats(table.getId());
} catch (Throwable t) {
LOG.warn("Failed to fetch table stats", t);
return null;
}
if (tblStats == TableStatistic.UNKNOWN) {
LOG.warn("Failed to automatically analyze statistics, "
+ "no corresponding table statistics for job: {}", jobInfo.toString());
throw new DdlException("No corresponding table statistics for automatic job.");
return jobInfo;
}
if (!needReanalyzeTable(table, tblStats)) {
@ -200,7 +206,7 @@ public class StatisticsAutoAnalyzer extends MasterDaemon {
}
private void checkAnalyzedPartitions(TableIf table, Set<String> statsPartitions,
Set<String> needRunPartitions, long lastExecTimeInMs) throws DdlException {
Set<String> needRunPartitions, long lastExecTimeInMs) {
for (String statsPartition : statsPartitions) {
Partition partition = table.getPartition(statsPartition);
if (partition == null) {
@ -209,12 +215,17 @@ public class StatisticsAutoAnalyzer extends MasterDaemon {
needRunPartitions.add(statsPartition);
continue;
}
TableStatistic partitionStats = StatisticsRepository
TableStatistic partitionStats = null;
try {
partitionStats = StatisticsRepository
.fetchTableLevelOfPartStats(partition.getId());
if (partitionStats == TableStatistic.UNKNOWN) {
} catch (DdlException e) {
LOG.warn("Failed to fetch part stats", e);
continue;
}
if (needReanalyzePartition(lastExecTimeInMs, partition, partitionStats)) {
if (needReanalyzePartition(lastExecTimeInMs, partition, partitionStats)
|| partitionStats == TableStatistic.UNKNOWN) {
needRunPartitions.add(partition.getName());
}
}