From b7b78ae7079a61eadab0e78d9e9c9792dd0af1b3 Mon Sep 17 00:00:00 2001
From: morrySnow <101034200+morrySnow@users.noreply.github.com>
Date: Fri, 17 Jun 2022 21:02:45 +0800
Subject: [PATCH] [style](fe)the last step of fe CheckStyle (#10134)
1. fix all checkstyle warning
2. change all checkstyle rules to error
3. remove some java doc rules
a. RequireEmptyLineBeforeBlockTagGroup
b. JavadocStyle
c. JavadocParagraph
4. suppress some rules for old codes
a. all java doc rules only affect on Nereids
b. DeclarationOrder only affect on Nereids
c. OverloadMethodsDeclarationOrder only affect on Nereids
d. VariableDeclarationUsageDistance only affect on Nereids
e. suppress OneTopLevelClass on org/apache/doris/load/loadv2/dpp/ColumnParser.java
f. suppress OneTopLevelClass on org/apache/doris/load/loadv2/dpp/SparkRDDAggregator.java
g. suppress LineLength on org/apache/doris/catalog/FunctionSet.java
h. suppress LineLength on org/apache/doris/common/ErrorCode.java
---
.../developer-guide/java-format-code.md | 7 +
.../developer-guide/java-format-code.md | 7 +
fe/check/checkstyle/checkstyle.xml | 119 ++-----
fe/check/checkstyle/suppressions.xml | 29 ++
.../doris/common/io/DataOutputBuffer.java | 2 +-
.../org/apache/doris/common/io/IOUtils.java | 9 +-
.../apache/doris/common/io/OutputBuffer.java | 2 +-
.../java/org/apache/doris/common/io/Text.java | 2 +-
.../doris/common/property/PropertySchema.java | 2 +-
.../main/java/org/apache/doris/PaloFe.java | 6 +-
.../java/org/apache/doris/alter/Alter.java | 33 +-
.../org/apache/doris/alter/AlterHandler.java | 11 +-
.../org/apache/doris/alter/AlterJobV2.java | 5 +-
.../apache/doris/alter/AlterOperations.java | 7 +-
.../doris/alter/MaterializedViewHandler.java | 45 ++-
.../org/apache/doris/alter/RollupJobV2.java | 30 +-
.../doris/alter/SchemaChangeHandler.java | 110 +++---
.../apache/doris/alter/SchemaChangeJobV2.java | 27 +-
.../org/apache/doris/alter/SystemHandler.java | 3 +-
.../doris/analysis/AbstractBackupStmt.java | 9 +-
.../AdminCancelRebalanceDiskStmt.java | 2 +-
.../doris/analysis/AdminCleanTrashStmt.java | 2 +-
.../doris/analysis/AdminCompactTableStmt.java | 3 +-
.../analysis/AdminRebalanceDiskStmt.java | 6 +-
.../apache/doris/analysis/AggregateInfo.java | 6 +-
.../doris/analysis/AggregateInfoBase.java | 4 +-
.../doris/analysis/AggregateParamsList.java | 2 +-
.../doris/analysis/AlterColumnStatsStmt.java | 4 +-
.../analysis/AlterDatabaseQuotaStmt.java | 3 +-
.../doris/analysis/AlterDatabaseRename.java | 7 +-
.../doris/analysis/AlterRoutineLoadStmt.java | 15 +-
.../doris/analysis/AlterSqlBlockRuleStmt.java | 15 +-
.../doris/analysis/AlterTableStatsStmt.java | 4 +-
.../apache/doris/analysis/AlterTableStmt.java | 6 +-
.../apache/doris/analysis/AlterViewStmt.java | 7 +-
.../apache/doris/analysis/AnalyticExpr.java | 15 +-
.../apache/doris/analysis/AnalyticInfo.java | 4 +-
.../apache/doris/analysis/AnalyticWindow.java | 6 +
.../org/apache/doris/analysis/Analyzer.java | 14 +-
.../apache/doris/analysis/ArithmeticExpr.java | 4 +
.../org/apache/doris/analysis/BackupStmt.java | 4 +-
.../doris/analysis/BinaryPredicate.java | 2 +-
.../org/apache/doris/analysis/BrokerDesc.java | 7 +-
.../analysis/BuiltinAggregateFunction.java | 1 +
.../org/apache/doris/analysis/CaseExpr.java | 10 +-
.../org/apache/doris/analysis/CastExpr.java | 3 +-
.../doris/analysis/ChannelDescription.java | 3 +-
.../org/apache/doris/analysis/ColumnDef.java | 8 +-
.../doris/analysis/CompoundPredicate.java | 2 +-
.../doris/analysis/CreateDataSyncJobStmt.java | 6 +-
.../apache/doris/analysis/CreateDbStmt.java | 3 +-
.../doris/analysis/CreateFunctionStmt.java | 10 +-
.../analysis/CreateMaterializedViewStmt.java | 9 +-
.../doris/analysis/CreateRoutineLoadStmt.java | 3 +-
.../analysis/CreateSqlBlockRuleStmt.java | 15 +-
.../doris/analysis/CreateTableLikeStmt.java | 3 +-
.../doris/analysis/CreateTableStmt.java | 6 +-
.../apache/doris/analysis/CreateUserStmt.java | 3 +-
.../doris/analysis/DataDescription.java | 11 +-
.../apache/doris/analysis/DateLiteral.java | 27 +-
.../apache/doris/analysis/DecimalLiteral.java | 3 +-
.../org/apache/doris/analysis/DeleteStmt.java | 6 +-
.../apache/doris/analysis/DescribeStmt.java | 4 +-
.../doris/analysis/DescriptorTable.java | 2 +-
.../org/apache/doris/analysis/DropDbStmt.java | 3 +-
.../org/apache/doris/analysis/ExportStmt.java | 2 +-
.../java/org/apache/doris/analysis/Expr.java | 31 +-
.../org/apache/doris/analysis/ExprId.java | 2 +-
.../doris/analysis/ExprSubstitutionMap.java | 2 +-
.../doris/analysis/ExpressionFunctions.java | 6 +-
.../doris/analysis/FunctionCallExpr.java | 24 +-
.../apache/doris/analysis/FunctionParams.java | 2 +-
.../org/apache/doris/analysis/GrantStmt.java | 12 +-
.../apache/doris/analysis/GroupByClause.java | 4 +-
.../apache/doris/analysis/InlineViewRef.java | 3 +-
.../org/apache/doris/analysis/InsertStmt.java | 28 +-
.../doris/analysis/IsNullPredicate.java | 1 +
.../doris/analysis/LargeIntLiteral.java | 2 +-
.../org/apache/doris/analysis/LoadStmt.java | 5 +-
.../org/apache/doris/analysis/LockTable.java | 1 +
.../analysis/MVColumnBitmapUnionPattern.java | 3 +-
.../analysis/ModifyTablePropertiesClause.java | 3 +-
.../apache/doris/analysis/OpcodeRegistry.java | 314 ------------------
.../apache/doris/analysis/OrderByElement.java | 5 +
.../apache/doris/analysis/OutFileClause.java | 41 ++-
.../org/apache/doris/analysis/Predicate.java | 5 +-
.../org/apache/doris/analysis/QueryStmt.java | 11 +-
.../apache/doris/analysis/RecoverDbStmt.java | 9 +-
.../doris/analysis/RecoverPartitionStmt.java | 7 +-
.../doris/analysis/RecoverTableStmt.java | 10 +-
.../apache/doris/analysis/RefreshDbStmt.java | 6 +-
.../analysis/ReplacePartitionClause.java | 9 +-
.../doris/analysis/ResourcePattern.java | 1 +
.../apache/doris/analysis/RestoreStmt.java | 8 +-
.../org/apache/doris/analysis/RevokeStmt.java | 3 +-
.../RoutineLoadDataSourceProperties.java | 51 +--
.../doris/analysis/SchemaTableType.java | 5 +-
.../apache/doris/analysis/SelectListItem.java | 3 +-
.../org/apache/doris/analysis/SelectStmt.java | 23 +-
.../doris/analysis/SetOperationStmt.java | 6 +-
.../org/apache/doris/analysis/SetVar.java | 3 +-
.../apache/doris/analysis/ShowAlterStmt.java | 3 +-
.../apache/doris/analysis/ShowBackupStmt.java | 6 +-
.../doris/analysis/ShowClusterStmt.java | 4 +-
.../apache/doris/analysis/ShowExportStmt.java | 3 +-
.../apache/doris/analysis/ShowGrantsStmt.java | 1 +
.../apache/doris/analysis/ShowIndexStmt.java | 4 +-
.../doris/analysis/ShowLoadProfileStmt.java | 2 +-
.../apache/doris/analysis/ShowPolicyStmt.java | 2 -
.../doris/analysis/ShowQueryProfileStmt.java | 2 +-
.../doris/analysis/ShowRestoreStmt.java | 6 +-
.../apache/doris/analysis/ShowRolesStmt.java | 1 +
.../analysis/ShowRoutineLoadTaskStmt.java | 3 +-
.../doris/analysis/ShowStreamLoadStmt.java | 3 +-
.../apache/doris/analysis/ShowViewStmt.java | 3 +-
.../org/apache/doris/analysis/SortInfo.java | 2 +-
.../apache/doris/analysis/StmtRewriter.java | 5 +-
.../apache/doris/analysis/StorageBackend.java | 3 +-
.../apache/doris/analysis/StringLiteral.java | 3 +-
.../org/apache/doris/analysis/Subquery.java | 2 +-
.../apache/doris/analysis/TablePattern.java | 1 +
.../doris/analysis/TransactionBeginStmt.java | 3 +
.../org/apache/doris/analysis/TypeDef.java | 1 -
.../org/apache/doris/analysis/UseStmt.java | 3 +-
.../apache/doris/analysis/UserIdentity.java | 3 +-
.../org/apache/doris/analysis/ValueList.java | 1 +
.../org/apache/doris/analysis/WithClause.java | 3 +-
.../apache/doris/backup/BackupHandler.java | 12 +-
.../org/apache/doris/backup/BackupJob.java | 17 +-
.../apache/doris/backup/BrokerStorage.java | 10 +-
.../org/apache/doris/backup/Repository.java | 6 +-
.../org/apache/doris/backup/RestoreJob.java | 46 ++-
.../org/apache/doris/backup/S3Storage.java | 10 +-
.../doris/blockrule/SqlBlockRuleMgr.java | 2 +-
.../doris/catalog/AggregateFunction.java | 102 +++---
.../apache/doris/catalog/AggregateType.java | 1 +
.../org/apache/doris/catalog/AuthType.java | 2 +-
.../org/apache/doris/catalog/BrokerTable.java | 4 +-
.../org/apache/doris/catalog/Catalog.java | 131 +++++---
.../doris/catalog/ColocateGroupSchema.java | 3 +-
.../java/org/apache/doris/catalog/Column.java | 7 +-
.../org/apache/doris/catalog/ColumnStats.java | 5 +-
.../org/apache/doris/catalog/Database.java | 28 +-
.../catalog/DynamicPartitionProperty.java | 6 +-
.../org/apache/doris/catalog/EsTable.java | 16 +-
.../org/apache/doris/catalog/Function.java | 7 +-
.../org/apache/doris/catalog/FunctionSet.java | 1 +
.../doris/catalog/HashDistributionInfo.java | 1 +
.../catalog/HiveMetaStoreClientHelper.java | 24 +-
.../org/apache/doris/catalog/HiveTable.java | 26 +-
.../doris/catalog/ListPartitionInfo.java | 10 +-
.../org/apache/doris/catalog/MapType.java | 2 +
.../apache/doris/catalog/MetadataViewer.java | 18 +-
.../org/apache/doris/catalog/MysqlTable.java | 3 +-
.../org/apache/doris/catalog/OdbcTable.java | 11 +-
.../org/apache/doris/catalog/OlapTable.java | 19 +-
.../apache/doris/catalog/PartitionInfo.java | 7 +-
.../apache/doris/catalog/PrimitiveType.java | 8 +-
.../doris/catalog/RandomDistributionInfo.java | 1 +
.../doris/catalog/RangePartitionInfo.java | 8 +-
.../org/apache/doris/catalog/Replica.java | 19 +-
.../org/apache/doris/catalog/Resource.java | 1 +
.../apache/doris/catalog/ResourceGroup.java | 1 +
.../apache/doris/catalog/ScalarFunction.java | 3 +-
.../org/apache/doris/catalog/ScalarType.java | 6 +-
.../org/apache/doris/catalog/SchemaTable.java | 1 +
.../apache/doris/catalog/SparkResource.java | 10 +-
.../org/apache/doris/catalog/StructType.java | 1 +
.../org/apache/doris/catalog/TableIf.java | 6 +-
.../apache/doris/catalog/TableProperty.java | 6 +-
.../java/org/apache/doris/catalog/Tablet.java | 18 +-
.../doris/catalog/TabletInvertedIndex.java | 52 ++-
.../apache/doris/catalog/TempPartitions.java | 1 +
.../java/org/apache/doris/catalog/Type.java | 4 +
.../catalog/external/ExternalDatabase.java | 2 +-
.../doris/clone/BackendLoadStatistic.java | 6 +-
.../apache/doris/clone/BeLoadRebalancer.java | 8 +-
.../doris/clone/ClusterLoadStatistic.java | 62 ++--
.../ColocateTableCheckerAndBalancer.java | 47 ++-
.../apache/doris/clone/DiskRebalancer.java | 3 +-
.../clone/DynamicPartitionScheduler.java | 69 ++--
.../org/apache/doris/clone/MovesCacheMap.java | 12 +-
.../doris/clone/PartitionRebalancer.java | 45 ++-
.../org/apache/doris/clone/Rebalancer.java | 5 +-
.../org/apache/doris/clone/TabletChecker.java | 20 +-
.../apache/doris/clone/TabletSchedCtx.java | 21 +-
.../apache/doris/clone/TabletScheduler.java | 48 ++-
.../TwoDimensionalGreedyRebalanceAlgo.java | 61 ++--
.../java/org/apache/doris/common/CIDR.java | 1 +
.../org/apache/doris/common/CheckedMath.java | 2 +-
.../java/org/apache/doris/common/Config.java | 28 +-
.../org/apache/doris/common/ConfigBase.java | 9 +-
.../org/apache/doris/common/DdlException.java | 1 +
.../org/apache/doris/common/ErrorReport.java | 3 +-
.../org/apache/doris/common/GenericPool.java | 5 +-
.../main/java/org/apache/doris/common/Id.java | 2 +-
.../org/apache/doris/common/IdGenerator.java | 2 +
.../org/apache/doris/common/Log4jConfig.java | 6 +-
.../java/org/apache/doris/common/Pair.java | 2 +-
.../doris/common/ThreadPoolManager.java | 37 ++-
.../org/apache/doris/common/ThriftServer.java | 6 +-
.../apache/doris/common/UserException.java | 1 +
.../doris/common/parquet/BrokerInputFile.java | 12 +-
.../proc/BackendLoadStatisticProcNode.java | 8 +-
.../proc/ClusterLoadStatisticProcDir.java | 7 +-
.../ColocationGroupBackendSeqsProcNode.java | 3 +-
.../common/proc/CurrentQueryInfoProvider.java | 6 +-
.../common/proc/EsPartitionsProcDir.java | 6 +-
.../apache/doris/common/proc/JobsProcDir.java | 9 +-
.../apache/doris/common/proc/JvmProcDir.java | 40 ++-
.../doris/common/proc/PartitionsProcDir.java | 9 +-
.../apache/doris/common/proc/ProcResult.java | 1 +
.../doris/common/proc/StatisticProcNode.java | 3 +-
.../common/proc/TabletHealthProcDir.java | 19 +-
.../common/profile/ProfileTreeBuilder.java | 9 +-
.../doris/common/profile/ProfileTreeNode.java | 1 +
.../common/publish/ClusterStatePublisher.java | 3 +-
.../apache/doris/common/util/BrokerUtil.java | 33 +-
.../common/util/DynamicPartitionUtil.java | 84 +++--
.../apache/doris/common/util/KafkaUtil.java | 4 +-
.../apache/doris/common/util/ListUtil.java | 14 +-
.../doris/common/util/MetaLockUtils.java | 3 +-
.../doris/common/util/PrintableMap.java | 1 +
.../doris/common/util/ProfileManager.java | 6 +-
.../doris/common/util/PropertyAnalyzer.java | 20 +-
.../apache/doris/common/util/RangeUtils.java | 6 +-
.../doris/common/util/ReflectionUtils.java | 2 +-
.../doris/common/util/RuntimeProfile.java | 3 +-
.../doris/common/util/SmallFileMgr.java | 8 +-
.../doris/common/util/SqlBlockUtil.java | 19 +-
.../org/apache/doris/common/util/URI.java | 1 +
.../org/apache/doris/common/util/Util.java | 6 +-
.../consistency/CheckConsistencyJob.java | 3 +-
.../doris/consistency/ConsistencyChecker.java | 12 +-
.../doris/datasource/InternalDataSource.java | 9 +-
.../apache/doris/deploy/DeployManager.java | 3 +-
.../external/elasticsearch/EsNodeInfo.java | 5 +-
.../external/elasticsearch/EsRepository.java | 6 +-
.../elasticsearch/EsShardPartitions.java | 3 +-
.../doris/external/elasticsearch/EsUtil.java | 3 +-
.../external/elasticsearch/MappingPhase.java | 3 +-
.../external/iceberg/IcebergCatalogMgr.java | 3 +-
.../IcebergTableCreationRecordMgr.java | 17 +-
.../doris/httpv2/config/WebConfigurer.java | 1 +
.../httpv2/controller/BaseController.java | 3 +-
.../controller/HardwareInfoController.java | 75 +++--
.../httpv2/entity/ResponseEntityBuilder.java | 3 +-
.../httpv2/interceptor/AuthInterceptor.java | 6 +-
.../interceptor/ServletTraceIterceptor.java | 1 +
.../httpv2/rest/CheckDecommissionAction.java | 3 +-
.../doris/httpv2/rest/GetDdlStmtAction.java | 3 +-
.../apache/doris/httpv2/rest/LoadAction.java | 3 +-
.../doris/httpv2/rest/RestBaseController.java | 3 +-
.../httpv2/rest/TableQueryPlanAction.java | 27 +-
.../doris/httpv2/rest/TableSchemaAction.java | 3 +-
.../doris/httpv2/rest/UploadAction.java | 18 +-
.../doris/httpv2/rest/manager/NodeAction.java | 6 +-
.../doris/httpv2/restv2/ImportAction.java | 3 +-
.../doris/httpv2/util/LoadSubmitter.java | 6 +-
.../doris/httpv2/util/StatementSubmitter.java | 6 +-
.../apache/doris/httpv2/util/TmpFileMgr.java | 6 +-
.../doris/journal/bdbje/BDBDebugger.java | 3 +-
.../doris/journal/bdbje/BDBEnvironment.java | 10 +-
.../org/apache/doris/ldap/LdapClient.java | 7 +-
.../apache/doris/ldap/LdapPrivsChecker.java | 16 +-
.../apache/doris/load/BrokerFileGroup.java | 6 +-
.../doris/load/BrokerFileGroupAggInfo.java | 6 +-
.../org/apache/doris/load/DeleteHandler.java | 77 +++--
.../java/org/apache/doris/load/DeleteJob.java | 6 +-
.../java/org/apache/doris/load/EtlStatus.java | 1 +
.../java/org/apache/doris/load/FailMsg.java | 1 +
.../main/java/org/apache/doris/load/Load.java | 43 ++-
.../org/apache/doris/load/LoadChecker.java | 58 ++--
.../java/org/apache/doris/load/LoadJob.java | 3 +-
.../apache/doris/load/PartitionLoadInfo.java | 1 +
.../java/org/apache/doris/load/Source.java | 1 +
.../apache/doris/load/StreamLoadRecord.java | 4 +-
.../doris/load/StreamLoadRecordMgr.java | 9 +-
.../doris/load/loadv2/BrokerLoadJob.java | 19 +-
.../load/loadv2/BrokerLoadPendingTask.java | 3 +-
.../apache/doris/load/loadv2/BulkLoadJob.java | 9 +-
.../apache/doris/load/loadv2/ConfigFile.java | 5 +-
.../doris/load/loadv2/InsertLoadJob.java | 4 +-
.../org/apache/doris/load/loadv2/LoadJob.java | 6 +-
.../doris/load/loadv2/LoadLoadingTask.java | 7 +-
.../doris/load/loadv2/SparkEtlJobHandler.java | 9 +-
.../load/loadv2/SparkLauncherMonitor.java | 7 +-
.../doris/load/loadv2/SparkLoadJob.java | 41 ++-
.../load/loadv2/SparkLoadPendingTask.java | 15 +-
.../doris/load/loadv2/SparkRepository.java | 11 +-
.../load/routineload/KafkaRoutineLoadJob.java | 16 +-
.../doris/load/routineload/KafkaTaskInfo.java | 7 +-
.../load/routineload/RoutineLoadJob.java | 69 ++--
.../load/routineload/RoutineLoadManager.java | 12 +-
.../routineload/RoutineLoadScheduler.java | 9 +-
.../load/routineload/RoutineLoadTaskInfo.java | 4 +-
.../routineload/RoutineLoadTaskScheduler.java | 16 +-
.../apache/doris/load/sync/SyncChannel.java | 3 +-
.../doris/load/sync/SyncChannelHandle.java | 2 +-
.../org/apache/doris/load/sync/SyncJob.java | 3 +-
.../load/sync/canal/CanalSyncChannel.java | 16 +-
.../doris/load/sync/canal/CanalSyncJob.java | 14 +-
.../doris/load/sync/canal/CanalUtils.java | 9 +-
.../load/sync/canal/SyncCanalClient.java | 3 +-
.../load/sync/position/EntryPosition.java | 4 +-
.../doris/load/update/UpdateManager.java | 6 +-
.../doris/load/update/UpdateStmtExecutor.java | 6 +-
.../org/apache/doris/master/Checkpoint.java | 11 +-
.../org/apache/doris/master/MasterImpl.java | 34 +-
.../PartitionInMemoryInfoCollector.java | 3 +-
.../apache/doris/master/ReportHandler.java | 26 +-
.../apache/doris/metric/CounterMetric.java | 2 +-
.../java/org/apache/doris/metric/Metric.java | 2 +-
.../org/apache/doris/metric/MetricRepo.java | 128 ++++---
.../doris/metric/PrometheusMetricVisitor.java | 75 +++--
.../doris/metric/SimpleCoreMetricVisitor.java | 10 +-
.../apache/doris/metric/SystemMetrics.java | 3 +-
.../doris/monitor/jvm/JvmPauseMonitor.java | 4 +-
.../apache/doris/monitor/jvm/JvmStats.java | 2 +-
.../apache/doris/monitor/unit/TimeValue.java | 3 +-
.../org/apache/doris/mysql/MysqlPacket.java | 2 +-
.../org/apache/doris/mysql/MysqlPassword.java | 4 +-
.../org/apache/doris/mysql/MysqlProto.java | 7 +-
.../doris/mysql/nio/AcceptListener.java | 8 +-
.../apache/doris/mysql/nio/NMysqlChannel.java | 2 +-
.../apache/doris/mysql/nio/NMysqlServer.java | 9 +-
.../doris/mysql/privilege/PaloAuth.java | 39 ++-
.../doris/mysql/privilege/PaloRole.java | 8 +-
.../mysql/privilege/ResourcePrivEntry.java | 18 +-
.../doris/mysql/privilege/TablePrivEntry.java | 3 +-
.../doris/mysql/privilege/UserProperty.java | 3 +-
.../mysql/privilege/UserPropertyMgr.java | 3 +-
.../physical/PhysicalBroadcastHashJoin.java | 1 -
.../doris/persist/ColocatePersistInfo.java | 3 +-
.../apache/doris/persist/CreateTableInfo.java | 1 +
.../doris/persist/DropPartitionInfo.java | 3 +-
.../org/apache/doris/persist/EditLog.java | 22 +-
.../persist/ModifyCommentOperationLog.java | 3 +-
.../doris/persist/PartitionPersistInfo.java | 4 +-
.../doris/persist/ReplicaPersistInfo.java | 11 +-
.../apache/doris/persist/gson/GsonUtils.java | 8 +-
.../apache/doris/persist/meta/MetaFooter.java | 3 +-
.../apache/doris/persist/meta/MetaHeader.java | 3 +-
.../doris/persist/meta/MetaPersistMethod.java | 6 +-
.../apache/doris/persist/meta/MetaWriter.java | 3 +-
.../doris/planner/AnalyticEvalNode.java | 4 +-
.../apache/doris/planner/AnalyticPlanner.java | 6 +-
.../apache/doris/planner/BrokerScanNode.java | 6 +-
.../apache/doris/planner/CrossJoinNode.java | 4 +-
.../planner/DistributedPlanColocateRule.java | 3 +-
.../doris/planner/DistributedPlanner.java | 17 +-
.../org/apache/doris/planner/EsScanNode.java | 9 +-
.../doris/planner/HashDistributionPruner.java | 3 +-
.../apache/doris/planner/HashJoinNode.java | 10 +-
.../apache/doris/planner/HiveScanNode.java | 3 +-
.../doris/planner/JoinCostEvaluation.java | 2 +-
.../apache/doris/planner/LoadScanNode.java | 17 +-
.../apache/doris/planner/OlapScanNode.java | 17 +-
.../apache/doris/planner/OlapTableSink.java | 13 +-
.../doris/planner/PartitionColumnFilter.java | 3 +-
.../apache/doris/planner/PartitionPruner.java | 2 +-
.../apache/doris/planner/PlanFragment.java | 2 +-
.../org/apache/doris/planner/PlanNode.java | 12 +-
.../org/apache/doris/planner/Planner.java | 3 +-
.../apache/doris/planner/PlannerContext.java | 4 +-
.../apache/doris/planner/ProjectPlanner.java | 4 +-
.../apache/doris/planner/RollupSelector.java | 5 +-
.../apache/doris/planner/RuntimeFilter.java | 6 +-
.../doris/planner/RuntimeFilterGenerator.java | 7 +-
.../org/apache/doris/planner/ScanNode.java | 6 +-
.../org/apache/doris/planner/SelectNode.java | 3 +-
.../doris/planner/SetOperationNode.java | 15 +-
.../doris/planner/SingleNodePlanner.java | 3 +-
.../org/apache/doris/planner/SortNode.java | 6 +-
.../doris/planner/StreamLoadPlanner.java | 3 +-
.../doris/planner/StreamLoadScanNode.java | 3 +-
.../doris/plugin/DynamicPluginLoader.java | 9 +-
.../org/apache/doris/plugin/PluginMgr.java | 7 +-
.../org/apache/doris/plugin/PluginZip.java | 5 +-
.../org/apache/doris/policy/PolicyMgr.java | 1 -
.../org/apache/doris/qe/AuditLogBuilder.java | 4 +-
.../org/apache/doris/qe/ConnectContext.java | 3 +-
.../org/apache/doris/qe/ConnectProcessor.java | 4 +-
.../org/apache/doris/qe/ConnectScheduler.java | 15 +-
.../java/org/apache/doris/qe/Coordinator.java | 305 ++++++++++-------
.../org/apache/doris/qe/GlobalVariable.java | 6 +-
.../org/apache/doris/qe/HelpObjectIface.java | 1 +
.../doris/qe/InsertStreamTxnExecutor.java | 6 +-
.../apache/doris/qe/MasterTxnExecutor.java | 1 +
.../org/apache/doris/qe/MultiLoadMgr.java | 10 +-
.../java/org/apache/doris/qe/QueryDetail.java | 2 +-
.../org/apache/doris/qe/QueryDetailQueue.java | 2 +-
.../apache/doris/qe/QueryStateException.java | 1 +
.../org/apache/doris/qe/ResultReceiver.java | 3 +-
.../doris/qe/RuntimeFilterTypeHelper.java | 13 +-
.../org/apache/doris/qe/SessionVariable.java | 4 +-
.../org/apache/doris/qe/ShowExecutor.java | 68 ++--
.../org/apache/doris/qe/SimpleScheduler.java | 3 +-
.../org/apache/doris/qe/SqlModeHelper.java | 12 +-
.../org/apache/doris/qe/StmtExecutor.java | 47 ++-
.../java/org/apache/doris/qe/VariableMgr.java | 6 +-
.../apache/doris/qe/cache/CacheAnalyzer.java | 14 +-
.../apache/doris/qe/cache/CacheBeProxy.java | 6 +-
.../doris/qe/cache/CacheCoordinator.java | 3 +-
.../doris/qe/cache/RowBatchBuilder.java | 3 +-
.../org/apache/doris/qe/cache/SqlCache.java | 3 +-
.../apache/doris/rewrite/ExprRewriter.java | 5 +-
.../rewrite/ExtractCommonFactorsRule.java | 15 +-
.../org/apache/doris/rewrite/FEFunctions.java | 6 +-
.../doris/rewrite/FoldConstantsRule.java | 15 +-
.../doris/rewrite/InferFiltersRule.java | 29 +-
.../rewrite/RewriteBinaryPredicatesRule.java | 7 +-
.../doris/rewrite/RewriteDateLiteralRule.java | 8 +-
.../rewrite/RewriteFromUnixTimeRule.java | 24 +-
.../apache/doris/rpc/AttachmentRequest.java | 5 +
.../doris/service/FrontendServiceImpl.java | 22 +-
.../statistics/StatisticsJobManager.java | 3 +-
.../statistics/StatisticsJobScheduler.java | 18 +-
.../doris/statistics/StatisticsManager.java | 2 +-
.../statistics/StatisticsTaskScheduler.java | 5 +-
.../apache/doris/statistics/StatsType.java | 2 +
.../java/org/apache/doris/system/Backend.java | 3 +-
.../doris/system/BackendHbResponse.java | 3 +-
.../doris/system/BeSelectionPolicy.java | 1 +
.../org/apache/doris/system/Diagnoser.java | 18 +-
.../doris/system/FrontendHbResponse.java | 3 +-
.../org/apache/doris/system/HeartbeatMgr.java | 14 +-
.../doris/system/SystemInfoService.java | 3 +-
.../java/org/apache/doris/task/AgentTask.java | 2 +-
.../apache/doris/task/AgentTaskExecutor.java | 3 +-
.../java/org/apache/doris/task/CloneTask.java | 2 +
.../doris/task/HadoopLoadPendingTask.java | 9 +-
.../org/apache/doris/task/LoadEtlTask.java | 8 +-
.../org/apache/doris/task/LoadTaskInfo.java | 85 +++--
.../apache/doris/task/MasterTaskExecutor.java | 12 +-
.../org/apache/doris/task/StreamLoadTask.java | 1 +
.../doris/task/UpdateTabletMetaInfoTask.java | 3 +-
.../AbstractTxnStateChangeCallback.java | 3 +-
.../transaction/DatabaseTransactionMgr.java | 188 ++++++-----
.../transaction/GlobalTransactionMgr.java | 62 ++--
.../transaction/PublishVersionDaemon.java | 15 +-
.../transaction/TransactionIdGenerator.java | 1 +
.../doris/transaction/TransactionState.java | 6 +-
.../transaction/TxnStateChangeCallback.java | 3 +-
.../org/apache/doris/alter/AlterTest.java | 12 +-
.../analysis/CreateDataSyncJobStmtTest.java | 1 +
.../doris/analysis/GroupByClauseTest.java | 1 +
.../doris/analysis/InsertArrayStmtTest.java | 1 -
.../doris/analysis/SetOperationStmtTest.java | 1 +
.../TableNameComparedLowercaseTest.java | 6 +-
.../TableNameStoredLowercaseTest.java | 6 +-
.../apache/doris/backup/BackupJobTest.java | 2 +
.../apache/doris/backup/RestoreJobTest.java | 2 +
.../doris/blockrule/SqlBlockRuleMgrTest.java | 2 +-
.../apache/doris/catalog/ColumnTypeTest.java | 1 +
.../apache/doris/catalog/HiveTableTest.java | 1 -
.../apache/doris/catalog/OlapTableTest.java | 4 +-
.../org/apache/doris/catalog/TableTest.java | 14 +-
.../org/apache/doris/catalog/TabletTest.java | 6 +-
.../doris/catalog/TempPartitionTest.java | 166 ++++++---
.../doris/catalog/TruncateTableTest.java | 15 +-
.../doris/clone/RebalancerTestUtil.java | 6 +-
.../clone/TabletRepairAndBalanceTest.java | 20 +-
.../doris/clone/TabletReplicaTooSlowTest.java | 3 +-
...TwoDimensionalGreedyRebalanceAlgoTest.java | 9 +-
.../apache/doris/common/GenericPoolTest.java | 1 +
.../doris/common/ThreadPoolManagerTest.java | 6 +-
.../org/apache/doris/common/util/URITest.java | 1 +
.../apache/doris/http/DorisHttpTestCase.java | 6 +-
.../doris/http/TableQueryPlanActionTest.java | 13 +-
.../doris/ldap/LdapPrivsCheckerTest.java | 8 +-
.../apache/doris/load/DeleteHandlerTest.java | 1 +
.../apache/doris/load/TabletLoadInfoTest.java | 1 +
.../load/loadv2/SparkRepositoryTest.java | 12 +-
.../load/sync/canal/CanalSyncDataTest.java | 5 +
.../load/sync/canal/CanalSyncJobTest.java | 4 +
.../BatchModifyPartitionsInfoTest.java | 12 +-
.../doris/planner/ColocatePlanTest.java | 4 +-
.../planner/RuntimeFilterGeneratorTest.java | 32 +-
.../doris/planner/StreamLoadScanNodeTest.java | 13 +-
.../doris/planner/TableFunctionPlanTest.java | 25 +-
.../apache/doris/qe/ConnectProcessorTest.java | 5 +
.../org/apache/doris/qe/CoordinatorTest.java | 6 +-
.../org/apache/doris/qe/MultiLoadMgrTest.java | 1 +
.../org/apache/doris/qe/ShowExecutorTest.java | 1 +
.../apache/doris/qe/ShowResultSetTest.java | 1 +
.../org/apache/doris/qe/StmtExecutorTest.java | 18 +-
.../doris/resource/TagSerializationTest.java | 12 +-
.../apache/doris/rewrite/FEFunctionsTest.java | 1 +
.../doris/rewrite/InferFiltersRuleTest.java | 1 +
.../apache/doris/service/ExecuteEnvTest.java | 26 +-
.../doris/system/SystemInfoServiceTest.java | 9 +-
.../org/apache/doris/task/AgentTaskTest.java | 3 +-
.../doris/task/SerialExecutorServiceTest.java | 1 +
.../doris/utframe/MockedBackendFactory.java | 2 +-
.../apache/doris/utframe/UtFrameUtils.java | 11 +-
.../org/apache/doris/udf/BitmapAndUDF.java | 3 +-
.../org/apache/doris/udf/BitmapCountUDF.java | 3 +-
.../org/apache/doris/udf/BitmapOrUDF.java | 3 +-
.../org/apache/doris/udf/BitmapUnionUDAF.java | 3 +-
.../org/apache/doris/udf/BitmapXorUDF.java | 3 +-
.../org/apache/doris/udf/JMXJsonUtil.java | 2 +-
.../java/org/apache/doris/udf/JniUtil.java | 3 +-
.../org/apache/doris/udf/UdfExecutor.java | 10 +-
fe/pom.xml | 32 +-
.../loadv2/dpp/DorisRangePartitioner.java | 1 +
.../doris/load/loadv2/dpp/DppColumns.java | 8 -
.../doris/load/loadv2/dpp/DppUtils.java | 37 ++-
.../load/loadv2/dpp/GlobalDictBuilder.java | 39 ++-
.../dpp/MinimumCoverageRollupTreeBuilder.java | 4 +-
.../doris/load/loadv2/dpp/SparkDpp.java | 213 ++++++------
.../load/loadv2/dpp/SparkRDDAggregator.java | 15 +-
.../doris/load/loadv2/etl/EtlJobConfig.java | 9 +-
.../doris/load/loadv2/etl/SparkEtlJob.java | 18 +-
514 files changed, 3877 insertions(+), 2757 deletions(-)
delete mode 100644 fe/fe-core/src/main/java/org/apache/doris/analysis/OpcodeRegistry.java
diff --git a/docs/en/developer/developer-guide/java-format-code.md b/docs/en/developer/developer-guide/java-format-code.md
index d87a97f5ca..bad37cc7f1 100644
--- a/docs/en/developer/developer-guide/java-format-code.md
+++ b/docs/en/developer/developer-guide/java-format-code.md
@@ -42,6 +42,13 @@ standard java package
* Do not use `import *`
* Do not use `import static`
+## Check when compile
+
+Now, when compiling with `caven`, `CheckStyle` checks are done by default. This will slightly slow down compilation. If you want to skip checkstyle, please use the following command to compile
+```
+mvn clean install -DskipTests -Dcheckstyle.skip
+```
+
## Checkstyle Plugin
Now we have `formatter-check` in `CI` to check the code format.
diff --git a/docs/zh-CN/developer/developer-guide/java-format-code.md b/docs/zh-CN/developer/developer-guide/java-format-code.md
index 6896107476..6fad1867ec 100644
--- a/docs/zh-CN/developer/developer-guide/java-format-code.md
+++ b/docs/zh-CN/developer/developer-guide/java-format-code.md
@@ -42,6 +42,13 @@ standard java package
* 禁止使用 `import *`
* 禁止使用 `import static`
+## 编译时检查
+
+现在,在使用`maven`进行编译时,会默认进行`CheckStyle`检查。此检查会略微降低编译速度。如果想跳过此检查,请使用如下命令进行编译
+```
+mvn clean install -DskipTests -Dcheckstyle.skip
+```
+
## Checkstyle 插件
现在的 `CI` 之中会有 `formatter-check` 进行代码格式化检测。
diff --git a/fe/check/checkstyle/checkstyle.xml b/fe/check/checkstyle/checkstyle.xml
index 2b005d16c2..b37927ff1d 100644
--- a/fe/check/checkstyle/checkstyle.xml
+++ b/fe/check/checkstyle/checkstyle.xml
@@ -24,7 +24,7 @@ under the License.
-
+
@@ -43,7 +43,6 @@ under the License.
-
@@ -52,24 +51,20 @@ under the License.
-
-
-
-
@@ -83,33 +78,26 @@ under the License.
-
-
-
-
-
+
-
-
-
-
-
@@ -134,7 +120,6 @@ under the License.
value="CLASS_DEF, METHOD_DEF, CTOR_DEF, LITERAL_FOR, LITERAL_WHILE, STATIC_INIT,
INSTANCE_INIT, ANNOTATION_DEF, ENUM_DEF, INTERFACE_DEF, RECORD_DEF,
COMPACT_CTOR_DEF"/>
-
@@ -148,61 +133,36 @@ under the License.
-
-
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
-
-
-
-
-
-
+
+
+
-
-
-
-
-
-
+
+
-
-
-
-
-
-
-
-
-
-
+
+
+
@@ -220,25 +180,20 @@ under the License.
-
-
-
+
-
+
+ value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, RECORD_DEF, ANNOTATION_DEF"/>
-
@@ -252,11 +207,9 @@ under the License.
-
-
@@ -265,14 +218,9 @@ under the License.
-
-
-
-
-
-
-
+
+
@@ -285,90 +233,75 @@ under the License.
value="CLASS_DEF, INTERFACE_DEF, ENUM_DEF, ANNOTATION_DEF, ANNOTATION_FIELD_DEF,
PARAMETER_DEF, VARIABLE_DEF, METHOD_DEF, PATTERN_VARIABLE_DEF, RECORD_DEF,
RECORD_COMPONENT_DEF"/>
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
@@ -392,35 +325,28 @@ under the License.
value="GenericWhitespace ''{0}'' should followed by whitespace."/>
-
-
-
-
-
-
-
+
-
-
@@ -462,7 +387,6 @@ under the License.
-
@@ -479,7 +403,6 @@ under the License.
LITERAL_TRY, LITERAL_WHILE, LOR, LT, MINUS, MINUS_ASSIGN, MOD, MOD_ASSIGN,
NOT_EQUAL, PLUS, PLUS_ASSIGN, QUESTION, RCURLY, SL, SLIST, SL_ASSIGN, SR,
SR_ASSIGN, STAR, STAR_ASSIGN, LITERAL_ASSERT, TYPE_EXTENSION_AND"/>
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/io/DataOutputBuffer.java b/fe/fe-common/src/main/java/org/apache/doris/common/io/DataOutputBuffer.java
index f0337217d8..6aea88c41c 100644
--- a/fe/fe-common/src/main/java/org/apache/doris/common/io/DataOutputBuffer.java
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/io/DataOutputBuffer.java
@@ -70,7 +70,7 @@ public class DataOutputBuffer extends DataOutputStream {
public void write(DataInput in, int len) throws IOException {
int newcount = count + len;
if (newcount > buf.length) {
- byte newbuf[] = new byte[Math.max(buf.length << 1, newcount)];
+ byte[] newbuf = new byte[Math.max(buf.length << 1, newcount)];
System.arraycopy(buf, 0, newbuf, 0, count);
buf = newbuf;
}
diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/io/IOUtils.java b/fe/fe-common/src/main/java/org/apache/doris/common/io/IOUtils.java
index ffd2330bd3..60596f44d1 100644
--- a/fe/fe-common/src/main/java/org/apache/doris/common/io/IOUtils.java
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/io/IOUtils.java
@@ -34,7 +34,7 @@ import java.net.Socket;
public class IOUtils {
public static long copyBytes(InputStream in, OutputStream out,
int buffSize, long len) throws IOException {
- byte buf[] = new byte[buffSize];
+ byte[] buf = new byte[buffSize];
int totalRead = 0;
int toRead = 0;
int bytesRead = 0;
@@ -76,7 +76,7 @@ public class IOUtils {
int buffSize, int speed, boolean close) throws IOException {
PrintStream ps = out instanceof PrintStream ? (PrintStream) out : null;
- byte buf[] = new byte[buffSize];
+ byte[] buf = new byte[buffSize];
long bytesReadTotal = 0;
long startTime = 0;
long sleepTime = 0;
@@ -133,7 +133,7 @@ public class IOUtils {
int buffSize, boolean close) throws IOException {
PrintStream ps = out instanceof PrintStream ? (PrintStream) out : null;
- byte buf[] = new byte[buffSize];
+ byte[] buf = new byte[buffSize];
long totalBytes = 0;
try {
int bytesRead = in.read(buf);
@@ -169,7 +169,7 @@ public class IOUtils {
* if it could not read requested number of bytes for any reason
* (including EOF)
*/
- public static void readFully(InputStream in, byte buf[], int off, int len)
+ public static void readFully(InputStream in, byte[] buf, int off, int len)
throws IOException {
int toRead = len;
int tmpOff = off;
@@ -263,6 +263,7 @@ public class IOUtils {
Text.writeString(output, value);
}
}
+
public static String readOptionStringOrNull(DataInput input) throws IOException {
if (input.readBoolean()) {
return Text.readString(input);
diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/io/OutputBuffer.java b/fe/fe-common/src/main/java/org/apache/doris/common/io/OutputBuffer.java
index 8f95d7401b..f68a2f179e 100644
--- a/fe/fe-common/src/main/java/org/apache/doris/common/io/OutputBuffer.java
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/io/OutputBuffer.java
@@ -66,7 +66,7 @@ public class OutputBuffer extends FilterOutputStream {
public void write(InputStream in, int len) throws IOException {
int newcount = count + len;
if (newcount > buf.length) {
- byte newbuf[] = new byte[Math.max(buf.length << 1, newcount)];
+ byte[] newbuf = new byte[Math.max(buf.length << 1, newcount)];
System.arraycopy(buf, 0, newbuf, 0, count);
buf = newbuf;
}
diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/io/Text.java b/fe/fe-common/src/main/java/org/apache/doris/common/io/Text.java
index 7331195d32..1710cb34e2 100644
--- a/fe/fe-common/src/main/java/org/apache/doris/common/io/Text.java
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/io/Text.java
@@ -606,7 +606,7 @@ public class Text implements Writable {
return ch;
}
- static final int offsetsFromUTF8[] = { 0x00000000, 0x00003080, 0x000E2080,
+ static final int[] offsetsFromUTF8 = { 0x00000000, 0x00003080, 0x000E2080,
0x03C82080, 0xFA082080, 0x82082080 };
/**
diff --git a/fe/fe-common/src/main/java/org/apache/doris/common/property/PropertySchema.java b/fe/fe-common/src/main/java/org/apache/doris/common/property/PropertySchema.java
index b68d2072bf..e58c62f80e 100644
--- a/fe/fe-common/src/main/java/org/apache/doris/common/property/PropertySchema.java
+++ b/fe/fe-common/src/main/java/org/apache/doris/common/property/PropertySchema.java
@@ -332,7 +332,7 @@ public abstract class PropertySchema {
}
}
- private static abstract class ComparableProperty extends PropertySchema {
+ private abstract static class ComparableProperty extends PropertySchema {
protected ComparableProperty(String name) {
super(name);
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/PaloFe.java b/fe/fe-core/src/main/java/org/apache/doris/PaloFe.java
index 0d1ec31434..8004ee15ab 100755
--- a/fe/fe-core/src/main/java/org/apache/doris/PaloFe.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/PaloFe.java
@@ -132,7 +132,8 @@ public class PaloFe {
// 1. HttpServer for HTTP Server
// 2. FeServer for Thrift Server
// 3. QeService for MySQL Server
- QeService qeService = new QeService(Config.query_port, Config.mysql_service_nio_enabled, ExecuteEnv.getInstance().getScheduler());
+ QeService qeService = new QeService(Config.query_port, Config.mysql_service_nio_enabled,
+ ExecuteEnv.getInstance().getScheduler());
FeServer feServer = new FeServer(Config.rpc_port);
feServer.start();
@@ -324,7 +325,8 @@ public class PaloFe {
} else if (cmdLineOpts.runImageTool()) {
File imageFile = new File(cmdLineOpts.getImagePath());
if (!imageFile.exists()) {
- System.out.println("image does not exist: " + imageFile.getAbsolutePath() + " . Please put an absolute path instead");
+ System.out.println("image does not exist: " + imageFile.getAbsolutePath()
+ + " . Please put an absolute path instead");
System.exit(-1);
} else {
System.out.println("Start to load image: ");
diff --git a/fe/fe-core/src/main/java/org/apache/doris/alter/Alter.java b/fe/fe-core/src/main/java/org/apache/doris/alter/Alter.java
index 2154ab311c..a11537eb33 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/alter/Alter.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/alter/Alter.java
@@ -171,7 +171,8 @@ public class Alter {
}
Catalog.getCurrentCatalog().dropPartition(db, olapTable, ((DropPartitionClause) alterClause));
} else if (alterClause instanceof ReplacePartitionClause) {
- Catalog.getCurrentCatalog().replaceTempPartition(db, olapTable, (ReplacePartitionClause) alterClause);
+ Catalog.getCurrentCatalog().replaceTempPartition(
+ db, olapTable, (ReplacePartitionClause) alterClause);
} else if (alterClause instanceof ModifyPartitionClause) {
ModifyPartitionClause clause = ((ModifyPartitionClause) alterClause);
// expand the partition names if it is 'Modify Partition(*)'
@@ -206,7 +207,8 @@ public class Alter {
} else if (currentAlterOps.contains(AlterOpType.MODIFY_DISTRIBUTION)) {
Preconditions.checkState(alterClauses.size() == 1);
AlterClause alterClause = alterClauses.get(0);
- Catalog.getCurrentCatalog().modifyDefaultDistributionBucketNum(db, olapTable, (ModifyDistributionClause) alterClause);
+ Catalog.getCurrentCatalog().modifyDefaultDistributionBucketNum(
+ db, olapTable, (ModifyDistributionClause) alterClause);
} else if (currentAlterOps.contains(AlterOpType.MODIFY_COLUMN_COMMENT)) {
processModifyColumnComment(db, olapTable, alterClauses);
} else if (currentAlterOps.contains(AlterOpType.MODIFY_TABLE_COMMENT)) {
@@ -227,7 +229,8 @@ public class Alter {
ModifyTableCommentClause clause = (ModifyTableCommentClause) alterClause;
tbl.setComment(clause.getComment());
// log
- ModifyCommentOperationLog op = ModifyCommentOperationLog.forTable(db.getId(), tbl.getId(), clause.getComment());
+ ModifyCommentOperationLog op = ModifyCommentOperationLog
+ .forTable(db.getId(), tbl.getId(), clause.getComment());
Catalog.getCurrentCatalog().getEditLog().logModifyComment(op);
} finally {
tbl.writeUnlock();
@@ -338,7 +341,8 @@ public class Alter {
}
}
- private void processModifyEngineInternal(Database db, Table externalTable, Map prop, boolean isReplay) {
+ private void processModifyEngineInternal(Database db, Table externalTable,
+ Map prop, boolean isReplay) {
MysqlTable mysqlTable = (MysqlTable) externalTable;
Map newProp = Maps.newHashMap(prop);
newProp.put(OdbcTable.ODBC_HOST, mysqlTable.getHost());
@@ -393,7 +397,8 @@ public class Alter {
processAlterExternalTable(stmt, table, db);
return;
default:
- throw new DdlException("Do not support alter " + table.getType().toString() + " table[" + tableName + "]");
+ throw new DdlException("Do not support alter "
+ + table.getType().toString() + " table[" + tableName + "]");
}
// the following ops should done outside table lock. because it contain synchronized create operation
@@ -402,7 +407,8 @@ public class Alter {
AlterClause alterClause = alterClauses.get(0);
if (alterClause instanceof AddPartitionClause) {
if (!((AddPartitionClause) alterClause).isTempPartition()) {
- DynamicPartitionUtil.checkAlterAllowed((OlapTable) db.getTableOrMetaException(tableName, TableType.OLAP));
+ DynamicPartitionUtil.checkAlterAllowed(
+ (OlapTable) db.getTableOrMetaException(tableName, TableType.OLAP));
}
Catalog.getCurrentCatalog().addPartition(db, tableName, (AddPartitionClause) alterClause);
} else if (alterClause instanceof ModifyPartitionClause) {
@@ -432,7 +438,8 @@ public class Alter {
}
// entry of processing replace table
- private void processReplaceTable(Database db, OlapTable origTable, List alterClauses) throws UserException {
+ private void processReplaceTable(Database db, OlapTable origTable, List alterClauses)
+ throws UserException {
ReplaceTableClause clause = (ReplaceTableClause) alterClauses.get(0);
String newTblName = clause.getTblName();
boolean swapTable = clause.isSwapTable();
@@ -452,7 +459,8 @@ public class Alter {
}
replaceTableInternal(db, origTable, olapNewTbl, swapTable, false);
// write edit log
- ReplaceTableOperationLog log = new ReplaceTableOperationLog(db.getId(), origTable.getId(), olapNewTbl.getId(), swapTable);
+ ReplaceTableOperationLog log = new ReplaceTableOperationLog(db.getId(),
+ origTable.getId(), olapNewTbl.getId(), swapTable);
Catalog.getCurrentCatalog().getEditLog().logReplaceTable(log);
LOG.info("finish replacing table {} with table {}, is swap: {}", oldTblName, newTblName, swapTable);
} finally {
@@ -533,7 +541,8 @@ public class Alter {
modifyViewDef(db, view, stmt.getInlineViewDef(), ctx.getSessionVariable().getSqlMode(), stmt.getColumns());
}
- private void modifyViewDef(Database db, View view, String inlineViewDef, long sqlMode, List newFullSchema) throws DdlException {
+ private void modifyViewDef(Database db, View view, String inlineViewDef, long sqlMode,
+ List newFullSchema) throws DdlException {
db.writeLockOrDdlException();
try {
view.writeLockOrDdlException();
@@ -549,7 +558,8 @@ public class Alter {
db.dropTable(viewName);
db.createTable(view);
- AlterViewInfo alterViewInfo = new AlterViewInfo(db.getId(), view.getId(), inlineViewDef, newFullSchema, sqlMode);
+ AlterViewInfo alterViewInfo = new AlterViewInfo(db.getId(), view.getId(),
+ inlineViewDef, newFullSchema, sqlMode);
Catalog.getCurrentCatalog().getEditLog().logModifyViewDef(alterViewInfo);
LOG.info("modify view[{}] definition to {}", viewName, inlineViewDef);
} finally {
@@ -680,7 +690,8 @@ public class Alter {
DateLiteral dateLiteral = new DateLiteral(dataProperty.getCooldownTimeMs(),
TimeUtils.getTimeZone(), Type.DATETIME);
newProperties.put(PropertyAnalyzer.PROPERTIES_STORAGE_COOLDOWN_TIME, dateLiteral.getStringValue());
- newProperties.put(PropertyAnalyzer.PROPERTIES_REMOTE_STORAGE_RESOURCE, dataProperty.getRemoteStorageResourceName());
+ newProperties.put(PropertyAnalyzer.PROPERTIES_REMOTE_STORAGE_RESOURCE,
+ dataProperty.getRemoteStorageResourceName());
DateLiteral dateLiteral1 = new DateLiteral(dataProperty.getRemoteCooldownTimeMs(),
TimeUtils.getTimeZone(), Type.DATETIME);
newProperties.put(PropertyAnalyzer.PROPERTIES_REMOTE_STORAGE_COOLDOWN_TIME, dateLiteral1.getStringValue());
diff --git a/fe/fe-core/src/main/java/org/apache/doris/alter/AlterHandler.java b/fe/fe-core/src/main/java/org/apache/doris/alter/AlterHandler.java
index 5205d418b3..347aa36747 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/alter/AlterHandler.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/alter/AlterHandler.java
@@ -116,7 +116,8 @@ public abstract class AlterHandler extends MasterDaemon {
AlterJobV2 alterJobV2 = iterator.next().getValue();
if (alterJobV2.isExpire()) {
iterator.remove();
- RemoveAlterJobV2OperationLog log = new RemoveAlterJobV2OperationLog(alterJobV2.getJobId(), alterJobV2.getType());
+ RemoveAlterJobV2OperationLog log = new RemoveAlterJobV2OperationLog(
+ alterJobV2.getJobId(), alterJobV2.getType());
Catalog.getCurrentCatalog().getEditLog().logRemoveExpiredAlterJobV2(log);
LOG.info("remove expired {} job {}. finish at {}", alterJobV2.getType(),
alterJobV2.getJobId(), TimeUtils.longToTimeString(alterJobV2.getFinishedTimeMs()));
@@ -169,7 +170,7 @@ public abstract class AlterHandler extends MasterDaemon {
* entry function. handle alter ops for external table
*/
public void processExternalTable(List alterClauses, Database db, Table externalTable)
- throws UserException {};
+ throws UserException {}
/*
* cancel alter ops
@@ -183,11 +184,13 @@ public abstract class AlterHandler extends MasterDaemon {
* We assume that the specified version is X.
* Case 1:
* After alter table process starts, there is no new load job being submitted. So the new replica
- * should be with version (0-1). So we just modify the replica's version to partition's visible version, which is X.
+ * should be with version (0-1). So we just modify the replica's version to
+ * partition's visible version, which is X.
* Case 2:
* After alter table process starts, there are some load job being processed.
* Case 2.1:
- * None of them succeed on this replica. so the version is still 1. We should modify the replica's version to X.
+ * None of them succeed on this replica. so the version is still 1.
+ * We should modify the replica's version to X.
* Case 2.2
* There are new load jobs after alter task, and at least one of them is succeed on this replica.
* So the replica's version should be larger than X. So we don't need to modify the replica version
diff --git a/fe/fe-core/src/main/java/org/apache/doris/alter/AlterJobV2.java b/fe/fe-core/src/main/java/org/apache/doris/alter/AlterJobV2.java
index a69ad6266a..89dce3e169 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/alter/AlterJobV2.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/alter/AlterJobV2.java
@@ -43,9 +43,12 @@ import java.util.List;
public abstract class AlterJobV2 implements Writable {
private static final Logger LOG = LogManager.getLogger(AlterJobV2.class);
+
public enum JobState {
PENDING, // Job is created
+ // CHECKSTYLE OFF
WAITING_TXN, // New replicas are created and Shadow catalog object is visible for incoming txns, waiting for previous txns to be finished
+ // CHECKSTYLE ON
RUNNING, // alter tasks are sent to BE, and waiting for them finished.
FINISHED, // job is done
CANCELLED; // job is cancelled(failed or be cancelled by user)
@@ -175,7 +178,7 @@ public abstract class AlterJobV2 implements Writable {
}
}
- public synchronized final boolean cancel(String errMsg) {
+ public final synchronized boolean cancel(String errMsg) {
return cancelImpl(errMsg);
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/alter/AlterOperations.java b/fe/fe-core/src/main/java/org/apache/doris/alter/AlterOperations.java
index 5d5c346cf3..c60594326b 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/alter/AlterOperations.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/alter/AlterOperations.java
@@ -59,8 +59,10 @@ public class AlterOperations {
}
public boolean hasPartitionOp() {
- return currentOps.contains(AlterOpType.ADD_PARTITION) || currentOps.contains(AlterOpType.DROP_PARTITION)
- || currentOps.contains(AlterOpType.REPLACE_PARTITION) || currentOps.contains(AlterOpType.MODIFY_PARTITION);
+ return currentOps.contains(AlterOpType.ADD_PARTITION)
+ || currentOps.contains(AlterOpType.DROP_PARTITION)
+ || currentOps.contains(AlterOpType.REPLACE_PARTITION)
+ || currentOps.contains(AlterOpType.MODIFY_PARTITION);
}
// MODIFY_TABLE_PROPERTY is also processed by SchemaChangeHandler
@@ -103,6 +105,7 @@ public class AlterOperations {
currentOps.add(opType);
}
+
public boolean hasEnableFeatureOP() {
return currentOps.contains(AlterOpType.ENABLE_FEATURE);
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/alter/MaterializedViewHandler.java b/fe/fe-core/src/main/java/org/apache/doris/alter/MaterializedViewHandler.java
index 66465e0aac..64319c4cfb 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/alter/MaterializedViewHandler.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/alter/MaterializedViewHandler.java
@@ -92,7 +92,8 @@ public class MaterializedViewHandler extends AlterHandler {
}
// for batch submit rollup job, tableId -> jobId
- // keep table's not final state job size. The job size determine's table's state, = 0 means table is normal, otherwise is rollup
+ // keep table's not final state job size. The job size determine's table's state, = 0 means table is normal,
+ // otherwise is rollup
private Map> tableNotFinalStateJobMap = new ConcurrentHashMap<>();
// keep table's running job,used for concurrency limit
// table id -> set of running job ids
@@ -197,8 +198,9 @@ public class MaterializedViewHandler extends AlterHandler {
List mvColumns = checkAndPrepareMaterializedView(addMVClause, olapTable);
// Step2: create mv job
- RollupJobV2 rollupJobV2 = createMaterializedViewJob(mvIndexName, baseIndexName, mvColumns, addMVClause
- .getProperties(), olapTable, db, baseIndexId, addMVClause.getMVKeysType(), addMVClause.getOrigStmt());
+ RollupJobV2 rollupJobV2 = createMaterializedViewJob(mvIndexName, baseIndexName, mvColumns,
+ addMVClause.getProperties(), olapTable, db, baseIndexId,
+ addMVClause.getMVKeysType(), addMVClause.getOrigStmt());
addAlterJobV2(rollupJobV2);
@@ -223,7 +225,8 @@ public class MaterializedViewHandler extends AlterHandler {
* @throws DdlException
* @throws AnalysisException
*/
- public void processBatchAddRollup(List alterClauses, Database db, OlapTable olapTable) throws DdlException, AnalysisException {
+ public void processBatchAddRollup(List alterClauses, Database db, OlapTable olapTable)
+ throws DdlException, AnalysisException {
Map rollupNameJobMap = new LinkedHashMap<>();
// save job id for log
Set logJobIdSet = new HashSet<>();
@@ -265,11 +268,12 @@ public class MaterializedViewHandler extends AlterHandler {
long baseIndexId = checkAndGetBaseIndex(baseIndexName, olapTable);
// step 2.2 check rollup schema
- List rollupSchema = checkAndPrepareMaterializedView(addRollupClause, olapTable, baseIndexId, changeStorageFormat);
+ List rollupSchema = checkAndPrepareMaterializedView(
+ addRollupClause, olapTable, baseIndexId, changeStorageFormat);
// step 3 create rollup job
- RollupJobV2 alterJobV2 = createMaterializedViewJob(rollupIndexName, baseIndexName, rollupSchema, addRollupClause.getProperties(),
- olapTable, db, baseIndexId, olapTable.getKeysType(), null);
+ RollupJobV2 alterJobV2 = createMaterializedViewJob(rollupIndexName, baseIndexName, rollupSchema,
+ addRollupClause.getProperties(), olapTable, db, baseIndexId, olapTable.getKeysType(), null);
rollupNameJobMap.put(addRollupClause.getRollupName(), alterJobV2);
logJobIdSet.add(alterJobV2.getJobId());
@@ -319,10 +323,9 @@ public class MaterializedViewHandler extends AlterHandler {
* @throws AnalysisException
*/
private RollupJobV2 createMaterializedViewJob(String mvName, String baseIndexName,
- List mvColumns, Map properties,
- OlapTable olapTable, Database db, long baseIndexId, KeysType mvKeysType,
- OriginStatement origStmt)
- throws DdlException, AnalysisException {
+ List mvColumns, Map properties,
+ OlapTable olapTable, Database db, long baseIndexId, KeysType mvKeysType,
+ OriginStatement origStmt) throws DdlException, AnalysisException {
if (mvKeysType == null) {
// assign rollup index's key type, same as base index's
mvKeysType = olapTable.getKeysType();
@@ -384,11 +387,13 @@ public class MaterializedViewHandler extends AlterHandler {
if (baseReplica.getState() == Replica.ReplicaState.CLONE
|| baseReplica.getState() == Replica.ReplicaState.DECOMMISSION
|| baseReplica.getLastFailedVersion() > 0) {
- LOG.info("base replica {} of tablet {} state is {}, and last failed version is {}, skip creating rollup replica",
- baseReplica.getId(), baseTabletId, baseReplica.getState(), baseReplica.getLastFailedVersion());
+ LOG.info("base replica {} of tablet {} state is {}, and last failed version is {},"
+ + " skip creating rollup replica", baseReplica.getId(), baseTabletId,
+ baseReplica.getState(), baseReplica.getLastFailedVersion());
continue;
}
- Preconditions.checkState(baseReplica.getState() == Replica.ReplicaState.NORMAL, baseReplica.getState());
+ Preconditions.checkState(baseReplica.getState() == Replica.ReplicaState.NORMAL,
+ baseReplica.getState());
// replica's init state is ALTER, so that tablet report process will ignore its report
Replica mvReplica = new Replica(mvReplicaId, backendId, Replica.ReplicaState.ALTER,
Partition.PARTITION_INIT_VERSION, mvSchemaHash);
@@ -441,7 +446,8 @@ public class MaterializedViewHandler extends AlterHandler {
int numOfKeys = 0;
if (olapTable.getKeysType().isAggregationFamily()) {
if (addMVClause.getMVKeysType() != KeysType.AGG_KEYS) {
- throw new DdlException("The materialized view of aggregation or unique table must has grouping columns");
+ throw new DdlException("The materialized view of aggregation"
+ + " or unique table must has grouping columns");
}
for (MVColumnItem mvColumnItem : mvColumnItemList) {
String mvColumnName = mvColumnItem.getName();
@@ -735,7 +741,8 @@ public class MaterializedViewHandler extends AlterHandler {
long dbId = db.getId();
long tableId = olapTable.getId();
editLog.logBatchDropRollup(new BatchDropInfo(dbId, tableId, indexIdSet));
- LOG.info("finished drop rollup index[{}] in table[{}]", String.join("", rollupNameSet), olapTable.getName());
+ LOG.info("finished drop rollup index[{}] in table[{}]",
+ String.join("", rollupNameSet), olapTable.getName());
} finally {
olapTable.writeUnlock();
}
@@ -998,7 +1005,8 @@ public class MaterializedViewHandler extends AlterHandler {
continue;
}
if (ctx != null) {
- if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ctx, db.getFullName(), alterJob.getTableName(), PrivPredicate.ALTER)) {
+ if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ctx, db.getFullName(),
+ alterJob.getTableName(), PrivPredicate.ALTER)) {
continue;
}
}
@@ -1041,7 +1049,8 @@ public class MaterializedViewHandler extends AlterHandler {
}
olapTable.writeLock();
try {
- if (olapTable.getState() != OlapTableState.ROLLUP && olapTable.getState() != OlapTableState.WAITING_STABLE) {
+ if (olapTable.getState() != OlapTableState.ROLLUP
+ && olapTable.getState() != OlapTableState.WAITING_STABLE) {
throw new DdlException("Table[" + tableName + "] is not under ROLLUP. "
+ "Use 'ALTER TABLE DROP ROLLUP' if you want to.");
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/alter/RollupJobV2.java b/fe/fe-core/src/main/java/org/apache/doris/alter/RollupJobV2.java
index a05863d9ea..ba707233a5 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/alter/RollupJobV2.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/alter/RollupJobV2.java
@@ -157,7 +157,8 @@ public class RollupJobV2 extends AlterJobV2 implements GsonPostProcessable {
}
public void addTabletIdMap(long partitionId, long rollupTabletId, long baseTabletId) {
- Map tabletIdMap = partitionIdToBaseRollupTabletIdMap.computeIfAbsent(partitionId, k -> Maps.newHashMap());
+ Map tabletIdMap = partitionIdToBaseRollupTabletIdMap
+ .computeIfAbsent(partitionId, k -> Maps.newHashMap());
tabletIdMap.put(rollupTabletId, baseTabletId);
}
@@ -181,7 +182,8 @@ public class RollupJobV2 extends AlterJobV2 implements GsonPostProcessable {
Preconditions.checkState(jobState == JobState.PENDING, jobState);
LOG.info("begin to send create rollup replica tasks. job: {}", jobId);
- Database db = Catalog.getCurrentCatalog().getDbOrException(dbId, s -> new AlterCancelException("Database " + s + " does not exist"));
+ Database db = Catalog.getCurrentCatalog().getDbOrException(dbId,
+ s -> new AlterCancelException("Database " + s + " does not exist"));
if (!checkTableStable(db)) {
return;
}
@@ -291,7 +293,8 @@ public class RollupJobV2 extends AlterJobV2 implements GsonPostProcessable {
tbl.writeUnlock();
}
- this.watershedTxnId = Catalog.getCurrentGlobalTransactionMgr().getTransactionIDGenerator().getNextTransactionId();
+ this.watershedTxnId = Catalog.getCurrentGlobalTransactionMgr()
+ .getTransactionIDGenerator().getNextTransactionId();
this.jobState = JobState.WAITING_TXN;
// write edit log
@@ -333,7 +336,8 @@ public class RollupJobV2 extends AlterJobV2 implements GsonPostProcessable {
}
LOG.info("previous transactions are all finished, begin to send rollup tasks. job: {}", jobId);
- Database db = Catalog.getCurrentCatalog().getDbOrException(dbId, s -> new AlterCancelException("Databasee " + s + " does not exist"));
+ Database db = Catalog.getCurrentCatalog().getDbOrException(dbId,
+ s -> new AlterCancelException("Databasee " + s + " does not exist"));
OlapTable tbl;
try {
@@ -412,7 +416,8 @@ public class RollupJobV2 extends AlterJobV2 implements GsonPostProcessable {
// must check if db or table still exist first.
// or if table is dropped, the tasks will never be finished,
// and the job will be in RUNNING state forever.
- Database db = Catalog.getCurrentCatalog().getDbOrException(dbId, s -> new AlterCancelException("Databasee " + s + " does not exist"));
+ Database db = Catalog.getCurrentCatalog().getDbOrException(dbId,
+ s -> new AlterCancelException("Databasee " + s + " does not exist"));
OlapTable tbl;
try {
@@ -447,7 +452,8 @@ public class RollupJobV2 extends AlterJobV2 implements GsonPostProcessable {
}
long visiableVersion = partition.getVisibleVersion();
- short expectReplicationNum = tbl.getPartitionInfo().getReplicaAllocation(partitionId).getTotalReplicaNum();
+ short expectReplicationNum = tbl.getPartitionInfo().getReplicaAllocation(
+ partitionId).getTotalReplicaNum();
MaterializedIndex rollupIndex = entry.getValue();
@@ -464,7 +470,8 @@ public class RollupJobV2 extends AlterJobV2 implements GsonPostProcessable {
if (healthyReplicaNum < expectReplicationNum / 2 + 1) {
LOG.warn("rollup tablet {} has few healthy replicas: {}, rollup job: {}",
rollupTablet.getId(), replicas, jobId);
- throw new AlterCancelException("rollup tablet " + rollupTablet.getId() + " has few healthy replicas");
+ throw new AlterCancelException("rollup tablet " + rollupTablet.getId()
+ + " has few healthy replicas");
}
} // end for tablets
} // end for partitions
@@ -544,7 +551,8 @@ public class RollupJobV2 extends AlterJobV2 implements GsonPostProcessable {
// Check whether transactions of the given database which txnId is less than 'watershedTxnId' are finished.
protected boolean isPreviousLoadFinished() throws AnalysisException {
- return Catalog.getCurrentGlobalTransactionMgr().isPreviousTransactionsFinished(watershedTxnId, dbId, Lists.newArrayList(tableId));
+ return Catalog.getCurrentGlobalTransactionMgr().isPreviousTransactionsFinished(
+ watershedTxnId, dbId, Lists.newArrayList(tableId));
}
/**
@@ -745,7 +753,8 @@ public class RollupJobV2 extends AlterJobV2 implements GsonPostProcessable {
return;
}
// parse the define stmt to schema
- SqlParser parser = new SqlParser(new SqlScanner(new StringReader(origStmt.originStmt), SqlModeHelper.MODE_DEFAULT));
+ SqlParser parser = new SqlParser(new SqlScanner(
+ new StringReader(origStmt.originStmt), SqlModeHelper.MODE_DEFAULT));
ConnectContext connectContext = new ConnectContext();
Database db;
try {
@@ -755,7 +764,8 @@ public class RollupJobV2 extends AlterJobV2 implements GsonPostProcessable {
}
String clusterName = db.getClusterName();
// It's almost impossible that db's cluster name is null, just in case
- // because before user want to create database, he must first enter a cluster which means that cluster is set to current ConnectContext
+ // because before user want to create database, he must first enter a cluster
+ // which means that cluster is set to current ConnectContext
// then when createDBStmt is executed, cluster name is set to Database
if (clusterName == null || clusterName.length() == 0) {
clusterName = SystemInfoService.DEFAULT_CLUSTER;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/alter/SchemaChangeHandler.java b/fe/fe-core/src/main/java/org/apache/doris/alter/SchemaChangeHandler.java
index 1ffa5b8607..6329834d59 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/alter/SchemaChangeHandler.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/alter/SchemaChangeHandler.java
@@ -111,7 +111,8 @@ public class SchemaChangeHandler extends AlterHandler {
public static final int CYCLE_COUNT_TO_CHECK_EXPIRE_SCHEMA_CHANGE_JOB = 20;
- public final ThreadPoolExecutor schemaChangeThreadPool = ThreadPoolManager.newDaemonCacheThreadPool(MAX_ACTIVE_SCHEMA_CHANGE_JOB_V2_SIZE, "schema-change-pool", true);
+ public final ThreadPoolExecutor schemaChangeThreadPool = ThreadPoolManager.newDaemonCacheThreadPool(
+ MAX_ACTIVE_SCHEMA_CHANGE_JOB_V2_SIZE, "schema-change-pool", true);
public final Map activeSchemaChangeJobsV2 = Maps.newConcurrentMap();
@@ -144,7 +145,8 @@ public class SchemaChangeHandler extends AlterHandler {
indexSchemaMap, newColNameSet);
}
- private void processAddColumn(AddColumnClause alterClause, Table externalTable, List newSchema) throws DdlException {
+ private void processAddColumn(AddColumnClause alterClause,
+ Table externalTable, List newSchema) throws DdlException {
Column column = alterClause.getColumn();
ColumnPosition columnPos = alterClause.getColPos();
Set newColNameSet = Sets.newHashSet(column.getName());
@@ -152,7 +154,8 @@ public class SchemaChangeHandler extends AlterHandler {
addColumnInternal(column, columnPos, newSchema, newColNameSet);
}
- private void processAddColumns(AddColumnsClause alterClause, Table externalTable, List newSchema) throws DdlException {
+ private void processAddColumns(AddColumnsClause alterClause,
+ Table externalTable, List newSchema) throws DdlException {
List columns = alterClause.getColumns();
Set newColNameSet = Sets.newHashSet();
for (Column column : alterClause.getColumns()) {
@@ -190,7 +193,8 @@ public class SchemaChangeHandler extends AlterHandler {
}
}
- private void processDropColumn(DropColumnClause alterClause, Table externalTable, List newSchema) throws DdlException {
+ private void processDropColumn(DropColumnClause alterClause,
+ Table externalTable, List newSchema) throws DdlException {
String dropColName = alterClause.getColName();
// find column in base index and remove it
@@ -216,7 +220,7 @@ public class SchemaChangeHandler extends AlterHandler {
}
private void processDropColumn(DropColumnClause alterClause, OlapTable olapTable,
- Map> indexSchemaMap, List indexes) throws DdlException {
+ Map> indexSchemaMap, List indexes) throws DdlException {
String dropColName = alterClause.getColName();
String targetIndexName = alterClause.getRollupName();
checkIndexExists(olapTable, targetIndexName);
@@ -261,7 +265,8 @@ public class SchemaChangeHandler extends AlterHandler {
}
}
if (isKey && hasReplaceColumn) {
- throw new DdlException("Can not drop key column when table has value column with REPLACE aggregation method");
+ throw new DdlException(
+ "Can not drop key column when table has value column with REPLACE aggregation method");
}
} else {
// drop column in rollup and base index
@@ -279,7 +284,8 @@ public class SchemaChangeHandler extends AlterHandler {
}
}
if (isKey && hasReplaceColumn) {
- throw new DdlException("Can not drop key column when rollup has value column with REPLACE aggregation metho");
+ throw new DdlException(
+ "Can not drop key column when rollup has value column with REPLACE aggregation method");
}
}
}
@@ -352,7 +358,8 @@ public class SchemaChangeHandler extends AlterHandler {
}
// User can modify column type and column position
- private void processModifyColumn(ModifyColumnClause alterClause, Table externalTable, List newSchema) throws DdlException {
+ private void processModifyColumn(ModifyColumnClause alterClause,
+ Table externalTable, List newSchema) throws DdlException {
Column modColumn = alterClause.getColumn();
ColumnPosition columnPos = alterClause.getColPos();
@@ -430,14 +437,16 @@ public class SchemaChangeHandler extends AlterHandler {
}
} else if (KeysType.UNIQUE_KEYS == olapTable.getKeysType()) {
if (null != modColumn.getAggregationType()) {
- throw new DdlException("Can not assign aggregation method on column in Unique data model table: " + modColumn.getName());
+ throw new DdlException("Can not assign aggregation method"
+ + " on column in Unique data model table: " + modColumn.getName());
}
if (!modColumn.isKey()) {
modColumn.setAggregationType(AggregateType.REPLACE, true);
}
} else {
if (null != modColumn.getAggregationType()) {
- throw new DdlException("Can not assign aggregation method on column in Duplicate data model table: " + modColumn.getName());
+ throw new DdlException("Can not assign aggregation method"
+ + " on column in Duplicate data model table: " + modColumn.getName());
}
if (!modColumn.isKey()) {
modColumn.setAggregationType(AggregateType.NONE, true);
@@ -604,7 +613,8 @@ public class SchemaChangeHandler extends AlterHandler {
}
}
- private void processReorderColumn(ReorderColumnsClause alterClause, Table externalTable, List newSchema) throws DdlException {
+ private void processReorderColumn(ReorderColumnsClause alterClause,
+ Table externalTable, List newSchema) throws DdlException {
List orderedColNames = alterClause.getColumnsByPos();
newSchema.clear();
@@ -763,18 +773,21 @@ public class SchemaChangeHandler extends AlterHandler {
newColumn.setIsKey(true);
} else if (newColumn.getAggregationType() == AggregateType.SUM
&& newColumn.getDefaultValue() != null && !newColumn.getDefaultValue().equals("0")) {
- throw new DdlException("The default value of '" + newColName + "' with SUM aggregation function must be zero");
+ throw new DdlException("The default value of '"
+ + newColName + "' with SUM aggregation function must be zero");
}
} else if (KeysType.UNIQUE_KEYS == olapTable.getKeysType()) {
if (newColumn.getAggregationType() != null) {
- throw new DdlException("Can not assign aggregation method on column in Unique data model table: " + newColName);
+ throw new DdlException("Can not assign aggregation method"
+ + " on column in Unique data model table: " + newColName);
}
if (!newColumn.isKey()) {
newColumn.setAggregationType(AggregateType.REPLACE, true);
}
} else {
if (newColumn.getAggregationType() != null) {
- throw new DdlException("Can not assign aggregation method on column in Duplicate data model table: " + newColName);
+ throw new DdlException("Can not assign aggregation method"
+ + " on column in Duplicate data model table: " + newColName);
}
if (!newColumn.isKey()) {
if (targetIndexId != -1L
@@ -790,7 +803,8 @@ public class SchemaChangeHandler extends AlterHandler {
throw new DdlException("HLL type column can only be in Aggregation data model table: " + newColName);
}
- if (newColumn.getAggregationType() == AggregateType.BITMAP_UNION && KeysType.AGG_KEYS != olapTable.getKeysType()) {
+ if (newColumn.getAggregationType() == AggregateType.BITMAP_UNION
+ && KeysType.AGG_KEYS != olapTable.getKeysType()) {
throw new DdlException("BITMAP_UNION must be used in AGG_KEYS");
}
@@ -1095,7 +1109,8 @@ public class SchemaChangeHandler extends AlterHandler {
// create job
Catalog catalog = Catalog.getCurrentCatalog();
long jobId = catalog.getNextId();
- SchemaChangeJobV2 schemaChangeJob = new SchemaChangeJobV2(jobId, dbId, olapTable.getId(), olapTable.getName(), timeoutSecond * 1000);
+ SchemaChangeJobV2 schemaChangeJob = new SchemaChangeJobV2(jobId, dbId,
+ olapTable.getId(), olapTable.getName(), timeoutSecond * 1000);
schemaChangeJob.setBloomFilterInfo(hasBfChange, bfColumns, bfFpp);
schemaChangeJob.setAlterIndexInfo(hasIndexChange, indexes);
@@ -1288,7 +1303,8 @@ public class SchemaChangeHandler extends AlterHandler {
/*
* Create schema change job
- * 1. For each index which has been changed, create a SHADOW index, and save the mapping of origin index to SHADOW index.
+ * 1. For each index which has been changed, create a SHADOW index,
+ * and save the mapping of origin index to SHADOW index.
* 2. Create all tablets and replicas of all SHADOW index, add them to tablet inverted index.
* 3. Change table's state as SCHEMA_CHANGE
*/
@@ -1316,7 +1332,8 @@ public class SchemaChangeHandler extends AlterHandler {
// index state is SHADOW
MaterializedIndex shadowIndex = new MaterializedIndex(shadowIndexId, IndexState.SHADOW);
MaterializedIndex originIndex = partition.getIndex(originIndexId);
- TabletMeta shadowTabletMeta = new TabletMeta(dbId, tableId, partitionId, shadowIndexId, newSchemaHash, medium);
+ TabletMeta shadowTabletMeta = new TabletMeta(dbId, tableId, partitionId,
+ shadowIndexId, newSchemaHash, medium);
ReplicaAllocation replicaAlloc = olapTable.getPartitionInfo().getReplicaAllocation(partitionId);
Short totalReplicaNum = replicaAlloc.getTotalReplicaNum();
for (Tablet originTablet : originIndex.getTablets()) {
@@ -1338,11 +1355,14 @@ public class SchemaChangeHandler extends AlterHandler {
if (originReplica.getState() == Replica.ReplicaState.CLONE
|| originReplica.getState() == Replica.ReplicaState.DECOMMISSION
|| originReplica.getLastFailedVersion() > 0) {
- LOG.info("origin replica {} of tablet {} state is {}, and last failed version is {}, skip creating shadow replica",
- originReplica.getId(), originReplica, originReplica.getState(), originReplica.getLastFailedVersion());
+ LOG.info("origin replica {} of tablet {} state is {},"
+ + " and last failed version is {}, skip creating shadow replica",
+ originReplica.getId(), originReplica, originReplica.getState(),
+ originReplica.getLastFailedVersion());
continue;
}
- Preconditions.checkState(originReplica.getState() == ReplicaState.NORMAL, originReplica.getState());
+ Preconditions.checkState(originReplica.getState() == ReplicaState.NORMAL,
+ originReplica.getState());
// replica's init state is ALTER, so that tablet report process will ignore its report
Replica shadowReplica = new Replica(shadowReplicaId, backendId, ReplicaState.ALTER,
Partition.PARTITION_INIT_VERSION, newSchemaHash);
@@ -1353,8 +1373,8 @@ public class SchemaChangeHandler extends AlterHandler {
if (healthyReplicaNum < totalReplicaNum / 2 + 1) {
/*
* TODO(cmy): This is a bad design.
- * Because in the schema change job, we will only send tasks to the shadow replicas that have been created,
- * without checking whether the quorum of replica number are satisfied.
+ * Because in the schema change job, we will only send tasks to the shadow replicas
+ * that have been created, without checking whether the quorum of replica number are satisfied.
* This will cause the job to fail until we find that the quorum of replica number
* is not satisfied until the entire job is done.
* So here we check the replica number strictly and do not allow to submit the job
@@ -1370,7 +1390,8 @@ public class SchemaChangeHandler extends AlterHandler {
schemaChangeJob.addPartitionShadowIndex(partitionId, shadowIndexId, shadowIndex);
} // end for partition
- schemaChangeJob.addIndexSchema(shadowIndexId, originIndexId, newIndexName, newSchemaVersion, newSchemaHash, newShortKeyColumnCount, entry.getValue());
+ schemaChangeJob.addIndexSchema(shadowIndexId, originIndexId, newIndexName,
+ newSchemaVersion, newSchemaHash, newShortKeyColumnCount, entry.getValue());
} // end for index
// set table state
@@ -1428,14 +1449,16 @@ public class SchemaChangeHandler extends AlterHandler {
return schemaChangeJobInfos;
}
- private void getAlterJobV2Infos(Database db, List alterJobsV2, List> schemaChangeJobInfos) {
+ private void getAlterJobV2Infos(Database db, List alterJobsV2,
+ List> schemaChangeJobInfos) {
ConnectContext ctx = ConnectContext.get();
for (AlterJobV2 alterJob : alterJobsV2) {
if (alterJob.getDbId() != db.getId()) {
continue;
}
if (ctx != null) {
- if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ctx, db.getFullName(), alterJob.getTableName(), PrivPredicate.ALTER)) {
+ if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(
+ ctx, db.getFullName(), alterJob.getTableName(), PrivPredicate.ALTER)) {
continue;
}
}
@@ -1487,9 +1510,11 @@ public class SchemaChangeHandler extends AlterHandler {
} else if (DynamicPartitionUtil.checkDynamicPartitionPropertiesExist(properties)) {
if (!olapTable.dynamicPartitionExists()) {
try {
- DynamicPartitionUtil.checkInputDynamicPartitionProperties(properties, olapTable.getPartitionInfo());
+ DynamicPartitionUtil.checkInputDynamicPartitionProperties(
+ properties, olapTable.getPartitionInfo());
} catch (DdlException e) {
- // This table is not a dynamic partition table and didn't supply all dynamic partition properties
+ // This table is not a dynamic partition table
+ // and didn't supply all dynamic partition properties
throw new DdlException("Table " + db.getFullName() + "."
+ olapTable.getName() + " is not a dynamic partition table."
+ " Use command `HELP ALTER TABLE` "
@@ -1498,8 +1523,10 @@ public class SchemaChangeHandler extends AlterHandler {
}
Catalog.getCurrentCatalog().modifyTableDynamicPartition(db, olapTable, properties);
return;
- } else if (properties.containsKey("default." + PropertyAnalyzer.PROPERTIES_REPLICATION_ALLOCATION)) {
- Preconditions.checkNotNull(properties.get("default." + PropertyAnalyzer.PROPERTIES_REPLICATION_ALLOCATION));
+ } else if (properties.containsKey(
+ "default." + PropertyAnalyzer.PROPERTIES_REPLICATION_ALLOCATION)) {
+ Preconditions.checkNotNull(properties.get("default."
+ + PropertyAnalyzer.PROPERTIES_REPLICATION_ALLOCATION));
Catalog.getCurrentCatalog().modifyTableDefaultReplicaAllocation(db, olapTable, properties);
return;
} else if (properties.containsKey(PropertyAnalyzer.PROPERTIES_REPLICATION_ALLOCATION)) {
@@ -1615,7 +1642,8 @@ public class SchemaChangeHandler extends AlterHandler {
/**
* Update all partitions' in-memory property of table
*/
- public void updateTableInMemoryMeta(Database db, String tableName, Map properties) throws UserException {
+ public void updateTableInMemoryMeta(Database db, String tableName, Map properties)
+ throws UserException {
List partitions = Lists.newArrayList();
OlapTable olapTable = (OlapTable) db.getTableOrMetaException(tableName, Table.TableType.OLAP);
olapTable.readLock();
@@ -1646,9 +1674,8 @@ public class SchemaChangeHandler extends AlterHandler {
* Update some specified partitions' in-memory property of table
*/
public void updatePartitionsInMemoryMeta(Database db,
- String tableName,
- List partitionNames,
- Map properties) throws DdlException, MetaNotFoundException {
+ String tableName, List partitionNames, Map properties)
+ throws DdlException, MetaNotFoundException {
OlapTable olapTable = (OlapTable) db.getTableOrMetaException(tableName, Table.TableType.OLAP);
boolean isInMemory = Boolean.parseBoolean(properties.get(PropertyAnalyzer.PROPERTIES_INMEMORY));
if (isInMemory == olapTable.isInMemory()) {
@@ -1735,7 +1762,8 @@ public class SchemaChangeHandler extends AlterHandler {
} else {
List>>> unfinishedMarks = countDownLatch.getLeftMarks();
// only show at most 3 results
- List>>> subList = unfinishedMarks.subList(0, Math.min(unfinishedMarks.size(), 3));
+ List>>> subList
+ = unfinishedMarks.subList(0, Math.min(unfinishedMarks.size(), 3));
if (!subList.isEmpty()) {
errMsg += " Unfinished mark: " + Joiner.on(", ").join(subList);
}
@@ -1770,9 +1798,11 @@ public class SchemaChangeHandler extends AlterHandler {
// find from new alter jobs first
List schemaChangeJobV2List = getUnfinishedAlterJobV2ByTableId(olapTable.getId());
// current schemaChangeJob job doesn't support batch operation,so just need to get one job
- schemaChangeJobV2 = schemaChangeJobV2List.size() == 0 ? null : Iterables.getOnlyElement(schemaChangeJobV2List);
+ schemaChangeJobV2 = schemaChangeJobV2List.size() == 0
+ ? null : Iterables.getOnlyElement(schemaChangeJobV2List);
if (schemaChangeJobV2 == null) {
- throw new DdlException("Table[" + tableName + "] is under schema change state but could not find related job");
+ throw new DdlException("Table[" + tableName + "] is under schema change state"
+ + " but could not find related job");
}
} finally {
olapTable.writeUnlock();
@@ -1804,7 +1834,8 @@ public class SchemaChangeHandler extends AlterHandler {
for (Index existedIdx : existedIndexes) {
if (existedIdx.getIndexName().equalsIgnoreCase(indexDef.getIndexName())) {
if (indexDef.isSetIfNotExists()) {
- LOG.info("create index[{}] which already exists on table[{}]", indexDef.getIndexName(), olapTable.getName());
+ LOG.info("create index[{}] which already exists on table[{}]",
+ indexDef.getIndexName(), olapTable.getName());
return true;
}
throw new DdlException("index `" + indexDef.getIndexName() + "` already exist.");
@@ -1834,7 +1865,8 @@ public class SchemaChangeHandler extends AlterHandler {
* Returns true if the index does not exist, there is no need to create the job to drop the index.
* Otherwise return false, there is need to create a job to drop the index.
*/
- private boolean processDropIndex(DropIndexClause alterClause, OlapTable olapTable, List indexes) throws DdlException {
+ private boolean processDropIndex(DropIndexClause alterClause, OlapTable olapTable,
+ List indexes) throws DdlException {
String indexName = alterClause.getIndexName();
List existedIndexes = olapTable.getIndexes();
Index found = null;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/alter/SchemaChangeJobV2.java b/fe/fe-core/src/main/java/org/apache/doris/alter/SchemaChangeJobV2.java
index a95bdede1d..cf1dc977a9 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/alter/SchemaChangeJobV2.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/alter/SchemaChangeJobV2.java
@@ -198,7 +198,8 @@ public class SchemaChangeJobV2 extends AlterJobV2 {
protected void runPendingJob() throws AlterCancelException {
Preconditions.checkState(jobState == JobState.PENDING, jobState);
LOG.info("begin to send create replica tasks. job: {}", jobId);
- Database db = Catalog.getCurrentCatalog().getDbOrException(dbId, s -> new AlterCancelException("Database " + s + " does not exist"));
+ Database db = Catalog.getCurrentCatalog()
+ .getDbOrException(dbId, s -> new AlterCancelException("Database " + s + " does not exist"));
if (!checkTableStable(db)) {
return;
@@ -261,7 +262,8 @@ public class SchemaChangeJobV2 extends AlterJobV2 {
tbl.isInMemory(),
tbl.getPartitionInfo().getTabletType(partitionId),
tbl.getCompressionType());
- createReplicaTask.setBaseTablet(partitionIndexTabletMap.get(partitionId, shadowIdxId).get(shadowTabletId), originSchemaHash);
+ createReplicaTask.setBaseTablet(partitionIndexTabletMap.get(partitionId, shadowIdxId)
+ .get(shadowTabletId), originSchemaHash);
if (this.storageFormat != null) {
createReplicaTask.setStorageFormat(this.storageFormat);
}
@@ -317,12 +319,14 @@ public class SchemaChangeJobV2 extends AlterJobV2 {
tbl.writeUnlock();
}
- this.watershedTxnId = Catalog.getCurrentGlobalTransactionMgr().getTransactionIDGenerator().getNextTransactionId();
+ this.watershedTxnId = Catalog.getCurrentGlobalTransactionMgr()
+ .getTransactionIDGenerator().getNextTransactionId();
this.jobState = JobState.WAITING_TXN;
// write edit log
Catalog.getCurrentCatalog().getEditLog().logAlterJob(this);
- LOG.info("transfer schema change job {} state to {}, watershed txn id: {}", jobId, this.jobState, watershedTxnId);
+ LOG.info("transfer schema change job {} state to {}, watershed txn id: {}",
+ jobId, this.jobState, watershedTxnId);
}
private void addShadowIndexToCatalog(OlapTable tbl) {
@@ -369,7 +373,8 @@ public class SchemaChangeJobV2 extends AlterJobV2 {
}
LOG.info("previous transactions are all finished, begin to send schema change tasks. job: {}", jobId);
- Database db = Catalog.getCurrentCatalog().getDbOrException(dbId, s -> new AlterCancelException("Database " + s + " does not exist"));
+ Database db = Catalog.getCurrentCatalog()
+ .getDbOrException(dbId, s -> new AlterCancelException("Database " + s + " does not exist"));
OlapTable tbl;
try {
@@ -473,7 +478,8 @@ public class SchemaChangeJobV2 extends AlterJobV2 {
// must check if db or table still exist first.
// or if table is dropped, the tasks will never be finished,
// and the job will be in RUNNING state forever.
- Database db = Catalog.getCurrentCatalog().getDbOrException(dbId, s -> new AlterCancelException("Database " + s + " does not exist"));
+ Database db = Catalog.getCurrentCatalog()
+ .getDbOrException(dbId, s -> new AlterCancelException("Database " + s + " does not exist"));
OlapTable tbl;
try {
@@ -487,7 +493,8 @@ public class SchemaChangeJobV2 extends AlterJobV2 {
List tasks = schemaChangeBatchTask.getUnfinishedTasks(2000);
for (AgentTask task : tasks) {
if (task.getFailedTimes() >= 3) {
- throw new AlterCancelException("schema change task failed after try three times: " + task.getErrorMsg());
+ throw new AlterCancelException("schema change task failed after try three times: "
+ + task.getErrorMsg());
}
}
return;
@@ -507,7 +514,8 @@ public class SchemaChangeJobV2 extends AlterJobV2 {
Preconditions.checkNotNull(partition, partitionId);
long visiableVersion = partition.getVisibleVersion();
- short expectReplicationNum = tbl.getPartitionInfo().getReplicaAllocation(partition.getId()).getTotalReplicaNum();
+ short expectReplicationNum = tbl.getPartitionInfo()
+ .getReplicaAllocation(partition.getId()).getTotalReplicaNum();
Map shadowIndexMap = partitionIndexMap.row(partitionId);
for (Map.Entry entry : shadowIndexMap.entrySet()) {
@@ -680,7 +688,8 @@ public class SchemaChangeJobV2 extends AlterJobV2 {
// Check whether transactions of the given database which txnId is less than 'watershedTxnId' are finished.
protected boolean isPreviousLoadFinished() throws AnalysisException {
- return Catalog.getCurrentGlobalTransactionMgr().isPreviousTransactionsFinished(watershedTxnId, dbId, Lists.newArrayList(tableId));
+ return Catalog.getCurrentGlobalTransactionMgr().isPreviousTransactionsFinished(
+ watershedTxnId, dbId, Lists.newArrayList(tableId));
}
/**
diff --git a/fe/fe-core/src/main/java/org/apache/doris/alter/SystemHandler.java b/fe/fe-core/src/main/java/org/apache/doris/alter/SystemHandler.java
index 44cd210c78..b0aa25a6fe 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/alter/SystemHandler.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/alter/SystemHandler.java
@@ -117,7 +117,8 @@ public class SystemHandler extends AlterHandler {
AddBackendClause addBackendClause = (AddBackendClause) alterClause;
final String destClusterName = addBackendClause.getDestCluster();
- if ((!Strings.isNullOrEmpty(destClusterName) || addBackendClause.isFree()) && Config.disable_cluster_feature) {
+ if ((!Strings.isNullOrEmpty(destClusterName) || addBackendClause.isFree())
+ && Config.disable_cluster_feature) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_INVALID_OPERATION, "ADD BACKEND TO CLUSTER");
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AbstractBackupStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AbstractBackupStmt.java
index 2bf8a48fd5..8a602921a4 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AbstractBackupStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AbstractBackupStmt.java
@@ -36,8 +36,8 @@ import java.util.Map;
public class AbstractBackupStmt extends DdlStmt {
private static final Logger LOG = LogManager.getLogger(AbstractBackupStmt.class);
- private final static String PROP_TIMEOUT = "timeout";
- private final static long MIN_TIMEOUT_MS = 600 * 1000L; // 10 min
+ private static final String PROP_TIMEOUT = "timeout";
+ private static final long MIN_TIMEOUT_MS = 600 * 1000L; // 10 min
protected LabelName labelName;
protected String repoName;
@@ -46,8 +46,9 @@ public class AbstractBackupStmt extends DdlStmt {
protected long timeoutMs;
- public AbstractBackupStmt(LabelName labelName, String repoName, AbstractBackupTableRefClause abstractBackupTableRefClause,
- Map properties) {
+ public AbstractBackupStmt(LabelName labelName, String repoName,
+ AbstractBackupTableRefClause abstractBackupTableRefClause,
+ Map properties) {
this.labelName = labelName;
this.repoName = repoName;
this.abstractBackupTableRefClause = abstractBackupTableRefClause;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AdminCancelRebalanceDiskStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AdminCancelRebalanceDiskStmt.java
index f9006f4c33..818032ccfd 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AdminCancelRebalanceDiskStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AdminCancelRebalanceDiskStmt.java
@@ -39,7 +39,7 @@ public class AdminCancelRebalanceDiskStmt extends DdlStmt {
ImmutableMap backendsInfo = Catalog.getCurrentSystemInfo().getIdToBackend();
Map backendsID = new HashMap();
for (Backend backend : backendsInfo.values()) {
- backendsID.put(String.valueOf(backend.getHost()) + ":" + String.valueOf(backend.getHeartbeatPort()), backend.getId());
+ backendsID.put(backend.getHost() + ":" + backend.getHeartbeatPort(), backend.getId());
}
if (backends == null) {
for (Backend backend : backendsInfo.values()) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AdminCleanTrashStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AdminCleanTrashStmt.java
index 6a1e716102..b0a0b8caa4 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AdminCleanTrashStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AdminCleanTrashStmt.java
@@ -39,7 +39,7 @@ public class AdminCleanTrashStmt extends DdlStmt {
ImmutableMap backendsInfo = Catalog.getCurrentSystemInfo().getIdToBackend();
Map backendsID = new HashMap();
for (Backend backend : backendsInfo.values()) {
- backendsID.put(String.valueOf(backend.getHost()) + ":" + String.valueOf(backend.getHeartbeatPort()), backend.getId());
+ backendsID.put(backend.getHost() + ":" + backend.getHeartbeatPort(), backend.getId());
}
if (backends == null) {
for (Backend backend : backendsInfo.values()) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AdminCompactTableStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AdminCompactTableStmt.java
index 1cd448a0fd..d65ad0acff 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AdminCompactTableStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AdminCompactTableStmt.java
@@ -83,7 +83,8 @@ public class AdminCompactTableStmt extends DdlStmt {
// analyze where clause if not null
if (where == null) {
- throw new AnalysisException("Compaction type must be specified in Where clause like: type = 'BASE/CUMULATIVE'");
+ throw new AnalysisException("Compaction type must be specified in"
+ + " Where clause like: type = 'BASE/CUMULATIVE'");
}
if (!analyzeWhere()) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AdminRebalanceDiskStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AdminRebalanceDiskStmt.java
index cbda427a66..f99c0126bb 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AdminRebalanceDiskStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AdminRebalanceDiskStmt.java
@@ -40,12 +40,10 @@ public class AdminRebalanceDiskStmt extends DdlStmt {
ImmutableMap backendsInfo = Catalog.getCurrentSystemInfo().getIdToBackend();
Map backendsID = new HashMap();
for (Backend backend : backendsInfo.values()) {
- backendsID.put(String.valueOf(backend.getHost()) + ":" + String.valueOf(backend.getHeartbeatPort()), backend.getId());
+ backendsID.put(backend.getHost() + ":" + backend.getHeartbeatPort(), backend.getId());
}
if (backends == null) {
- for (Backend backend : backendsInfo.values()) {
- this.backends.add(backend);
- }
+ this.backends.addAll(backendsInfo.values());
} else {
for (String backend : backends) {
if (backendsID.get(backend) != null) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateInfo.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateInfo.java
index 298837d11e..28084d44ba 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateInfo.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateInfo.java
@@ -70,7 +70,7 @@ import java.util.List;
* TODO: Add query tests for aggregation with intermediate tuples with num_nodes=1.
*/
public final class AggregateInfo extends AggregateInfoBase {
- private final static Logger LOG = LogManager.getLogger(AggregateInfo.class);
+ private static final Logger LOG = LogManager.getLogger(AggregateInfo.class);
public enum AggPhase {
FIRST,
@@ -81,7 +81,7 @@ public final class AggregateInfo extends AggregateInfoBase {
public boolean isMerge() {
return this == FIRST_MERGE || this == SECOND_MERGE;
}
- };
+ }
// created by createMergeAggInfo()
private AggregateInfo mergeAggInfo;
@@ -173,7 +173,7 @@ public final class AggregateInfo extends AggregateInfoBase {
* If an aggTupleDesc is created, also registers eq predicates between the
* grouping exprs and their respective slots with 'analyzer'.
*/
- static public AggregateInfo create(
+ public static AggregateInfo create(
ArrayList groupingExprs, ArrayList aggExprs,
TupleDescriptor tupleDesc, Analyzer analyzer)
throws AnalysisException {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateInfoBase.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateInfoBase.java
index b44dc45589..f0298afff6 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateInfoBase.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateInfoBase.java
@@ -38,7 +38,7 @@ import java.util.List;
* tuple descriptors as well as their smaps for evaluating aggregate functions.
*/
public abstract class AggregateInfoBase {
- private final static Logger LOG =
+ private static final Logger LOG =
LoggerFactory.getLogger(AggregateInfoBase.class);
// For aggregations: All unique grouping expressions from a select block.
@@ -248,7 +248,7 @@ public abstract class AggregateInfoBase {
if (intermediateType != null) {
return true;
}
- if (noGrouping && ((AggregateFunction) aggExpr.fn).getNullableMode().equals(Function.NullableMode.DEPEND_ON_ARGUMENT)) {
+ if (noGrouping && aggExpr.fn.getNullableMode().equals(Function.NullableMode.DEPEND_ON_ARGUMENT)) {
return true;
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateParamsList.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateParamsList.java
index 6afb62abff..12ff07d99a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateParamsList.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AggregateParamsList.java
@@ -43,7 +43,7 @@ class AggregateParamsList {
isDistinct = false;
}
- static public AggregateParamsList createStarParam() {
+ public static AggregateParamsList createStarParam() {
return new AggregateParamsList();
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterColumnStatsStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterColumnStatsStmt.java
index b5627c2565..8a21d18fcd 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterColumnStatsStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterColumnStatsStmt.java
@@ -67,8 +67,8 @@ public class AlterColumnStatsStmt extends DdlStmt {
throw new AnalysisException(optional.get() + " is invalid statistic");
}
// check auth
- if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), tableName.getDb(), tableName.getTbl(),
- PrivPredicate.ALTER)) {
+ if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(
+ ConnectContext.get(), tableName.getDb(), tableName.getTbl(), PrivPredicate.ALTER)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "ALTER COLUMN STATS",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(),
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterDatabaseQuotaStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterDatabaseQuotaStmt.java
index daacdadb91..d9ce04fdab 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterDatabaseQuotaStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterDatabaseQuotaStmt.java
@@ -63,7 +63,8 @@ public class AlterDatabaseQuotaStmt extends DdlStmt {
super.analyze(analyzer);
if (!Catalog.getCurrentCatalog().getAuth().checkGlobalPriv(ConnectContext.get(), PrivPredicate.ADMIN)) {
- ErrorReport.reportAnalysisException(ErrorCode.ERR_DBACCESS_DENIED_ERROR, analyzer.getQualifiedUser(), dbName);
+ ErrorReport.reportAnalysisException(ErrorCode.ERR_DBACCESS_DENIED_ERROR,
+ analyzer.getQualifiedUser(), dbName);
}
if (Strings.isNullOrEmpty(dbName)) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterDatabaseRename.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterDatabaseRename.java
index b7606a3e1a..ff9ed52e8f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterDatabaseRename.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterDatabaseRename.java
@@ -57,10 +57,9 @@ public class AlterDatabaseRename extends DdlStmt {
}
if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), dbName,
- PrivPredicate.of(PrivBitSet.of(PaloPrivilege.ADMIN_PRIV,
- PaloPrivilege.ALTER_PRIV),
- Operator.OR))) {
- ErrorReport.reportAnalysisException(ErrorCode.ERR_DBACCESS_DENIED_ERROR, analyzer.getQualifiedUser(), dbName);
+ PrivPredicate.of(PrivBitSet.of(PaloPrivilege.ADMIN_PRIV, PaloPrivilege.ALTER_PRIV), Operator.OR))) {
+ ErrorReport.reportAnalysisException(ErrorCode.ERR_DBACCESS_DENIED_ERROR,
+ analyzer.getQualifiedUser(), dbName);
}
if (Strings.isNullOrEmpty(newDbName)) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterRoutineLoadStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterRoutineLoadStmt.java
index 0bd3f6f438..4656d81f2d 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterRoutineLoadStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterRoutineLoadStmt.java
@@ -174,32 +174,35 @@ public class AlterRoutineLoadStmt extends DdlStmt {
}
if (jobProperties.containsKey(CreateRoutineLoadStmt.JSONPATHS)) {
- analyzedJobProperties.put(CreateRoutineLoadStmt.JSONPATHS, jobProperties.get(CreateRoutineLoadStmt.JSONPATHS));
+ analyzedJobProperties.put(CreateRoutineLoadStmt.JSONPATHS,
+ jobProperties.get(CreateRoutineLoadStmt.JSONPATHS));
}
if (jobProperties.containsKey(CreateRoutineLoadStmt.JSONROOT)) {
- analyzedJobProperties.put(CreateRoutineLoadStmt.JSONROOT, jobProperties.get(CreateRoutineLoadStmt.JSONROOT));
+ analyzedJobProperties.put(CreateRoutineLoadStmt.JSONROOT,
+ jobProperties.get(CreateRoutineLoadStmt.JSONROOT));
}
if (jobProperties.containsKey(CreateRoutineLoadStmt.STRIP_OUTER_ARRAY)) {
- boolean stripOuterArray = Boolean.valueOf(jobProperties.get(CreateRoutineLoadStmt.STRIP_OUTER_ARRAY));
+ boolean stripOuterArray = Boolean.parseBoolean(jobProperties.get(CreateRoutineLoadStmt.STRIP_OUTER_ARRAY));
analyzedJobProperties.put(CreateRoutineLoadStmt.STRIP_OUTER_ARRAY, String.valueOf(stripOuterArray));
}
if (jobProperties.containsKey(CreateRoutineLoadStmt.NUM_AS_STRING)) {
- boolean numAsString = Boolean.valueOf(jobProperties.get(CreateRoutineLoadStmt.NUM_AS_STRING));
+ boolean numAsString = Boolean.parseBoolean(jobProperties.get(CreateRoutineLoadStmt.NUM_AS_STRING));
analyzedJobProperties.put(CreateRoutineLoadStmt.NUM_AS_STRING, String.valueOf(numAsString));
}
if (jobProperties.containsKey(CreateRoutineLoadStmt.FUZZY_PARSE)) {
- boolean fuzzyParse = Boolean.valueOf(jobProperties.get(CreateRoutineLoadStmt.FUZZY_PARSE));
+ boolean fuzzyParse = Boolean.parseBoolean(jobProperties.get(CreateRoutineLoadStmt.FUZZY_PARSE));
analyzedJobProperties.put(CreateRoutineLoadStmt.FUZZY_PARSE, String.valueOf(fuzzyParse));
}
}
private void checkDataSourceProperties() throws UserException {
if (!FeConstants.runningUnitTest) {
- RoutineLoadJob job = Catalog.getCurrentCatalog().getRoutineLoadManager().checkPrivAndGetJob(getDbName(), getLabel());
+ RoutineLoadJob job = Catalog.getCurrentCatalog().getRoutineLoadManager()
+ .checkPrivAndGetJob(getDbName(), getLabel());
dataSourceProperties.setTimezone(job.getTimezone());
} else {
dataSourceProperties.setTimezone(TimeUtils.DEFAULT_TIME_ZONE);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterSqlBlockRuleStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterSqlBlockRuleStmt.java
index cc2a13c228..a51e20d98a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterSqlBlockRuleStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterSqlBlockRuleStmt.java
@@ -73,16 +73,21 @@ public class AlterSqlBlockRuleStmt extends DdlStmt {
private void setProperties(Map properties) throws AnalysisException {
this.sql = properties.getOrDefault(CreateSqlBlockRuleStmt.SQL_PROPERTY, CreateSqlBlockRuleStmt.STRING_NOT_SET);
- this.sqlHash = properties.getOrDefault(CreateSqlBlockRuleStmt.SQL_HASH_PROPERTY, CreateSqlBlockRuleStmt.STRING_NOT_SET);
+ this.sqlHash = properties.getOrDefault(CreateSqlBlockRuleStmt.SQL_HASH_PROPERTY,
+ CreateSqlBlockRuleStmt.STRING_NOT_SET);
String partitionNumString = properties.get(CreateSqlBlockRuleStmt.SCANNED_PARTITION_NUM);
String tabletNumString = properties.get(CreateSqlBlockRuleStmt.SCANNED_TABLET_NUM);
String cardinalityString = properties.get(CreateSqlBlockRuleStmt.SCANNED_CARDINALITY);
SqlBlockUtil.checkSqlAndSqlHashSetBoth(sql, sqlHash);
- SqlBlockUtil.checkSqlAndLimitationsSetBoth(sql, sqlHash, partitionNumString, tabletNumString, cardinalityString);
- this.partitionNum = Util.getLongPropertyOrDefault(partitionNumString, LONG_NOT_SET, null, CreateSqlBlockRuleStmt.SCANNED_PARTITION_NUM + " should be a long");
- this.tabletNum = Util.getLongPropertyOrDefault(tabletNumString, LONG_NOT_SET, null, CreateSqlBlockRuleStmt.SCANNED_TABLET_NUM + " should be a long");
- this.cardinality = Util.getLongPropertyOrDefault(cardinalityString, LONG_NOT_SET, null, CreateSqlBlockRuleStmt.SCANNED_CARDINALITY + " should be a long");
+ SqlBlockUtil.checkSqlAndLimitationsSetBoth(sql, sqlHash,
+ partitionNumString, tabletNumString, cardinalityString);
+ this.partitionNum = Util.getLongPropertyOrDefault(partitionNumString, LONG_NOT_SET, null,
+ CreateSqlBlockRuleStmt.SCANNED_PARTITION_NUM + " should be a long");
+ this.tabletNum = Util.getLongPropertyOrDefault(tabletNumString, LONG_NOT_SET, null,
+ CreateSqlBlockRuleStmt.SCANNED_TABLET_NUM + " should be a long");
+ this.cardinality = Util.getLongPropertyOrDefault(cardinalityString, LONG_NOT_SET, null,
+ CreateSqlBlockRuleStmt.SCANNED_CARDINALITY + " should be a long");
// allow null, represents no modification
String globalStr = properties.get(CreateSqlBlockRuleStmt.GLOBAL_PROPERTY);
this.global = StringUtils.isNotEmpty(globalStr) ? Boolean.parseBoolean(globalStr) : null;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterTableStatsStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterTableStatsStmt.java
index 42661b7bdd..b25f7c2897 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterTableStatsStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterTableStatsStmt.java
@@ -61,8 +61,8 @@ public class AlterTableStatsStmt extends DdlStmt {
throw new AnalysisException(optional.get() + " is invalid statistic");
}
// check auth
- if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), tableName.getDb(), tableName.getTbl(),
- PrivPredicate.ALTER)) {
+ if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(
+ ConnectContext.get(), tableName.getDb(), tableName.getTbl(), PrivPredicate.ALTER)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "ALTER TABLE STATS",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(),
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterTableStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterTableStmt.java
index 1085197b12..1734dcc0c7 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterTableStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterTableStmt.java
@@ -87,10 +87,12 @@ public class AlterTableStmt extends DdlStmt {
if (alterFeature == null || alterFeature == EnableFeatureClause.Features.UNKNOWN) {
throw new AnalysisException("unknown feature for alter clause");
}
- if (table.getKeysType() != KeysType.UNIQUE_KEYS && alterFeature == EnableFeatureClause.Features.BATCH_DELETE) {
+ if (table.getKeysType() != KeysType.UNIQUE_KEYS
+ && alterFeature == EnableFeatureClause.Features.BATCH_DELETE) {
throw new AnalysisException("Batch delete only supported in unique tables.");
}
- if (table.getKeysType() != KeysType.UNIQUE_KEYS && alterFeature == EnableFeatureClause.Features.SEQUENCE_LOAD) {
+ if (table.getKeysType() != KeysType.UNIQUE_KEYS
+ && alterFeature == EnableFeatureClause.Features.SEQUENCE_LOAD) {
throw new AnalysisException("Sequence load only supported in unique tables.");
}
// analyse sequence column
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterViewStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterViewStmt.java
index 124b7cc850..93a55b9908 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterViewStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AlterViewStmt.java
@@ -51,11 +51,12 @@ public class AlterViewStmt extends BaseViewStmt {
Table table = analyzer.getTableOrAnalysisException(tableName);
if (!(table instanceof View)) {
- throw new AnalysisException(String.format("ALTER VIEW not allowed on a table:%s.%s", getDbName(), getTable()));
+ throw new AnalysisException(String.format("ALTER VIEW not allowed on a table:%s.%s",
+ getDbName(), getTable()));
}
- if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), tableName.getDb(), tableName.getTbl(),
- PrivPredicate.ALTER)) {
+ if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(
+ ConnectContext.get(), tableName.getDb(), tableName.getTbl(), PrivPredicate.ALTER)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "ALTER VIEW",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(),
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticExpr.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticExpr.java
index b0147dff55..a241da5db3 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticExpr.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticExpr.java
@@ -62,7 +62,7 @@ import java.util.Objects;
* and need to be substituted as such; example: COUNT(COUNT(..)) OVER (..)
*/
public class AnalyticExpr extends Expr {
- private final static Logger LOG = LoggerFactory.getLogger(AnalyticExpr.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AnalyticExpr.class);
private static String NTILE = "NTILE";
private FunctionCallExpr fnCall;
@@ -134,12 +134,15 @@ public class AnalyticExpr extends Expr {
public FunctionCallExpr getFnCall() {
return fnCall;
}
+
public List getPartitionExprs() {
return partitionExprs;
}
+
public List getOrderByElements() {
return orderByElements;
}
+
public AnalyticWindow getWindow() {
return window;
}
@@ -210,11 +213,10 @@ public class AnalyticExpr extends Expr {
|| fn.functionName().equalsIgnoreCase(MAX) || fn.functionName().equalsIgnoreCase(COUNT)) {
return true;
}
-
return false;
}
- static private boolean isOffsetFn(Function fn) {
+ private static boolean isOffsetFn(Function fn) {
if (!isAnalyticFn(fn)) {
return false;
}
@@ -222,7 +224,7 @@ public class AnalyticExpr extends Expr {
return fn.functionName().equalsIgnoreCase(LEAD) || fn.functionName().equalsIgnoreCase(LAG);
}
- static private boolean isMinMax(Function fn) {
+ private static boolean isMinMax(Function fn) {
if (!isAnalyticFn(fn)) {
return false;
}
@@ -230,7 +232,7 @@ public class AnalyticExpr extends Expr {
return fn.functionName().equalsIgnoreCase(MIN) || fn.functionName().equalsIgnoreCase(MAX);
}
- static private boolean isRankingFn(Function fn) {
+ private static boolean isRankingFn(Function fn) {
if (!isAnalyticFn(fn)) {
return false;
}
@@ -241,7 +243,7 @@ public class AnalyticExpr extends Expr {
|| fn.functionName().equalsIgnoreCase(NTILE);
}
- static private boolean isHllAggFn(Function fn) {
+ private static boolean isHllAggFn(Function fn) {
if (!isAnalyticFn(fn)) {
return false;
}
@@ -376,6 +378,7 @@ public class AnalyticExpr extends Expr {
+ orderByElements.get(0).getExpr().toSql());
}
}
+
/**
* check the value out of range in lag/lead() function
*/
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticInfo.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticInfo.java
index 8b88f4c4fc..aa993e34bf 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticInfo.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticInfo.java
@@ -36,7 +36,7 @@ import java.util.List;
* the corresponding analytic result tuple and its substitution map.
*/
public final class AnalyticInfo extends AggregateInfoBase {
- private final static Logger LOG = LoggerFactory.getLogger(AnalyticInfo.class);
+ private static final Logger LOG = LoggerFactory.getLogger(AnalyticInfo.class);
// All unique analytic exprs of a select block. Used to populate
// super.aggregateExprs_ based on AnalyticExpr.getFnCall() for each analytic expr
@@ -87,7 +87,7 @@ public final class AnalyticInfo extends AggregateInfoBase {
* Creates complete AnalyticInfo for analyticExprs, including tuple descriptors and
* smaps.
*/
- static public AnalyticInfo create(ArrayList analyticExprs, Analyzer analyzer) {
+ public static AnalyticInfo create(ArrayList analyticExprs, Analyzer analyzer) {
Preconditions.checkState(analyticExprs != null && !analyticExprs.isEmpty());
Expr.removeDuplicates(analyticExprs);
AnalyticInfo result = new AnalyticInfo(analyticExprs);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticWindow.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticWindow.java
index b6bc06d3bb..4266d02888 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticWindow.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/AnalyticWindow.java
@@ -55,6 +55,7 @@ public class AnalyticWindow {
public String toString() {
return description;
}
+
public TAnalyticWindowType toThrift() {
return this == ROWS ? TAnalyticWindowType.ROWS : TAnalyticWindowType.RANGE;
}
@@ -77,6 +78,7 @@ public class AnalyticWindow {
public String toString() {
return description;
}
+
public TAnalyticWindowBoundaryType toThrift() {
Preconditions.checkState(!isAbsolutePos());
@@ -140,6 +142,7 @@ public class AnalyticWindow {
public BoundaryType getType() {
return type;
}
+
public Expr getExpr() {
return expr;
}
@@ -243,12 +246,15 @@ public class AnalyticWindow {
public Type getType() {
return type;
}
+
public Boundary getLeftBoundary() {
return leftBoundary;
}
+
public Boundary getRightBoundary() {
return rightBoundary;
}
+
public Boundary setRightBoundary(Boundary b) {
return rightBoundary = b;
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/Analyzer.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/Analyzer.java
index 2a598b77c1..997891253f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/Analyzer.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/Analyzer.java
@@ -98,7 +98,7 @@ import java.util.stream.Collectors;
* simple.
*/
public class Analyzer {
- private final static Logger LOG = LogManager.getLogger(Analyzer.class);
+ private static final Logger LOG = LogManager.getLogger(Analyzer.class);
// used for contains inlineview analytic function's tuple changed
private ExprSubstitutionMap changeResSmap = new ExprSubstitutionMap();
@@ -969,7 +969,8 @@ public class Analyzer {
* At this time, vectorization cannot support this situation,
* so it is necessary to fall back to non-vectorization for processing.
* For example:
- * Query: select * from t1 left join (select k1, count(k2) as count_k2 from t2 group by k1) tmp on t1.k1=tmp.k1
+ * Query: select * from t1 left join
+ * (select k1, count(k2) as count_k2 from t2 group by k1) tmp on t1.k1=tmp.k1
* Origin: tmp.k1 not null, tmp.count_k2 not null
* Result: throw VecNotImplException
*/
@@ -1528,6 +1529,7 @@ public class Analyzer {
public Set getGlobalInDeDuplication() {
return Sets.newHashSet(globalState.globalInDeDuplication);
}
+
/**
* Makes the given semi-joined tuple visible such that its slots can be referenced.
* If tid is null, makes the currently visible semi-joined tuple invisible again.
@@ -2000,7 +2002,8 @@ public class Analyzer {
if (globalState.context == null) {
return false;
}
- return !globalState.context.getSessionVariable().isEnableJoinReorderBasedCost() && !globalState.context.getSessionVariable().isDisableJoinReorder();
+ return !globalState.context.getSessionVariable().isEnableJoinReorderBasedCost()
+ && !globalState.context.getSessionVariable().isDisableJoinReorder();
}
public boolean enableInferPredicate() {
@@ -2028,7 +2031,8 @@ public class Analyzer {
if (globalState.context == null) {
return false;
}
- return globalState.context.getSessionVariable().isEnableJoinReorderBasedCost() && !globalState.context.getSessionVariable().isDisableJoinReorder();
+ return globalState.context.getSessionVariable().isEnableJoinReorderBasedCost()
+ && !globalState.context.getSessionVariable().isDisableJoinReorder();
}
public boolean safeIsEnableFoldConstantByBe() {
@@ -2176,6 +2180,7 @@ public class Analyzer {
public List getUnassignedConjuncts(PlanNode node) {
return getUnassignedConjuncts(node.getTblRefIds());
}
+
/**
* Returns true if e must be evaluated by a join node. Note that it may still be
* safe to evaluate e elsewhere as well, but in any case the join must evaluate e.
@@ -2196,6 +2201,7 @@ public class Analyzer {
return false;
}
+
/**
* Mark all slots that are referenced in exprs as materialized.
*/
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ArithmeticExpr.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ArithmeticExpr.java
index bd1ac2d0b7..8629d771ba 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ArithmeticExpr.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ArithmeticExpr.java
@@ -77,15 +77,19 @@ public class ArithmeticExpr extends Expr {
public String toString() {
return description;
}
+
public String getName() {
return name;
}
+
public OperatorPosition getPos() {
return pos;
}
+
public TExprOpcode getOpcode() {
return opcode;
}
+
public boolean isUnary() {
return pos == OperatorPosition.UNARY_PREFIX
|| pos == OperatorPosition.UNARY_POSTFIX;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/BackupStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/BackupStmt.java
index 43dee100be..a1eef45ec2 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/BackupStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/BackupStmt.java
@@ -28,8 +28,8 @@ import com.google.common.collect.Maps;
import java.util.Map;
public class BackupStmt extends AbstractBackupStmt {
- private final static String PROP_TYPE = "type";
- public final static String PROP_CONTENT = "content";
+ private static final String PROP_TYPE = "type";
+ public static final String PROP_CONTENT = "content";
public enum BackupType {
INCREMENTAL, FULL
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/BinaryPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/BinaryPredicate.java
index 12d6aa7b24..7da7356b36 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/BinaryPredicate.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/BinaryPredicate.java
@@ -49,7 +49,7 @@ import java.util.Objects;
* Most predicates with two operands..
*/
public class BinaryPredicate extends Predicate implements Writable {
- private final static Logger LOG = LogManager.getLogger(BinaryPredicate.class);
+ private static final Logger LOG = LogManager.getLogger(BinaryPredicate.class);
// true if this BinaryPredicate is inferred from slot equivalences, false otherwise.
private boolean isInferred = false;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/BrokerDesc.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/BrokerDesc.java
index eb79f65267..f1155086fe 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/BrokerDesc.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/BrokerDesc.java
@@ -41,11 +41,11 @@ import java.util.Map;
// "password" = "password0"
// )
public class BrokerDesc extends StorageDesc implements Writable {
- private final static Logger LOG = LogManager.getLogger(BrokerDesc.class);
+ private static final Logger LOG = LogManager.getLogger(BrokerDesc.class);
// just for multi load
- public final static String MULTI_LOAD_BROKER = "__DORIS_MULTI_LOAD_BROKER__";
- public final static String MULTI_LOAD_BROKER_BACKEND_KEY = "__DORIS_MULTI_LOAD_BROKER_BACKEND__";
+ public static final String MULTI_LOAD_BROKER = "__DORIS_MULTI_LOAD_BROKER__";
+ public static final String MULTI_LOAD_BROKER_BACKEND_KEY = "__DORIS_MULTI_LOAD_BROKER_BACKEND__";
// Only used for recovery
private BrokerDesc() {
@@ -108,6 +108,7 @@ public class BrokerDesc extends StorageDesc implements Writable {
}
return TFileType.FILE_BROKER;
}
+
public StorageBackend.StorageType storageType() {
return storageType;
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/BuiltinAggregateFunction.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/BuiltinAggregateFunction.java
index 00b99e87de..d14f3400d0 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/BuiltinAggregateFunction.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/BuiltinAggregateFunction.java
@@ -40,6 +40,7 @@ public class BuiltinAggregateFunction extends Function {
public boolean isAnalyticFn() {
return isAnalyticFn;
}
+
// TODO: this is not used yet until the planner understand this.
private org.apache.doris.catalog.Type intermediateType;
private boolean reqIntermediateTuple = false;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CaseExpr.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CaseExpr.java
index 787d658552..40f0016453 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CaseExpr.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CaseExpr.java
@@ -300,7 +300,8 @@ public class CaseExpr extends Expr {
// this method just compare literal value and not completely consistent with be,for two cases
// 1 not deal float
- // 2 just compare literal value with same type. for a example sql 'select case when 123 then '1' else '2' end as col'
+ // 2 just compare literal value with same type.
+ // for a example sql 'select case when 123 then '1' else '2' end as col'
// for be will return '1', because be only regard 0 as false
// but for current LiteralExpr.compareLiteral, `123`' won't be regard as true
// the case which two values has different type left to be
@@ -349,7 +350,8 @@ public class CaseExpr extends Expr {
// early return when the `when expr` can't be converted to constants
Expr startExpr = expr.getChild(startIndex);
if ((!startExpr.isLiteral() || startExpr instanceof DecimalLiteral || startExpr instanceof FloatLiteral)
- || (!(startExpr instanceof NullLiteral) && !startExpr.getClass().toString().equals(caseExpr.getClass().toString()))) {
+ || (!(startExpr instanceof NullLiteral)
+ && !startExpr.getClass().toString().equals(caseExpr.getClass().toString()))) {
return expr;
}
@@ -363,7 +365,9 @@ public class CaseExpr extends Expr {
// 1 not literal
// 2 float
// 3 `case expr` and `when expr` don't have same type
- if ((!currentWhenExpr.isLiteral() || currentWhenExpr instanceof DecimalLiteral || currentWhenExpr instanceof FloatLiteral)
+ if ((!currentWhenExpr.isLiteral()
+ || currentWhenExpr instanceof DecimalLiteral
+ || currentWhenExpr instanceof FloatLiteral)
|| !currentWhenExpr.getClass().toString().equals(caseExpr.getClass().toString())) {
// remove the expr which has been evaluated
List exprLeft = new ArrayList<>();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CastExpr.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CastExpr.java
index f5dd01c112..5b4ed33e1c 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CastExpr.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CastExpr.java
@@ -160,7 +160,8 @@ public class CastExpr extends Expr {
if (toType.isNull() || disableRegisterCastingFunction(fromType, toType)) {
continue;
}
- String beClass = toType.isDecimalV2() || fromType.isDecimalV2() ? "DecimalV2Operators" : "CastFunctions";
+ String beClass = toType.isDecimalV2()
+ || fromType.isDecimalV2() ? "DecimalV2Operators" : "CastFunctions";
if (fromType.isTime()) {
beClass = "TimeOperators";
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ChannelDescription.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ChannelDescription.java
index 13ff1438e6..86f7482f02 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ChannelDescription.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ChannelDescription.java
@@ -62,7 +62,8 @@ public class ChannelDescription implements Writable {
@SerializedName(value = "channelId")
private long channelId;
- public ChannelDescription(String srcDatabase, String srcTableName, String targetTable, PartitionNames partitionNames, List colNames) {
+ public ChannelDescription(String srcDatabase, String srcTableName, String targetTable,
+ PartitionNames partitionNames, List colNames) {
this.srcDatabase = srcDatabase;
this.srcTableName = srcTableName;
this.targetTable = targetTable;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ColumnDef.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ColumnDef.java
index cffa8df492..e98f4c2dfc 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ColumnDef.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ColumnDef.java
@@ -50,8 +50,8 @@ public class ColumnDef {
* k1 INT NULL DEFAULT NULL
*
* ColumnnDef will be transformed to Column in Analysis phase, and in Column, default value is a String.
- * No matter does the user set the default value as NULL explicitly, or not set default value,
- * the default value in Column will be "null", so that Doris can not distinguish between "not set" and "set as null".
+ * No matter does the user set the default value as NULL explicitly, or not set default value, the default value
+ * in Column will be "null", so that Doris can not distinguish between "not set" and "set as null".
*
* But this is OK because Column has another attribute "isAllowNull".
* If the column is not allowed to be null, and user does not set the default value,
@@ -113,6 +113,7 @@ public class ColumnDef {
this.comment = "";
this.defaultValue = DefaultValue.NOT_SET;
}
+
public ColumnDef(String name, TypeDef typeDef, boolean isKey, AggregateType aggregateType,
boolean isAllowNull, DefaultValue defaultValue, String comment) {
this(name, typeDef, isKey, aggregateType, isAllowNull, defaultValue, comment, true);
@@ -146,7 +147,8 @@ public class ColumnDef {
}
public static ColumnDef newSequenceColumnDef(Type type, AggregateType aggregateType) {
- return new ColumnDef(Column.SEQUENCE_COL, new TypeDef(type), false, aggregateType, true, DefaultValue.NULL_DEFAULT_VALUE,
+ return new ColumnDef(Column.SEQUENCE_COL, new TypeDef(type), false,
+ aggregateType, true, DefaultValue.NULL_DEFAULT_VALUE,
"sequence column hidden column", false);
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CompoundPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CompoundPredicate.java
index d2175117a0..2672d5564c 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CompoundPredicate.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CompoundPredicate.java
@@ -40,7 +40,7 @@ import java.util.Objects;
* &&, ||, ! predicates.
*/
public class CompoundPredicate extends Predicate {
- private final static Logger LOG = LogManager.getLogger(CompoundPredicate.class);
+ private static final Logger LOG = LogManager.getLogger(CompoundPredicate.class);
private final Operator op;
public static void initBuiltins(FunctionSet functionSet) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateDataSyncJobStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateDataSyncJobStmt.java
index cbf0bdbf13..67159630c3 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateDataSyncJobStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateDataSyncJobStmt.java
@@ -96,10 +96,12 @@ public class CreateDataSyncJobStmt extends DdlStmt {
Database db = Catalog.getCurrentCatalog().getDbOrAnalysisException(dbName);
OlapTable olapTable = db.getOlapTableOrAnalysisException(tableName);
if (olapTable.getKeysType() != KeysType.UNIQUE_KEYS) {
- throw new AnalysisException("Table: " + tableName + " is not a unique table, key type: " + olapTable.getKeysType());
+ throw new AnalysisException("Table: " + tableName
+ + " is not a unique table, key type: " + olapTable.getKeysType());
}
if (!olapTable.hasDeleteSign()) {
- throw new AnalysisException("Table: " + tableName + " don't support batch delete. Please upgrade it to support, see `help alter table`.");
+ throw new AnalysisException("Table: " + tableName
+ + " don't support batch delete. Please upgrade it to support, see `help alter table`.");
}
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateDbStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateDbStmt.java
index 3ee94b70ae..90b4354844 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateDbStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateDbStmt.java
@@ -65,7 +65,8 @@ public class CreateDbStmt extends DdlStmt {
dbName = ClusterNamespace.getFullName(getClusterName(), dbName);
if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), dbName, PrivPredicate.CREATE)) {
- ErrorReport.reportAnalysisException(ErrorCode.ERR_DBACCESS_DENIED_ERROR, analyzer.getQualifiedUser(), dbName);
+ ErrorReport.reportAnalysisException(
+ ErrorCode.ERR_DBACCESS_DENIED_ERROR, analyzer.getQualifiedUser(), dbName);
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateFunctionStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateFunctionStmt.java
index 5cbbc5102a..9115a25391 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateFunctionStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateFunctionStmt.java
@@ -64,9 +64,9 @@ import java.net.URL;
import java.net.URLClassLoader;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
-import java.util.HashMap;
import java.time.LocalDate;
import java.time.LocalDateTime;
+import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -246,10 +246,12 @@ public class CreateFunctionStmt extends DdlStmt {
}
private void analyzeUda() throws AnalysisException {
- AggregateFunction.AggregateFunctionBuilder builder = AggregateFunction.AggregateFunctionBuilder.createUdfBuilder();
+ AggregateFunction.AggregateFunctionBuilder builder
+ = AggregateFunction.AggregateFunctionBuilder.createUdfBuilder();
- builder.name(functionName).argsType(argsDef.getArgTypes()).retType(returnType.getType()).
- hasVarArgs(argsDef.isVariadic()).intermediateType(intermediateType.getType()).location(URI.create(userFile));
+ builder.name(functionName).argsType(argsDef.getArgTypes()).retType(returnType.getType())
+ .hasVarArgs(argsDef.isVariadic()).intermediateType(intermediateType.getType())
+ .location(URI.create(userFile));
String initFnSymbol = properties.get(INIT_KEY);
if (initFnSymbol == null && !(binaryType == TFunctionBinaryType.JAVA_UDF)) {
throw new AnalysisException("No 'init_fn' in properties");
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateMaterializedViewStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateMaterializedViewStmt.java
index b8125417ee..61e33892a4 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateMaterializedViewStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateMaterializedViewStmt.java
@@ -87,7 +87,8 @@ public class CreateMaterializedViewStmt extends DdlStmt {
private String baseIndexName;
private String dbName;
private KeysType mvKeysType = KeysType.DUP_KEYS;
- //if process is replaying log, isReplay is true, otherwise is false, avoid replay process error report, only in Rollup or MaterializedIndexMeta is true
+ //if process is replaying log, isReplay is true, otherwise is false, avoid replay process error report,
+ // only in Rollup or MaterializedIndexMeta is true
private boolean isReplay = false;
public CreateMaterializedViewStmt(String mvName, SelectStmt selectStmt, Map properties) {
@@ -317,7 +318,8 @@ public class CreateMaterializedViewStmt extends DdlStmt {
for (; theBeginIndexOfValue < mvColumnItemList.size(); theBeginIndexOfValue++) {
MVColumnItem column = mvColumnItemList.get(theBeginIndexOfValue);
keySizeByte += column.getType().getIndexSize();
- if (theBeginIndexOfValue + 1 > FeConstants.shortkey_max_column_count || keySizeByte > FeConstants.shortkey_maxsize_bytes) {
+ if (theBeginIndexOfValue + 1 > FeConstants.shortkey_max_column_count
+ || keySizeByte > FeConstants.shortkey_maxsize_bytes) {
if (theBeginIndexOfValue == 0 && column.getType().getPrimitiveType().isCharFamily()) {
column.setIsKey(true);
theBeginIndexOfValue++;
@@ -413,8 +415,7 @@ public class CreateMaterializedViewStmt extends DdlStmt {
default:
throw new AnalysisException("Unsupported function:" + functionName);
}
- MVColumnItem mvColumnItem = new MVColumnItem(mvColumnName, type, mvAggregateType, false, defineExpr, baseColumnName);
- return mvColumnItem;
+ return new MVColumnItem(mvColumnName, type, mvAggregateType, false, defineExpr, baseColumnName);
}
public Map parseDefineExprWithoutAnalyze() throws AnalysisException {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateRoutineLoadStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateRoutineLoadStmt.java
index eff6a8bb7c..a55d61d945 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateRoutineLoadStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateRoutineLoadStmt.java
@@ -417,7 +417,8 @@ public class CreateRoutineLoadStmt extends DdlStmt {
throw new AnalysisException(optional.get() + " is invalid property");
}
- desiredConcurrentNum = ((Long) Util.getLongPropertyOrDefault(jobProperties.get(DESIRED_CONCURRENT_NUMBER_PROPERTY),
+ desiredConcurrentNum = ((Long) Util.getLongPropertyOrDefault(
+ jobProperties.get(DESIRED_CONCURRENT_NUMBER_PROPERTY),
Config.max_routine_load_task_concurrent_num, DESIRED_CONCURRENT_NUMBER_PRED,
DESIRED_CONCURRENT_NUMBER_PROPERTY + " should > 0")).intValue();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateSqlBlockRuleStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateSqlBlockRuleStmt.java
index a458a468ea..4d2018cf3d 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateSqlBlockRuleStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateSqlBlockRuleStmt.java
@@ -123,12 +123,17 @@ public class CreateSqlBlockRuleStmt extends DdlStmt {
SqlBlockUtil.checkSqlAndSqlHashSetBoth(sql, sqlHash);
SqlBlockUtil.checkPropertiesValidate(sql, sqlHash, partitionNumString, tabletNumString, cardinalityString);
- this.partitionNum = Util.getLongPropertyOrDefault(partitionNumString, 0L, null, SCANNED_PARTITION_NUM + " should be a long");
- this.tabletNum = Util.getLongPropertyOrDefault(tabletNumString, 0L, null, SCANNED_TABLET_NUM + " should be a long");
- this.cardinality = Util.getLongPropertyOrDefault(cardinalityString, 0L, null, SCANNED_CARDINALITY + " should be a long");
+ this.partitionNum = Util.getLongPropertyOrDefault(partitionNumString, 0L, null,
+ SCANNED_PARTITION_NUM + " should be a long");
+ this.tabletNum = Util.getLongPropertyOrDefault(tabletNumString, 0L, null,
+ SCANNED_TABLET_NUM + " should be a long");
+ this.cardinality = Util.getLongPropertyOrDefault(cardinalityString, 0L, null,
+ SCANNED_CARDINALITY + " should be a long");
- this.global = Util.getBooleanPropertyOrDefault(properties.get(GLOBAL_PROPERTY), false, GLOBAL_PROPERTY + " should be a boolean");
- this.enable = Util.getBooleanPropertyOrDefault(properties.get(ENABLE_PROPERTY), true, ENABLE_PROPERTY + " should be a boolean");
+ this.global = Util.getBooleanPropertyOrDefault(properties.get(GLOBAL_PROPERTY),
+ false, GLOBAL_PROPERTY + " should be a boolean");
+ this.enable = Util.getBooleanPropertyOrDefault(properties.get(ENABLE_PROPERTY),
+ true, ENABLE_PROPERTY + " should be a boolean");
}
public static void checkCommonProperties(Map properties) throws UserException {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableLikeStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableLikeStmt.java
index b65aaa4d28..1b1bf6dac6 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableLikeStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableLikeStmt.java
@@ -47,7 +47,8 @@ public class CreateTableLikeStmt extends DdlStmt {
private final ArrayList rollupNames;
private final boolean withAllRollup;
- public CreateTableLikeStmt(boolean ifNotExists, TableName tableName, TableName existedTableName, ArrayList rollupNames, boolean withAllRollup) throws DdlException {
+ public CreateTableLikeStmt(boolean ifNotExists, TableName tableName, TableName existedTableName,
+ ArrayList rollupNames, boolean withAllRollup) throws DdlException {
this.ifNotExists = ifNotExists;
this.tableName = tableName;
this.existedTableName = existedTableName;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java
index 44de0d27f9..56f20d2113 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateTableStmt.java
@@ -381,7 +381,8 @@ public class CreateTableStmt extends DdlStmt {
if (columnDef.getType().isArrayType()) {
if (columnDef.getAggregateType() != null && columnDef.getAggregateType() != AggregateType.NONE) {
- throw new AnalysisException("Array column can't support aggregation " + columnDef.getAggregateType());
+ throw new AnalysisException("Array column can't support aggregation "
+ + columnDef.getAggregateType());
}
if (columnDef.isKey()) {
throw new AnalysisException("Array can only be used in the non-key column of"
@@ -409,7 +410,8 @@ public class CreateTableStmt extends DdlStmt {
if (partitionDesc instanceof ListPartitionDesc || partitionDesc instanceof RangePartitionDesc) {
partitionDesc.analyze(columnDefs, properties);
} else {
- throw new AnalysisException("Currently only support range and list partition with engine type olap");
+ throw new AnalysisException("Currently only support range"
+ + " and list partition with engine type olap");
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateUserStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateUserStmt.java
index 25bdf87b93..180d3f532f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateUserStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/CreateUserStmt.java
@@ -134,7 +134,8 @@ public class CreateUserStmt extends DdlStmt {
}
// check if current user has GRANT priv on GLOBAL or DATABASE level.
- if (!Catalog.getCurrentCatalog().getAuth().checkHasPriv(ConnectContext.get(), PrivPredicate.GRANT, PrivLevel.GLOBAL, PrivLevel.DATABASE)) {
+ if (!Catalog.getCurrentCatalog().getAuth().checkHasPriv(ConnectContext.get(),
+ PrivPredicate.GRANT, PrivLevel.GLOBAL, PrivLevel.DATABASE)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, "GRANT");
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/DataDescription.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/DataDescription.java
index bda93a8651..86d7722305 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/DataDescription.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/DataDescription.java
@@ -130,13 +130,14 @@ public class DataDescription {
* For hadoop load, this param is also used to persistence.
* The function in this param is copied from 'parsedColumnExprList'
*/
- private final Map>> columnToHadoopFunction = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
+ private final Map>> columnToHadoopFunction
+ = Maps.newTreeMap(String.CASE_INSENSITIVE_ORDER);
private boolean isHadoopLoad = false;
private LoadTask.MergeType mergeType = LoadTask.MergeType.APPEND;
private final Expr deleteCondition;
- private Map properties;
+ private final Map properties;
public DataDescription(String tableName,
PartitionNames partitionNames,
@@ -572,7 +573,8 @@ public class DataDescription {
* columnToHadoopFunction = {"col3": "strftime("%Y-%m-%d %H:%M:%S", tmp_col3)"}
*/
private void analyzeColumns() throws AnalysisException {
- if ((fileFieldNames == null || fileFieldNames.isEmpty()) && (columnsFromPath != null && !columnsFromPath.isEmpty())) {
+ if ((fileFieldNames == null || fileFieldNames.isEmpty())
+ && (columnsFromPath != null && !columnsFromPath.isEmpty())) {
throw new AnalysisException("Can not specify columns_from_path without column_list");
}
@@ -719,7 +721,8 @@ public class DataDescription {
}
// check olapTable schema and sequenceCol
if (olapTable.hasSequenceCol() && !hasSequenceCol()) {
- throw new AnalysisException("Table " + olapTable.getName() + " has sequence column, need to specify the sequence column");
+ throw new AnalysisException("Table " + olapTable.getName()
+ + " has sequence column, need to specify the sequence column");
}
if (hasSequenceCol() && !olapTable.hasSequenceCol()) {
throw new AnalysisException("There is no sequence column in the table " + olapTable.getName());
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/DateLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/DateLiteral.java
index a59952f55e..24f552e100 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/DateLiteral.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/DateLiteral.java
@@ -84,10 +84,10 @@ public class DateLiteral extends LiteralExpr {
private static Map MONTH_NAME_DICT = Maps.newHashMap();
private static Map MONTH_ABBR_NAME_DICT = Maps.newHashMap();
private static Map WEEK_DAY_NAME_DICT = Maps.newHashMap();
- private final static int[] DAYS_IN_MONTH = new int[] {0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31};
- private final static int ALLOW_SPACE_MASK = 4 | 64;
- private final static int MAX_DATE_PARTS = 8;
- private final static int YY_PART_YEAR = 70;
+ private static final int[] DAYS_IN_MONTH = new int[] {0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31};
+ private static final int ALLOW_SPACE_MASK = 4 | 64;
+ private static final int MAX_DATE_PARTS = 8;
+ private static final int YY_PART_YEAR = 70;
static {
try {
@@ -148,6 +148,7 @@ public class DateLiteral extends LiteralExpr {
//Regex used to determine if the TIME field exists int date_format
private static final Pattern HAS_TIME_PART = Pattern.compile("^.*[HhIiklrSsTp]+.*$");
+
//Date Literal persist type in meta
private enum DateLiteralType {
DATETIME(0),
@@ -597,7 +598,9 @@ public class DateLiteral extends LiteralExpr {
case 'v': // %v Week (01..53), where Monday is the first day of the week; used with %x
builder.appendWeekOfWeekyear(2);
break;
- case 'x': // %x Year for the week, where Monday is the first day of the week, numeric, four digits; used with %v
+ case 'x':
+ // %x Year for the week, where Monday is the first day of the week,
+ // numeric, four digits; used with %v
builder.appendWeekyear(4, 4);
break;
case 'W': // %W Weekday name (Sunday..Saturday)
@@ -614,9 +617,12 @@ public class DateLiteral extends LiteralExpr {
case 'U': // %U Week (00..53), where Sunday is the first day of the week
case 'u': // %u Week (00..53), where Monday is the first day of the week
case 'V': // %V Week (01..53), where Sunday is the first day of the week; used with %X
- case 'X': // %X Year for the week where Sunday is the first day of the week, numeric, four digits; used with %V
+ case 'X':
+ // %X Year for the week where Sunday is the first day of the week,
+ // numeric, four digits; used with %V
case 'D': // %D Day of the month with English suffix (0th, 1st, 2nd, 3rd, …)
- throw new AnalysisException(String.format("%%%s not supported in date format string", character));
+ throw new AnalysisException(
+ String.format("%%%s not supported in date format string", character));
case '%': // %% A literal "%" character
builder.appendLiteral('%');
break;
@@ -957,7 +963,8 @@ public class DateLiteral extends LiteralExpr {
}
} else if (format.charAt(fp) != ' ') {
if (format.charAt(fp) != value.charAt(vp)) {
- throw new InvalidFormatException("Invalid char: " + value.charAt(vp) + ", expected: " + format.charAt(fp));
+ throw new InvalidFormatException("Invalid char: " + value.charAt(vp)
+ + ", expected: " + format.charAt(fp));
}
fp++;
vp++;
@@ -1053,6 +1060,7 @@ public class DateLiteral extends LiteralExpr {
|| hour > MAX_DATETIME.hour || minute > MAX_DATETIME.minute || second > MAX_DATETIME.second
|| microsecond > MAX_MICROSECOND;
}
+
private boolean checkDate() {
if (month != 0 && day > DAYS_IN_MONTH[((int) month)]) {
if (month == 2 && day == 29 && Year.isLeap(year)) {
@@ -1195,7 +1203,8 @@ public class DateLiteral extends LiteralExpr {
int start = pre;
int tempVal = 0;
boolean scanToDelim = (!isIntervalFormat) && (fieldIdx != 6);
- while (pre < dateStr.length() && Character.isDigit(dateStr.charAt(pre)) && (scanToDelim || fieldLen-- != 0)) {
+ while (pre < dateStr.length() && Character.isDigit(dateStr.charAt(pre))
+ && (scanToDelim || fieldLen-- != 0)) {
tempVal = tempVal * 10 + (dateStr.charAt(pre++) - '0');
}
dateVal[fieldIdx] = tempVal;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/DecimalLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/DecimalLiteral.java
index 87c22b5242..e938a46361 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/DecimalLiteral.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/DecimalLiteral.java
@@ -107,7 +107,8 @@ public class DecimalLiteral extends LiteralExpr {
@Override
public ByteBuffer getHashValue(PrimitiveType type) {
ByteBuffer buffer;
- // no need to consider the overflow when cast decimal to other type, because this func only be used when querying, not storing.
+ // no need to consider the overflow when cast decimal to other type,
+ // because this func only be used when querying, not storing.
// e.g. For column A with type INT, the data stored certainly no overflow.
switch (type) {
case TINYINT:
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/DeleteStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/DeleteStmt.java
index 9cfd26eece..465813177b 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/DeleteStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/DeleteStmt.java
@@ -132,7 +132,8 @@ public class DeleteStmt extends DdlStmt {
int inElementNum = inPredicate.getInElementNum();
int maxAllowedInElementNumOfDelete = Config.max_allowed_in_element_num_of_delete;
if (inElementNum > maxAllowedInElementNumOfDelete) {
- throw new AnalysisException("Element num of in predicate should not be more than " + maxAllowedInElementNumOfDelete);
+ throw new AnalysisException("Element num of in predicate should not be more than "
+ + maxAllowedInElementNumOfDelete);
}
for (int i = 1; i <= inPredicate.getInElementNum(); i++) {
Expr expr = inPredicate.getChild(i);
@@ -142,7 +143,8 @@ public class DeleteStmt extends DdlStmt {
}
deleteConditions.add(inPredicate);
} else {
- throw new AnalysisException("Where clause only supports compound predicate, binary predicate, is_null predicate or in predicate");
+ throw new AnalysisException("Where clause only supports compound predicate,"
+ + " binary predicate, is_null predicate or in predicate");
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/DescribeStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/DescribeStmt.java
index a2c5139adb..257ea98c83 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/DescribeStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/DescribeStmt.java
@@ -169,7 +169,8 @@ public class DescribeStmt extends ShowStmt {
column.getOriginType().toString(),
column.isAllowNull() ? "Yes" : "No",
((Boolean) column.isKey()).toString(),
- column.getDefaultValue() == null ? FeConstants.null_string : column.getDefaultValue(),
+ column.getDefaultValue() == null
+ ? FeConstants.null_string : column.getDefaultValue(),
extraStr,
((Boolean) column.isVisible()).toString()
);
@@ -221,6 +222,7 @@ public class DescribeStmt extends ShowStmt {
public String getTableName() {
return dbTableName.getTbl();
}
+
public String getDb() {
return dbTableName.getDb();
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/DescriptorTable.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/DescriptorTable.java
index f733e3a9a6..00139913d9 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/DescriptorTable.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/DescriptorTable.java
@@ -41,7 +41,7 @@ import java.util.List;
* them unique ids..
*/
public class DescriptorTable {
- private final static Logger LOG = LogManager.getLogger(DescriptorTable.class);
+ private static final Logger LOG = LogManager.getLogger(DescriptorTable.class);
private final HashMap tupleDescs = new HashMap();
// List of referenced tables with no associated TupleDescriptor to ship to the BE.
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/DropDbStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/DropDbStmt.java
index 2c01a967c4..43cc9f5b47 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/DropDbStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/DropDbStmt.java
@@ -61,7 +61,8 @@ public class DropDbStmt extends DdlStmt {
dbName = ClusterNamespace.getFullName(getClusterName(), dbName);
// Don't allowed to drop 'information_schema'
if (dbName.equalsIgnoreCase(ClusterNamespace.getFullName(getClusterName(), InfoSchemaDb.DATABASE_NAME))) {
- ErrorReport.reportAnalysisException(ErrorCode.ERR_DBACCESS_DENIED_ERROR, analyzer.getQualifiedUser(), dbName);
+ ErrorReport.reportAnalysisException(ErrorCode.ERR_DBACCESS_DENIED_ERROR,
+ analyzer.getQualifiedUser(), dbName);
}
if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), dbName, PrivPredicate.DROP)) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ExportStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ExportStmt.java
index 6df62c42f4..b8f2c2c9be 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ExportStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ExportStmt.java
@@ -54,7 +54,7 @@ import java.util.UUID;
// [PROPERTIES("key"="value")]
// BY BROKER 'broker_name' [( $broker_attrs)]
public class ExportStmt extends StatementBase {
- private final static Logger LOG = LogManager.getLogger(ExportStmt.class);
+ private static final Logger LOG = LogManager.getLogger(ExportStmt.class);
public static final String TABLET_NUMBER_PER_TASK_PROP = "tablet_num_per_task";
public static final String LABEL = "label";
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/Expr.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/Expr.java
index 5387539254..5c5a6d8c55 100755
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/Expr.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/Expr.java
@@ -60,7 +60,7 @@ import java.util.Set;
/**
* Root of the expr node hierarchy.
*/
-abstract public class Expr extends TreeNode implements ParseNode, Cloneable, Writable {
+public abstract class Expr extends TreeNode implements ParseNode, Cloneable, Writable {
private static final Logger LOG = LogManager.getLogger(Expr.class);
// Name of the function that needs to be implemented by every Expr that
@@ -70,10 +70,10 @@ abstract public class Expr extends TreeNode implements ParseNode, Cloneabl
// to be used where we can't come up with a better estimate
public static final double DEFAULT_SELECTIVITY = 0.1;
- public final static float FUNCTION_CALL_COST = 10;
+ public static final float FUNCTION_CALL_COST = 10;
// returns true if an Expr is a non-analytic aggregate.
- private final static com.google.common.base.Predicate IS_AGGREGATE_PREDICATE =
+ private static final com.google.common.base.Predicate IS_AGGREGATE_PREDICATE =
new com.google.common.base.Predicate() {
public boolean apply(Expr arg) {
return arg instanceof FunctionCallExpr
@@ -82,7 +82,7 @@ abstract public class Expr extends TreeNode implements ParseNode, Cloneabl
};
// Returns true if an Expr is a NOT CompoundPredicate.
- public final static com.google.common.base.Predicate IS_NOT_PREDICATE =
+ public static final com.google.common.base.Predicate IS_NOT_PREDICATE =
new com.google.common.base.Predicate() {
@Override
public boolean apply(Expr arg) {
@@ -92,7 +92,7 @@ abstract public class Expr extends TreeNode implements ParseNode, Cloneabl
};
// Returns true if an Expr is an OR CompoundPredicate.
- public final static com.google.common.base.Predicate IS_OR_PREDICATE =
+ public static final com.google.common.base.Predicate IS_OR_PREDICATE =
new com.google.common.base.Predicate() {
@Override
public boolean apply(Expr arg) {
@@ -102,7 +102,7 @@ abstract public class Expr extends TreeNode implements ParseNode, Cloneabl
};
// Returns true if an Expr is a scalar subquery
- public final static com.google.common.base.Predicate IS_SCALAR_SUBQUERY =
+ public static final com.google.common.base.Predicate IS_SCALAR_SUBQUERY =
new com.google.common.base.Predicate() {
@Override
public boolean apply(Expr arg) {
@@ -112,7 +112,7 @@ abstract public class Expr extends TreeNode implements ParseNode, Cloneabl
// Returns true if an Expr is an aggregate function that returns non-null on
// an empty set (e.g. count).
- public final static com.google.common.base.Predicate NON_NULL_EMPTY_AGG =
+ public static final com.google.common.base.Predicate NON_NULL_EMPTY_AGG =
new com.google.common.base.Predicate() {
@Override
public boolean apply(Expr arg) {
@@ -121,7 +121,7 @@ abstract public class Expr extends TreeNode implements ParseNode, Cloneabl
};
// Returns true if an Expr is a builtin aggregate function.
- public final static com.google.common.base.Predicate CORRELATED_SUBQUERY_SUPPORT_AGG_FN =
+ public static final com.google.common.base.Predicate CORRELATED_SUBQUERY_SUPPORT_AGG_FN =
new com.google.common.base.Predicate() {
@Override
public boolean apply(Expr arg) {
@@ -139,7 +139,7 @@ abstract public class Expr extends TreeNode implements ParseNode, Cloneabl
};
- public final static com.google.common.base.Predicate IS_TRUE_LITERAL =
+ public static final com.google.common.base.Predicate IS_TRUE_LITERAL =
new com.google.common.base.Predicate() {
@Override
public boolean apply(Expr arg) {
@@ -147,7 +147,7 @@ abstract public class Expr extends TreeNode implements ParseNode, Cloneabl
}
};
- public final static com.google.common.base.Predicate IS_FALSE_LITERAL =
+ public static final com.google.common.base.Predicate IS_FALSE_LITERAL =
new com.google.common.base.Predicate() {
@Override
public boolean apply(Expr arg) {
@@ -155,7 +155,7 @@ abstract public class Expr extends TreeNode implements ParseNode, Cloneabl
}
};
- public final static com.google.common.base.Predicate IS_EQ_BINARY_PREDICATE =
+ public static final com.google.common.base.Predicate IS_EQ_BINARY_PREDICATE =
new com.google.common.base.Predicate() {
@Override
public boolean apply(Expr arg) {
@@ -163,7 +163,7 @@ abstract public class Expr extends TreeNode implements ParseNode, Cloneabl
}
};
- public final static com.google.common.base.Predicate IS_BINARY_PREDICATE =
+ public static final com.google.common.base.Predicate IS_BINARY_PREDICATE =
new com.google.common.base.Predicate() {
@Override
public boolean apply(Expr arg) {
@@ -423,7 +423,7 @@ abstract public class Expr extends TreeNode implements ParseNode, Cloneabl
/**
* Does subclass-specific analysis. Subclasses should override analyzeImpl().
*/
- abstract protected void analyzeImpl(Analyzer analyzer) throws AnalysisException;
+ protected abstract void analyzeImpl(Analyzer analyzer) throws AnalysisException;
/**
* Set the expr to be analyzed and computes isConstant_.
@@ -665,6 +665,7 @@ abstract public class Expr extends TreeNode implements ParseNode, Cloneabl
}
}
}
+
/**
* Returns true if the list contains an aggregate expr.
*/
@@ -828,6 +829,7 @@ abstract public class Expr extends TreeNode implements ParseNode, Cloneabl
child.markAgg();
}
}
+
/**
* Returns the product of the given exprs' number of distinct values or -1 if any of
* the exprs have an invalid number of distinct values.
@@ -1354,7 +1356,8 @@ abstract public class Expr extends TreeNode implements ParseNode, Cloneabl
&& (this.type.isStringType() || this.type.isHllType())) {
return this;
}
- // Preconditions.checkState(PrimitiveType.isImplicitCast(type, targetType), "cast %s to %s", this.type, targetType);
+ // Preconditions.checkState(PrimitiveType.isImplicitCast(type, targetType),
+ // "cast %s to %s", this.type, targetType);
// TODO(zc): use implicit cast
if (!Type.canCastTo(this.type, targetType)) {
throw new AnalysisException("type not match, originType=" + this.type
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ExprId.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ExprId.java
index 79b2fc721e..2045b71367 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ExprId.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ExprId.java
@@ -27,7 +27,7 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class ExprId extends Id {
- private final static Logger LOG = LogManager.getLogger(ExprId.class);
+ private static final Logger LOG = LogManager.getLogger(ExprId.class);
// Construction only allowed via an IdGenerator.
public ExprId(int id) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ExprSubstitutionMap.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ExprSubstitutionMap.java
index 46b9caa0fa..966cfa7e0a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ExprSubstitutionMap.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ExprSubstitutionMap.java
@@ -37,7 +37,7 @@ import java.util.List;
* See Expr.substitute() and related functions for details on the actual substitution.
*/
public final class ExprSubstitutionMap {
- private final static Logger LOG = LoggerFactory.getLogger(ExprSubstitutionMap.class);
+ private static final Logger LOG = LoggerFactory.getLogger(ExprSubstitutionMap.class);
private boolean checkAnalyzed = true;
private List lhs; // left-hand side
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ExpressionFunctions.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ExpressionFunctions.java
index 220fe55299..035cd8a67b 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ExpressionFunctions.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ExpressionFunctions.java
@@ -75,7 +75,8 @@ public enum ExpressionFunctions {
// 2. Not in NonNullResultWithNullParamFunctions
// 3. Has null parameter
if ((fn.getNullableMode() == Function.NullableMode.DEPEND_ON_ARGUMENT
- || Catalog.getCurrentCatalog().isNullResultWithOneNullParamFunction(fn.getFunctionName().getFunction()))
+ || Catalog.getCurrentCatalog().isNullResultWithOneNullParamFunction(
+ fn.getFunctionName().getFunction()))
&& !fn.isUdf()) {
for (Expr e : constExpr.getChildren()) {
if (e instanceof NullLiteral) {
@@ -205,7 +206,8 @@ public enum ExpressionFunctions {
if (argType.isArray()) {
Preconditions.checkArgument(method.getParameterTypes().length == typeIndex + 1);
final List variableLengthExprs = Lists.newArrayList();
- for (int variableLengthArgIndex = typeIndex; variableLengthArgIndex < args.size(); variableLengthArgIndex++) {
+ for (int variableLengthArgIndex = typeIndex;
+ variableLengthArgIndex < args.size(); variableLengthArgIndex++) {
variableLengthExprs.add(args.get(variableLengthArgIndex));
}
LiteralExpr[] variableLengthArgs = createVariableLengthArgs(variableLengthExprs, typeIndex);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/FunctionCallExpr.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/FunctionCallExpr.java
index a9ca2504a1..26f55a321f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/FunctionCallExpr.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/FunctionCallExpr.java
@@ -466,7 +466,8 @@ public class FunctionCallExpr extends Expr {
if (fnName.getFunction().equalsIgnoreCase("json_object")) {
if ((children.size() & 1) == 1 && (originChildSize == children.size())) {
- throw new AnalysisException("json_object can't be odd parameters, need even parameters: " + this.toSql());
+ throw new AnalysisException("json_object can't be odd parameters, need even parameters: "
+ + this.toSql());
}
String res = parseJsonDataType(true);
if (children.size() == originChildSize) {
@@ -505,7 +506,8 @@ public class FunctionCallExpr extends Expr {
if (children.size() > 2) {
if (!getChild(1).isConstant() || !getChild(2).isConstant()) {
throw new AnalysisException(
- "The default parameter (parameter 2 or parameter 3) of LEAD/LAG must be a constant: " + this.toSql());
+ "The default parameter (parameter 2 or parameter 3) of LEAD/LAG must be a constant: "
+ + this.toSql());
}
uncheckedCastChild(Type.BIGINT, 1);
if (!getChild(2).type.matchesType(getChild(0).type) && !getChild(2).type.matchesType(Type.NULL)) {
@@ -590,7 +592,8 @@ public class FunctionCallExpr extends Expr {
}
Type inputType = getChild(0).getType();
if (!inputType.isBitmapType()) {
- throw new AnalysisException(fnName + " function's argument should be of BITMAP type, but was " + inputType);
+ throw new AnalysisException(fnName
+ + " function's argument should be of BITMAP type, but was " + inputType);
}
return;
}
@@ -601,7 +604,8 @@ public class FunctionCallExpr extends Expr {
}
Type inputType = getChild(0).getType();
if (!inputType.isQuantileStateType()) {
- throw new AnalysisException(fnName + " function's argument should be of QUANTILE_STATE type, but was" + inputType);
+ throw new AnalysisException(fnName
+ + " function's argument should be of QUANTILE_STATE type, but was" + inputType);
}
}
@@ -885,7 +889,8 @@ public class FunctionCallExpr extends Expr {
}
for (int i = 3; i < children.size(); i++) {
if (children.get(i).type != Type.BOOLEAN) {
- throw new AnalysisException("The 4th and subsequent params of " + fnName + " function must be boolean");
+ throw new AnalysisException("The 4th and subsequent params of "
+ + fnName + " function must be boolean");
}
childTypes[i] = children.get(i).type;
}
@@ -919,7 +924,8 @@ public class FunctionCallExpr extends Expr {
if (fn == null) {
if (!analyzer.isUDFAllowed()) {
throw new AnalysisException(
- "Does not support non-builtin functions, or function does not exist: " + this.toSqlImpl());
+ "Does not support non-builtin functions, or function does not exist: "
+ + this.toSqlImpl());
}
String dbName = fnName.analyzeDb(analyzer);
@@ -1081,7 +1087,8 @@ public class FunctionCallExpr extends Expr {
List inputParamsExprs = retExpr.fnParams.exprs();
List parameters = ((AliasFunction) retExpr.fn).getParameters();
Preconditions.checkArgument(inputParamsExprs.size() == parameters.size(),
- "Alias function [" + retExpr.fn.getFunctionName().getFunction() + "] args number is not equal to it's definition");
+ "Alias function [" + retExpr.fn.getFunctionName().getFunction()
+ + "] args number is not equal to it's definition");
List oriParamsExprs = oriExpr.fnParams.exprs();
// replace origin function params exprs' with input params expr depending on parameter name
@@ -1108,7 +1115,8 @@ public class FunctionCallExpr extends Expr {
* @return
* @throws AnalysisException
*/
- private Expr replaceParams(List parameters, List inputParamsExprs, Expr oriExpr) throws AnalysisException {
+ private Expr replaceParams(List parameters, List inputParamsExprs, Expr oriExpr)
+ throws AnalysisException {
for (int i = 0; i < oriExpr.getChildren().size(); i++) {
Expr retExpr = replaceParams(parameters, inputParamsExprs, oriExpr.getChild(i));
oriExpr.setChild(i, retExpr);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/FunctionParams.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/FunctionParams.java
index 8234e25201..32cfba0351 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/FunctionParams.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/FunctionParams.java
@@ -58,7 +58,7 @@ public class FunctionParams implements Writable {
isDistinct = false;
}
- static public FunctionParams createStarParam() {
+ public static FunctionParams createStarParam() {
return new FunctionParams();
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/GrantStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/GrantStmt.java
index 18621f19b2..4e849e7c61 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/GrantStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/GrantStmt.java
@@ -59,7 +59,8 @@ public class GrantStmt extends DdlStmt {
this.privileges = privs.toPrivilegeList();
}
- public GrantStmt(UserIdentity userIdent, String role, ResourcePattern resourcePattern, List privileges) {
+ public GrantStmt(UserIdentity userIdent, String role,
+ ResourcePattern resourcePattern, List privileges) {
this.userIdent = userIdent;
this.role = role;
this.tblPattern = null;
@@ -168,12 +169,14 @@ public class GrantStmt extends DdlStmt {
ErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, "GRANT");
}
} else if (tblPattern.getPrivLevel() == PrivLevel.DATABASE) {
- if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), tblPattern.getQualifiedDb(), PrivPredicate.GRANT)) {
+ if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(),
+ tblPattern.getQualifiedDb(), PrivPredicate.GRANT)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, "GRANT");
}
} else {
// table level
- if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), tblPattern.getQualifiedDb(), tblPattern.getTbl(), PrivPredicate.GRANT)) {
+ if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(),
+ tblPattern.getQualifiedDb(), tblPattern.getTbl(), PrivPredicate.GRANT)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, "GRANT");
}
}
@@ -204,7 +207,8 @@ public class GrantStmt extends DdlStmt {
ErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, "GRANT");
}
} else {
- if (!Catalog.getCurrentCatalog().getAuth().checkResourcePriv(ConnectContext.get(), resourcePattern.getResourceName(), PrivPredicate.GRANT)) {
+ if (!Catalog.getCurrentCatalog().getAuth().checkResourcePriv(ConnectContext.get(),
+ resourcePattern.getResourceName(), PrivPredicate.GRANT)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, "GRANT");
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/GroupByClause.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/GroupByClause.java
index d74218c69e..f8d490977e 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/GroupByClause.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/GroupByClause.java
@@ -45,10 +45,10 @@ import java.util.stream.Collectors;
* In this class we produce the rule of generating rows base on the group by clause.
*/
public class GroupByClause implements ParseNode {
- private final static Logger LOG = LogManager.getLogger(GroupByClause.class);
+ private static final Logger LOG = LogManager.getLogger(GroupByClause.class);
// max num of distinct sets in grouping sets clause
- private final static int MAX_GROUPING_SETS_NUM = 64;
+ private static final int MAX_GROUPING_SETS_NUM = 64;
// max num of distinct expressions
private boolean analyzed = false;
private boolean exprGenerated = false;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/InlineViewRef.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/InlineViewRef.java
index 666b143305..f0f9c421b8 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/InlineViewRef.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/InlineViewRef.java
@@ -288,7 +288,8 @@ public class InlineViewRef extends TableRef {
false, null, selectItemExpr.isNullable(),
null, ""));
}
- InlineView inlineView = (view != null) ? new InlineView(view, columnList) : new InlineView(getExplicitAlias(), columnList);
+ InlineView inlineView = (view != null) ? new InlineView(view, columnList)
+ : new InlineView(getExplicitAlias(), columnList);
// Create the non-materialized tuple and set the fake table in it.
TupleDescriptor result = analyzer.getDescTbl().createTupleDescriptor();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/InsertStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/InsertStmt.java
index 579d9d1512..62a8a9887e 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/InsertStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/InsertStmt.java
@@ -143,7 +143,8 @@ public class InsertStmt extends DdlStmt {
isUserSpecifiedLabel = true;
}
- this.isValuesOrConstantSelect = (queryStmt instanceof SelectStmt && ((SelectStmt) queryStmt).getTableRefs().isEmpty());
+ this.isValuesOrConstantSelect = (queryStmt instanceof SelectStmt
+ && ((SelectStmt) queryStmt).getTableRefs().isEmpty());
}
// Ctor for CreateTableAsSelectStmt
@@ -187,7 +188,8 @@ public class InsertStmt extends DdlStmt {
return tblName.getTbl();
}
- public void getTables(Analyzer analyzer, Map tableMap, Set parentViewNameSet) throws AnalysisException {
+ public void getTables(Analyzer analyzer, Map tableMap, Set parentViewNameSet)
+ throws AnalysisException {
// get dbs of statement
queryStmt.getTables(analyzer, tableMap, parentViewNameSet);
tblName.analyze(analyzer);
@@ -269,8 +271,8 @@ public class InsertStmt extends DdlStmt {
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), tblName.getDb(),
tblName.getTbl(), PrivPredicate.LOAD)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "LOAD",
- ConnectContext.get().getQualifiedUser(),
- ConnectContext.get().getRemoteIP(), tblName.getDb() + ": " + tblName.getTbl());
+ ConnectContext.get().getQualifiedUser(),
+ ConnectContext.get().getRemoteIP(), tblName.getDb() + ": " + tblName.getTbl());
}
// check partition
@@ -457,7 +459,8 @@ public class InsertStmt extends DdlStmt {
if (column.isNameWithPrefix(CreateMaterializedViewStmt.MATERIALIZED_VIEW_NAME_PREFIX)) {
SlotRef refColumn = column.getRefColumn();
if (refColumn == null) {
- ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_FIELD_ERROR, column.getName(), targetTable.getName());
+ ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_FIELD_ERROR,
+ column.getName(), targetTable.getName());
}
String origName = refColumn.getColumnName();
for (int originColumnIdx = 0; originColumnIdx < targetColumns.size(); originColumnIdx++) {
@@ -526,7 +529,8 @@ public class InsertStmt extends DdlStmt {
ExprSubstitutionMap smap = new ExprSubstitutionMap();
smap.getLhs().add(entry.second.getRefColumn());
smap.getRhs().add(queryStmt.getResultExprs().get(entry.first));
- Expr e = Expr.substituteList(Lists.newArrayList(entry.second.getDefineExpr()), smap, analyzer, false).get(0);
+ Expr e = Expr.substituteList(Lists.newArrayList(entry.second.getDefineExpr()),
+ smap, analyzer, false).get(0);
queryStmt.getResultExprs().add(e);
}
}
@@ -551,7 +555,8 @@ public class InsertStmt extends DdlStmt {
ExprSubstitutionMap smap = new ExprSubstitutionMap();
smap.getLhs().add(entry.second.getRefColumn());
smap.getRhs().add(queryStmt.getResultExprs().get(entry.first));
- Expr e = Expr.substituteList(Lists.newArrayList(entry.second.getDefineExpr()), smap, analyzer, false).get(0);
+ Expr e = Expr.substituteList(Lists.newArrayList(entry.second.getDefineExpr()),
+ smap, analyzer, false).get(0);
queryStmt.getBaseTblResultExprs().add(e);
}
}
@@ -605,7 +610,8 @@ public class InsertStmt extends DdlStmt {
ExprSubstitutionMap smap = new ExprSubstitutionMap();
smap.getLhs().add(entry.second.getRefColumn());
smap.getRhs().add(extentedRow.get(entry.first));
- extentedRow.add(Expr.substituteList(Lists.newArrayList(entry.second.getDefineExpr()), smap, analyzer, false).get(0));
+ extentedRow.add(Expr.substituteList(Lists.newArrayList(entry.second.getDefineExpr()),
+ smap, analyzer, false).get(0));
}
}
}
@@ -620,7 +626,8 @@ public class InsertStmt extends DdlStmt {
if (expr instanceof DefaultValueExpr) {
if (targetColumns.get(i).getDefaultValue() == null) {
- throw new AnalysisException("Column has no default value, column=" + targetColumns.get(i).getName());
+ throw new AnalysisException("Column has no default value, column="
+ + targetColumns.get(i).getName());
}
expr = new StringLiteral(targetColumns.get(i).getDefaultValue());
}
@@ -727,7 +734,8 @@ public class InsertStmt extends DdlStmt {
if (!isExplain() && targetTable instanceof OlapTable) {
((OlapTableSink) dataSink).complete();
// add table indexes to transaction state
- TransactionState txnState = Catalog.getCurrentGlobalTransactionMgr().getTransactionState(db.getId(), transactionId);
+ TransactionState txnState = Catalog.getCurrentGlobalTransactionMgr()
+ .getTransactionState(db.getId(), transactionId);
if (txnState == null) {
throw new DdlException("txn does not exist: " + transactionId);
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/IsNullPredicate.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/IsNullPredicate.java
index a196c89f55..8c17ff9af7 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/IsNullPredicate.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/IsNullPredicate.java
@@ -143,6 +143,7 @@ public class IsNullPredicate extends Predicate {
public boolean isNullable() {
return false;
}
+
/**
* fix issue 6390
*/
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/LargeIntLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/LargeIntLiteral.java
index 0e3fd0c7a8..33f13c74e1 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/LargeIntLiteral.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/LargeIntLiteral.java
@@ -39,7 +39,7 @@ import java.util.Objects;
// large int for the num that native types can not
public class LargeIntLiteral extends LiteralExpr {
- private final static Logger LOG = LogManager.getLogger(LargeIntLiteral.class);
+ private static final Logger LOG = LogManager.getLogger(LargeIntLiteral.class);
// -2^127
public static final BigInteger LARGE_INT_MIN = new BigInteger("-170141183460469231731687303715884105728");
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/LoadStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/LoadStmt.java
index 1c12ad69e5..469f4d4523 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/LoadStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/LoadStmt.java
@@ -124,7 +124,7 @@ public class LoadStmt extends DdlStmt {
private EtlJobType etlJobType = EtlJobType.UNKNOWN;
- public final static ImmutableMap PROPERTIES_MAP = new ImmutableMap.Builder()
+ public static final ImmutableMap PROPERTIES_MAP = new ImmutableMap.Builder()
.put(TIMEOUT_PROPERTY, new Function() {
@Override
public @Nullable Long apply(@Nullable String s) {
@@ -337,7 +337,8 @@ public class LoadStmt extends DdlStmt {
}
Database db = Catalog.getCurrentCatalog().getDbOrAnalysisException(label.getDbName());
OlapTable table = db.getOlapTableOrAnalysisException(dataDescription.getTableName());
- if (dataDescription.getMergeType() != LoadTask.MergeType.APPEND && table.getKeysType() != KeysType.UNIQUE_KEYS) {
+ if (dataDescription.getMergeType() != LoadTask.MergeType.APPEND
+ && table.getKeysType() != KeysType.UNIQUE_KEYS) {
throw new AnalysisException("load by MERGE or DELETE is only supported in unique tables.");
}
if (dataDescription.getMergeType() != LoadTask.MergeType.APPEND && !table.hasDeleteSign()) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/LockTable.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/LockTable.java
index 07b51caf1b..4fbaa33651 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/LockTable.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/LockTable.java
@@ -34,6 +34,7 @@ public class LockTable {
return desc;
}
}
+
private TableName tableName;
private String alias;
private LockType lockType;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/MVColumnBitmapUnionPattern.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/MVColumnBitmapUnionPattern.java
index 45bf9948f8..8dabace3a1 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/MVColumnBitmapUnionPattern.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/MVColumnBitmapUnionPattern.java
@@ -61,7 +61,8 @@ public class MVColumnBitmapUnionPattern implements MVColumnPattern {
@Override
public String toString() {
- return FunctionSet.BITMAP_UNION + "(" + FunctionSet.TO_BITMAP + "(column)), type of column could not be integer. "
+ return FunctionSet.BITMAP_UNION + "(" + FunctionSet.TO_BITMAP
+ + "(column)), type of column could not be integer. "
+ "Or " + FunctionSet.BITMAP_UNION + "(bitmap_column) in agg table";
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ModifyTablePropertiesClause.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ModifyTablePropertiesClause.java
index 37b50fa73e..d91b770ec9 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ModifyTablePropertiesClause.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ModifyTablePropertiesClause.java
@@ -82,7 +82,8 @@ public class ModifyTablePropertiesClause extends AlterTableClause {
} else if (properties.containsKey("default." + PropertyAnalyzer.PROPERTIES_REPLICATION_NUM)
|| properties.containsKey("default." + PropertyAnalyzer.PROPERTIES_REPLICATION_ALLOCATION)) {
ReplicaAllocation replicaAlloc = PropertyAnalyzer.analyzeReplicaAllocation(properties, "default");
- properties.put("default." + PropertyAnalyzer.PROPERTIES_REPLICATION_ALLOCATION, replicaAlloc.toCreateStmt());
+ properties.put("default." + PropertyAnalyzer.PROPERTIES_REPLICATION_ALLOCATION,
+ replicaAlloc.toCreateStmt());
} else if (properties.containsKey(PropertyAnalyzer.PROPERTIES_INMEMORY)) {
this.needTableStable = false;
this.opType = AlterOpType.MODIFY_TABLE_PROPERTY_SYNC;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/OpcodeRegistry.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/OpcodeRegistry.java
deleted file mode 100644
index 8b3f9f8460..0000000000
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/OpcodeRegistry.java
+++ /dev/null
@@ -1,314 +0,0 @@
-// Licensed to the Apache Software Foundation (ASF) under one
-// or more contributor license agreements. See the NOTICE file
-// distributed with this work for additional information
-// regarding copyright ownership. The ASF licenses this file
-// to you under the Apache License, Version 2.0 (the
-// "License"); you may not use this file except in compliance
-// with the License. You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing,
-// software distributed under the License is distributed on an
-// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-// KIND, either express or implied. See the License for the
-// specific language governing permissions and limitations
-// under the License.
-
-package org.apache.doris.analysis;
-
-/**
- * The OpcodeRegistry provides a mapping between function signatures and opcodes. The
- * supported functions are code-gen'ed and added to the registry with an assigned opcode.
- * The opcode is shared with the backend. The frontend can use the registry to look up
- * a function's opcode.
- *
- * The OpcodeRegistry also contains a mapping between function names (as strings) to
- * operators.
- *
- * The OpcodeRegistry is a singleton.
- *
- * TODO: The opcode registry should be versioned in the FE/BE.
- */
-public class OpcodeRegistry {
-//
-// private final static Logger LOG = LogManager.getLogger(OpcodeRegistry.class);
-// private static OpcodeRegistry instance = new OpcodeRegistry();
-// /**
-// * This is a mapping of Operator,#args to signatures with a fixed number of arguments.
-// * The signature is defined by the operator enum and the arguments
-// * and is a one to one mapping to opcodes.
-// * The map is structured this way to more efficiently look for signature matches.
-// * Signatures that have the same number of arguments have a potential to be matches
-// * by allowing types to be implicitly cast.
-// * Functions with a variable number of arguments are put into the varArgOperations map.
-// */
-// private final Map, List> operations;
-// /**
-// * This is a mapping of Operator,varArgType to signatures of vararg functions only.
-// * varArgType must be a maximum-resolution type.
-// * We use a separate map to be able to support multiple vararg signatures for the same
-// * FunctionOperator.
-// * Contains a special entry mapping from Operator,NULL_TYPE to signatures for each
-// * Operator to correctly match varag functions when all args are NULL.
-// * Limitations: Since we do not consider the number of arguments, each FunctionOperator
-// * is limited to having one vararg signature per maximum-resolution PrimitiveType.
-// * For example, one can have two signatures func(float, int ...) and func(string ...),
-// * but not func(float, int ...) and func (int ...).
-// */
-// private final Map, List>
-// varArgOperations;
-// /**
-// * This contains a mapping of function names to a FunctionOperator enum. This is used
-// * by FunctionCallExpr to go from the parser input to function opcodes.
-// * This is a many to one mapping (i.e. substr and substring both map to the same
-// * operation).
-// * The mappings are filled in in FunctionRegistry.java which is auto-generated.
-// */
-// private final HashMap functionNameMap;
-//
-// private final HashMap> funcByOp;
-//
-// // Singleton interface, don't call the constructor
-// private OpcodeRegistry() {
-// operations = Maps.newHashMap();
-// varArgOperations = Maps.newHashMap();
-// functionNameMap = Maps.newHashMap();
-// funcByOp = Maps.newHashMap();
-//
-// // Add all the function signatures to the registry and the function name(string)
-// // to FunctionOperator mapping
-// FunctionRegistry.InitFunctions(this);
-// }
-//
-// // Singleton interface
-// public static OpcodeRegistry instance() {
-// return instance;
-// }
-//
-// /**
-// * Static utility functions
-// */
-// public static boolean isBitwiseOperation(FunctionOperator operator) {
-// return operator == FunctionOperator.BITAND || operator == FunctionOperator.BITNOT ||
-// operator == FunctionOperator.BITOR || operator == FunctionOperator.BITXOR;
-// }
-//
-// /**
-// * Returns the set of function names.
-// *
-// * @return
-// */
-// public Set getFunctionNames() {
-// return functionNameMap.keySet();
-// }
-//
-// /**
-// * Returns the function operator enum. The lookup is case insensitive.
-// * (i.e. "Substring" --> TExprOperator.STRING_SUBSTR).
-// * Returns INVALID_OP is that function name is unknown.
-// */
-// public FunctionOperator getFunctionOperator(String fnName) {
-// String lookup = fnName.toLowerCase();
-// if (functionNameMap.containsKey(lookup)) {
-// return functionNameMap.get(lookup);
-// }
-// return FunctionOperator.INVALID_OPERATOR;
-// }
-//
-// /**
-// * Query for a function in the registry, specifying the operation, 'op', the arguments.
-// * If there is no matching signature, null will be returned.
-// * If there is a match, the matching signature will be returned.
-// * If 'allowImplicitCasts' is true the matching signature does not have to match the
-// * input identically, implicit type promotion is allowed.
-// */
-// public BuiltinFunction getFunctionInfo(
-// FunctionOperator op, boolean allowImplicitCasts,
-// boolean vectorFunction, PrimitiveType... argTypes) {
-// Pair lookup = Pair.create(op, argTypes.length);
-// List> varArgMatchTypes = null;
-// if (argTypes.length > 0) {
-// Set maxResolutionTypes = getMaxResolutionTypes(argTypes);
-// Preconditions.checkNotNull(maxResolutionTypes);
-// varArgMatchTypes = Lists.newArrayList();
-// for (PrimitiveType maxResolutionType : maxResolutionTypes) {
-// varArgMatchTypes.add(Pair.create(op, maxResolutionType));
-// }
-// }
-// List functions = null;
-// if (operations.containsKey(lookup)) {
-// functions = operations.get(lookup);
-// } else if (!varArgMatchTypes.isEmpty()) {
-// functions = Lists.newArrayList();
-// List matchedFunctions = null;
-// for (Pair varArgsMatchType : varArgMatchTypes) {
-// matchedFunctions = varArgOperations.get(varArgsMatchType);
-// if (matchedFunctions != null) {
-// functions.addAll(matchedFunctions);
-// }
-// }
-// }
-//
-// if (functions == null || functions.isEmpty()) {
-// // may be we can find from funByOp
-// if (funcByOp.containsKey(op)) {
-// functions = funcByOp.get(op);
-// } else {
-// return null;
-// }
-// }
-// Type[] args = new Type[argTypes.length];
-// int i = 0;
-// for (PrimitiveType type : argTypes) {
-// args[i] = Type.fromPrimitiveType(type);
-// i ++;
-// }
-// BuiltinFunction search = new BuiltinFunction(op, args);
-//
-// BuiltinFunction compatibleMatch = null;
-// List compatibleMatchFunctions = Lists.newArrayList();
-// // We firstly choose functions using IS_SUBTYPE(only check cast-method is implemented),
-// // if more than one functions are found, give priority to the assign-copatible one.
-// for (BuiltinFunction function : functions) {
-// if (function.compare(search, Function.CompareMode.IS_INDISTINGUISHABLE)) {
-// if (vectorFunction == function.vectorFunction) {
-// return function;
-// }
-// } else if (allowImplicitCasts
-// && function.compare(search, Function.CompareMode.IS_SUPERTYPE_OF)) {
-// if (vectorFunction == function.vectorFunction) {
-// compatibleMatchFunctions.add(function);
-// }
-// }
-// }
-//
-// // If there are many compatible functions, we priority to choose the non-loss-precision one.
-// for (BuiltinFunction function : compatibleMatchFunctions) {
-// if (function.compare(search, Function.CompareMode.IS_SUPERTYPE_OF)) {
-// compatibleMatch = function;
-// } else {
-// LOG.info(" false {} {}", function.getReturnType(), function.getArgs());
-// }
-// }
-// if (compatibleMatch == null && compatibleMatchFunctions.size() > 0) {
-// compatibleMatch = compatibleMatchFunctions.get(0);
-// }
-//
-// return compatibleMatch;
-// }
-//
-// /**
-// * Returns the max resolution type for each argType that is not a NULL_TYPE. If all
-// * argument types are NULL_TYPE then a set will be returned containing NULL_TYPE.
-// */
-// private Set getMaxResolutionTypes(PrimitiveType[] argTypes) {
-// Set maxResolutionTypes = Sets.newHashSet();
-// for (int i = 0; i < argTypes.length; ++i) {
-// if (!argTypes[i].isNull()) {
-// maxResolutionTypes.add(argTypes[i].getMaxResolutionType());
-// }
-// }
-// if (maxResolutionTypes.isEmpty()) {
-// maxResolutionTypes.add(PrimitiveType.NULL_TYPE);
-// }
-// return maxResolutionTypes;
-// }
-//
-// /**
-// * Add a function with the specified opcode/signature to the registry.
-// */
-//
-// public boolean add(boolean udfInterface, boolean vectorFunction, FunctionOperator op,
-// TExprOpcode opcode, boolean varArgs, PrimitiveType retType, PrimitiveType... args) {
-// List functions;
-// Pair lookup = Pair.create(op, args.length);
-// // Take the last argument's type as the vararg type.
-// Pair varArgsLookup = null;
-// // Special signature for vararg functions to handle matching when all args are NULL.
-// Pair varArgsNullLookup = null;
-// Preconditions.checkArgument((varArgs) ? args.length > 0 : true);
-// if (varArgs && args.length > 0) {
-// varArgsLookup = Pair.create(op, args[args.length - 1].getMaxResolutionType());
-// varArgsNullLookup = Pair.create(op, PrimitiveType.NULL_TYPE);
-// }
-// if (operations.containsKey(lookup)) {
-// functions = operations.get(lookup);
-// } else if (varArgsLookup != null && varArgOperations.containsKey(varArgsLookup)) {
-// functions = varArgOperations.get(varArgsLookup);
-// } else {
-// functions = new ArrayList();
-// if (varArgs) {
-// varArgOperations.put(varArgsLookup, functions);
-// varArgOperations.put(varArgsNullLookup, functions);
-// } else {
-// operations.put(lookup, functions);
-// }
-// }
-//
-// Type[] argsType = new Type[args.length];
-// int i = 0;
-// for (PrimitiveType type : args) {
-// argsType[i] = Type.fromPrimitiveType(type);
-// i ++;
-// }
-//
-// BuiltinFunction function =
-// new BuiltinFunction(udfInterface, vectorFunction, opcode, op, varArgs, Type.fromPrimitiveType(retType), argsType);
-// if (functions.contains(function)) {
-// LOG.error("OpcodeRegistry: Function already exists: " + opcode);
-// return false;
-// }
-// functions.add(function);
-//
-// // add to op map
-// if (funcByOp.containsKey(op)) {
-// functions = funcByOp.get(op);
-// } else {
-// functions = Lists.newArrayList();
-// funcByOp.put(op, functions);
-// }
-// functions.add(function);
-// return true;
-// }
-//
-// public boolean addFunctionMapping(String functionName, FunctionOperator op) {
-// if (functionNameMap.containsKey(functionName)) {
-// LOG.error("OpcodeRegistry: Function mapping already exists: " + functionName);
-// return false;
-// }
-// functionNameMap.put(functionName, op);
-// return true;
-// }
-//
-// /**
-// * Contains all the information about a builtin function.
-// * TODO: merge with Function and Udf
-// */
-// public static class BuiltinFunction extends Function {
-// // If true, this builtin is implemented against the Udf interface.
-// public final boolean udfInterface;
-// public final boolean vectorFunction;
-// public TExprOpcode opcode;
-// public FunctionOperator operator;
-//
-// // Constructor for searching, specifying the op and arguments
-// public BuiltinFunction(FunctionOperator operator, Type[] args) {
-// super(new FunctionName(operator.toString()), args, Type.INVALID, false);
-// this.operator = operator;
-// this.udfInterface = false;
-// this.vectorFunction = false;
-// this.setBinaryType(TFunctionBinaryType.BUILTIN);
-// }
-//
-// private BuiltinFunction(boolean udfInterface, boolean vectorFunction, TExprOpcode opcode,
-// FunctionOperator operator, boolean varArgs, Type ret, Type[] args) {
-// super(new FunctionName(opcode.toString()), args, ret, varArgs);
-// this.operator = operator;
-// this.opcode = opcode;
-// this.udfInterface = udfInterface;
-// this.vectorFunction = vectorFunction;
-// this.setBinaryType(TFunctionBinaryType.BUILTIN);
-// }
-// }
-}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/OrderByElement.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/OrderByElement.java
index c9d96ed043..39b8bd328d 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/OrderByElement.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/OrderByElement.java
@@ -60,11 +60,13 @@ public class OrderByElement {
public Boolean getNullsFirstParam() {
return nullsFirstParam;
}
+
public OrderByElement clone() {
OrderByElement clone = new OrderByElement(
expr.clone(), isAsc, nullsFirstParam);
return clone;
}
+
/**
* Returns a new list of OrderByElements with the same (cloned) expressions but the
* ordering direction reversed (asc becomes desc, nulls first becomes nulls last, etc.)
@@ -82,6 +84,7 @@ public class OrderByElement {
return result;
}
+
/**
* Extracts the order-by exprs from the list of order-by elements and returns them.
*/
@@ -111,6 +114,7 @@ public class OrderByElement {
return result;
}
+
public String toSql() {
StringBuilder strBuilder = new StringBuilder();
strBuilder.append(expr.toSql());
@@ -167,6 +171,7 @@ public class OrderByElement {
OrderByElement o = (OrderByElement) obj;
return expr.equals(o.expr) && isAsc == o.isAsc && nullsFirstParam == o.nullsFirstParam;
}
+
/**
* Compute nullsFirst.
*
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/OutFileClause.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/OutFileClause.java
index 8443949d35..7f8b1436a0 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/OutFileClause.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/OutFileClause.java
@@ -226,8 +226,8 @@ public class OutFileClause {
switch (resultType.getPrimitiveType()) {
case BOOLEAN:
if (!type.equals("boolean")) {
- throw new AnalysisException("project field type is BOOLEAN, should use boolean, but the type of column "
- + i + " is " + type);
+ throw new AnalysisException("project field type is BOOLEAN, should use boolean,"
+ + " but the type of column " + i + " is " + type);
}
break;
case TINYINT:
@@ -248,14 +248,14 @@ public class OutFileClause {
break;
case FLOAT:
if (!type.equals("float")) {
- throw new AnalysisException("project field type is FLOAT, should use float, but the definition type of column "
- + i + " is " + type);
+ throw new AnalysisException("project field type is FLOAT, should use float,"
+ + " but the definition type of column " + i + " is " + type);
}
break;
case DOUBLE:
if (!type.equals("double")) {
- throw new AnalysisException("project field type is DOUBLE, should use double, but the definition type of column "
- + i + " is " + type);
+ throw new AnalysisException("project field type is DOUBLE, should use double,"
+ + " but the definition type of column " + i + " is " + type);
}
break;
case CHAR:
@@ -263,23 +263,26 @@ public class OutFileClause {
case STRING:
case DECIMALV2:
if (!type.equals("byte_array")) {
- throw new AnalysisException("project field type is CHAR/VARCHAR/STRING/DECIMAL, should use byte_array, "
- + "but the definition type of column " + i + " is " + type);
+ throw new AnalysisException("project field type is CHAR/VARCHAR/STRING/DECIMAL,"
+ + " should use byte_array, but the definition type of column " + i + " is " + type);
}
break;
case HLL:
case BITMAP:
- if (ConnectContext.get() != null && ConnectContext.get().getSessionVariable().isReturnObjectDataAsBinary()) {
+ if (ConnectContext.get() != null && ConnectContext.get()
+ .getSessionVariable().isReturnObjectDataAsBinary()) {
if (!type.equals("byte_array")) {
throw new AnalysisException("project field type is HLL/BITMAP, should use byte_array, "
+ "but the definition type of column " + i + " is " + type);
}
} else {
- throw new AnalysisException("Parquet format does not support column type: " + resultType.getPrimitiveType());
+ throw new AnalysisException("Parquet format does not support column type: "
+ + resultType.getPrimitiveType());
}
break;
default:
- throw new AnalysisException("Parquet format does not support column type: " + resultType.getPrimitiveType());
+ throw new AnalysisException("Parquet format does not support column type: "
+ + resultType.getPrimitiveType());
}
}
}
@@ -318,12 +321,14 @@ public class OutFileClause {
break;
case HLL:
case BITMAP:
- if (ConnectContext.get() != null && ConnectContext.get().getSessionVariable().isReturnObjectDataAsBinary()) {
+ if (ConnectContext.get() != null && ConnectContext.get()
+ .getSessionVariable().isReturnObjectDataAsBinary()) {
column.add("byte_array");
}
break;
default:
- throw new AnalysisException("currently parquet do not support column type: " + expr.getType().getPrimitiveType());
+ throw new AnalysisException("currently parquet do not support column type: "
+ + expr.getType().getPrimitiveType());
}
column.add("col" + i);
this.schema.add(column);
@@ -338,7 +343,8 @@ public class OutFileClause {
if (filePath.startsWith(LOCAL_FILE_PREFIX)) {
if (!Config.enable_outfile_to_local) {
throw new AnalysisException("Exporting results to local disk is not allowed."
- + " To enable this feature, you need to add `enable_outfile_to_local=true` in fe.conf and restart FE");
+ + " To enable this feature, you need to add `enable_outfile_to_local=true`"
+ + " in fe.conf and restart FE");
}
isLocalOutput = true;
filePath = filePath.substring(LOCAL_FILE_PREFIX.length() - 1); // leave last '/'
@@ -433,11 +439,12 @@ public class OutFileClause {
brokerProps.put(entry.getKey(), entry.getValue());
processedPropKeys.add(entry.getKey());
} else if (entry.getKey().contains(BrokerUtil.HADOOP_FS_NAME)
- && storageType == StorageBackend.StorageType.HDFS) {
+ && storageType == StorageBackend.StorageType.HDFS) {
brokerProps.put(entry.getKey(), entry.getValue());
processedPropKeys.add(entry.getKey());
- } else if ((entry.getKey().startsWith(HADOOP_FS_PROP_PREFIX) || entry.getKey().startsWith(HADOOP_PROP_PREFIX))
- && storageType == StorageBackend.StorageType.HDFS) {
+ } else if ((entry.getKey().startsWith(HADOOP_FS_PROP_PREFIX)
+ || entry.getKey().startsWith(HADOOP_PROP_PREFIX))
+ && storageType == StorageBackend.StorageType.HDFS) {
brokerProps.put(entry.getKey(), entry.getValue());
processedPropKeys.add(entry.getKey());
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/Predicate.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/Predicate.java
index c34e9f7728..1c2f87b4f5 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/Predicate.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/Predicate.java
@@ -119,14 +119,15 @@ public abstract class Predicate extends Expr {
Preconditions.checkState(right != null);
// ATTN(cmy): Usually, the BinaryPredicate in the query will be rewritten through ExprRewriteRule,
- // and all SingleColumnPredicate will be rewritten as "column on the left and the constant on the right".
+ // and all SingleColumnPredicate will be rewritten as "column on the left and the constant on the right"
// So usually the right child is constant.
//
// But if there is a subquery in where clause, the planner will equal the subquery to join.
// During the equal, some auxiliary BinaryPredicate will be automatically generated,
// and these BinaryPredicates will not go through ExprRewriteRule.
// As a result, these BinaryPredicates may be as "column on the right and the constant on the left".
- // Example can be found in QueryPlanTest.java -> testJoinPredicateTransitivityWithSubqueryInWhereClause().
+ // Example can be found in QueryPlanTest.java
+ // -> testJoinPredicateTransitivityWithSubqueryInWhereClause().
//
// Because our current planner implementation is very error-prone, so when this happens,
// we simply assume that these kind of BinaryPredicates cannot be pushed down,
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/QueryStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/QueryStmt.java
index 5ca6044214..9e79548e6d 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/QueryStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/QueryStmt.java
@@ -51,7 +51,7 @@ import java.util.stream.Collectors;
* analysis of the ORDER BY and LIMIT clauses.
*/
public abstract class QueryStmt extends StatementBase {
- private final static Logger LOG = LogManager.getLogger(QueryStmt.class);
+ private static final Logger LOG = LogManager.getLogger(QueryStmt.class);
/////////////////////////////////////////
// BEGIN: Members that need to be reset()
@@ -455,7 +455,8 @@ public abstract class QueryStmt extends StatementBase {
return resultExprs.get((int) pos - 1).clone();
}
- public void getWithClauseTables(Analyzer analyzer, Map tableMap, Set parentViewNameSet) throws AnalysisException {
+ public void getWithClauseTables(Analyzer analyzer, Map tableMap,
+ Set parentViewNameSet) throws AnalysisException {
if (withClause != null) {
withClause.getTables(analyzer, tableMap, parentViewNameSet);
}
@@ -532,8 +533,10 @@ public abstract class QueryStmt extends StatementBase {
// "select a.siteid, b.citycode, a.siteid from (select siteid, citycode from tmp) a " +
// "left join (select siteid, citycode from tmp) b on a.siteid = b.siteid;";
// tmp in child stmt "(select siteid, citycode from tmp)" do not contain with_Clause
- // so need to check is view name by parentViewNameSet. issue link: https://github.com/apache/incubator-doris/issues/4598
- public abstract void getTables(Analyzer analyzer, Map tables, Set parentViewNameSet) throws AnalysisException;
+ // so need to check is view name by parentViewNameSet.
+ // issue link: https://github.com/apache/incubator-doris/issues/4598
+ public abstract void getTables(Analyzer analyzer, Map tables, Set parentViewNameSet)
+ throws AnalysisException;
// get TableRefs in this query, including physical TableRefs of this statement and
// nested statements of inline views and with_Clause.
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/RecoverDbStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/RecoverDbStmt.java
index 5be7c6bf24..05ccbf1fba 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/RecoverDbStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/RecoverDbStmt.java
@@ -51,11 +51,10 @@ public class RecoverDbStmt extends DdlStmt {
dbName = ClusterNamespace.getFullName(getClusterName(), dbName);
if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), dbName,
- PrivPredicate.of(PrivBitSet.of(PaloPrivilege.ALTER_PRIV,
- PaloPrivilege.CREATE_PRIV,
- PaloPrivilege.ADMIN_PRIV),
- Operator.OR))) {
- ErrorReport.reportAnalysisException(ErrorCode.ERR_DBACCESS_DENIED_ERROR, analyzer.getQualifiedUser(), dbName);
+ PrivPredicate.of(PrivBitSet.of(
+ PaloPrivilege.ALTER_PRIV, PaloPrivilege.CREATE_PRIV, PaloPrivilege.ADMIN_PRIV), Operator.OR))) {
+ ErrorReport.reportAnalysisException(
+ ErrorCode.ERR_DBACCESS_DENIED_ERROR, analyzer.getQualifiedUser(), dbName);
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/RecoverPartitionStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/RecoverPartitionStmt.java
index eb1ef63eb2..312cd8129c 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/RecoverPartitionStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/RecoverPartitionStmt.java
@@ -55,11 +55,8 @@ public class RecoverPartitionStmt extends DdlStmt {
public void analyze(Analyzer analyzer) throws AnalysisException, UserException {
dbTblName.analyze(analyzer);
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), dbTblName.getDb(),
- dbTblName.getTbl(),
- PrivPredicate.of(PrivBitSet.of(PaloPrivilege.ALTER_PRIV,
- PaloPrivilege.CREATE_PRIV,
- PaloPrivilege.ADMIN_PRIV),
- Operator.OR))) {
+ dbTblName.getTbl(), PrivPredicate.of(PrivBitSet.of(
+ PaloPrivilege.ALTER_PRIV, PaloPrivilege.CREATE_PRIV, PaloPrivilege.ADMIN_PRIV), Operator.OR))) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "RECOVERY",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(),
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/RecoverTableStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/RecoverTableStmt.java
index 5928b5fa44..dbe24f9e19 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/RecoverTableStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/RecoverTableStmt.java
@@ -49,12 +49,10 @@ public class RecoverTableStmt extends DdlStmt {
public void analyze(Analyzer analyzer) throws AnalysisException, UserException {
dbTblName.analyze(analyzer);
- if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), dbTblName.getDb(),
- dbTblName.getTbl(),
- PrivPredicate.of(PrivBitSet.of(PaloPrivilege.ALTER_PRIV,
- PaloPrivilege.CREATE_PRIV,
- PaloPrivilege.ADMIN_PRIV),
- Operator.OR))) {
+ if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(
+ ConnectContext.get(), dbTblName.getDb(), dbTblName.getTbl(), PrivPredicate.of(
+ PrivBitSet.of(PaloPrivilege.ALTER_PRIV, PaloPrivilege.CREATE_PRIV, PaloPrivilege.ADMIN_PRIV),
+ Operator.OR))) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "RECOVERY",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(),
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/RefreshDbStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/RefreshDbStmt.java
index 66a4cc9ee8..ff70985fe7 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/RefreshDbStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/RefreshDbStmt.java
@@ -57,7 +57,8 @@ public class RefreshDbStmt extends DdlStmt {
// Don't allow dropping 'information_schema' database
if (dbName.equalsIgnoreCase(ClusterNamespace.getFullName(getClusterName(), InfoSchemaDb.DATABASE_NAME))) {
- ErrorReport.reportAnalysisException(ErrorCode.ERR_DBACCESS_DENIED_ERROR, analyzer.getQualifiedUser(), dbName);
+ ErrorReport.reportAnalysisException(
+ ErrorCode.ERR_DBACCESS_DENIED_ERROR, analyzer.getQualifiedUser(), dbName);
}
// check access
if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), dbName, PrivPredicate.DROP)) {
@@ -65,7 +66,8 @@ public class RefreshDbStmt extends DdlStmt {
ConnectContext.get().getQualifiedUser(), dbName);
}
if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), dbName, PrivPredicate.CREATE)) {
- ErrorReport.reportAnalysisException(ErrorCode.ERR_DBACCESS_DENIED_ERROR, analyzer.getQualifiedUser(), dbName);
+ ErrorReport.reportAnalysisException(
+ ErrorCode.ERR_DBACCESS_DENIED_ERROR, analyzer.getQualifiedUser(), dbName);
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ReplacePartitionClause.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ReplacePartitionClause.java
index 666f1c0b18..6d7f88c089 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ReplacePartitionClause.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ReplacePartitionClause.java
@@ -44,8 +44,10 @@ public class ReplacePartitionClause extends AlterTableClause {
// Otherwise, the replaced partition's name will be the temp partitions name.
// This parameter is valid only when the number of partitions is the same as the number of temp partitions.
// For example:
- // 1. REPLACE PARTITION (p1, p2, p3) WITH TEMPORARY PARTITION(tp1, tp2) PROPERTIES("use_temp_partition_name" = "false");
- // "use_temp_partition_name" will take no effect after replacing, and the partition names will be "tp1" and "tp2".
+ // 1. REPLACE PARTITION (p1, p2, p3) WITH TEMPORARY PARTITION(tp1, tp2)
+ // PROPERTIES("use_temp_partition_name" = "false");
+ // "use_temp_partition_name" will take no effect after replacing,
+ // and the partition names will be "tp1" and "tp2".
//
// 2. REPLACE PARTITION (p1, p2) WITH TEMPORARY PARTITION(tp1, tp2) PROPERTIES("use_temp_partition_name" = "false");
// alter replacing, the partition names will be "p1" and "p2".
@@ -90,7 +92,8 @@ public class ReplacePartitionClause extends AlterTableClause {
throw new AnalysisException("Only support replace partitions with temp partitions");
}
- this.isStrictRange = PropertyAnalyzer.analyzeBooleanProp(properties, PropertyAnalyzer.PROPERTIES_STRICT_RANGE, true);
+ this.isStrictRange = PropertyAnalyzer.analyzeBooleanProp(
+ properties, PropertyAnalyzer.PROPERTIES_STRICT_RANGE, true);
this.useTempPartitionName = PropertyAnalyzer.analyzeBooleanProp(properties,
PropertyAnalyzer.PROPERTIES_USE_TEMP_PARTITION_NAME, false);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ResourcePattern.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ResourcePattern.java
index 059f4bfa6a..19771f68b5 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ResourcePattern.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ResourcePattern.java
@@ -39,6 +39,7 @@ public class ResourcePattern implements Writable {
private String resourceName;
public static ResourcePattern ALL;
+
static {
ALL = new ResourcePattern("*");
try {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/RestoreStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/RestoreStmt.java
index 6a0bfe6f53..cb9b4e3bb4 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/RestoreStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/RestoreStmt.java
@@ -32,10 +32,10 @@ import java.util.Map;
import java.util.Set;
public class RestoreStmt extends AbstractBackupStmt {
- private final static String PROP_ALLOW_LOAD = "allow_load";
- private final static String PROP_REPLICATION_NUM = "replication_num";
- private final static String PROP_BACKUP_TIMESTAMP = "backup_timestamp";
- private final static String PROP_META_VERSION = "meta_version";
+ private static final String PROP_ALLOW_LOAD = "allow_load";
+ private static final String PROP_REPLICATION_NUM = "replication_num";
+ private static final String PROP_BACKUP_TIMESTAMP = "backup_timestamp";
+ private static final String PROP_META_VERSION = "meta_version";
private boolean allowLoad = false;
private ReplicaAllocation replicaAlloc = ReplicaAllocation.DEFAULT_ALLOCATION;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/RevokeStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/RevokeStmt.java
index 05e229d1a6..c84f490ddf 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/RevokeStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/RevokeStmt.java
@@ -54,7 +54,8 @@ public class RevokeStmt extends DdlStmt {
this.privileges = privs.toPrivilegeList();
}
- public RevokeStmt(UserIdentity userIdent, String role, ResourcePattern resourcePattern, List privileges) {
+ public RevokeStmt(UserIdentity userIdent, String role,
+ ResourcePattern resourcePattern, List privileges) {
this.userIdent = userIdent;
this.role = role;
this.tblPattern = null;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/RoutineLoadDataSourceProperties.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/RoutineLoadDataSourceProperties.java
index d7dbfab53e..26d335a18e 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/RoutineLoadDataSourceProperties.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/RoutineLoadDataSourceProperties.java
@@ -49,7 +49,8 @@ public class RoutineLoadDataSourceProperties {
.add(CreateRoutineLoadStmt.KAFKA_DEFAULT_OFFSETS)
.build();
- private static final ImmutableSet CONFIGURABLE_DATA_SOURCE_PROPERTIES_SET = new ImmutableSet.Builder()
+ private static final ImmutableSet CONFIGURABLE_DATA_SOURCE_PROPERTIES_SET
+ = new ImmutableSet.Builder()
.add(CreateRoutineLoadStmt.KAFKA_BROKER_LIST_PROPERTY)
.add(CreateRoutineLoadStmt.KAFKA_TOPIC_PROPERTY)
.add(CreateRoutineLoadStmt.KAFKA_PARTITIONS_PROPERTY)
@@ -160,7 +161,8 @@ public class RoutineLoadDataSourceProperties {
* 4. other properties start with "property."
*/
private void checkKafkaProperties() throws UserException {
- ImmutableSet propertySet = isAlter ? CONFIGURABLE_DATA_SOURCE_PROPERTIES_SET : DATA_SOURCE_PROPERTIES_SET;
+ ImmutableSet propertySet = isAlter
+ ? CONFIGURABLE_DATA_SOURCE_PROPERTIES_SET : DATA_SOURCE_PROPERTIES_SET;
Optional optional = properties.keySet().stream()
.filter(entity -> !propertySet.contains(entity))
.filter(entity -> !entity.startsWith("property."))
@@ -170,7 +172,8 @@ public class RoutineLoadDataSourceProperties {
}
// check broker list
- kafkaBrokerList = Strings.nullToEmpty(properties.get(CreateRoutineLoadStmt.KAFKA_BROKER_LIST_PROPERTY)).replaceAll(" ", "");
+ kafkaBrokerList = Strings.nullToEmpty(properties.get(CreateRoutineLoadStmt.KAFKA_BROKER_LIST_PROPERTY))
+ .replaceAll(" ", "");
if (!isAlter && Strings.isNullOrEmpty(kafkaBrokerList)) {
throw new AnalysisException(CreateRoutineLoadStmt.KAFKA_BROKER_LIST_PROPERTY + " is a required property");
}
@@ -185,7 +188,8 @@ public class RoutineLoadDataSourceProperties {
}
// check topic
- kafkaTopic = Strings.nullToEmpty(properties.get(CreateRoutineLoadStmt.KAFKA_TOPIC_PROPERTY)).replaceAll(" ", "");
+ kafkaTopic = Strings.nullToEmpty(properties.get(CreateRoutineLoadStmt.KAFKA_TOPIC_PROPERTY))
+ .replaceAll(" ", "");
if (!isAlter && Strings.isNullOrEmpty(kafkaTopic)) {
throw new AnalysisException(CreateRoutineLoadStmt.KAFKA_TOPIC_PROPERTY + " is a required property");
}
@@ -223,20 +227,23 @@ public class RoutineLoadDataSourceProperties {
throw new AnalysisException("Only one of " + CreateRoutineLoadStmt.KAFKA_OFFSETS_PROPERTY
+ " and " + CreateRoutineLoadStmt.KAFKA_DEFAULT_OFFSETS + " can be set.");
}
- if (isAlter && kafkaPartitionsString != null && kafkaOffsetsString == null && kafkaDefaultOffsetString == null) {
+ if (isAlter && kafkaPartitionsString != null
+ && kafkaOffsetsString == null && kafkaDefaultOffsetString == null) {
// if this is an alter operation, the partition and (default)offset must be set together.
throw new AnalysisException("Must set offset or default offset with partition property");
}
if (kafkaOffsetsString != null) {
- this.isOffsetsForTimes = analyzeKafkaOffsetProperty(kafkaOffsetsString, this.kafkaPartitionOffsets, this.timezone);
+ this.isOffsetsForTimes = analyzeKafkaOffsetProperty(kafkaOffsetsString,
+ this.kafkaPartitionOffsets, this.timezone);
} else {
// offset is not set, check default offset.
this.isOffsetsForTimes = analyzeKafkaDefaultOffsetProperty(this.customKafkaProperties, this.timezone);
if (!this.kafkaPartitionOffsets.isEmpty()) {
// Case C
kafkaDefaultOffsetString = customKafkaProperties.get(CreateRoutineLoadStmt.KAFKA_DEFAULT_OFFSETS);
- setDefaultOffsetForPartition(this.kafkaPartitionOffsets, kafkaDefaultOffsetString, this.isOffsetsForTimes);
+ setDefaultOffsetForPartition(this.kafkaPartitionOffsets,
+ kafkaDefaultOffsetString, this.isOffsetsForTimes);
}
}
}
@@ -259,10 +266,12 @@ public class RoutineLoadDataSourceProperties {
}
// If the default offset is not set, set the default offset to OFFSET_END.
- // If the offset is in datetime format, convert it to a timestamp, and also save the origin datatime formatted offset
+ // If the offset is in datetime format, convert it to a timestamp,
+ // and also save the origin datatime formatted offset
// in "customKafkaProperties"
// return true if the offset is in datetime format.
- private static boolean analyzeKafkaDefaultOffsetProperty(Map customKafkaProperties, String timeZoneStr)
+ private static boolean analyzeKafkaDefaultOffsetProperty(
+ Map customKafkaProperties, String timeZoneStr)
throws AnalysisException {
customKafkaProperties.putIfAbsent(CreateRoutineLoadStmt.KAFKA_DEFAULT_OFFSETS, KafkaProgress.OFFSET_END);
String defaultOffsetStr = customKafkaProperties.get(CreateRoutineLoadStmt.KAFKA_DEFAULT_OFFSETS);
@@ -275,8 +284,10 @@ public class RoutineLoadDataSourceProperties {
customKafkaProperties.put(CreateRoutineLoadStmt.KAFKA_ORIGIN_DEFAULT_OFFSETS, defaultOffsetStr);
return true;
} else {
- if (!defaultOffsetStr.equalsIgnoreCase(KafkaProgress.OFFSET_BEGINNING) && !defaultOffsetStr.equalsIgnoreCase(KafkaProgress.OFFSET_END)) {
- throw new AnalysisException(CreateRoutineLoadStmt.KAFKA_DEFAULT_OFFSETS + " can only be set to OFFSET_BEGINNING, OFFSET_END or date time");
+ if (!defaultOffsetStr.equalsIgnoreCase(KafkaProgress.OFFSET_BEGINNING)
+ && !defaultOffsetStr.equalsIgnoreCase(KafkaProgress.OFFSET_END)) {
+ throw new AnalysisException(CreateRoutineLoadStmt.KAFKA_DEFAULT_OFFSETS
+ + " can only be set to OFFSET_BEGINNING, OFFSET_END or date time");
}
return false;
}
@@ -285,16 +296,17 @@ public class RoutineLoadDataSourceProperties {
// init "kafkaPartitionOffsets" with partition property.
// The offset will be set to OFFSET_END for now, and will be changed in later analysis process.
private static void analyzeKafkaPartitionProperty(String kafkaPartitionsString,
- List> kafkaPartitionOffsets) throws AnalysisException {
+ List> kafkaPartitionOffsets) throws AnalysisException {
kafkaPartitionsString = kafkaPartitionsString.replaceAll(" ", "");
if (kafkaPartitionsString.isEmpty()) {
- throw new AnalysisException(CreateRoutineLoadStmt.KAFKA_PARTITIONS_PROPERTY + " could not be a empty string");
+ throw new AnalysisException(CreateRoutineLoadStmt.KAFKA_PARTITIONS_PROPERTY
+ + " could not be a empty string");
}
String[] kafkaPartitionsStringList = kafkaPartitionsString.split(",");
for (String s : kafkaPartitionsStringList) {
try {
- kafkaPartitionOffsets.add(Pair.create(getIntegerValueFromString(s, CreateRoutineLoadStmt.KAFKA_PARTITIONS_PROPERTY),
- KafkaProgress.OFFSET_END_VAL));
+ kafkaPartitionOffsets.add(Pair.create(getIntegerValueFromString(
+ s, CreateRoutineLoadStmt.KAFKA_PARTITIONS_PROPERTY), KafkaProgress.OFFSET_END_VAL));
} catch (AnalysisException e) {
throw new AnalysisException(CreateRoutineLoadStmt.KAFKA_PARTITIONS_PROPERTY
+ " must be a number string with comma-separated");
@@ -304,8 +316,8 @@ public class RoutineLoadDataSourceProperties {
// Fill the partition's offset with given kafkaOffsetsString,
// Return true if offset is specified by timestamp.
- private static boolean analyzeKafkaOffsetProperty(String kafkaOffsetsString, List> kafkaPartitionOffsets,
- String timeZoneStr)
+ private static boolean analyzeKafkaOffsetProperty(String kafkaOffsetsString,
+ List> kafkaPartitionOffsets, String timeZoneStr)
throws UserException {
if (Strings.isNullOrEmpty(kafkaOffsetsString)) {
throw new AnalysisException(CreateRoutineLoadStmt.KAFKA_OFFSETS_PROPERTY + " could not be a empty string");
@@ -354,7 +366,8 @@ public class RoutineLoadDataSourceProperties {
} else if (NumberUtils.isDigits(kafkaOffsetsStr)) {
kafkaPartitionOffsets.get(i).second = Long.valueOf(NumberUtils.toLong(kafkaOffsetsStr));
} else {
- throw new AnalysisException(CreateRoutineLoadStmt.KAFKA_OFFSETS_PROPERTY + " must be an integer or a date time");
+ throw new AnalysisException(CreateRoutineLoadStmt.KAFKA_OFFSETS_PROPERTY
+ + " must be an integer or a date time");
}
}
}
@@ -368,7 +381,7 @@ public class RoutineLoadDataSourceProperties {
if (dataSourceProperty.getKey().startsWith("property.")) {
String propertyKey = dataSourceProperty.getKey();
String propertyValue = dataSourceProperty.getValue();
- String propertyValueArr[] = propertyKey.split("\\.");
+ String[] propertyValueArr = propertyKey.split("\\.");
if (propertyValueArr.length < 2) {
throw new AnalysisException("kafka property value could not be a empty string");
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/SchemaTableType.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/SchemaTableType.java
index 2eb892ed1c..127285bcb5 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/SchemaTableType.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/SchemaTableType.java
@@ -72,8 +72,9 @@ public enum SchemaTableType {
fullSelectLists = new SelectList();
fullSelectLists.addItem(SelectListItem.createStarItem(null));
}
- private final String description;
- private final String tableName;
+
+ private final String description;
+ private final String tableName;
private final TSchemaTableType tableType;
SchemaTableType(String description, String tableName, TSchemaTableType tableType) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/SelectListItem.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/SelectListItem.java
index db2e688b6a..643da0095a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/SelectListItem.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/SelectListItem.java
@@ -62,7 +62,7 @@ public class SelectListItem {
}
// select list item corresponding to "[[db.]tbl.]*"
- static public SelectListItem createStarItem(TableName tblName) {
+ public static SelectListItem createStarItem(TableName tblName) {
return new SelectListItem(tblName);
}
@@ -115,6 +115,7 @@ public class SelectListItem {
return "*";
}
}
+
/**
* Return a column label for the select list item.
*/
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/SelectStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/SelectStmt.java
index e9ae2aa66e..ca450021b3 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/SelectStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/SelectStmt.java
@@ -71,7 +71,7 @@ import java.util.stream.Collectors;
* clauses.
*/
public class SelectStmt extends QueryStmt {
- private final static Logger LOG = LogManager.getLogger(SelectStmt.class);
+ private static final Logger LOG = LogManager.getLogger(SelectStmt.class);
private UUID id = UUID.randomUUID();
// ///////////////////////////////////////
@@ -291,7 +291,8 @@ public class SelectStmt extends QueryStmt {
}
@Override
- public void getTables(Analyzer analyzer, Map tableMap, Set parentViewNameSet) throws AnalysisException {
+ public void getTables(Analyzer analyzer, Map tableMap,
+ Set parentViewNameSet) throws AnalysisException {
getWithClauseTables(analyzer, tableMap, parentViewNameSet);
for (TableRef tblRef : fromClause) {
if (tblRef instanceof InlineViewRef) {
@@ -962,7 +963,8 @@ public class SelectStmt extends QueryStmt {
if (groupByClause == null && !selectList.isDistinct()
&& !TreeNode.contains(resultExprs, Expr.isAggregatePredicate())
- && (havingClauseAfterAnaylzed == null || !havingClauseAfterAnaylzed.contains(Expr.isAggregatePredicate()))
+ && (havingClauseAfterAnaylzed == null || !havingClauseAfterAnaylzed.contains(
+ Expr.isAggregatePredicate()))
&& (sortInfo == null || !TreeNode.contains(sortInfo.getOrderingExprs(),
Expr.isAggregatePredicate()))) {
// We're not computing aggregates but we still need to register the HAVING
@@ -993,7 +995,8 @@ public class SelectStmt extends QueryStmt {
if (selectList.isDistinct()
&& (groupByClause != null
|| TreeNode.contains(resultExprs, Expr.isAggregatePredicate())
- || (havingClauseAfterAnaylzed != null && havingClauseAfterAnaylzed.contains(Expr.isAggregatePredicate())))) {
+ || (havingClauseAfterAnaylzed != null && havingClauseAfterAnaylzed.contains(
+ Expr.isAggregatePredicate())))) {
throw new AnalysisException("cannot combine SELECT DISTINCT with aggregate functions or GROUP BY");
}
@@ -1063,7 +1066,8 @@ public class SelectStmt extends QueryStmt {
? aggInfo.getSecondPhaseDistinctAggInfo()
: aggInfo;
groupingByTupleIds.add(finalAggInfo.getOutputTupleId());
- ExprSubstitutionMap combinedSmap = ExprSubstitutionMap.compose(countAllMap, finalAggInfo.getOutputSmap(), analyzer);
+ ExprSubstitutionMap combinedSmap = ExprSubstitutionMap.compose(
+ countAllMap, finalAggInfo.getOutputSmap(), analyzer);
// change select list, having and ordering exprs to point to agg output. We need
// to reanalyze the exprs at this point.
if (LOG.isDebugEnabled()) {
@@ -1084,7 +1088,8 @@ public class SelectStmt extends QueryStmt {
}
/*
- * All of columns of result and having clause are replaced by new slot ref which is bound by top tuple of agg info.
+ * All of columns of result and having clause are replaced by new slot ref
+ * which is bound by top tuple of agg info.
* For example:
* ResultExprs: SlotRef(k1), FunctionCall(sum(SlotRef(k2)))
* Having predicate: FunctionCall(sum(SlotRef(k2))) > subquery
@@ -1448,9 +1453,9 @@ public class SelectStmt extends QueryStmt {
* Aliases information of groupBy and orderBy clauses is recorded in `QueryStmt.aliasSMap`.
* The select clause has its own alias info in `SelectListItem.alias`.
*
- * Aliases expr in the `group by` and `order by` clauses are not analyzed, i.e. `Expr.isAnalyzed=false`
- * Subsequent constant folding will analyze the unanalyzed Expr before collecting the constant
- * expressions, preventing the `INVALID_TYPE` expr from being sent to BE.
+ * Aliases expr in the `group by` and `order by` clauses are not analyzed,
+ * i.e. `Expr.isAnalyzed=false`. Subsequent constant folding will analyze the unanalyzed Expr before
+ * collecting the constant expressions, preventing the `INVALID_TYPE` expr from being sent to BE.
*
* But when analyzing the alias, the meta information corresponding to the slot cannot be found
* in the catalog, an error will be reported.
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/SetOperationStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/SetOperationStmt.java
index 2062bf26df..21968fd19c 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/SetOperationStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/SetOperationStmt.java
@@ -48,7 +48,7 @@ import java.util.Set;
* and we need to mark the slots of resolved exprs as materialized.
*/
public class SetOperationStmt extends QueryStmt {
- private final static Logger LOG = LogManager.getLogger(SetOperationStmt.class);
+ private static final Logger LOG = LogManager.getLogger(SetOperationStmt.class);
public enum Operation {
UNION,
@@ -213,7 +213,8 @@ public class SetOperationStmt extends QueryStmt {
}
@Override
- public void getTables(Analyzer analyzer, Map tableMap, Set parentViewNameSet) throws AnalysisException {
+ public void getTables(Analyzer analyzer, Map tableMap, Set parentViewNameSet)
+ throws AnalysisException {
getWithClauseTables(analyzer, tableMap, parentViewNameSet);
for (SetOperand op : operands) {
op.getQueryStmt().getTables(analyzer, tableMap, parentViewNameSet);
@@ -886,6 +887,7 @@ public class SetOperationStmt extends QueryStmt {
public Operation getOperation() {
return operation;
}
+
// Used for propagating DISTINCT.
public void setQualifier(Qualifier qualifier) {
this.qualifier = qualifier;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/SetVar.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/SetVar.java
index 3b1eeec8e9..b753a3ffc1 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/SetVar.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/SetVar.java
@@ -132,7 +132,8 @@ public class SetVar {
if (getVariable().equalsIgnoreCase(SessionVariable.PREFER_JOIN_METHOD)) {
String value = getValue().getStringValue();
if (!value.equalsIgnoreCase("broadcast") && !value.equalsIgnoreCase("shuffle")) {
- ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_VALUE_FOR_VAR, SessionVariable.PREFER_JOIN_METHOD, value);
+ ErrorReport.reportAnalysisException(ErrorCode.ERR_WRONG_VALUE_FOR_VAR,
+ SessionVariable.PREFER_JOIN_METHOD, value);
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowAlterStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowAlterStmt.java
index 8bfc5ed59d..1b4dc7f4ac 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowAlterStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowAlterStmt.java
@@ -46,7 +46,8 @@ import java.util.List;
/*
* ShowAlterStmt: used to show process state of alter statement.
* Syntax:
- * SHOW ALTER TABLE [COLUMN | ROLLUP] [FROM dbName] [WHERE TableName="xxx"] [ORDER BY CreateTime DESC] [LIMIT [offset,]rows]
+ * SHOW ALTER TABLE [COLUMN | ROLLUP] [FROM dbName] [WHERE TableName="xxx"]
+ * [ORDER BY CreateTime DESC] [LIMIT [offset,]rows]
*/
public class ShowAlterStmt extends ShowStmt {
private static final Logger LOG = LogManager.getLogger(ShowAlterStmt.class);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowBackupStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowBackupStmt.java
index eec29684d1..6e58296c64 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowBackupStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowBackupStmt.java
@@ -177,9 +177,11 @@ public class ShowBackupStmt extends ShowStmt {
return label -> true;
}
if (isAccurateMatch) {
- return CaseSensibility.LABEL.getCaseSensibility() ? label -> label.equals(labelValue) : label -> label.equalsIgnoreCase(labelValue);
+ return CaseSensibility.LABEL.getCaseSensibility()
+ ? label -> label.equals(labelValue) : label -> label.equalsIgnoreCase(labelValue);
} else {
- PatternMatcher patternMatcher = PatternMatcher.createMysqlPattern(labelValue, CaseSensibility.LABEL.getCaseSensibility());
+ PatternMatcher patternMatcher = PatternMatcher.createMysqlPattern(
+ labelValue, CaseSensibility.LABEL.getCaseSensibility());
return patternMatcher::match;
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowClusterStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowClusterStmt.java
index 0339c4f5ac..57b8766b4a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowClusterStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowClusterStmt.java
@@ -59,9 +59,7 @@ public class ShowClusterStmt extends ShowStmt {
@Override
public void analyze(Analyzer analyzer) throws AnalysisException {
if (!Catalog.getCurrentCatalog().getAuth().checkGlobalPriv(ConnectContext.get(),
- PrivPredicate.of(PrivBitSet.of(PaloPrivilege.ADMIN_PRIV,
- PaloPrivilege.NODE_PRIV),
- Operator.OR))) {
+ PrivPredicate.of(PrivBitSet.of(PaloPrivilege.ADMIN_PRIV, PaloPrivilege.NODE_PRIV), Operator.OR))) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_SPECIFIC_ACCESS_DENIED_ERROR, "ADMIN");
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowExportStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowExportStmt.java
index 481550ab29..d826da8340 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowExportStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowExportStmt.java
@@ -164,7 +164,8 @@ public class ShowExportStmt extends ShowStmt {
valid = true;
}
- } else if (whereExpr instanceof LikePredicate && ((LikePredicate) whereExpr).getOp() == LikePredicate.Operator.LIKE) {
+ } else if (whereExpr instanceof LikePredicate
+ && ((LikePredicate) whereExpr).getOp() == LikePredicate.Operator.LIKE) {
if ("label".equals(leftKey) && whereExpr.getChild(1) instanceof StringLiteral) {
label = whereExpr.getChild(1).getStringValue();
isLabelUseLike = true;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowGrantsStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowGrantsStmt.java
index 197f61df59..fa765da9e4 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowGrantsStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowGrantsStmt.java
@@ -46,6 +46,7 @@ import com.google.common.base.Preconditions;
public class ShowGrantsStmt extends ShowStmt {
private static final ShowResultSetMetaData META_DATA;
+
static {
ShowResultSetMetaData.Builder builder = ShowResultSetMetaData.builder();
for (String col : AuthProcDir.TITLE_NAMES) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowIndexStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowIndexStmt.java
index 58c35a9284..82616d3262 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowIndexStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowIndexStmt.java
@@ -69,8 +69,8 @@ public class ShowIndexStmt extends ShowStmt {
}
tableName.analyze(analyzer);
- if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), tableName.getDb(), tableName.getTbl(),
- PrivPredicate.SHOW)) {
+ if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(
+ ConnectContext.get(), tableName.getDb(), tableName.getTbl(), PrivPredicate.SHOW)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, analyzer.getQualifiedUser(),
tableName.getDb() + ": " + tableName.toString());
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowLoadProfileStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowLoadProfileStmt.java
index 50f8fcc28b..a30ef18b9f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowLoadProfileStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowLoadProfileStmt.java
@@ -29,7 +29,7 @@ import com.google.common.base.Strings;
// show load profile "/"; # list all saving load job ids
// show load profile "/10014" # show task ids of specified job
// show load profile "/10014/e0f7390f5363419e-b416a2a79996083e/" # show instance list of the task
-// show load profile "/10014/e0f7390f5363419e-b416a2a79996083e/e0f7390f5363419e-b416a2a799960906" # show instance tree graph
+// show load profile "/10014/e0f7390f5363419e-b416a2a79996083e/e0f7390f5363419e-b416a2a7999" # show instance's graph
public class ShowLoadProfileStmt extends ShowStmt {
private static final ShowResultSetMetaData META_DATA_TASK_IDS =
ShowResultSetMetaData.builder()
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowPolicyStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowPolicyStmt.java
index c7b84bcef7..81b9f294dc 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowPolicyStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowPolicyStmt.java
@@ -18,8 +18,6 @@
package org.apache.doris.analysis;
import org.apache.doris.catalog.Catalog;
-import org.apache.doris.catalog.Column;
-import org.apache.doris.catalog.ScalarType;
import org.apache.doris.common.ErrorCode;
import org.apache.doris.common.ErrorReport;
import org.apache.doris.common.UserException;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowQueryProfileStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowQueryProfileStmt.java
index 7c0994c434..15be652aed 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowQueryProfileStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowQueryProfileStmt.java
@@ -29,7 +29,7 @@ import com.google.common.base.Strings;
// show query profile "/"; # list all saving query ids
// show query profile "/e0f7390f5363419e-b416a2a79996083e" # show graph of fragments of the query
// show query profile "/e0f7390f5363419e-b416a2a79996083e/0" # show instance list of the specified fragment
-// show query profile "/e0f7390f5363419e-b416a2a79996083e/0/e0f7390f5363419e-b416a2a799960906" # show graph of the instance
+// show query profile "/e0f7390f5363419e-b416a2a79996083e/0/e0f7390f5363419e-b416a2a799960906" # show instance's graph
public class ShowQueryProfileStmt extends ShowStmt {
// This should be same as ProfileManager.PROFILE_HEADERS
public static final ShowResultSetMetaData META_DATA_QUERY_IDS =
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowRestoreStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowRestoreStmt.java
index 0c42178415..4aa2007bd5 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowRestoreStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowRestoreStmt.java
@@ -176,9 +176,11 @@ public class ShowRestoreStmt extends ShowStmt {
return label -> true;
}
if (isAccurateMatch) {
- return CaseSensibility.LABEL.getCaseSensibility() ? label -> label.equals(labelValue) : label -> label.equalsIgnoreCase(labelValue);
+ return CaseSensibility.LABEL.getCaseSensibility()
+ ? label -> label.equals(labelValue) : label -> label.equalsIgnoreCase(labelValue);
} else {
- PatternMatcher patternMatcher = PatternMatcher.createMysqlPattern(labelValue, CaseSensibility.LABEL.getCaseSensibility());
+ PatternMatcher patternMatcher = PatternMatcher.createMysqlPattern(
+ labelValue, CaseSensibility.LABEL.getCaseSensibility());
return patternMatcher::match;
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowRolesStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowRolesStmt.java
index ccf84e2c95..47b9bacda2 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowRolesStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowRolesStmt.java
@@ -29,6 +29,7 @@ import org.apache.doris.qe.ShowResultSetMetaData;
public class ShowRolesStmt extends ShowStmt {
private static final ShowResultSetMetaData META_DATA;
+
static {
ShowResultSetMetaData.Builder builder = ShowResultSetMetaData.builder();
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowRoutineLoadTaskStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowRoutineLoadTaskStmt.java
index 553151165b..84047f0b3b 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowRoutineLoadTaskStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowRoutineLoadTaskStmt.java
@@ -128,7 +128,8 @@ public class ShowRoutineLoadTaskStmt extends ShowStmt {
} // CHECKSTYLE IGNORE THIS LINE
if (!valid) {
- throw new AnalysisException("show routine load job only support one equal expr which is sames like JobName=\"ILoveDoris\"");
+ throw new AnalysisException("show routine load job only support one equal expr "
+ + "which is sames like JobName=\"ILoveDoris\"");
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowStreamLoadStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowStreamLoadStmt.java
index 1ecfa5e060..9ae208e936 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowStreamLoadStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowStreamLoadStmt.java
@@ -68,7 +68,8 @@ public class ShowStreamLoadStmt extends ShowStmt {
.add("StartTime").add("FinishTime")
.build();
- public ShowStreamLoadStmt(String db, Expr labelExpr, List orderByElements, LimitElement limitElement) {
+ public ShowStreamLoadStmt(String db, Expr labelExpr,
+ List orderByElements, LimitElement limitElement) {
this.dbName = db;
this.whereClause = labelExpr;
this.orderByElements = orderByElements;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowViewStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowViewStmt.java
index 2842c4f48d..9c7ea82381 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowViewStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ShowViewStmt.java
@@ -89,7 +89,8 @@ public class ShowViewStmt extends ShowStmt {
tbl.analyze(analyzer);
String dbName = tbl.getDb();
- if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(), dbName, getTbl(), PrivPredicate.SHOW)) {
+ if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(
+ ConnectContext.get(), dbName, getTbl(), PrivPredicate.SHOW)) {
ErrorReport.reportAnalysisException(ErrorCode.ERR_TABLEACCESS_DENIED_ERROR, "SHOW VIEW",
ConnectContext.get().getQualifiedUser(),
ConnectContext.get().getRemoteIP(),
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/SortInfo.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/SortInfo.java
index fe287e3900..3e0703414b 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/SortInfo.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/SortInfo.java
@@ -40,7 +40,7 @@ import java.util.Set;
* particular input row (materialize all row slots)
*/
public class SortInfo {
- private final static Logger LOG = LogManager.getLogger(SortInfo.class);
+ private static final Logger LOG = LogManager.getLogger(SortInfo.class);
// All ordering exprs with cost greater than this will be materialized. Since we don't
// currently have any information about actual function costs, this value is intended to
// ensure that all expensive functions will be materialized while still leaving simple
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/StmtRewriter.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/StmtRewriter.java
index 4f0ea9c059..66c864fb57 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/StmtRewriter.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/StmtRewriter.java
@@ -182,7 +182,8 @@ public class StmtRewriter {
* For example:
* Query: select cs_item_sk, sum(cs_sales_price) from catalog_sales a group by cs_item_sk having ...;
* Inline view:
- * from (select cs_item_sk $ColumnA, sum(cs_sales_price) $ColumnB from catalog_sales a group by cs_item_sk) $TableA
+ * from (select cs_item_sk $ColumnA, sum(cs_sales_price) $ColumnB
+ * from catalog_sales a group by cs_item_sk) $TableA
*
* Add missing aggregation columns in select list
* For example:
@@ -595,7 +596,7 @@ public class StmtRewriter {
lhsExprs, rhsExprs, updateGroupBy);
}
- /**
+ /*
* Situation: The expr is a uncorrelated subquery for outer stmt.
* Rewrite: Add a limit 1 for subquery.
* origin stmt: select * from t1 where exists (select * from table2);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/StorageBackend.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/StorageBackend.java
index 12e570296c..b073ff7067 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/StorageBackend.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/StorageBackend.java
@@ -38,7 +38,8 @@ public class StorageBackend extends StorageDesc implements ParseNode {
private StorageType storageType;
private Map properties;
- public StorageBackend(String storageName, String location, StorageType storageType, Map properties) {
+ public StorageBackend(String storageName, String location,
+ StorageType storageType, Map properties) {
this.name = storageName;
this.location = location;
this.storageType = storageType;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/StringLiteral.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/StringLiteral.java
index 16eaddf455..a215cc3132 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/StringLiteral.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/StringLiteral.java
@@ -206,7 +206,8 @@ public class StringLiteral extends LiteralExpr {
case LARGEINT:
if (VariableVarConverters.hasConverter(beConverted)) {
try {
- return new LargeIntLiteral(String.valueOf(VariableVarConverters.encode(beConverted, value)));
+ return new LargeIntLiteral(String.valueOf(
+ VariableVarConverters.encode(beConverted, value)));
} catch (DdlException e) {
throw new AnalysisException(e.getMessage());
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/Subquery.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/Subquery.java
index d3056bbe2f..10b51b80db 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/Subquery.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/Subquery.java
@@ -41,7 +41,7 @@ import java.util.List;
* its own Analyzer context.
*/
public class Subquery extends Expr {
- private final static Logger LOG = LoggerFactory.getLogger(Subquery.class);
+ private static final Logger LOG = LoggerFactory.getLogger(Subquery.class);
// The QueryStmt of the subquery.
protected QueryStmt stmt;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/TablePattern.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/TablePattern.java
index f03a71fb3d..6f6a3d393a 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/TablePattern.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/TablePattern.java
@@ -41,6 +41,7 @@ public class TablePattern implements Writable {
boolean isAnalyzed = false;
public static TablePattern ALL;
+
static {
ALL = new TablePattern("*", "*");
try {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/TransactionBeginStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/TransactionBeginStmt.java
index 287e45952c..58184b9bb2 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/TransactionBeginStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/TransactionBeginStmt.java
@@ -24,12 +24,15 @@ import org.apache.doris.transaction.TransactionEntry;
public class TransactionBeginStmt extends TransactionStmt {
private String label = null;
+
public TransactionBeginStmt() {
this.label = "";
}
+
public TransactionBeginStmt(final String label) {
this.label = label;
}
+
@Override
public void analyze(Analyzer analyzer) throws AnalysisException, UserException {
if (label == null || label.isEmpty()) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/TypeDef.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/TypeDef.java
index e8ed068a43..bdbb4c88fc 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/TypeDef.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/TypeDef.java
@@ -28,7 +28,6 @@ import org.apache.doris.catalog.StructField;
import org.apache.doris.catalog.StructType;
import org.apache.doris.catalog.Type;
import org.apache.doris.common.AnalysisException;
-import org.apache.doris.common.Config;
import com.google.common.base.Preconditions;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/UseStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/UseStmt.java
index 8727418625..d0da7479aa 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/UseStmt.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/UseStmt.java
@@ -63,7 +63,8 @@ public class UseStmt extends StatementBase {
database = ClusterNamespace.getFullName(getClusterName(), database);
if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), database, PrivPredicate.SHOW)) {
- ErrorReport.reportAnalysisException(ErrorCode.ERR_DBACCESS_DENIED_ERROR, analyzer.getQualifiedUser(), database);
+ ErrorReport.reportAnalysisException(ErrorCode.ERR_DBACCESS_DENIED_ERROR,
+ analyzer.getQualifiedUser(), database);
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/UserIdentity.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/UserIdentity.java
index b87878fa4e..ec24d06a41 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/UserIdentity.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/UserIdentity.java
@@ -98,7 +98,8 @@ public class UserIdentity implements Writable {
}
public static UserIdentity fromThrift(TUserIdentity tUserIdent) {
- UserIdentity userIdentity = new UserIdentity(tUserIdent.getUsername(), tUserIdent.getHost(), tUserIdent.is_domain);
+ UserIdentity userIdentity = new UserIdentity(tUserIdent.getUsername(),
+ tUserIdent.getHost(), tUserIdent.is_domain);
userIdentity.setIsAnalyzed();
return userIdentity;
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ValueList.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ValueList.java
index a9f3951bff..49e7e92e35 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ValueList.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ValueList.java
@@ -32,6 +32,7 @@ public class ValueList {
rows = Lists.newArrayList();
rows.add(row);
}
+
public ValueList(List> rows) {
this.rows = rows;
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/WithClause.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/WithClause.java
index 44ab177c25..98979fc028 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/WithClause.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/WithClause.java
@@ -113,7 +113,8 @@ public class WithClause implements ParseNode {
}
}
- public void getTables(Analyzer analyzer, Map tableMap, Set parentViewNameSet) throws AnalysisException {
+ public void getTables(Analyzer analyzer, Map tableMap,
+ Set parentViewNameSet) throws AnalysisException {
for (View view : views) {
QueryStmt stmt = view.getQueryStmt();
parentViewNameSet.add(view.getName());
diff --git a/fe/fe-core/src/main/java/org/apache/doris/backup/BackupHandler.java b/fe/fe-core/src/main/java/org/apache/doris/backup/BackupHandler.java
index 2f0b07e461..c529259fbc 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/backup/BackupHandler.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/backup/BackupHandler.java
@@ -192,7 +192,8 @@ public class BackupHandler extends MasterDaemon implements Writable {
public void createRepository(CreateRepositoryStmt stmt) throws DdlException {
if (!catalog.getBrokerMgr().containsBroker(stmt.getBrokerName())
&& stmt.getStorageType() == StorageBackend.StorageType.BROKER) {
- ErrorReport.reportDdlException(ErrorCode.ERR_COMMON_ERROR, "broker does not exist: " + stmt.getBrokerName());
+ ErrorReport.reportDdlException(ErrorCode.ERR_COMMON_ERROR,
+ "broker does not exist: " + stmt.getBrokerName());
}
BlobStorage storage = BlobStorage.create(stmt.getBrokerName(), stmt.getStorageType(), stmt.getProperties());
@@ -328,13 +329,15 @@ public class BackupHandler extends MasterDaemon implements Writable {
tbl.readLock();
try {
if (olapTbl.existTempPartitions()) {
- ErrorReport.reportDdlException(ErrorCode.ERR_COMMON_ERROR, "Do not support backup table with temp partitions");
+ ErrorReport.reportDdlException(ErrorCode.ERR_COMMON_ERROR,
+ "Do not support backup table with temp partitions");
}
PartitionNames partitionNames = tblRef.getPartitionNames();
if (partitionNames != null) {
if (partitionNames.isTemp()) {
- ErrorReport.reportDdlException(ErrorCode.ERR_COMMON_ERROR, "Do not support backup temp partitions");
+ ErrorReport.reportDdlException(ErrorCode.ERR_COMMON_ERROR,
+ "Do not support backup temp partitions");
}
for (String partName : partitionNames.getPartitionNames()) {
@@ -671,7 +674,8 @@ public class BackupHandler extends MasterDaemon implements Writable {
public void write(DataOutput out) throws IOException {
repoMgr.write(out);
- List jobs = dbIdToBackupOrRestoreJobs.values().stream().flatMap(Deque::stream).collect(Collectors.toList());
+ List jobs = dbIdToBackupOrRestoreJobs.values()
+ .stream().flatMap(Deque::stream).collect(Collectors.toList());
out.writeInt(jobs.size());
for (AbstractJob job : jobs) {
job.write(out);
diff --git a/fe/fe-core/src/main/java/org/apache/doris/backup/BackupJob.java b/fe/fe-core/src/main/java/org/apache/doris/backup/BackupJob.java
index 96d89a2707..da07f64342 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/backup/BackupJob.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/backup/BackupJob.java
@@ -162,7 +162,8 @@ public class BackupJob extends AbstractJob {
// snapshot task could not finish if status_code is OLAP_ERR_VERSION_ALREADY_MERGED,
// so cancel this job
if (request.getTaskStatus().getStatusCode() == TStatusCode.OLAP_ERR_VERSION_ALREADY_MERGED) {
- status = new Status(ErrCode.OLAP_VERSION_ALREADY_MERGED, "make snapshot failed, version already merged");
+ status = new Status(ErrCode.OLAP_VERSION_ALREADY_MERGED,
+ "make snapshot failed, version already merged");
cancelInternal();
}
return false;
@@ -427,7 +428,8 @@ public class BackupJob extends AbstractJob {
}
}
- private void prepareSnapshotTaskForOlapTable(OlapTable olapTable, TableRef backupTableRef, AgentBatchTask batchTask) {
+ private void prepareSnapshotTaskForOlapTable(OlapTable olapTable,
+ TableRef backupTableRef, AgentBatchTask batchTask) {
olapTable.readLock();
try {
// check backup table again
@@ -654,8 +656,8 @@ public class BackupJob extends AbstractJob {
File jobDir = new File(localJobDirPath.toString());
if (jobDir.exists()) {
// if dir exists, delete it first
- Files.walk(localJobDirPath,
- FileVisitOption.FOLLOW_LINKS).sorted(Comparator.reverseOrder()).map(Path::toFile).forEach(File::delete);
+ Files.walk(localJobDirPath, FileVisitOption.FOLLOW_LINKS).sorted(Comparator.reverseOrder())
+ .map(Path::toFile).forEach(File::delete);
}
if (!jobDir.mkdir()) {
status = new Status(ErrCode.COMMON_ERROR, "Failed to create tmp dir: " + localJobDirPath);
@@ -673,7 +675,8 @@ public class BackupJob extends AbstractJob {
localMetaInfoFilePath = metaInfoFile.getAbsolutePath();
// 3. save job info file
- jobInfo = BackupJobInfo.fromCatalog(createTime, label, dbName, dbId, getContent(), backupMeta, snapshotInfos);
+ jobInfo = BackupJobInfo.fromCatalog(createTime, label, dbName, dbId,
+ getContent(), backupMeta, snapshotInfos);
LOG.debug("job info: {}. {}", jobInfo, this);
File jobInfoFile = new File(jobDir, Repository.PREFIX_JOB_INFO + createTimeStr);
if (!jobInfoFile.createNewFile()) {
@@ -805,8 +808,8 @@ public class BackupJob extends AbstractJob {
try {
File jobDir = new File(localJobDirPath.toString());
if (jobDir.exists()) {
- Files.walk(localJobDirPath,
- FileVisitOption.FOLLOW_LINKS).sorted(Comparator.reverseOrder()).map(Path::toFile).forEach(File::delete);
+ Files.walk(localJobDirPath, FileVisitOption.FOLLOW_LINKS).sorted(Comparator.reverseOrder())
+ .map(Path::toFile).forEach(File::delete);
}
} catch (Exception e) {
LOG.warn("failed to clean the backup job dir: " + localJobDirPath.toString());
diff --git a/fe/fe-core/src/main/java/org/apache/doris/backup/BrokerStorage.java b/fe/fe-core/src/main/java/org/apache/doris/backup/BrokerStorage.java
index 9295095f40..f703634413 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/backup/BrokerStorage.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/backup/BrokerStorage.java
@@ -126,8 +126,8 @@ public class BrokerStorage extends BlobStorage {
File localFile = new File(localFilePath);
if (localFile.exists()) {
try {
- Files.walk(Paths.get(localFilePath),
- FileVisitOption.FOLLOW_LINKS).sorted(Comparator.reverseOrder()).map(Path::toFile).forEach(File::delete);
+ Files.walk(Paths.get(localFilePath), FileVisitOption.FOLLOW_LINKS)
+ .sorted(Comparator.reverseOrder()).map(Path::toFile).forEach(File::delete);
} catch (IOException e) {
return new Status(Status.ErrCode.COMMON_ERROR, "failed to delete exist local file: " + localFilePath);
}
@@ -294,7 +294,8 @@ public class BrokerStorage extends BlobStorage {
}
} finally {
Status closeStatus = closeWriter(client, address, fd);
- if (closeStatus.getErrCode() == Status.ErrCode.BAD_CONNECTION || status.getErrCode() == Status.ErrCode.BAD_CONNECTION) {
+ if (closeStatus.getErrCode() == Status.ErrCode.BAD_CONNECTION
+ || status.getErrCode() == Status.ErrCode.BAD_CONNECTION) {
ClientPool.brokerPool.invalidateObject(address, client);
} else {
ClientPool.brokerPool.returnObject(address, client);
@@ -340,7 +341,8 @@ public class BrokerStorage extends BlobStorage {
int tryTimes = 0;
while (tryTimes < 3) {
try {
- TBrokerPWriteRequest req = new TBrokerPWriteRequest(TBrokerVersion.VERSION_ONE, fd, writeOffset, bb);
+ TBrokerPWriteRequest req
+ = new TBrokerPWriteRequest(TBrokerVersion.VERSION_ONE, fd, writeOffset, bb);
TBrokerOperationStatus opst = client.pwrite(req);
if (opst.getStatusCode() != TBrokerOperationStatusCode.OK) {
// pwrite return failure.
diff --git a/fe/fe-core/src/main/java/org/apache/doris/backup/Repository.java b/fe/fe-core/src/main/java/org/apache/doris/backup/Repository.java
index 5a99017b2c..4b5b13ad70 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/backup/Repository.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/backup/Repository.java
@@ -366,7 +366,8 @@ public class Repository implements Writable {
// create remote tablet snapshot path
// eg:
- // /location/__palo_repository_repo_name/__ss_my_ss1/__ss_content/__db_10001/__tbl_10020/__part_10031/__idx_10032/__10023/__3481721
+ // /location/__palo_repository_repo_name/__ss_my_ss1/__ss_content/
+ // __db_10001/__tbl_10020/__part_10031/__idx_10032/__10023/__3481721
public String assembleRemoteSnapshotPath(String label, SnapshotInfo info) {
String path = Joiner.on(PATH_DELIMITER).join(location,
joinPrefix(PREFIX_REPO, name),
@@ -453,7 +454,8 @@ public class Repository implements Writable {
if (storage instanceof BrokerStorage) {
// this may be a retry, so we should first delete remote file
String tmpRemotePath = assembleFileNameWithSuffix(remoteFilePath, SUFFIX_TMP_FILE);
- LOG.debug("get md5sum of file: {}. tmp remote path: {}. final remote path: {}", localFilePath, tmpRemotePath, finalRemotePath);
+ LOG.debug("get md5sum of file: {}. tmp remote path: {}. final remote path: {}",
+ localFilePath, tmpRemotePath, finalRemotePath);
st = storage.delete(tmpRemotePath);
if (!st.ok()) {
return st;
diff --git a/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreJob.java b/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreJob.java
index 25267dbad8..78148acc1f 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreJob.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/backup/RestoreJob.java
@@ -511,8 +511,7 @@ public class RestoreJob extends AbstractJob {
}
}
for (BackupJobInfo.BackupOdbcResourceInfo backupOdbcResourceInfo : jobInfo.newBackupObjects.odbcResources) {
- Resource resource = Catalog.getCurrentCatalog().getResourceMgr().
- getResource(backupOdbcResourceInfo.name);
+ Resource resource = Catalog.getCurrentCatalog().getResourceMgr().getResource(backupOdbcResourceInfo.name);
if (resource == null) {
continue;
}
@@ -538,7 +537,8 @@ public class RestoreJob extends AbstractJob {
// table already exist, check schema
if (localTbl.getType() != TableType.OLAP) {
status = new Status(ErrCode.COMMON_ERROR,
- "The type of local table should be same as type of remote table: " + remoteTbl.getName());
+ "The type of local table should be same as type of remote table: "
+ + remoteTbl.getName());
return;
}
OlapTable localOlapTbl = (OlapTable) localTbl;
@@ -554,8 +554,10 @@ public class RestoreJob extends AbstractJob {
}
LOG.debug("get intersect part names: {}, job: {}", intersectPartNames, this);
if (!localOlapTbl.getSignature(BackupHandler.SIGNATURE_VERSION, intersectPartNames)
- .equals(remoteOlapTbl.getSignature(BackupHandler.SIGNATURE_VERSION, intersectPartNames))) {
- status = new Status(ErrCode.COMMON_ERROR, "Table " + jobInfo.getAliasByOriginNameIfSet(tableName)
+ .equals(remoteOlapTbl.getSignature(
+ BackupHandler.SIGNATURE_VERSION, intersectPartNames))) {
+ status = new Status(ErrCode.COMMON_ERROR, "Table "
+ + jobInfo.getAliasByOriginNameIfSet(tableName)
+ " already exist but with different schema");
return;
}
@@ -571,10 +573,12 @@ public class RestoreJob extends AbstractJob {
if (localPartInfo.getType() == PartitionType.RANGE
|| localPartInfo.getType() == PartitionType.LIST) {
PartitionItem localItem = localPartInfo.getItem(localPartition.getId());
- PartitionItem remoteItem = remoteOlapTbl.getPartitionInfo().getItem(backupPartInfo.id);
+ PartitionItem remoteItem = remoteOlapTbl
+ .getPartitionInfo().getItem(backupPartInfo.id);
if (localItem.equals(remoteItem)) {
// Same partition, same range
- if (genFileMappingWhenBackupReplicasEqual(localPartInfo, localPartition, localTbl, backupPartInfo, partitionName, tblInfo)) {
+ if (genFileMappingWhenBackupReplicasEqual(localPartInfo, localPartition,
+ localTbl, backupPartInfo, partitionName, tblInfo)) {
return;
}
} else {
@@ -586,7 +590,8 @@ public class RestoreJob extends AbstractJob {
}
} else {
// If this is a single partitioned table.
- if (genFileMappingWhenBackupReplicasEqual(localPartInfo, localPartition, localTbl, backupPartInfo, partitionName, tblInfo)) {
+ if (genFileMappingWhenBackupReplicasEqual(localPartInfo, localPartition, localTbl,
+ backupPartInfo, partitionName, tblInfo)) {
return;
}
}
@@ -596,7 +601,8 @@ public class RestoreJob extends AbstractJob {
PartitionInfo localPartitionInfo = localOlapTbl.getPartitionInfo();
if (localPartitionInfo.getType() == PartitionType.RANGE
|| localPartitionInfo.getType() == PartitionType.LIST) {
- PartitionItem remoteItem = remoteOlapTbl.getPartitionInfo().getItem(backupPartInfo.id);
+ PartitionItem remoteItem = remoteOlapTbl.getPartitionInfo()
+ .getItem(backupPartInfo.id);
if (localPartitionInfo.getAnyIntersectItem(remoteItem, false) != null) {
status = new Status(ErrCode.COMMON_ERROR, "Partition " + partitionName
+ " in table " + localTbl.getName()
@@ -614,7 +620,8 @@ public class RestoreJob extends AbstractJob {
restoredPartitions.add(Pair.create(localOlapTbl.getName(), restorePart));
}
} else {
- // It is impossible that a single partitioned table exist without any existing partition
+ // It is impossible that a single partitioned table exist
+ // without any existing partition
status = new Status(ErrCode.COMMON_ERROR,
"No partition exist in single partitioned table " + localOlapTbl.getName());
return;
@@ -876,7 +883,8 @@ public class RestoreJob extends AbstractJob {
}
// check disk capacity
- org.apache.doris.common.Status st = Catalog.getCurrentSystemInfo().checkExceedDiskCapacityLimit(bePathsMap, true);
+ org.apache.doris.common.Status st = Catalog.getCurrentSystemInfo()
+ .checkExceedDiskCapacityLimit(bePathsMap, true);
if (!st.ok()) {
status = new Status(ErrCode.COMMON_ERROR, st.getErrorMsg());
return;
@@ -923,8 +931,8 @@ public class RestoreJob extends AbstractJob {
}
}
- private boolean genFileMappingWhenBackupReplicasEqual(PartitionInfo localPartInfo, Partition localPartition, Table localTbl,
- BackupPartitionInfo backupPartInfo, String partitionName, BackupOlapTableInfo tblInfo) {
+ private boolean genFileMappingWhenBackupReplicasEqual(PartitionInfo localPartInfo, Partition localPartition,
+ Table localTbl, BackupPartitionInfo backupPartInfo, String partitionName, BackupOlapTableInfo tblInfo) {
short restoreReplicaNum = replicaAlloc.getTotalReplicaNum();
short localReplicaNum = localPartInfo.getReplicaAllocation(localPartition.getId()).getTotalReplicaNum();
if (localReplicaNum != restoreReplicaNum) {
@@ -1116,7 +1124,8 @@ public class RestoreJob extends AbstractJob {
OlapTable remoteTbl = (OlapTable) backupMeta.getTable(entry.first);
PartitionInfo localPartitionInfo = localTbl.getPartitionInfo();
PartitionInfo remotePartitionInfo = remoteTbl.getPartitionInfo();
- BackupPartitionInfo backupPartitionInfo = jobInfo.getOlapTableInfo(entry.first).getPartInfo(restorePart.getName());
+ BackupPartitionInfo backupPartitionInfo = jobInfo.getOlapTableInfo(entry.first)
+ .getPartInfo(restorePart.getName());
long remotePartId = backupPartitionInfo.id;
DataProperty remoteDataProperty = remotePartitionInfo.getDataProperty(remotePartId);
localPartitionInfo.addPartition(restorePart.getId(), false, remotePartitionInfo.getItem(remotePartId),
@@ -1271,9 +1280,9 @@ public class RestoreJob extends AbstractJob {
MaterializedIndex idx = part.getIndex(info.getIndexId());
if (idx == null) {
- status = new Status(ErrCode.NOT_FOUND,
- "index " + info.getIndexId() + " does not exist in partion " + part.getName()
- + "of restored table " + tbl.getName());
+ status = new Status(ErrCode.NOT_FOUND, "index " + info.getIndexId()
+ + " does not exist in partion " + part.getName()
+ + "of restored table " + tbl.getName());
return;
}
@@ -1745,7 +1754,8 @@ public class RestoreJob extends AbstractJob {
for (Map.Entry entry : restoredVersionInfo.row(tblId).entrySet()) {
out.writeLong(entry.getKey());
out.writeLong(entry.getValue());
- // It is version hash in the past, but it useless but should compatible with old version so that write 0 here
+ // It is version hash in the past,
+ // but it useless but should compatible with old version so that write 0 here
out.writeLong(0L);
}
}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/backup/S3Storage.java b/fe/fe-core/src/main/java/org/apache/doris/backup/S3Storage.java
index a67b0992e5..ae89175ebd 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/backup/S3Storage.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/backup/S3Storage.java
@@ -104,7 +104,8 @@ public class S3Storage extends BlobStorage {
// If not, it will not be converted ( https://github.com/aws/aws-sdk-java-v2/pull/763),
// but the endpoints of many cloud service providers for object storage do not start with s3,
// so they cannot be converted to virtual hosted-sytle.
- // Some of them, such as aliyun's oss, only support virtual hosted-sytle, and some of them(ceph) may only support
+ // Some of them, such as aliyun's oss, only support virtual hosted-sytle,
+ // and some of them(ceph) may only support
// path-style, so we need to do some additional conversion.
//
// use_path_style | !use_path_style
@@ -202,7 +203,8 @@ public class S3Storage extends BlobStorage {
}
try {
S3URI uri = S3URI.create(remoteFilePath, forceHostedStyle);
- GetObjectResponse response = getClient(uri.getVirtualBucket()).getObject(GetObjectRequest.builder().bucket(uri.getBucket()).key(uri.getKey()).build(), localFile.toPath());
+ GetObjectResponse response = getClient(uri.getVirtualBucket()).getObject(
+ GetObjectRequest.builder().bucket(uri.getBucket()).key(uri.getKey()).build(), localFile.toPath());
if (localFile.length() == fileSize) {
LOG.info(
"finished to download from {} to {} with size: {}. cost {} ms",
@@ -348,7 +350,9 @@ public class S3Storage extends BlobStorage {
return Status.OK;
}
for (FileStatus fileStatus : files) {
- RemoteFile remoteFile = new RemoteFile(fileNameOnly ? fileStatus.getPath().getName() : fileStatus.getPath().toString(), !fileStatus.isDirectory(), fileStatus.isDirectory() ? -1 : fileStatus.getLen());
+ RemoteFile remoteFile = new RemoteFile(
+ fileNameOnly ? fileStatus.getPath().getName() : fileStatus.getPath().toString(),
+ !fileStatus.isDirectory(), fileStatus.isDirectory() ? -1 : fileStatus.getLen());
result.add(remoteFile);
}
} catch (FileNotFoundException e) {
diff --git a/fe/fe-core/src/main/java/org/apache/doris/blockrule/SqlBlockRuleMgr.java b/fe/fe-core/src/main/java/org/apache/doris/blockrule/SqlBlockRuleMgr.java
index 3bf66989fd..db99f4de43 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/blockrule/SqlBlockRuleMgr.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/blockrule/SqlBlockRuleMgr.java
@@ -307,4 +307,4 @@ public class SqlBlockRuleMgr implements Writable {
String json = Text.readString(in);
return GsonUtils.GSON.fromJson(json, SqlBlockRuleMgr.class);
}
-}
\ No newline at end of file
+}
diff --git a/fe/fe-core/src/main/java/org/apache/doris/catalog/AggregateFunction.java b/fe/fe-core/src/main/java/org/apache/doris/catalog/AggregateFunction.java
index efa1c0c2f1..d0b5e28733 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/catalog/AggregateFunction.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/catalog/AggregateFunction.java
@@ -101,18 +101,19 @@ public class AggregateFunction extends Function {
}
public AggregateFunction(FunctionName fnName, List argTypes,
- Type retType, Type intermediateType,
- URI location, String updateFnSymbol, String initFnSymbol,
- String serializeFnSymbol, String mergeFnSymbol, String getValueFnSymbol,
- String removeFnSymbol, String finalizeFnSymbol) {
+ Type retType, Type intermediateType,
+ URI location, String updateFnSymbol, String initFnSymbol,
+ String serializeFnSymbol, String mergeFnSymbol, String getValueFnSymbol,
+ String removeFnSymbol, String finalizeFnSymbol) {
this(fnName, argTypes, retType, intermediateType, location, updateFnSymbol, initFnSymbol, serializeFnSymbol,
mergeFnSymbol, getValueFnSymbol, removeFnSymbol, finalizeFnSymbol, false);
}
public AggregateFunction(FunctionName fnName, List argTypes,
- Type retType, Type intermediateType, boolean hasVarArgs) {
+ Type retType, Type intermediateType, boolean hasVarArgs) {
super(fnName, argTypes, retType, hasVarArgs);
- this.intermediateType = (intermediateType != null && intermediateType.equals(retType)) ? null : intermediateType;
+ this.intermediateType = (intermediateType != null && intermediateType.equals(retType))
+ ? null : intermediateType;
ignoresDistinct = false;
isAnalyticFn = false;
isAggregateFn = true;
@@ -120,19 +121,21 @@ public class AggregateFunction extends Function {
}
public static AggregateFunction createBuiltin(String name,
- List argTypes, Type retType, Type intermediateType,
- boolean ignoresDistinct,
- boolean isAnalyticFn,
- boolean returnsNonNullOnEmpty) {
- return createBuiltin(name, argTypes, retType, intermediateType, false, ignoresDistinct, isAnalyticFn, returnsNonNullOnEmpty);
+ List argTypes, Type retType, Type intermediateType,
+ boolean ignoresDistinct,
+ boolean isAnalyticFn,
+ boolean returnsNonNullOnEmpty) {
+ return createBuiltin(name, argTypes, retType, intermediateType, false,
+ ignoresDistinct, isAnalyticFn, returnsNonNullOnEmpty);
}
public static AggregateFunction createBuiltin(String name,
- List argTypes, Type retType, Type intermediateType,
- boolean hasVarArgs, boolean ignoresDistinct,
- boolean isAnalyticFn,
- boolean returnsNonNullOnEmpty) {
- AggregateFunction fn = new AggregateFunction(new FunctionName(name), argTypes, retType, intermediateType, hasVarArgs);
+ List argTypes, Type retType, Type intermediateType,
+ boolean hasVarArgs, boolean ignoresDistinct,
+ boolean isAnalyticFn,
+ boolean returnsNonNullOnEmpty) {
+ AggregateFunction fn = new AggregateFunction(new FunctionName(name),
+ argTypes, retType, intermediateType, hasVarArgs);
fn.setBinaryType(TFunctionBinaryType.BUILTIN);
fn.ignoresDistinct = ignoresDistinct;
fn.isAnalyticFn = isAnalyticFn;
@@ -146,7 +149,8 @@ public class AggregateFunction extends Function {
URI location, String updateFnSymbol, String initFnSymbol,
String serializeFnSymbol, String mergeFnSymbol, String getValueFnSymbol,
String removeFnSymbol, String finalizeFnSymbol, boolean vectorized) {
- this(fnName, argTypes, retType, intermediateType, false, location, updateFnSymbol, initFnSymbol, serializeFnSymbol,
+ this(fnName, argTypes, retType, intermediateType, false, location,
+ updateFnSymbol, initFnSymbol, serializeFnSymbol,
mergeFnSymbol, getValueFnSymbol, removeFnSymbol, finalizeFnSymbol, vectorized);
}
@@ -157,8 +161,10 @@ public class AggregateFunction extends Function {
String removeFnSymbol, String finalizeFnSymbol, boolean vectorized) {
// only `count` is always not nullable, other aggregate function is always nullable
super(fnName, argTypes, retType, hasVarArgs, vectorized,
- AggregateFunction.NOT_NULLABLE_AGGREGATE_FUNCTION_NAME_SET.contains(fnName.getFunction()) ? NullableMode.ALWAYS_NOT_NULLABLE :
- AggregateFunction.ALWAYS_NULLABLE_AGGREGATE_FUNCTION_NAME_SET.contains(fnName.getFunction()) ? NullableMode.ALWAYS_NULLABLE : NullableMode.DEPEND_ON_ARGUMENT);
+ AggregateFunction.NOT_NULLABLE_AGGREGATE_FUNCTION_NAME_SET.contains(fnName.getFunction())
+ ? NullableMode.ALWAYS_NOT_NULLABLE :
+ AggregateFunction.ALWAYS_NULLABLE_AGGREGATE_FUNCTION_NAME_SET.contains(fnName.getFunction())
+ ? NullableMode.ALWAYS_NULLABLE : NullableMode.DEPEND_ON_ARGUMENT);
setLocation(location);
this.intermediateType = (intermediateType.equals(retType)) ? null : intermediateType;
this.updateFnSymbol = updateFnSymbol;
@@ -175,30 +181,31 @@ public class AggregateFunction extends Function {
}
public static AggregateFunction createBuiltin(String name,
- List argTypes, Type retType, Type intermediateType,
- String initFnSymbol, String updateFnSymbol, String mergeFnSymbol,
- String serializeFnSymbol, String finalizeFnSymbol, boolean ignoresDistinct,
- boolean isAnalyticFn, boolean returnsNonNullOnEmpty) {
+ List argTypes, Type retType, Type intermediateType,
+ String initFnSymbol, String updateFnSymbol, String mergeFnSymbol,
+ String serializeFnSymbol, String finalizeFnSymbol, boolean ignoresDistinct,
+ boolean isAnalyticFn, boolean returnsNonNullOnEmpty) {
return createBuiltin(name, argTypes, retType, intermediateType,
initFnSymbol, updateFnSymbol, mergeFnSymbol,
serializeFnSymbol, finalizeFnSymbol, ignoresDistinct, isAnalyticFn, returnsNonNullOnEmpty, false);
}
+
public static AggregateFunction createBuiltin(String name,
- List argTypes, Type retType, Type intermediateType,
- String initFnSymbol, String updateFnSymbol, String mergeFnSymbol,
- String serializeFnSymbol, String finalizeFnSymbol, boolean ignoresDistinct,
- boolean isAnalyticFn, boolean returnsNonNullOnEmpty, boolean vectorized) {
+ List argTypes, Type retType, Type intermediateType,
+ String initFnSymbol, String updateFnSymbol, String mergeFnSymbol,
+ String serializeFnSymbol, String finalizeFnSymbol, boolean ignoresDistinct,
+ boolean isAnalyticFn, boolean returnsNonNullOnEmpty, boolean vectorized) {
return createBuiltin(name, argTypes, retType, intermediateType, initFnSymbol,
updateFnSymbol, mergeFnSymbol, serializeFnSymbol, null, null, finalizeFnSymbol,
ignoresDistinct, isAnalyticFn, returnsNonNullOnEmpty, vectorized);
}
public static AggregateFunction createBuiltin(String name,
- List argTypes, Type retType, Type intermediateType,
- String initFnSymbol, String updateFnSymbol, String mergeFnSymbol,
- String serializeFnSymbol, String getValueFnSymbol, String removeFnSymbol,
- String finalizeFnSymbol, boolean ignoresDistinct, boolean isAnalyticFn,
- boolean returnsNonNullOnEmpty) {
+ List argTypes, Type retType, Type intermediateType,
+ String initFnSymbol, String updateFnSymbol, String mergeFnSymbol,
+ String serializeFnSymbol, String getValueFnSymbol, String removeFnSymbol,
+ String finalizeFnSymbol, boolean ignoresDistinct, boolean isAnalyticFn,
+ boolean returnsNonNullOnEmpty) {
return createBuiltin(name, argTypes, retType, intermediateType,
initFnSymbol, updateFnSymbol, mergeFnSymbol,
serializeFnSymbol, getValueFnSymbol, removeFnSymbol,
@@ -206,11 +213,11 @@ public class AggregateFunction extends Function {
}
public static AggregateFunction createBuiltin(String name,
- List argTypes, Type retType, Type intermediateType,
- String initFnSymbol, String updateFnSymbol, String mergeFnSymbol,
- String serializeFnSymbol, String getValueFnSymbol, String removeFnSymbol,
- String finalizeFnSymbol, boolean ignoresDistinct, boolean isAnalyticFn,
- boolean returnsNonNullOnEmpty, boolean vectorized) {
+ List argTypes, Type retType, Type intermediateType,
+ String initFnSymbol, String updateFnSymbol, String mergeFnSymbol,
+ String serializeFnSymbol, String getValueFnSymbol, String removeFnSymbol,
+ String finalizeFnSymbol, boolean ignoresDistinct, boolean isAnalyticFn,
+ boolean returnsNonNullOnEmpty, boolean vectorized) {
return createBuiltin(name, argTypes, retType, intermediateType, false,
initFnSymbol, updateFnSymbol, mergeFnSymbol,
serializeFnSymbol, getValueFnSymbol, removeFnSymbol,
@@ -218,22 +225,22 @@ public class AggregateFunction extends Function {
}
public static AggregateFunction createBuiltin(String name,
- List argTypes, Type retType, Type intermediateType, boolean hasVarArgs,
- String initFnSymbol, String updateFnSymbol, String mergeFnSymbol,
- String serializeFnSymbol, String getValueFnSymbol, String removeFnSymbol,
- String finalizeFnSymbol, boolean ignoresDistinct, boolean isAnalyticFn,
- boolean returnsNonNullOnEmpty) {
+ List argTypes, Type retType, Type intermediateType, boolean hasVarArgs,
+ String initFnSymbol, String updateFnSymbol, String mergeFnSymbol,
+ String serializeFnSymbol, String getValueFnSymbol, String removeFnSymbol,
+ String finalizeFnSymbol, boolean ignoresDistinct, boolean isAnalyticFn,
+ boolean returnsNonNullOnEmpty) {
return createBuiltin(name, argTypes, retType, intermediateType, hasVarArgs, initFnSymbol, updateFnSymbol,
mergeFnSymbol, serializeFnSymbol, getValueFnSymbol, removeFnSymbol, finalizeFnSymbol, ignoresDistinct,
isAnalyticFn, returnsNonNullOnEmpty, false);
}
public static AggregateFunction createBuiltin(String name,
- List