From 42b3dd35bba0085aa70cdabbf80e7cc05bdd465f Mon Sep 17 00:00:00 2001 From: Guangdong Liu Date: Thu, 7 Dec 2023 10:15:37 +0800 Subject: [PATCH] [regression test](broker load) add case for without filepath (#27658) --- .../apache/doris/analysis/BrokerLoadStmt.java | 2 +- .../org/apache/doris/analysis/ExportStmt.java | 2 +- .../org/apache/doris/analysis/LoadStmt.java | 2 +- .../apache/doris/analysis/StorageBackend.java | 7 +- .../trees/plans/commands/ExportCommand.java | 2 +- .../test_broker_load_without_filepath.groovy | 97 +++++++++++++++++++ 6 files changed, 105 insertions(+), 7 deletions(-) create mode 100644 regression-test/suites/load_p0/broker_load/test_broker_load_without_filepath.groovy diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/BrokerLoadStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/BrokerLoadStmt.java index 8fb813d562..55f2113504 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/BrokerLoadStmt.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/BrokerLoadStmt.java @@ -88,7 +88,7 @@ public class BrokerLoadStmt extends InsertStmt { String location = brokerDesc.getFileLocation(dataDescription.getFilePaths().get(i)); dataDescription.getFilePaths().set(i, location); StorageBackend.checkPath(dataDescription.getFilePaths().get(i), - brokerDesc.getStorageType()); + brokerDesc.getStorageType(), "DATA INFILE must be specified."); dataDescription.getFilePaths().set(i, dataDescription.getFilePaths().get(i)); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/ExportStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/ExportStmt.java index b6ec8df488..5034d01be1 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/ExportStmt.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/ExportStmt.java @@ -178,7 +178,7 @@ public class ExportStmt extends StatementBase { } // check path is valid - StorageBackend.checkPath(path, brokerDesc.getStorageType()); + StorageBackend.checkPath(path, brokerDesc.getStorageType(), null); if (brokerDesc.getStorageType() == StorageBackend.StorageType.BROKER) { BrokerMgr brokerMgr = analyzer.getEnv().getBrokerMgr(); if (!brokerMgr.containsBroker(brokerDesc.getName())) { diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/LoadStmt.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/LoadStmt.java index ae2a18d47d..01a4490003 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/LoadStmt.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/LoadStmt.java @@ -434,7 +434,7 @@ public class LoadStmt extends DdlStmt { String location = brokerDesc.getFileLocation(dataDescription.getFilePaths().get(i)); dataDescription.getFilePaths().set(i, location); StorageBackend.checkPath(dataDescription.getFilePaths().get(i), - brokerDesc.getStorageType()); + brokerDesc.getStorageType(), "DATA INFILE must be specified."); dataDescription.getFilePaths().set(i, dataDescription.getFilePaths().get(i)); } } diff --git a/fe/fe-core/src/main/java/org/apache/doris/analysis/StorageBackend.java b/fe/fe-core/src/main/java/org/apache/doris/analysis/StorageBackend.java index 0e148c6c39..3b4cfefc99 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/analysis/StorageBackend.java +++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/StorageBackend.java @@ -35,9 +35,10 @@ public class StorageBackend implements ParseNode { private String location; private StorageDesc storageDesc; - public static void checkPath(String path, StorageBackend.StorageType type) throws AnalysisException { + public static void checkPath(String path, StorageBackend.StorageType type, String exceptionMsg) + throws AnalysisException { if (Strings.isNullOrEmpty(path)) { - throw new AnalysisException("No destination path specified."); + throw new AnalysisException(exceptionMsg == null ? "No destination path specified." : exceptionMsg); } checkUri(URI.create(path), type); } @@ -120,7 +121,7 @@ public class StorageBackend implements ParseNode { if (Strings.isNullOrEmpty(location)) { throw new AnalysisException("You must specify a location on the repository"); } - checkPath(location, storageType); + checkPath(location, storageType, null); } @Override diff --git a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ExportCommand.java b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ExportCommand.java index f2cd13a479..0aa34a0428 100644 --- a/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ExportCommand.java +++ b/fe/fe-core/src/main/java/org/apache/doris/nereids/trees/plans/commands/ExportCommand.java @@ -220,7 +220,7 @@ public class ExportCommand extends Command implements ForwardWithSync { private void checkBrokerDesc(ConnectContext ctx) throws UserException { // check path is valid - StorageBackend.checkPath(this.path, this.brokerDesc.get().getStorageType()); + StorageBackend.checkPath(this.path, this.brokerDesc.get().getStorageType(), null); if (brokerDesc.get().getStorageType() == StorageBackend.StorageType.BROKER) { BrokerMgr brokerMgr = ctx.getEnv().getBrokerMgr(); diff --git a/regression-test/suites/load_p0/broker_load/test_broker_load_without_filepath.groovy b/regression-test/suites/load_p0/broker_load/test_broker_load_without_filepath.groovy new file mode 100644 index 0000000000..2dae37c615 --- /dev/null +++ b/regression-test/suites/load_p0/broker_load/test_broker_load_without_filepath.groovy @@ -0,0 +1,97 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +suite("test_broker_load_without_filepath", "load_p0") { + // define a sql table + def testTable = "tbl_test_broker_load_without_filepath" + + def a = 0; + + def create_test_table = {testTablex -> + def result1 = sql """ + CREATE TABLE IF NOT EXISTS ${testTable} ( + `k1` BIGINT NOT NULL, + `k2` DATE NULL, + `k3` INT(11) NOT NULL, + `k4` INT(11) NOT NULL, + `v5` BIGINT SUM NULL DEFAULT "0" + ) ENGINE=OLAP + AGGREGATE KEY(`k1`, `k2`, `k3`, `k4`) + COMMENT 'OLAP' + DISTRIBUTED BY HASH(`k1`) BUCKETS 16 + PROPERTIES ( + "replication_allocation" = "tag.location.default: 1", + "storage_format" = "V2" + ); + """ + + // DDL/DML return 1 row and 3 column, the only value is update row count + assertTrue(result1.size() == 1) + assertTrue(result1[0].size() == 1) + assertTrue(result1[0][0] == 0, "Create table should update 0 rows") + + // insert 1 row to check whether the table is ok + def result2 = sql """ INSERT INTO ${testTable} VALUES + (1,2023-09-01,1,1,1) + """ + assertTrue(result2.size() == 1) + assertTrue(result2[0].size() == 1) + assertTrue(result2[0][0] == 1, "Insert should update 1 rows") + } + + def load_from_hdfs_norm = {testTablex, label, hdfsFilePath, format, brokerName, hdfsUser, hdfsPasswd -> + + test { + sql """ + LOAD LABEL ${label} ( + DATA INFILE("${hdfsFilePath}") + INTO TABLE ${testTablex} + COLUMNS TERMINATED BY "," + FORMAT as "${format}" + ) + with BROKER "${brokerName}" ( + "username"="${hdfsUser}", + "password"="${hdfsPasswd}") + PROPERTIES ( + "timeout"="1200", + "max_filter_ratio"="0.1"); + """ + exception "DATA INFILE must be specified." + } + + } + + // if 'enableHdfs' in regression-conf.groovy has been set to true, + // the test will run these case as below. + if (enableHdfs()) { + brokerName = getBrokerName() + hdfsUser = getHdfsUser() + hdfsPasswd = getHdfsPasswd() + def hdfs_csv_file_path = uploadToHdfs "load_p0/broker_load/broker_load_without_filepath.csv" + + try { + sql "DROP TABLE IF EXISTS ${testTable}" + create_test_table.call(testTable) + + def test_load_label = UUID.randomUUID().toString().replaceAll("-", "") + load_from_hdfs_norm.call(testTable, test_load_label, hdfs_csv_file_path, "csv", + brokerName, hdfsUser, hdfsPasswd) + } finally { + try_sql("DROP TABLE IF EXISTS ${testTable}") + } + } +}