[fix](hierarchical-storage) Fix bug that storage medium property change back to SSD (#9158)
1. fix bug described in #9159 2. fix a `fill_tuple` bug introduced from #9173
This commit is contained in:
@ -467,6 +467,7 @@ Status BrokerScanner::_convert_one_row(const Slice& line, Tuple* tuple, MemPool*
|
||||
RETURN_IF_ERROR(_line_to_src_tuple(line));
|
||||
if (!_success) {
|
||||
// If not success, which means we met an invalid row, return.
|
||||
*fill_tuple = false;
|
||||
return Status::OK();
|
||||
}
|
||||
|
||||
|
||||
@ -141,5 +141,7 @@
|
||||
"lo": 10
|
||||
},
|
||||
"preferred_rowset_type": "BETA_ROWSET",
|
||||
"tablet_type": "TABLET_TYPE_DISK"
|
||||
"tablet_type": "TABLET_TYPE_DISK",
|
||||
"storage_medium": "HDD",
|
||||
"remote_storage_name": ""
|
||||
}
|
||||
|
||||
@ -295,7 +295,6 @@ import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
public class Catalog {
|
||||
@ -4997,7 +4996,8 @@ public class Catalog {
|
||||
if (dataProperty.getStorageMedium() == TStorageMedium.SSD
|
||||
&& dataProperty.getCooldownTimeMs() < currentTimeMs) {
|
||||
// expire. change to HDD.
|
||||
partitionInfo.setDataProperty(partition.getId(), new DataProperty(TStorageMedium.HDD));
|
||||
DataProperty hddProperty = new DataProperty(TStorageMedium.HDD);
|
||||
partitionInfo.setDataProperty(partition.getId(), hddProperty);
|
||||
storageMediumMap.put(partitionId, TStorageMedium.HDD);
|
||||
LOG.debug("partition[{}-{}-{}] storage medium changed from SSD to HDD",
|
||||
dbId, tableId, partitionId);
|
||||
@ -5006,7 +5006,7 @@ public class Catalog {
|
||||
ModifyPartitionInfo info =
|
||||
new ModifyPartitionInfo(db.getId(), olapTable.getId(),
|
||||
partition.getId(),
|
||||
DataProperty.DEFAULT_DATA_PROPERTY,
|
||||
hddProperty,
|
||||
ReplicaAllocation.NOT_SET,
|
||||
partitionInfo.getIsInMemory(partition.getId()));
|
||||
editLog.logModifyPartition(info);
|
||||
|
||||
@ -67,7 +67,6 @@ import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
@ -129,6 +128,9 @@ public class TableQueryPlanAction extends RestBaseController {
|
||||
// status code should conforms to HTTP semantic
|
||||
resultMap.put("status", e.getCode().code());
|
||||
resultMap.put("exception", e.getMessage());
|
||||
} catch (Exception e) {
|
||||
resultMap.put("status", "1");
|
||||
resultMap.put("exception", e.getMessage());
|
||||
}
|
||||
return ResponseEntityBuilder.ok(resultMap);
|
||||
}
|
||||
@ -252,7 +254,7 @@ public class TableQueryPlanAction extends RestBaseController {
|
||||
for (TScanRangeLocations scanRangeLocations : scanRangeLocationsList) {
|
||||
// only process palo(doris) scan range
|
||||
TPaloScanRange scanRange = scanRangeLocations.scan_range.palo_scan_range;
|
||||
Node tabletRouting = new Node(Long.parseLong(scanRange.version), Integer.parseInt(scanRange.schema_hash));
|
||||
Node tabletRouting = new Node(Long.parseLong(scanRange.version), 0 /* schema hash is not used */);
|
||||
for (TNetworkAddress address : scanRange.hosts) {
|
||||
tabletRouting.addRouting(address.hostname + ":" + address.port);
|
||||
}
|
||||
|
||||
@ -1,182 +0,0 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
package org.apache.doris.analysis;
|
||||
|
||||
import org.apache.doris.catalog.Catalog;
|
||||
import org.apache.doris.catalog.Column;
|
||||
import org.apache.doris.catalog.Database;
|
||||
import org.apache.doris.catalog.KeysType;
|
||||
import org.apache.doris.catalog.OlapTable;
|
||||
import org.apache.doris.catalog.PrimitiveType;
|
||||
import org.apache.doris.catalog.SinglePartitionInfo;
|
||||
import org.apache.doris.catalog.View;
|
||||
import org.apache.doris.common.jmockit.Deencapsulation;
|
||||
import org.apache.doris.common.util.SqlParserUtils;
|
||||
import org.apache.doris.mysql.privilege.PaloAuth;
|
||||
import org.apache.doris.mysql.privilege.PrivPredicate;
|
||||
import org.apache.doris.persist.AlterViewInfo;
|
||||
import org.apache.doris.persist.CreateTableInfo;
|
||||
import org.apache.doris.persist.EditLog;
|
||||
import org.apache.doris.qe.ConnectContext;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.StringReader;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
|
||||
import mockit.Expectations;
|
||||
import mockit.Mock;
|
||||
import mockit.MockUp;
|
||||
import mockit.Mocked;
|
||||
|
||||
public class AlterViewStmtTest {
|
||||
private Analyzer analyzer;
|
||||
|
||||
private Catalog catalog;
|
||||
|
||||
@Mocked
|
||||
EditLog editLog;
|
||||
|
||||
@Mocked
|
||||
private ConnectContext connectContext;
|
||||
|
||||
@Mocked
|
||||
private PaloAuth auth;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
catalog = Deencapsulation.newInstance(Catalog.class);
|
||||
analyzer = new Analyzer(catalog, connectContext);
|
||||
|
||||
|
||||
Database db = new Database(50000L, "testCluster:testDb");
|
||||
|
||||
Column column1 = new Column("col1", PrimitiveType.BIGINT);
|
||||
Column column2 = new Column("col2", PrimitiveType.DOUBLE);
|
||||
|
||||
List<Column> baseSchema = new LinkedList<Column>();
|
||||
baseSchema.add(column1);
|
||||
baseSchema.add(column2);
|
||||
|
||||
OlapTable table = new OlapTable(30000, "testTbl",
|
||||
baseSchema, KeysType.AGG_KEYS, new SinglePartitionInfo(), null);
|
||||
db.createTable(table);
|
||||
|
||||
|
||||
new Expectations(auth) {
|
||||
{
|
||||
auth.checkGlobalPriv((ConnectContext) any, (PrivPredicate) any);
|
||||
minTimes = 0;
|
||||
result = true;
|
||||
|
||||
auth.checkDbPriv((ConnectContext) any, anyString, (PrivPredicate) any);
|
||||
minTimes = 0;
|
||||
result = true;
|
||||
|
||||
auth.checkTblPriv((ConnectContext) any, anyString, anyString, (PrivPredicate) any);
|
||||
minTimes = 0;
|
||||
result = true;
|
||||
}
|
||||
};
|
||||
|
||||
new Expectations(editLog) {
|
||||
{
|
||||
editLog.logCreateTable((CreateTableInfo) any);
|
||||
minTimes = 0;
|
||||
|
||||
editLog.logModifyViewDef((AlterViewInfo) any);
|
||||
minTimes = 0;
|
||||
}
|
||||
};
|
||||
|
||||
Deencapsulation.setField(catalog, "editLog", editLog);
|
||||
|
||||
new MockUp<Catalog>() {
|
||||
@Mock
|
||||
Catalog getCurrentCatalog() {
|
||||
return catalog;
|
||||
}
|
||||
@Mock
|
||||
PaloAuth getAuth() {
|
||||
return auth;
|
||||
}
|
||||
@Mock
|
||||
Database getDbOrDdlException(long dbId) {
|
||||
return db;
|
||||
}
|
||||
@Mock
|
||||
Database getDbOrDdlException(String dbName) {
|
||||
return db;
|
||||
}
|
||||
@Mock
|
||||
Database getDbOrAnalysisException(long dbId) {
|
||||
return db;
|
||||
}
|
||||
@Mock
|
||||
Database getDbOrAnalysisException(String dbName) {
|
||||
return db;
|
||||
}
|
||||
};
|
||||
|
||||
new MockUp<Analyzer>() {
|
||||
@Mock
|
||||
String getClusterName() {
|
||||
return "testCluster";
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNormal() throws Exception {
|
||||
String originStmt = "select col1 as c1, sum(col2) as c2 from testDb.testTbl group by col1";
|
||||
View view = new View(30000L, "testView", null);
|
||||
view.setInlineViewDefWithSqlMode("select col1 as c1, sum(col2) as c2 from testDb.testTbl group by col1", 0L);
|
||||
view.init();
|
||||
|
||||
Database db = analyzer.getCatalog().getDbOrAnalysisException("testDb");
|
||||
db.createTable(view);
|
||||
|
||||
Assert.assertEquals(originStmt, view.getInlineViewDef());
|
||||
|
||||
String alterStmt = "with testTbl_cte (w1, w2) as (select col1, col2 from testDb.testTbl) select w1 as c1, sum(w2) as c2 from testTbl_cte where w1 > 10 group by w1 order by w1";
|
||||
SqlParser parser = new SqlParser(new SqlScanner(new StringReader(alterStmt)));
|
||||
QueryStmt alterQueryStmt = (QueryStmt) SqlParserUtils.getFirstStmt(parser);
|
||||
|
||||
ColWithComment col1 = new ColWithComment("h1", null);
|
||||
ColWithComment col2 = new ColWithComment("h2", null);
|
||||
|
||||
AlterViewStmt alterViewStmt = new AlterViewStmt(new TableName("testDb", "testView"), Lists.newArrayList(col1, col2), alterQueryStmt);
|
||||
alterViewStmt.analyze(analyzer);
|
||||
Catalog catalog1 = analyzer.getCatalog();
|
||||
if (catalog1 == null) {
|
||||
System.out.println("cmy get null");
|
||||
return;
|
||||
}
|
||||
catalog1.alterView(alterViewStmt);
|
||||
|
||||
View newView = (View) db.getTableOrAnalysisException("testView");
|
||||
Assert.assertEquals("WITH testTbl_cte(w1, w2) AS (SELECT `col1` AS `col1`, `col2` AS `col2` FROM `testCluster:testDb`.`testTbl`)" +
|
||||
" SELECT `w1` AS `h1`, sum(`w2`) AS `h2` FROM `testTbl_cte` WHERE `w1` > 10 GROUP BY `w1` ORDER BY `w1`",
|
||||
newView.getInlineViewDef());
|
||||
}
|
||||
}
|
||||
@ -22,9 +22,6 @@ import org.apache.doris.qe.ConnectContext;
|
||||
import org.apache.doris.utframe.UtFrameUtils;
|
||||
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
|
||||
public class ExplainTest {
|
||||
@ -75,13 +72,6 @@ public class ExplainTest {
|
||||
Assert.assertEquals(dropDbStmt.toSql(), dropSchemaStmt.toSql());
|
||||
}
|
||||
|
||||
public void testExplainInsertInto() throws Exception {
|
||||
String sql = "explain insert into test_explain.explain_t1 select * from test_explain.explain_t2";
|
||||
String explainString = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, sql, true);
|
||||
System.out.println(explainString);
|
||||
Assert.assertTrue(explainString.contains("CAST"));
|
||||
}
|
||||
|
||||
public void testExplainSelect() throws Exception {
|
||||
String sql = "explain select * from test_explain.explain_t1 where dt = '1001';";
|
||||
String explainString = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, sql, false);
|
||||
@ -89,6 +79,13 @@ public class ExplainTest {
|
||||
Assert.assertFalse(explainString.contains("CAST"));
|
||||
}
|
||||
|
||||
public void testExplainInsertInto() throws Exception {
|
||||
String sql = "explain verbose insert into test_explain.explain_t1 select * from test_explain.explain_t2";
|
||||
String explainString = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, sql, true);
|
||||
System.out.println(explainString);
|
||||
Assert.assertTrue(explainString.contains("CAST"));
|
||||
}
|
||||
|
||||
public void testExplainVerboseSelect() throws Exception {
|
||||
String queryStr = "explain verbose select * from test_explain.explain_t1 where dt = '1001';";
|
||||
String explainString = UtFrameUtils.getSQLPlanOrErrorMsg(ctx, queryStr, true);
|
||||
|
||||
@ -17,6 +17,7 @@
|
||||
|
||||
package org.apache.doris.catalog;
|
||||
|
||||
import org.apache.doris.analysis.AlterViewStmt;
|
||||
import org.apache.doris.analysis.CreateDbStmt;
|
||||
import org.apache.doris.analysis.CreateTableStmt;
|
||||
import org.apache.doris.analysis.CreateViewStmt;
|
||||
@ -129,4 +130,22 @@ public class CreateViewTest {
|
||||
System.out.println(explainString);
|
||||
Assert.assertTrue(explainString.contains("OlapScanNode"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAlterView() throws Exception {
|
||||
String originStmt = "select k1 as kc1, sum(k2) as kc2 from test.tbl1 group by kc1";
|
||||
ExceptionChecker.expectThrowsNoException(
|
||||
() -> createView("create view test.alter1 as " + originStmt));
|
||||
Database db = Catalog.getCurrentCatalog().getDbOrDdlException("default_cluster:test");
|
||||
View alter1 = (View) db.getTableOrDdlException("alter1");
|
||||
Assert.assertEquals("SELECT `k1` AS `kc1`, sum(`k2`) AS `kc2` FROM `default_cluster:test`.`tbl1` GROUP BY `kc1`", alter1.getInlineViewDef());
|
||||
|
||||
String alterStmt = "alter view test.alter1 as with test1_cte (w1, w2) as (select k1, k2 from test.tbl1) select w1 as c1, sum(w2) as c2 from test1_cte where w1 > 10 group by w1 order by w1";
|
||||
AlterViewStmt alterViewStmt = (AlterViewStmt) UtFrameUtils.parseAndAnalyzeStmt(alterStmt, connectContext);
|
||||
Catalog.getCurrentCatalog().alterView(alterViewStmt);
|
||||
|
||||
alter1 = (View) db.getTableOrDdlException("alter1");
|
||||
System.out.println(alter1.getInlineViewDef());
|
||||
Assert.assertEquals("WITH test1_cte(w1, w2) AS (SELECT `k1` AS `k1`, `k2` AS `k2` FROM `default_cluster:test`.`tbl1`) SELECT `w1` AS `c1`, sum(`w2`) AS `c2` FROM `test1_cte` WHERE `w1` > 10 GROUP BY `w1` ORDER BY `w1` ASC", alter1.getInlineViewDef());
|
||||
}
|
||||
}
|
||||
|
||||
@ -48,7 +48,7 @@ public class TableQueryPlanActionTest extends DorisHttpTestCase {
|
||||
}
|
||||
@Test
|
||||
public void testQueryPlanAction() throws IOException, TException {
|
||||
RequestBody body = RequestBody.create(JSON, "{ \"sql\" : \" select k1,k2 from " + DB_NAME + "." + TABLE_NAME + " \" }");
|
||||
RequestBody body = RequestBody.create("{ \"sql\" : \" select k1,k2 from " + DB_NAME + "." + TABLE_NAME + " \" }", JSON);
|
||||
Request request = new Request.Builder()
|
||||
.post(body)
|
||||
.addHeader("Authorization", rootAuth)
|
||||
@ -67,8 +67,6 @@ public class TableQueryPlanActionTest extends DorisHttpTestCase {
|
||||
Assert.assertNotNull(tabletObject.get("routings"));
|
||||
Assert.assertEquals(3, ((JSONArray) tabletObject.get("routings")).size());
|
||||
Assert.assertEquals(testStartVersion, (long) tabletObject.get("version"));
|
||||
Assert.assertEquals(testSchemaHash, (long) tabletObject.get("schemaHash"));
|
||||
|
||||
}
|
||||
String queryPlan = (String) ((JSONObject) jsonObject.get("data")).get("opaqued_query_plan");
|
||||
Assert.assertNotNull(queryPlan);
|
||||
|
||||
@ -1991,8 +1991,8 @@ public class QueryPlanTest {
|
||||
public void testExplainInsertInto() throws Exception {
|
||||
ExplainTest explainTest = new ExplainTest();
|
||||
explainTest.before(connectContext);
|
||||
explainTest.testExplainInsertInto();
|
||||
explainTest.testExplainSelect();
|
||||
explainTest.testExplainInsertInto();
|
||||
explainTest.testExplainVerboseSelect();
|
||||
explainTest.testExplainConcatSelect();
|
||||
explainTest.testExplainVerboseConcatSelect();
|
||||
@ -2097,7 +2097,8 @@ public class QueryPlanTest {
|
||||
" (SELECT 4 AS bid)b ON (a.aid=b.bid)\n";
|
||||
String explainString = UtFrameUtils.getSQLPlanOrErrorMsg(connectContext, queryStr);
|
||||
Assert.assertFalse(explainString.contains("OUTPUT EXPRS:3 | 4"));
|
||||
Assert.assertTrue(explainString.contains("OUTPUT EXPRS:CAST(`a`.`aid` AS INT) | 4"));
|
||||
System.out.println(explainString);
|
||||
Assert.assertTrue(explainString.contains("OUTPUT EXPRS:`a`.`aid` | 4"));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@ -20,11 +20,11 @@
|
||||
// **Note**: default db will be create if not exist
|
||||
defaultDb = "regression_test"
|
||||
|
||||
jdbcUrl = "jdbc:mysql://127.0.0.1:9030/?"
|
||||
jdbcUrl = "jdbc:mysql://127.0.0.1:9232/?"
|
||||
jdbcUser = "root"
|
||||
jdbcPassword = ""
|
||||
|
||||
feHttpAddress = "127.0.0.1:8030"
|
||||
feHttpAddress = "127.0.0.1:8232"
|
||||
feHttpUser = "root"
|
||||
feHttpPassword = ""
|
||||
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !select_tb --
|
||||
1 2 4 8 50string 500varchar c 65535varchar 0.000000 123456789012345678.123456789 2013-12-01 1900-01-01T00:00 1 2 4 8 50string 500varchar_replace c 65535varchar 12345678901234.123456 123456789012345678.123456789 1900-01-01 1900-01-01 1900-01-01 1900-01-01T00:00 1900-01-01T00:00 1900-01-01T00:00 0.4 0.8
|
||||
1 2 4 8 50string 500varchar c 65535varchar 0 123456789012345678.123456789 2013-12-01 1900-01-01T00:00 1 2 4 8 50string 500varchar_replace c 65535varchar 12345678901234.123456 123456789012345678.123456789 1900-01-01 1900-01-01 1900-01-01 1900-01-01T00:00 1900-01-01T00:00 1900-01-01T00:00 0.4 0.8
|
||||
|
||||
-- !desc_tb --
|
||||
tinyint_key TINYINT No true \N
|
||||
|
||||
@ -0,0 +1,7 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !select_str_tb --
|
||||
\N \N \N
|
||||
d41d8cd98f00b204e9800998ecf8427e 0
|
||||
1 1f44fb91f47cab16f711973af06294a0 65536
|
||||
2 3c514d3b89e26e2f983b7bd4cbb82055 1048576
|
||||
|
||||
@ -6,4 +6,10 @@
|
||||
0
|
||||
|
||||
-- !test_issue_8850_3 --
|
||||
0
|
||||
|
||||
-- !test_issue_8850_4 --
|
||||
|
||||
-- !test_issue_8850_5 --
|
||||
0
|
||||
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
-- This file is automatically generated. You should know what you did if you want to edit this
|
||||
-- !explode_json_array --
|
||||
-- !explode_json_array1 --
|
||||
100 John 30 1 Street 1 40 30
|
||||
100 John 30 1 Street 1 80 30
|
||||
100 John 30 1 Street 1 40 60
|
||||
@ -17,17 +17,17 @@
|
||||
400 Dan 50 4 Street 4 40 60
|
||||
400 Dan 50 4 Street 4 80 60
|
||||
|
||||
-- !explode_json_array --
|
||||
-- !explode_json_array2 --
|
||||
30 8
|
||||
60 8
|
||||
|
||||
-- !explode_json_array --
|
||||
-- !explode_json_array3 --
|
||||
100 John 30 1 Street 1 \N
|
||||
200 Mary \N 1 Street 2 \N
|
||||
300 Mike 80 3 Street 3 \N
|
||||
400 Dan 50 4 Street 4 \N
|
||||
|
||||
-- !explode_json_array --
|
||||
-- !explode_json_array4 --
|
||||
100 John 30 1 Street 1 1.23 1
|
||||
100 John 30 1 Street 1 22.214 1
|
||||
100 John 30 1 Street 1 214.1 1
|
||||
@ -65,7 +65,7 @@
|
||||
400 Dan 50 4 Street 4 22.214 b
|
||||
400 Dan 50 4 Street 4 214.1 b
|
||||
|
||||
-- !outer_join_explode_json_array --
|
||||
-- !outer_join_explode_json_array5 --
|
||||
\N \N 1
|
||||
\N \N 3
|
||||
\N \N b
|
||||
@ -79,10 +79,10 @@
|
||||
\N 80 3
|
||||
\N 80 b
|
||||
|
||||
-- !explode_json_array --
|
||||
-- !explode_json_array6 --
|
||||
true
|
||||
|
||||
-- !explode_json_array --
|
||||
-- !explode_json_array7 --
|
||||
100 John 30 1 Street 1 40 30
|
||||
100 John 30 1 Street 1 80 30
|
||||
100 John 30 1 Street 1 40 60
|
||||
@ -100,17 +100,13 @@ true
|
||||
400 Dan 50 4 Street 4 40 60
|
||||
400 Dan 50 4 Street 4 80 60
|
||||
|
||||
-- !explode_json_array --
|
||||
-- !explode_json_array8 --
|
||||
30 8
|
||||
60 8
|
||||
|
||||
-- !explode_json_array --
|
||||
100 John 30 1 Street 1 \N
|
||||
200 Mary \N 1 Street 2 \N
|
||||
300 Mike 80 3 Street 3 \N
|
||||
400 Dan 50 4 Street 4 \N
|
||||
-- !explode_json_array9 --
|
||||
|
||||
-- !explode_json_array --
|
||||
-- !explode_json_array10 --
|
||||
100 John 30 1 Street 1 1.23 1
|
||||
100 John 30 1 Street 1 22.214 1
|
||||
100 John 30 1 Street 1 214.1 1
|
||||
|
||||
@ -66,7 +66,8 @@ suite("test_create_table_with_bloom_filter", "bloom_filter") {
|
||||
PROPERTIES (
|
||||
"bloom_filter_columns"="smallint_key,int_key,bigint_key,char_50_key,character_key,
|
||||
char_key,character_most_key,decimal_key,decimal_most_key,
|
||||
date_key,datetime_key"
|
||||
date_key,datetime_key",
|
||||
"replication_num" = "1"
|
||||
)
|
||||
"""
|
||||
sql """
|
||||
|
||||
@ -35,7 +35,7 @@ suite("test_aggregate_table", "data_model") {
|
||||
int_value_replace_if_not_null int replace_if_not_null
|
||||
)
|
||||
AGGREGATE KEY(k)
|
||||
DISTRIBUTED BY HASH(k) BUCKETS 5
|
||||
DISTRIBUTED BY HASH(k) BUCKETS 5 properties("replication_num" = "1");
|
||||
"""
|
||||
sql """insert into int_agg values(0, 1, 1, 1, 1, 1)"""
|
||||
sql """insert into int_agg values(0, 2, 2, 2, 2, 2)"""
|
||||
@ -54,7 +54,7 @@ suite("test_aggregate_table", "data_model") {
|
||||
char_value_replace_if_not_null char(10) replace_if_not_null
|
||||
)
|
||||
AGGREGATE KEY(k)
|
||||
DISTRIBUTED BY HASH(k) BUCKETS 5
|
||||
DISTRIBUTED BY HASH(k) BUCKETS 5 properties("replication_num" = "1");
|
||||
"""
|
||||
sql """insert into string_agg values(0, '1', '1', '1', '1')"""
|
||||
sql """insert into string_agg values(0, '2', '2', '2', '2')"""
|
||||
@ -74,7 +74,7 @@ suite("test_aggregate_table", "data_model") {
|
||||
date_value_replace_if_not_null date replace_if_not_null
|
||||
)
|
||||
AGGREGATE KEY(k)
|
||||
DISTRIBUTED BY HASH(k) BUCKETS 5
|
||||
DISTRIBUTED BY HASH(k) BUCKETS 5 properties("replication_num" = "1");
|
||||
"""
|
||||
sql """insert into date_agg values(0, '2000-01-01', '2000-01-01', '2000-01-01', '2000-01-01')"""
|
||||
sql """insert into date_agg values(0, '2000-12-31', '2000-12-31', '2000-12-31', '2000-12-31')"""
|
||||
|
||||
@ -34,7 +34,7 @@ suite("test_duplicate_table", "data_model") {
|
||||
date_value date
|
||||
)
|
||||
DUPLICATE KEY(k)
|
||||
DISTRIBUTED BY HASH(k) BUCKETS 5
|
||||
DISTRIBUTED BY HASH(k) BUCKETS 5 properties("replication_num" = "1")
|
||||
"""
|
||||
sql "insert into ${tbName} values(0, 1, 'test char', '2000-01-01')"
|
||||
sql "insert into ${tbName} values(0, 2, 'test int', '2000-02-02')"
|
||||
@ -53,7 +53,7 @@ suite("test_duplicate_table", "data_model") {
|
||||
k3 int,
|
||||
int_value int
|
||||
)
|
||||
DISTRIBUTED BY HASH(k1) BUCKETS 5
|
||||
DISTRIBUTED BY HASH(k1) BUCKETS 5 properties("replication_num" = "1");
|
||||
"""
|
||||
sql "insert into ${tbName1} values(0, 1, 2, 4)"
|
||||
sql "insert into ${tbName1} values(0, 1, 2, 5)"
|
||||
|
||||
@ -34,7 +34,7 @@ suite("test_unique_table", "data_model") {
|
||||
date_value date
|
||||
)
|
||||
UNIQUE KEY(k)
|
||||
DISTRIBUTED BY HASH(k) BUCKETS 5
|
||||
DISTRIBUTED BY HASH(k) BUCKETS 5 properties("replication_num" = "1");
|
||||
"""
|
||||
sql "insert into ${tbName} values(0, 1, 'test char', '2000-01-01')"
|
||||
sql "insert into ${tbName} values(0, 2, 'test int', '2000-02-02')"
|
||||
|
||||
@ -19,7 +19,7 @@ suite("test_bitmap_int", "datatype") {
|
||||
sql "DROP TABLE IF EXISTS test_int_bitmap"
|
||||
sql """
|
||||
CREATE TABLE test_int_bitmap (`id` int, `bitmap_set` bitmap bitmap_union)
|
||||
ENGINE=OLAP DISTRIBUTED BY HASH(`id`) BUCKETS 5
|
||||
ENGINE=OLAP DISTRIBUTED BY HASH(`id`) BUCKETS 5 properties("replication_num" = "1");
|
||||
"""
|
||||
sql "insert into test_int_bitmap values(1, bitmap_hash(1)), (2, bitmap_hash(2)), (3, bitmap_hash(3))"
|
||||
|
||||
|
||||
@ -19,7 +19,7 @@ suite("test_hll_int", "datatype") {
|
||||
sql "DROP TABLE IF EXISTS test_int_hll"
|
||||
sql """
|
||||
CREATE TABLE test_int_hll (`id` int COMMENT "", `hll_set` hll hll_union COMMENT "")
|
||||
ENGINE=OLAP DISTRIBUTED BY HASH(`id`) BUCKETS 5
|
||||
ENGINE=OLAP DISTRIBUTED BY HASH(`id`) BUCKETS 5 properties("replication_num" = "1");
|
||||
"""
|
||||
sql "insert into test_int_hll values(1, hll_hash(1)), (2, hll_hash(2)), (3, hll_hash(3))"
|
||||
qt_sql1 "select hll_union_agg(hll_set), count(*) from test_int_hll"
|
||||
|
||||
@ -19,22 +19,23 @@ suite("test_string_basic", "datatype") {
|
||||
sql "drop table if exists fail_tb1"
|
||||
// first column could not be string
|
||||
test {
|
||||
sql """CREATE TABLE fail_tb1 (k1 STRING NOT NULL, v1 STRING NOT NULL) DISTRIBUTED BY HASH(k1) BUCKETS 5"""
|
||||
sql """CREATE TABLE fail_tb1 (k1 STRING NOT NULL, v1 STRING NOT NULL) DISTRIBUTED BY HASH(k1) BUCKETS 5 properties("replication_num" = "1")"""
|
||||
exception "The olap table first column could not be float, double, string use decimal or varchar instead."
|
||||
}
|
||||
// string type should could not be key
|
||||
test {
|
||||
sql """
|
||||
CREATE TABLE fail_tb1 ( k1 INT NOT NULL, k2 STRING NOT NULL)
|
||||
DUPLICATE KEY(k1,k2) DISTRIBUTED BY HASH(k1) BUCKETS 5
|
||||
DUPLICATE KEY(k1,k2) DISTRIBUTED BY HASH(k1) BUCKETS 5 properties("replication_num" = "1")
|
||||
"""
|
||||
exception "String Type should not be used in key column[k2]"
|
||||
}
|
||||
// create table with string column, insert and select ok
|
||||
def tbName = "str_tb"
|
||||
sql "drop table if exists ${tbName}"
|
||||
sql """
|
||||
CREATE TABLE ${tbName} (k1 VARCHAR(10) NULL, v1 STRING NULL)
|
||||
UNIQUE KEY(k1) DISTRIBUTED BY HASH(k1) BUCKETS 5
|
||||
UNIQUE KEY(k1) DISTRIBUTED BY HASH(k1) BUCKETS 5 properties("replication_num" = "1")
|
||||
"""
|
||||
sql """
|
||||
INSERT INTO ${tbName} VALUES
|
||||
@ -43,6 +44,6 @@ suite("test_string_basic", "datatype") {
|
||||
(1, repeat("test1111", 8192)),
|
||||
(2, repeat("test1111", 131072))
|
||||
"""
|
||||
order_qt_select_str_tb "select k1, md5(v1), length(v1) from test_sys_string_basic_test_insert_load_tb"
|
||||
order_qt_select_str_tb "select k1, md5(v1), length(v1) from ${tbName}"
|
||||
}
|
||||
|
||||
|
||||
@ -36,7 +36,7 @@ suite("test_list_partition", "partition") {
|
||||
PARTITION p1 VALUES IN ("1","2","3","4"),
|
||||
PARTITION p2 VALUES IN ("5","6","7","8","9","10","11","12","13","14"),
|
||||
PARTITION p3 VALUES IN ("15") )
|
||||
DISTRIBUTED BY HASH(k1) BUCKETS 5
|
||||
DISTRIBUTED BY HASH(k1) BUCKETS 5 properties("replication_num" = "1")
|
||||
"""
|
||||
List<List<Object>> result1 = sql "show tables like 'list_par'"
|
||||
logger.info("${result1}")
|
||||
|
||||
@ -6,4 +6,4 @@ set enable_lateral_view=true;
|
||||
|
||||
with d as (select f1.bucket, bitmap_and(f1.members, f2.members) as members from (select f1.bucket, bitmap_and(f1.members, f2.members) as members from (select bucket, bitmap_union(members) as members from tag_map where partition_sign='2022-03-31-1' and tag_group=810004 and tag_value_id in (5524627,5524628,5524629) group by bucket) f1,(select bucket, bitmap_union(members) as members from tag_map where partition_sign='2022-03-31-1' and tag_group=810007 and tag_value_id in ('5525013_17357124_5525019','5525013_17357124_5525020','5525013_17357124_5525021','5525013_17357124_5525022','5525013_17357124_5525023') group by bucket) f2 where f1.bucket=f2.bucket) f1, (select f1.bucket, bitmap_and(f1.members, f2.members) as members from (select f1.bucket, bitmap_and(f1.members, f2.members) as members from (select f1.bucket, bitmap_and(f1.members, f2.members) as members from (select bucket, bitmap_union(members) as members from tag_map where partition_sign='2022-03-31-1' and tag_group=660004 and tag_value_id in (1392235) group by bucket) f1,(select bucket, bitmap_union(members) as members from tag_map where partition_sign='2022-03-31-1' and tag_group=630004 and tag_value_id in (5404632) group by bucket) f2 where f1.bucket=f2.bucket) f1,(select bucket, bitmap_union(members) as members from tag_map where partition_sign='2022-03-31-1' and tag_group=420004 and tag_value_id in (5404628) group by bucket) f2 where f1.bucket=f2.bucket) f1,(select bucket, bitmap_union(members) as members from tag_map where partition_sign='2022-03-31-1' and tag_group=240004 and tag_value_id in (14622211) group by bucket) f2 where f1.bucket=f2.bucket) f2 where f1.bucket=f2.bucket) select bucket, member_id from d lateral view explode_bitmap(members) t as member_id;
|
||||
|
||||
DROP TABLE test_map;
|
||||
DROP TABLE tag_map;
|
||||
|
||||
@ -38,7 +38,7 @@ suite("load") {
|
||||
`k12` string replace null comment "",
|
||||
`k13` largeint(40) replace null comment ""
|
||||
) engine=olap
|
||||
DISTRIBUTED BY HASH(`k1`) BUCKETS 5
|
||||
DISTRIBUTED BY HASH(`k1`) BUCKETS 5 properties("replication_num" = "1")
|
||||
"""
|
||||
sql """
|
||||
CREATE TABLE `test` (
|
||||
@ -57,7 +57,7 @@ suite("load") {
|
||||
`k12` string replace_if_not_null null comment "",
|
||||
`k13` largeint(40) replace null comment ""
|
||||
) engine=olap
|
||||
DISTRIBUTED BY HASH(`k1`) BUCKETS 5
|
||||
DISTRIBUTED BY HASH(`k1`) BUCKETS 5 properties("replication_num" = "1")
|
||||
"""
|
||||
streamLoad {
|
||||
table "baseall"
|
||||
|
||||
@ -39,48 +39,48 @@ suite("explode_json_array") {
|
||||
sql """ set enable_lateral_view = true """
|
||||
|
||||
// not vectorized
|
||||
qt_explode_json_array """ SELECT * FROM ${tableName}
|
||||
qt_explode_json_array1 """ SELECT * FROM ${tableName}
|
||||
LATERAL VIEW EXPLODE_JSON_ARRAY_INT('[30, 60]') t1 as c_age
|
||||
LATERAL VIEW EXPLODE_JSON_ARRAY_INT('[40, 80]') t2 as d_age
|
||||
ORDER BY id, c_age, d_age """
|
||||
|
||||
qt_explode_json_array """ SELECT c_age, COUNT(1) FROM ${tableName}
|
||||
qt_explode_json_array2 """ SELECT c_age, COUNT(1) FROM ${tableName}
|
||||
LATERAL VIEW EXPLODE_JSON_ARRAY_INT('[30, 60]') t1 as c_age
|
||||
LATERAL VIEW EXPLODE_JSON_ARRAY_INT('[40, 80]') t2 as d_age
|
||||
GROUP BY c_age ORDER BY c_age """
|
||||
|
||||
qt_explode_json_array """ SELECT * FROM ${tableName}
|
||||
qt_explode_json_array3 """ SELECT * FROM ${tableName}
|
||||
LATERAL VIEW EXPLODE_JSON_ARRAY_INT('[]') t1 AS c_age
|
||||
ORDER BY id, c_age """
|
||||
|
||||
qt_explode_json_array """ SELECT * FROM ${tableName}
|
||||
qt_explode_json_array4 """ SELECT * FROM ${tableName}
|
||||
LATERAL VIEW EXPLODE_JSON_ARRAY_STRING('[1, "b", 3]') t1 as c
|
||||
LATERAL VIEW EXPLODE_JSON_ARRAY_DOUBLE('[1.23, 22.214, 214.1]') t2 as d
|
||||
ORDER BY id, c, d """
|
||||
|
||||
qt_outer_join_explode_json_array """SELECT id, age, e1 FROM (SELECT id, age, e1 FROM (SELECT b.id, a.age FROM
|
||||
qt_outer_join_explode_json_array5 """SELECT id, age, e1 FROM (SELECT id, age, e1 FROM (SELECT b.id, a.age FROM
|
||||
${tableName} a LEFT JOIN ${tableName} b ON a.id=b.age)T LATERAL VIEW EXPLODE_JSON_ARRAY_STRING('[1, "b", 3]')
|
||||
TMP AS e1) AS T ORDER BY age, e1"""
|
||||
|
||||
// vectorized
|
||||
sql """ set enable_vectorized_engine = true """
|
||||
|
||||
qt_explode_json_array """ select @@enable_vectorized_engine """
|
||||
qt_explode_json_array """ SELECT * FROM ${tableName}
|
||||
qt_explode_json_array6 """ select @@enable_vectorized_engine """
|
||||
qt_explode_json_array7 """ SELECT * FROM ${tableName}
|
||||
LATERAL VIEW EXPLODE_JSON_ARRAY_INT('[30, 60]') t1 as c_age
|
||||
LATERAL VIEW EXPLODE_JSON_ARRAY_INT('[40, 80]') t2 as d_age
|
||||
ORDER BY id, c_age, d_age """
|
||||
|
||||
qt_explode_json_array """ SELECT c_age, COUNT(1) FROM ${tableName}
|
||||
qt_explode_json_array8 """ SELECT c_age, COUNT(1) FROM ${tableName}
|
||||
LATERAL VIEW EXPLODE_JSON_ARRAY_INT('[30, 60]') t1 as c_age
|
||||
LATERAL VIEW EXPLODE_JSON_ARRAY_INT('[40, 80]') t2 as d_age
|
||||
GROUP BY c_age ORDER BY c_age """
|
||||
|
||||
qt_explode_json_array """ SELECT * FROM ${tableName}
|
||||
qt_explode_json_array9 """ SELECT * FROM ${tableName}
|
||||
LATERAL VIEW EXPLODE_JSON_ARRAY_INT('[]') t1 AS c_age
|
||||
ORDER BY id, c_age """
|
||||
|
||||
qt_explode_json_array """ SELECT * FROM ${tableName}
|
||||
qt_explode_json_array10 """ SELECT * FROM ${tableName}
|
||||
LATERAL VIEW EXPLODE_JSON_ARRAY_STRING('[1, "b", 3]') t1 as c
|
||||
LATERAL VIEW EXPLODE_JSON_ARRAY_DOUBLE('[1.23, 22.214, 214.1]') t2 as d
|
||||
ORDER BY id, c, d """
|
||||
|
||||
Reference in New Issue
Block a user