[ARRAY] Support array type load and select not include access by index (#5980)

This is part of the array type support and has not been fully completed. 
The following functions are implemented
1. fe array type support and implementation of array function, support array syntax analysis and planning
2. Support import array type data through insert into
3. Support select array type data
4. Only the array type is supported on the value lie of the duplicate table

this pr merge some code from #4655 #4650 #4644 #4643 #4623 #2979
This commit is contained in:
Zhengguo Yang
2021-07-13 14:02:39 +08:00
committed by GitHub
parent 8fe5c75877
commit ed3ff470ce
115 changed files with 2919 additions and 754 deletions

View File

@ -34,6 +34,10 @@ import org.apache.doris.catalog.KeysType;
import org.apache.doris.catalog.PrimitiveType;
import org.apache.doris.catalog.ScalarType;
import org.apache.doris.catalog.Type;
import org.apache.doris.catalog.ArrayType;
import org.apache.doris.catalog.MapType;
import org.apache.doris.catalog.StructField;
import org.apache.doris.catalog.StructType;
import org.apache.doris.catalog.View;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.common.Version;
@ -63,6 +67,7 @@ parser code {:
private boolean reportExpectedToken(Integer tokenId) {
if (SqlScanner.isKeyword(tokenId) ||
tokenId.intValue() == SqlParserSymbols.COMMA ||
tokenId.intValue() == SqlParserSymbols.DOT ||
tokenId.intValue() == SqlParserSymbols.IDENT) {
return true;
} else {
@ -228,10 +233,10 @@ parser code {:
:};
// Total keywords of doris
terminal String KW_ADD, KW_ADMIN, KW_AFTER, KW_AGGREGATE, KW_ALL, KW_ALTER, KW_AND, KW_ANTI, KW_APPEND, KW_AS, KW_ASC, KW_AUTHORS,
terminal String KW_ADD, KW_ADMIN, KW_AFTER, KW_AGGREGATE, KW_ALL, KW_ALTER, KW_AND, KW_ANTI, KW_APPEND, KW_AS, KW_ASC, KW_AUTHORS, KW_ARRAY,
KW_BACKEND, KW_BACKUP, KW_BETWEEN, KW_BEGIN, KW_BIGINT, KW_BITMAP, KW_BITMAP_UNION, KW_BOOLEAN, KW_BROKER, KW_BACKENDS, KW_BY, KW_BUILTIN,
KW_CANCEL, KW_CASE, KW_CAST, KW_CHAIN, KW_CHAR, KW_CHARSET, KW_CHECK, KW_CLUSTER, KW_CLUSTERS,
KW_COLLATE, KW_COLLATION, KW_COLUMN, KW_COLUMNS, KW_COMMENT, KW_COMMIT, KW_COMMITTED,
KW_COLLATE, KW_COLLATION, KW_COLUMN, KW_COLON, KW_COLUMNS, KW_COMMENT, KW_COMMIT, KW_COMMITTED,
KW_CONFIG, KW_CONNECTION, KW_CONNECTION_ID, KW_CONSISTENT, KW_CONVERT, KW_COUNT, KW_CREATE, KW_CROSS, KW_CUBE, KW_CURRENT, KW_CURRENT_USER,
KW_DATA, KW_DATABASE, KW_DATABASES, KW_DATE, KW_DATETIME, KW_DAY, KW_DECIMAL, KW_DECOMMISSION, KW_DEFAULT, KW_DESC, KW_DESCRIBE,
KW_DELETE, KW_DISTINCT, KW_DISTINCTPC, KW_DISTINCTPCSA, KW_DISTRIBUTED, KW_DISTRIBUTION, KW_DYNAMIC, KW_BUCKETS, KW_DIV, KW_DOUBLE, KW_DROP, KW_DROPP, KW_DUPLICATE,
@ -246,8 +251,8 @@ terminal String KW_ADD, KW_ADMIN, KW_AFTER, KW_AGGREGATE, KW_ALL, KW_ALTER, KW_A
KW_KEY, KW_KEYS, KW_KILL,
KW_LABEL, KW_LARGEINT, KW_LAST, KW_LEFT, KW_LESS, KW_LEVEL, KW_LIKE, KW_LIMIT, KW_LINK, KW_LIST, KW_LOAD,
KW_LOCAL, KW_LOCATION,
KW_MATERIALIZED, KW_MAX, KW_MAX_VALUE, KW_MERGE, KW_MIN, KW_MINUTE, KW_MINUS, KW_MIGRATE, KW_MIGRATIONS, KW_MODIFY, KW_MONTH,
KW_NAME, KW_NAMES, KW_NEGATIVE, KW_NO, KW_NOT, KW_NULL, KW_NULLS,
KW_MAP, KW_MATERIALIZED, KW_MAX, KW_MAX_VALUE, KW_MERGE, KW_MIN, KW_MINUTE, KW_MINUS, KW_MIGRATE, KW_MIGRATIONS, KW_MODIFY, KW_MONTH,
KW_NAME, KW_NAMED_STRUCT, KW_NAMES, KW_NEGATIVE, KW_NO, KW_NOT, KW_NULL, KW_NULLS,
KW_OBSERVER, KW_OFFSET, KW_ON, KW_ONLY, KW_OPEN, KW_OR, KW_ORDER, KW_OUTER, KW_OUTFILE, KW_OVER,
KW_PARTITION, KW_PARTITIONS, KW_PASSWORD, KW_PATH, KW_PAUSE, KW_PIPE, KW_PRECEDING,
KW_PLUGIN, KW_PLUGINS,
@ -257,7 +262,7 @@ terminal String KW_ADD, KW_ADMIN, KW_AFTER, KW_AGGREGATE, KW_ALL, KW_ALTER, KW_A
KW_REPAIR, KW_REPEATABLE, KW_REPOSITORY, KW_REPOSITORIES, KW_REPLACE, KW_REPLACE_IF_NOT_NULL, KW_REPLICA, KW_RESOURCE, KW_RESOURCES, KW_RESTORE, KW_RETURNS, KW_RESUME, KW_REVOKE,
KW_RIGHT, KW_ROLE, KW_ROLES, KW_ROLLBACK, KW_ROLLUP, KW_ROUTINE, KW_ROW, KW_ROWS,
KW_S3, KW_SCHEMA, KW_SCHEMAS, KW_SECOND, KW_SELECT, KW_SEMI, KW_SERIALIZABLE, KW_SESSION, KW_SET, KW_SETS, KW_SET_VAR, KW_SHOW, KW_SIGNED,
KW_SMALLINT, KW_SNAPSHOT, KW_SONAME, KW_SPLIT, KW_START, KW_STATUS, KW_STOP, KW_STORAGE, KW_STREAM, KW_STRING,
KW_SMALLINT, KW_SNAPSHOT, KW_SONAME, KW_SPLIT, KW_START, KW_STATUS, KW_STOP, KW_STORAGE, KW_STREAM, KW_STRING, KW_STRUCT,
KW_SUM, KW_SUPERUSER, KW_SYNC, KW_SYSTEM,
KW_TABLE, KW_TABLES, KW_TABLET, KW_TASK, KW_TEMPORARY, KW_TERMINATED, KW_THAN, KW_TIME, KW_THEN, KW_TIMESTAMP, KW_TINYINT,
KW_TO, KW_TRANSACTION, KW_TRIGGERS, KW_TRIM, KW_TRUE, KW_TRUNCATE, KW_TYPE, KW_TYPES,
@ -266,7 +271,7 @@ terminal String KW_ADD, KW_ADMIN, KW_AFTER, KW_AGGREGATE, KW_ALL, KW_ALTER, KW_A
KW_WARNINGS, KW_WEEK, KW_WHEN, KW_WHITELIST, KW_WHERE, KW_WITH, KW_WORK, KW_WRITE,
KW_YEAR;
terminal COMMA, DOT, DOTDOTDOT, AT, STAR, LPAREN, RPAREN, SEMICOLON, LBRACKET, RBRACKET, DIVIDE, MOD, ADD, SUBTRACT;
terminal COMMA, COLON, DOT, DOTDOTDOT, AT, STAR, LPAREN, RPAREN, SEMICOLON, LBRACKET, RBRACKET, DIVIDE, MOD, ADD, SUBTRACT;
terminal BITAND, BITOR, BITXOR, BITNOT;
terminal EQUAL, NOT, LESSTHAN, GREATERTHAN, SET_VAR;
terminal COMMENTED_PLAN_HINT_START, COMMENTED_PLAN_HINT_END;
@ -367,11 +372,14 @@ nonterminal LiteralExpr literal;
nonterminal CaseExpr case_expr;
nonterminal ArrayList<CaseWhenClause> case_when_clause_list;
nonterminal FunctionParams function_params;
nonterminal Expr function_call_expr;
nonterminal Expr function_call_expr, array_expr;
nonterminal StructField struct_field;
nonterminal ArrayList<StructField> struct_field_list;
nonterminal AnalyticWindow opt_window_clause;
nonterminal AnalyticWindow.Type window_type;
nonterminal AnalyticWindow.Boundary window_boundary;
nonterminal SlotRef column_ref;
nonterminal FunctionCallExpr column_subscript;
nonterminal ArrayList<TableRef> table_ref_list, base_table_ref_list;
nonterminal FromClause from_clause;
nonterminal TableRef table_ref;
@ -4087,6 +4095,12 @@ type ::=
:}
| KW_VARCHAR
{: RESULT = ScalarType.createVarcharType(-1); :}
| KW_ARRAY LESSTHAN type:value_type GREATERTHAN
{: RESULT = new ArrayType(value_type); :}
| KW_MAP LESSTHAN type:key_type COMMA type:value_type GREATERTHAN
{: RESULT = new MapType(key_type,value_type); :}
| KW_STRUCT LESSTHAN struct_field_list:fields GREATERTHAN
{: RESULT = new StructType(fields); :}
| KW_CHAR LPAREN INTEGER_LITERAL:len RPAREN
{: ScalarType type = ScalarType.createCharType(len.intValue());
type.setAssignedStrLenInColDefinition();
@ -4237,6 +4251,34 @@ function_call_expr ::=
:}
;
array_expr ::=
KW_ARRAY LPAREN function_params:params RPAREN
{:
RESULT = new FunctionCallExpr("array", params);
:}
| KW_ARRAY LPAREN RPAREN
{:
RESULT = new ArrayLiteral();
:}
;
struct_field ::=
ident:name COLON type:type
{: RESULT = new StructField(name, type); :}
;
struct_field_list ::=
struct_field:field
{:
RESULT = Lists.newArrayList(field);
:}
| struct_field_list:fields COMMA struct_field:field
{:
fields.add(field);
RESULT = fields;
:}
;
exists_predicate ::=
KW_EXISTS subquery:s
{: RESULT = new ExistsPredicate(s, false); :}
@ -4255,6 +4297,8 @@ non_pred_expr ::=
:}
| literal:l
{: RESULT = l; :}
| array_expr:a
{: RESULT = a; :}
| function_call_expr:e
{: RESULT = e; :}
| KW_DATE STRING_LITERAL:l
@ -4283,6 +4327,8 @@ non_pred_expr ::=
{: RESULT = c; :}
| column_ref:c
{: RESULT = c; :}
| column_subscript:c
{: RESULT = c; :}
| timestamp_arithmetic_expr:e
{: RESULT = e; :}
| arithmetic_expr:e
@ -4632,6 +4678,15 @@ column_ref ::=
{: RESULT = new SlotRef(new TableName(db, tbl), col); :}
;
column_subscript ::=
expr:e LBRACKET expr:index RBRACKET
{: ArrayList<Expr> list = new ArrayList<Expr>();
list.add(e);
list.add(index);
RESULT = new FunctionCallExpr("%element_extract%", list);
:}
;
privilege_type ::=
ident:name
{:
@ -4806,6 +4861,8 @@ keyword ::=
{: RESULT = id; :}
| KW_AUTHORS:id
{: RESULT = id; :}
| KW_ARRAY:id
{: RESULT = id; :}
| KW_BACKUP:id
{: RESULT = id; :}
| KW_BEGIN:id
@ -5086,6 +5143,8 @@ keyword ::=
{: RESULT = id; :}
| KW_FEATURE:id
{: RESULT = id; :}
| KW_MAP:id
{: RESULT = id; :}
;
// Identifier that contain keyword

View File

@ -0,0 +1,116 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.analysis;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.doris.catalog.ArrayType;
import org.apache.doris.catalog.Type;
import org.apache.doris.thrift.TExprNode;
import org.apache.doris.thrift.TExprNodeType;
public class ArrayLiteral extends LiteralExpr {
public ArrayLiteral() {
this.type = new ArrayType(Type.NULL);
children = new ArrayList<>();
}
public ArrayLiteral(LiteralExpr... v) {
if (v.length < 1) {
this.type = new ArrayType(Type.NULL);
return;
}
this.type = new ArrayType(v[0].type);
children = new ArrayList<>(v.length);
children.addAll(Arrays.asList(v));
}
protected ArrayLiteral(ArrayLiteral other) {
super(other);
}
@Override
public boolean isMinValue() {
return false;
}
@Override
public int compareLiteral(LiteralExpr expr) {
return 0;
}
@Override
protected String toSqlImpl() {
List<String> list = new ArrayList<>(children.size());
children.forEach(v -> list.add(v.toSqlImpl()));
return "ARRAY(" + StringUtils.join(list, ", ") + ")";
}
@Override
public String getStringValue() {
List<String> list = new ArrayList<>(children.size());
children.forEach(v -> list.add(((LiteralExpr) v).getStringValue()));
return "ARRAY[" + StringUtils.join(list, ", ") + "]";
}
@Override
protected void toThrift(TExprNode msg) {
msg.node_type = TExprNodeType.ARRAY_LITERAL;
msg.setChildType(((ArrayType) type).getItemType().getPrimitiveType().toThrift());
}
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
out.writeInt(children.size());
for (Expr e : children) {
Expr.writeTo(e, out);
}
}
@Override
public void readFields(DataInput in) throws IOException {
super.readFields(in);
int size = in.readInt();
children = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
children.add(Expr.readIn(in));
}
}
public static ArrayLiteral read(DataInput in) throws IOException {
ArrayLiteral literal = new ArrayLiteral();
literal.readFields(in);
return literal;
}
@Override
public Expr clone() {
return new ArrayLiteral(this);
}
}

View File

@ -200,12 +200,30 @@ public class ColumnDef {
}
if (type.getPrimitiveType() == PrimitiveType.BITMAP) {
if (defaultValue.isSet) {
if (defaultValue.isSet && defaultValue != DefaultValue.NULL_DEFAULT_VALUE) {
throw new AnalysisException("Bitmap type column can not set default value");
}
defaultValue = DefaultValue.BITMAP_EMPTY_DEFAULT_VALUE;
}
if (type.getPrimitiveType() == PrimitiveType.ARRAY) {
if (defaultValue.isSet && defaultValue != DefaultValue.NULL_DEFAULT_VALUE) {
throw new AnalysisException("Array type column default value only support null");
}
}
if (type.getPrimitiveType() == PrimitiveType.MAP) {
if (defaultValue.isSet && defaultValue != DefaultValue.NULL_DEFAULT_VALUE) {
throw new AnalysisException("Map type column default value just support null");
}
}
if (type.getPrimitiveType() == PrimitiveType.STRUCT) {
if (defaultValue.isSet && defaultValue != DefaultValue.NULL_DEFAULT_VALUE) {
throw new AnalysisException("Struct type column default value just support null");
}
}
// If aggregate type is REPLACE_IF_NOT_NULL, we set it nullable.
// If default value is not set, we set it NULL
if (aggregateType == AggregateType.REPLACE_IF_NOT_NULL) {
@ -267,6 +285,12 @@ public class ColumnDef {
break;
case BITMAP:
break;
case ARRAY:
break;
case MAP:
break;
case STRUCT:
break;
case BOOLEAN:
BoolLiteral boolLiteral = new BoolLiteral(defaultValue);
break;

View File

@ -18,6 +18,7 @@
package org.apache.doris.analysis;
import org.apache.doris.catalog.AggregateType;
import org.apache.doris.catalog.ArrayType;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.catalog.Column;
import org.apache.doris.catalog.Index;
@ -350,6 +351,21 @@ public class CreateTableStmt extends DdlStmt {
for (ColumnDef columnDef : columnDefs) {
columnDef.analyze(engineName.equals("olap"));
if (columnDef.getType().isArrayType()) {
ArrayType tp = (ArrayType) columnDef.getType();
if (!tp.getItemType().getPrimitiveType().isIntegerType() &&
!tp.getItemType().getPrimitiveType().isCharFamily()) {
throw new AnalysisException("Array column just support INT/VARCHAR sub-type");
}
if (columnDef.getAggregateType() != null && columnDef.getAggregateType() != AggregateType.NONE) {
throw new AnalysisException("Array column can't support aggregation " + columnDef.getAggregateType());
}
if (columnDef.isKey()) {
throw new AnalysisException("Array can only be used in the non-key column of" +
" the duplicate table at present.");
}
}
if (columnDef.getType().isHllType()) {
hasHll = true;
}

View File

@ -1582,7 +1582,8 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
DATE_LITERAL(9),
MAX_LITERAL(10),
BINARY_PREDICATE(11),
FUNCTION_CALL(12);
FUNCTION_CALL(12),
ARRAY_LITERAL(13);
private static Map<Integer, ExprSerCode> codeMap = Maps.newHashMap();
@ -1630,6 +1631,8 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
output.writeInt(ExprSerCode.BINARY_PREDICATE.getCode());
} else if (expr instanceof FunctionCallExpr) {
output.writeInt(ExprSerCode.FUNCTION_CALL.getCode());
} else if (expr instanceof ArrayLiteral) {
output.writeInt(ExprSerCode.ARRAY_LITERAL.getCode());
} else {
throw new IOException("Unknown class " + expr.getClass().getName());
}
@ -1671,6 +1674,8 @@ abstract public class Expr extends TreeNode<Expr> implements ParseNode, Cloneabl
return BinaryPredicate.read(in);
case FUNCTION_CALL:
return FunctionCallExpr.read(in);
case ARRAY_LITERAL:
return ArrayLiteral.read(in);
default:
throw new IOException("Unknown code: " + code);
}

View File

@ -19,12 +19,14 @@ package org.apache.doris.analysis;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.catalog.Function;
import org.apache.doris.catalog.PrimitiveType;
import org.apache.doris.catalog.ScalarType;
import org.apache.doris.catalog.Type;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.qe.ConnectContext;
import org.apache.doris.qe.VariableMgr;
import org.apache.doris.rewrite.FEFunction;
import org.apache.doris.rewrite.FEFunctionList;
import org.apache.doris.rewrite.FEFunctions;
import com.google.common.base.Joiner;
@ -96,7 +98,7 @@ public enum ExpressionFunctions {
argTypes.add((ScalarType) type);
}
FEFunctionSignature signature = new FEFunctionSignature(fn.functionName(),
argTypes.toArray(new ScalarType[argTypes.size()]), (ScalarType) fn.getReturnType());
argTypes.toArray(new ScalarType[argTypes.size()]), fn.getReturnType());
FEFunctionInvoker invoker = getFunction(signature);
if (invoker != null) {
try {
@ -149,22 +151,32 @@ public enum ExpressionFunctions {
new ImmutableMultimap.Builder<String, FEFunctionInvoker>();
Class clazz = FEFunctions.class;
for (Method method : clazz.getDeclaredMethods()) {
FEFunction annotation = method.getAnnotation(FEFunction.class);
if (annotation != null) {
String name = annotation.name();
ScalarType returnType = ScalarType.createType(annotation.returnType());
List<ScalarType> argTypes = new ArrayList<>();
for (String type : annotation.argTypes()) {
argTypes.add(ScalarType.createType(type));
FEFunctionList annotationList = method.getAnnotation(FEFunctionList.class);
if (annotationList != null) {
for (FEFunction f : annotationList.value()) {
registerFEFunction(mapBuilder, method, f);
}
FEFunctionSignature signature = new FEFunctionSignature(name,
argTypes.toArray(new ScalarType[argTypes.size()]), returnType);
mapBuilder.put(name, new FEFunctionInvoker(method, signature));
}
registerFEFunction(mapBuilder, method, method.getAnnotation(FEFunction.class));
}
this.functions = mapBuilder.build();
}
private void registerFEFunction(ImmutableMultimap.Builder<String, FEFunctionInvoker> mapBuilder,
Method method, FEFunction annotation) {
if (annotation != null) {
String name = annotation.name();
Type returnType = Type.fromPrimitiveType(PrimitiveType.valueOf(annotation.returnType()));
List<ScalarType> argTypes = new ArrayList<>();
for (String type : annotation.argTypes()) {
argTypes.add(ScalarType.createType(type));
}
FEFunctionSignature signature = new FEFunctionSignature(name,
argTypes.toArray(new ScalarType[argTypes.size()]), returnType);
mapBuilder.put(name, new FEFunctionInvoker(method, signature));
}
}
public static class FEFunctionInvoker {
private final Method method;
private final FEFunctionSignature signature;
@ -239,6 +251,12 @@ public enum ExpressionFunctions {
} else {
throw new IllegalArgumentException("Doris doesn't support type:" + argType);
}
// if args all is NullLiteral
long size = args.stream().filter(e -> e instanceof NullLiteral).count();
if (args.size() == size) {
exprs = new NullLiteral[args.size()];
}
args.toArray(exprs);
return exprs;
}
@ -247,9 +265,9 @@ public enum ExpressionFunctions {
public static class FEFunctionSignature {
private final String name;
private final ScalarType[] argTypes;
private final ScalarType returnType;
private final Type returnType;
public FEFunctionSignature(String name, ScalarType[] argTypes, ScalarType returnType) {
public FEFunctionSignature(String name, ScalarType[] argTypes, Type returnType) {
this.name = name;
this.argTypes = argTypes;
this.returnType = returnType;
@ -259,7 +277,7 @@ public enum ExpressionFunctions {
return argTypes;
}
public ScalarType getReturnType() {
public Type getReturnType() {
return returnType;
}

View File

@ -18,6 +18,7 @@
package org.apache.doris.analysis;
import org.apache.doris.catalog.AggregateFunction;
import org.apache.doris.catalog.ArrayType;
import org.apache.doris.catalog.Catalog;
import org.apache.doris.catalog.Database;
import org.apache.doris.catalog.Function;
@ -69,6 +70,7 @@ public class FunctionCallExpr extends Expr {
new ImmutableSortedSet.Builder(String.CASE_INSENSITIVE_ORDER)
.add("stddev").add("stddev_val").add("stddev_samp")
.add("variance").add("variance_pop").add("variance_pop").add("var_samp").add("var_pop").build();
private static final String ELEMENT_EXTRACT_FN_NAME = "%element_extract%";
public void setIsAnalyticFnCall(boolean v) {
isAnalyticFnCall = v;
@ -658,16 +660,18 @@ public class FunctionCallExpr extends Expr {
}
}
Type[] args = fn.getArgs();
if (args.length > 0) {
// Implicitly cast all the children to match the function if necessary
for (int i = 0; i < argTypes.length; ++i) {
// For varargs, we must compare with the last type in callArgs.argTypes.
int ix = Math.min(args.length - 1, i);
if (!argTypes[i].matchesType(args[ix]) && !(
argTypes[i].isDateType() && args[ix].isDateType())) {
uncheckedCastChild(args[ix], i);
//if (argTypes[i] != args[ix]) castChild(args[ix], i);
if (!fn.getFunctionName().getFunction().equals(ELEMENT_EXTRACT_FN_NAME)) {
Type[] args = fn.getArgs();
if (args.length > 0) {
// Implicitly cast all the children to match the function if necessary
for (int i = 0; i < argTypes.length; ++i) {
// For varargs, we must compare with the last type in callArgs.argTypes.
int ix = Math.min(args.length - 1, i);
if (!argTypes[i].matchesType(args[ix]) && !(
argTypes[i].isDateType() && args[ix].isDateType())) {
uncheckedCastChild(args[ix], i);
//if (argTypes[i] != args[ix]) castChild(args[ix], i);
}
}
}
}
@ -711,6 +715,18 @@ public class FunctionCallExpr extends Expr {
} else {
this.type = fn.getReturnType();
}
// rewrite return type if is nested type function
analyzeNestedFunction();
}
// if return type is nested type, need to be determined the sub-element type
private void analyzeNestedFunction() {
// array
if ("array".equalsIgnoreCase(fnName.getFunction())) {
if (children.size() > 0) {
this.type = new ArrayType(children.get(0).getType());
}
}
}
@Override
@ -798,4 +814,3 @@ public class FunctionCallExpr extends Expr {
return result;
}
}

View File

@ -93,7 +93,10 @@ public class RangePartitionDesc extends PartitionDesc {
partitionColumns.add(column);
find = true;
break;
}
if (column.getType().isComplexType()) {
throw new DdlException("Complex type column can't be partition column: "
+ column.getType().toString());
}
}
if (!find) {

View File

@ -20,10 +20,10 @@ package org.apache.doris.analysis;
import java.util.ArrayList;
import java.util.List;
import org.apache.doris.catalog.MultiRowType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.doris.catalog.ArrayType;
import org.apache.doris.catalog.StructField;
import org.apache.doris.catalog.StructType;
import org.apache.doris.common.AnalysisException;
@ -102,8 +102,8 @@ public class Subquery extends Expr {
type = createStructTypeFromExprList();
}
// If the subquery returns many rows, set its type to ArrayType.
if (!((SelectStmt)stmt).returnsSingleRow()) type = new ArrayType(type);
// If the subquery returns many rows, set its type to MultiRowType.
if (!((SelectStmt)stmt).returnsSingleRow()) type = new MultiRowType(type);
// Preconditions.checkNotNull(type);
// type.analyze();

View File

@ -17,13 +17,20 @@
package org.apache.doris.analysis;
import org.apache.doris.catalog.ArrayType;
import org.apache.doris.catalog.MapType;
import org.apache.doris.catalog.PrimitiveType;
import org.apache.doris.catalog.ScalarType;
import org.apache.doris.catalog.StructField;
import org.apache.doris.catalog.StructType;
import org.apache.doris.catalog.Type;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.common.Config;
import com.google.common.base.Preconditions;
import java.util.ArrayList;
/**
* Represents an anonymous type definition, e.g., used in DDL and CASTs.
*/
@ -38,15 +45,19 @@ public class TypeDef implements ParseNode {
public static TypeDef create(PrimitiveType type) {
return new TypeDef(ScalarType.createType(type));
}
public static TypeDef createDecimal(int precision, int scale) {
return new TypeDef(ScalarType.createDecimalV2Type(precision, scale));
}
public static TypeDef createVarchar(int len) {
return new TypeDef(ScalarType.createVarchar(len));
}
public static TypeDef createChar(int len) {
return new TypeDef(ScalarType.createChar(len));
}
@Override
public void analyze(Analyzer analyzer) throws AnalysisException {
if (isAnalyzed) {
@ -56,8 +67,8 @@ public class TypeDef implements ParseNode {
// a stack overflow.
if (parsedType.exceedsMaxNestingDepth()) {
throw new AnalysisException(String.format(
"Type exceeds the maximum nesting depth of %s:\n%s",
Type.MAX_NESTING_DEPTH, parsedType.toSql()));
"Type exceeds the maximum nesting depth of %s:\n%s",
Type.MAX_NESTING_DEPTH, parsedType.toSql()));
}
analyze(parsedType);
isAnalyzed = true;
@ -70,10 +81,49 @@ public class TypeDef implements ParseNode {
if (type.isScalarType()) {
analyzeScalarType((ScalarType) type);
}
if (type.isArrayType()) {
Type itemType = ((ArrayType) type).getItemType();
analyze(itemType);
}
if (type.isComplexType()) {
if (!Config.enable_complex_type_support) {
throw new AnalysisException("Unsupported data type: " + type.toSql());
}
if (type.isArrayType()) {
ScalarType itemType = (ScalarType) ((ArrayType) type).getItemType();
analyzeNestedType(itemType);
}
if (type.isMapType()) {
ScalarType keyType = (ScalarType) ((MapType) type).getKeyType();
ScalarType valueType = (ScalarType) ((MapType) type).getKeyType();
analyzeNestedType(keyType);
analyzeNestedType(valueType);
}
if (type.isStructType()) {
ArrayList<StructField> fields = ((StructType) type).getFields();
for (int i = 0; i < fields.size(); i++) {
ScalarType filedType = (ScalarType) fields.get(i).getType();
analyzeNestedType(filedType);
}
}
}
}
private void analyzeNestedType(ScalarType type) throws AnalysisException {
if (type.isNull()) {
throw new AnalysisException("Unsupported data type: " + type.toSql());
}
if (type.getPrimitiveType().isStringType()
&& !type.isAssignedStrLenInColDefinition()) {
type.setLength(1);
}
analyze(type);
}
private void analyzeScalarType(ScalarType scalarType)
throws AnalysisException {
throws AnalysisException {
PrimitiveType type = scalarType.getPrimitiveType();
switch (type) {
case CHAR:
@ -98,7 +148,7 @@ public class TypeDef implements ParseNode {
}
if (scalarType.getLength() > maxLen) {
throw new AnalysisException(
name + " size must be <= " + maxLen + ": " + len);
name + " size must be <= " + maxLen + ": " + len);
}
break;
}

View File

@ -17,27 +17,70 @@
package org.apache.doris.catalog;
import org.apache.doris.common.Config;
import org.apache.doris.thrift.TColumnType;
import org.apache.doris.thrift.TTypeDesc;
import org.apache.doris.thrift.TTypeNode;
import org.apache.doris.thrift.TTypeNodeType;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.gson.annotations.SerializedName;
/**
* Describes an ARRAY type.
*/
public class ArrayType extends Type {
private final Type itemType;
@SerializedName(value = "itemType")
private Type itemType;
public ArrayType() {
this.itemType = NULL;
}
public ArrayType(Type itemType) {
this.itemType = itemType;
}
public void setItemType(Type itemType) {
this.itemType = itemType;
}
public Type getItemType() {
return itemType;
}
@Override
public PrimitiveType getPrimitiveType() {
return PrimitiveType.ARRAY;
}
@Override
public boolean matchesType(Type t) {
if (equals(t)) {
return true;
}
if (!t.isArrayType()) {
return false;
}
if (itemType.isNull()) {
return true;
}
return itemType.matchesType(((ArrayType) t).itemType);
}
public static ArrayType create() {
return new ArrayType();
}
public static ArrayType create(Type type) {
return new ArrayType(type);
}
@Override
public String toSql(int depth) {
if (depth >= MAX_NESTING_DEPTH) {
@ -76,6 +119,46 @@ public class ArrayType extends Type {
structStr = structStr.substring(lpad);
return String.format("%sARRAY<%s>", leftPadding, structStr);
}
@Override
public boolean isSupported() {
if (!Config.enable_complex_type_support) {
return false;
}
if (itemType.isNull()) {
return false;
}
return true;
}
@Override
public String toString() {
return toSql(0);
}
@Override
public TColumnType toColumnTypeThrift() {
TColumnType thrift = new TColumnType();
thrift.type = PrimitiveType.ARRAY.toThrift();
return thrift;
}
@Override
public boolean isFixedLengthType() {
return false;
}
@Override
public boolean supportsTablePartitioning() {
if (!isSupported() || isComplexType()) {
return false;
}
return true;
}
@Override
public int getSlotSize() {
return PrimitiveType.ARRAY.getSlotSize();
}
}

View File

@ -50,6 +50,8 @@ public class Column implements Writable {
private static final Logger LOG = LogManager.getLogger(Column.class);
public static final String DELETE_SIGN = "__DORIS_DELETE_SIGN__";
public static final String SEQUENCE_COL = "__DORIS_SEQUENCE_COL__";
private static final String COLUMN_ARRAY_CHILDREN = "item";
@SerializedName(value = "name")
private String name;
@SerializedName(value = "type")
@ -75,6 +77,8 @@ public class Column implements Writable {
private String comment;
@SerializedName(value = "stats")
private ColumnStats stats; // cardinality and selectivity etc.
@SerializedName(value = "children")
private List<Column> children;
// Define expr may exist in two forms, one is analyzed, and the other is not analyzed.
// Currently, analyzed define expr is only used when creating materialized views, so the define expr in RollupJob must be analyzed.
// In other cases, such as define expr in `MaterializedIndexMeta`, it may not be analyzed after being relayed.
@ -89,6 +93,7 @@ public class Column implements Writable {
this.isKey = false;
this.stats = new ColumnStats();
this.visible = true;
this.children = new ArrayList<>(Type.MAX_NESTING_DEPTH);
}
public Column(String name, PrimitiveType dataType) {
@ -130,9 +135,10 @@ public class Column implements Writable {
this.isAllowNull = isAllowNull;
this.defaultValue = defaultValue;
this.comment = comment;
this.stats = new ColumnStats();
this.visible = visible;
this.children = new ArrayList<>(Type.MAX_NESTING_DEPTH);
createChildrenColumn(this.type, this);
}
public Column(Column column) {
@ -146,6 +152,22 @@ public class Column implements Writable {
this.comment = column.getComment();
this.stats = column.getStats();
this.visible = column.visible;
this.children = column.getChildren();
}
public void createChildrenColumn(Type type, Column column) {
if (type.isArrayType()) {
Column c = new Column(COLUMN_ARRAY_CHILDREN, ((ArrayType) type).getItemType());
column.addChildrenColumn(c);
}
}
public List<Column> getChildren() {
return children;
}
private void addChildrenColumn(Column column) {
this.children.add(column);
}
public void setName(String newName) {
@ -201,7 +223,12 @@ public class Column implements Writable {
public PrimitiveType getDataType() { return type.getPrimitiveType(); }
public Type getType() { return ScalarType.createType(type.getPrimitiveType()); }
public Type getType() {
if (type.isArrayType() || type.isMapType() || type.isStructType()) {
return type;
}
return ScalarType.createType(type.getPrimitiveType());
}
public void setType(Type type) {
this.type = type;
@ -209,9 +236,9 @@ public class Column implements Writable {
public Type getOriginType() { return type; }
public int getStrLen() { return ((ScalarType) type).getLength(); }
public int getPrecision() { return ((ScalarType) type).getScalarPrecision(); }
public int getScale() { return ((ScalarType) type).getScalarScale(); }
public int getStrLen() { return type.getLength(); }
public int getPrecision() { return type instanceof ScalarType ? ((ScalarType) type).getScalarPrecision() : -1; }
public int getScale() { return type instanceof ScalarType ? ((ScalarType) type).getScalarScale() : -1; }
public AggregateType getAggregationType() {
return this.aggregationType;
@ -291,12 +318,41 @@ public class Column implements Writable {
tColumn.setIsAllowNull(this.isAllowNull);
tColumn.setDefaultValue(this.defaultValue);
tColumn.setVisible(visible);
tColumn.setChildrenColumn(new ArrayList<>());
toChildrenThrift(this, tColumn);
// The define expr does not need to be serialized here for now.
// At present, only serialized(analyzed) define expr is directly used when creating a materialized view.
// It will not be used here, but through another structure `TAlterMaterializedViewParam`.
if (this.defineExpr != null) {
tColumn.setDefineExpr(this.defineExpr.treeToThrift());
}
return tColumn;
}
private void toChildrenThrift(Column column, TColumn tColumn) {
if (column.type.isArrayType()) {
Column children = column.getChildren().get(0);
TColumn childrenTColumn = new TColumn();
childrenTColumn.setColumnName(children.name);
TColumnType childrenTColumnType = new TColumnType();
childrenTColumnType.setType(children.getDataType().toThrift());
childrenTColumnType.setType(children.getDataType().toThrift());
childrenTColumnType.setLen(children.getStrLen());
childrenTColumnType.setPrecision(children.getPrecision());
childrenTColumnType.setScale(children.getScale());
childrenTColumnType.setIndexLen(children.getOlapColumnIndexSize());
childrenTColumn.setColumnType(childrenTColumnType);
tColumn.children_column.add(childrenTColumn);
toChildrenThrift(children, childrenTColumn);
}
}
public void checkSchemaChangeAllowed(Column other) throws DdlException {
if (Strings.isNullOrEmpty(other.name)) {
throw new DdlException("Dest column name is empty");
@ -475,6 +531,16 @@ public class Column implements Writable {
return false;
}
if (children.size() != other.children.size()) {
return false;
}
for (int i = 0; i < children.size(); i++) {
if (!children.get(i).equals(other.getChildren().get(i))) {
return false;
}
}
return true;
}
@ -550,6 +616,12 @@ public class Column implements Writable {
case DECIMALV2:
sb.append(String.format(typeStringMap.get(dataType), getPrecision(), getScale()));
break;
case ARRAY:
sb.append(type.toString());
case MAP:
sb.append(type.toString());
case STRUCT:
sb.append(type.toString());
default:
sb.append(typeStringMap.get(dataType));
break;

View File

@ -111,26 +111,38 @@ public abstract class ColumnType {
}
public static void write(DataOutput out, Type type) throws IOException {
Preconditions.checkArgument(type.isScalarType(), "only support scalar type serialization");
ScalarType scalarType = (ScalarType) type;
Text.writeString(out, scalarType.getPrimitiveType().name());
out.writeInt(scalarType.getScalarScale());
out.writeInt(scalarType.getScalarPrecision());
out.writeInt(scalarType.getLength());
// Actually, varcharLimit need not to write here, write true to back compatible
out.writeBoolean(true);
Preconditions.checkArgument(type.isScalarType() || type.isArrayType(),
"only support scalar type and array serialization");
if (type.isScalarType()) {
ScalarType scalarType = (ScalarType) type;
Text.writeString(out, scalarType.getPrimitiveType().name());
out.writeInt(scalarType.getScalarScale());
out.writeInt(scalarType.getScalarPrecision());
out.writeInt(scalarType.getLength());
// Actually, varcharLimit need not to write here, write true to back compatible
out.writeBoolean(true);
} else if (type.isArrayType()) {
ArrayType arrayType = (ArrayType) type;
Text.writeString(out, arrayType.getPrimitiveType().name());
write(out, arrayType.getItemType());
}
}
public static Type read(DataInput in) throws IOException {
PrimitiveType primitiveType = PrimitiveType.valueOf(Text.readString(in));
int scale = in.readInt();
int precision = in.readInt();
int len = in.readInt();
if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_22) {
// Useless, just for back compatible
in.readBoolean();
if (primitiveType == PrimitiveType.ARRAY) {
Type itermType = read(in);
return ArrayType.create(itermType);
} else {
int scale = in.readInt();
int precision = in.readInt();
int len = in.readInt();
if (Catalog.getCurrentCatalogJournalVersion() >= FeMetaVersion.VERSION_22) {
// Useless, just for back compatible
in.readBoolean();
}
return ScalarType.createType(primitiveType, len, precision, scale);
}
return ScalarType.createType(primitiveType, len, precision, scale);
}
}

View File

@ -156,6 +156,10 @@ public class Function implements Writable {
return retType;
}
public void setReturnType(Type type) {
this.retType = type;
}
public Type[] getArgs() {
return argTypes;
}

View File

@ -30,7 +30,10 @@ import com.google.common.base.Strings;
public class MapType extends Type {
private final Type keyType;
private final Type valueType;
public MapType() {
this.keyType = NULL;
this.valueType = NULL;
}
public MapType(Type keyType, Type valueType) {
Preconditions.checkNotNull(keyType);
Preconditions.checkNotNull(valueType);
@ -38,6 +41,11 @@ public class MapType extends Type {
this.valueType = valueType;
}
@Override
public PrimitiveType getPrimitiveType() {
return PrimitiveType.MAP;
}
public Type getKeyType() {
return keyType;
}

View File

@ -0,0 +1,79 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.catalog;
import org.apache.doris.thrift.TTypeDesc;
import org.apache.doris.thrift.TTypeNode;
import org.apache.doris.thrift.TTypeNodeType;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
/**
* Describes a multi-row type in sub-query.
*/
public class MultiRowType extends Type {
private final Type itemType;
public MultiRowType(Type itemType) {
this.itemType = itemType;
}
public Type getItemType() {
return itemType;
}
@Override
public String toSql(int depth) {
if (depth >= MAX_NESTING_DEPTH) {
return "ARRAY<...>";
}
return String.format("ARRAY<%s>", itemType.toSql(depth + 1));
}
@Override
public boolean equals(Object other) {
if (!(other instanceof MultiRowType)) {
return false;
}
MultiRowType otherMultiRowType = (MultiRowType) other;
return otherMultiRowType.itemType.equals(itemType);
}
@Override
public void toThrift(TTypeDesc container) {
TTypeNode node = new TTypeNode();
container.types.add(node);
Preconditions.checkNotNull(itemType);
node.setType(TTypeNodeType.ARRAY);
itemType.toThrift(container);
}
@Override
protected String prettyPrint(int lpad) {
String leftPadding = Strings.repeat(" ", lpad);
if (!itemType.isStructType()) {
return leftPadding + toSql();
}
// Pass in the padding to make sure nested fields are aligned properly,
// even if we then strip the top-level padding.
String structStr = itemType.prettyPrint(lpad);
structStr = structStr.substring(lpad);
return String.format("%sARRAY<%s>", leftPadding, structStr);
}
}

View File

@ -54,6 +54,9 @@ public enum PrimitiveType {
TIME("TIME", 8, TPrimitiveType.TIME),
// we use OBJECT type represent BITMAP type in Backend
BITMAP("BITMAP", 16, TPrimitiveType.OBJECT),
ARRAY("ARRAY", 24, TPrimitiveType.ARRAY),
MAP("MAP", 24, TPrimitiveType.MAP),
STRUCT("MAP", 24, TPrimitiveType.STRUCT),
// Unsupported scalar types.
BINARY("BINARY", -1, TPrimitiveType.BINARY);
@ -298,6 +301,8 @@ public enum PrimitiveType {
supportedTypes.add(TIME);
supportedTypes.add(DECIMALV2);
supportedTypes.add(BITMAP);
supportedTypes.add(ARRAY);
supportedTypes.add(MAP);
}
public static ArrayList<PrimitiveType> getIntegerTypes() {
@ -541,6 +546,12 @@ public enum PrimitiveType {
return HLL;
case OBJECT:
return BITMAP;
case ARRAY:
return ARRAY;
case MAP:
return MAP;
case STRUCT:
return STRUCT;
default:
return INVALID_TYPE;
}
@ -629,6 +640,10 @@ public enum PrimitiveType {
return (this == DATE || this == DATETIME);
}
public boolean isArrayType(){
return this == ARRAY;
}
public boolean isStringType() {
return (this == VARCHAR || this == CHAR || this == HLL);
}

View File

@ -304,6 +304,9 @@ public class ScalarType extends Type {
case BITMAP:
stringBuilder.append(type.toString().toLowerCase());
break;
case ARRAY:
stringBuilder.append(type.toString().toLowerCase());
break;
default:
stringBuilder.append("unknown type: " + type.toString());
break;
@ -371,6 +374,8 @@ public class ScalarType extends Type {
@Override
public PrimitiveType getPrimitiveType() { return type; }
public int ordinal() { return type.ordinal(); }
@Override
public int getLength() { return len; }
public void setLength(int len) {this.len = len; }
public boolean isAssignedStrLenInColDefinition() { return isAssignedStrLenInColDefinition; }

View File

@ -114,5 +114,9 @@ public class StructType extends Type {
field.toThrift(container, node);
}
}
@Override
public String toString() {
return toSql(0);
}
}

View File

@ -45,11 +45,11 @@ import java.util.List;
public abstract class Type {
private static final Logger LOG = LogManager.getLogger(Type.class);
// Maximum nesting depth of a type. This limit was determined experimentally by
// Maximum nesting depth of a type. This limit was determined experimentally byorg.apache.doris.rewrite.FoldConstantsRule.apply
// generating and scanning deeply nested Parquet and Avro files. In those experiments,
// we exceeded the stack space in the scanner (which uses recursion for dealing with
// nested types) at a nesting depth between 200 and 300 (200 worked, 300 crashed).
public static int MAX_NESTING_DEPTH = 100;
public static int MAX_NESTING_DEPTH = 2;
// Static constant types for scalar types that don't require additional information.
public static final ScalarType INVALID = new ScalarType(PrimitiveType.INVALID_TYPE);
@ -69,12 +69,13 @@ public abstract class Type {
ScalarType.createDecimalV2Type(ScalarType.DEFAULT_PRECISION,
ScalarType.DEFAULT_SCALE);
public static final ScalarType DECIMALV2 = DEFAULT_DECIMALV2;
// (ScalarType) ScalarType.createDecimalTypeInternal(-1, -1);
// (ScalarType) ScalarType.createDecimalTypeInternal(-1, -1);
public static final ScalarType DEFAULT_VARCHAR = ScalarType.createVarcharType(-1);
public static final ScalarType VARCHAR = ScalarType.createVarcharType(-1);
public static final ScalarType HLL = ScalarType.createHllType();
public static final ScalarType CHAR = (ScalarType) ScalarType.createCharType(-1);
public static final ScalarType BITMAP = new ScalarType(PrimitiveType.BITMAP);
public static final MapType Map = new MapType();
private static ArrayList<ScalarType> integerTypes;
private static ArrayList<ScalarType> numericTypes;
@ -262,7 +263,7 @@ public abstract class Type {
}
public boolean isCollectionType() {
return isMapType() || isArrayType();
return isMapType() || isArrayType() || isMultiRowType();
}
public boolean isMapType() {
@ -273,6 +274,10 @@ public abstract class Type {
return this instanceof ArrayType;
}
public boolean isMultiRowType() {
return this instanceof MultiRowType;
}
public boolean isStructType() {
return this instanceof StructType;
}
@ -289,6 +294,8 @@ public abstract class Type {
return true;
}
public int getLength() { return -1; }
/**
* Indicates whether we support partitioning tables on columns of this type.
*/
@ -352,6 +359,16 @@ public abstract class Type {
if (t1.isScalarType() && t2.isScalarType()) {
return ScalarType.isImplicitlyCastable((ScalarType) t1, (ScalarType) t2, strict);
}
if (t1.isComplexType() || t2.isComplexType()) {
if (t1.isArrayType() && t2.isArrayType()) {
return true;
} else if (t1.isMapType() && t2.isMapType()) {
return true;
} else if (t1.isStructType() && t2.isStructType()) {
return true;
}
return false;
}
return false;
}
@ -413,7 +430,7 @@ public abstract class Type {
if (d >= MAX_NESTING_DEPTH) return true;
if (isStructType()) {
StructType structType = (StructType) this;
for (StructField f: structType.getFields()) {
for (StructField f : structType.getFields()) {
if (f.getType().exceedsMaxNestingDepth(d + 1)) {
return true;
}
@ -423,6 +440,11 @@ public abstract class Type {
if (arrayType.getItemType().exceedsMaxNestingDepth(d + 1)) {
return true;
}
} else if (isMultiRowType()) {
MultiRowType multiRowType = (MultiRowType) this;
if (multiRowType.getItemType().exceedsMaxNestingDepth(d + 1)) {
return true;
}
} else if (isMapType()) {
MapType mapType = (MapType) this;
if (mapType.getValueType().exceedsMaxNestingDepth(d + 1)) {
@ -467,6 +489,12 @@ public abstract class Type {
return Type.VARCHAR;
case HLL:
return Type.HLL;
case ARRAY:
return ArrayType.create();
case MAP:
return new MapType();
case STRUCT:
return new StructType();
case BITMAP:
return Type.BITMAP;
default:
@ -890,8 +918,12 @@ public abstract class Type {
if (t1 == PrimitiveType.INVALID_TYPE ||
t2 == PrimitiveType.INVALID_TYPE) continue;
if (t1 == PrimitiveType.NULL_TYPE || t2 == PrimitiveType.NULL_TYPE) continue;
if (t1 == PrimitiveType.ARRAY || t2 == PrimitiveType.ARRAY) continue;
if (t1 == PrimitiveType.DECIMALV2 || t2 == PrimitiveType.DECIMALV2) continue;
if (t1 == PrimitiveType.TIME || t2 == PrimitiveType.TIME) continue;
if (t1 == PrimitiveType.ARRAY || t2 == PrimitiveType.ARRAY) continue;
if (t1 == PrimitiveType.MAP || t2 == PrimitiveType.MAP) continue;
if (t1 == PrimitiveType.STRUCT || t2 == PrimitiveType.STRUCT) continue;
Preconditions.checkNotNull(compatibilityMatrix[i][j]);
}
}

View File

@ -1235,6 +1235,11 @@ public class Config extends ConfigBase {
@ConfField(mutable = true, masterOnly = true)
public static int period_of_auto_resume_min = 5;
/*
* If set to true, Doris will support complex type
*/
@ConfField
public static boolean enable_complex_type_support = false;
/**
* If set to true, the backend will be automatically dropped after finishing decommission.
* If set to false, the backend will not be dropped and remaining in DECOMMISSION state.

View File

@ -72,6 +72,7 @@ public class Util {
TYPE_STRING_MAP.put(PrimitiveType.HLL, "varchar(%d)");
TYPE_STRING_MAP.put(PrimitiveType.BOOLEAN, "bool");
TYPE_STRING_MAP.put(PrimitiveType.BITMAP, "bitmap");
TYPE_STRING_MAP.put(PrimitiveType.ARRAY, "Array<%s>");
TYPE_STRING_MAP.put(PrimitiveType.NULL_TYPE, "null");
}

View File

@ -20,13 +20,16 @@ package org.apache.doris.persist.gson;
import org.apache.doris.alter.AlterJobV2;
import org.apache.doris.alter.RollupJobV2;
import org.apache.doris.alter.SchemaChangeJobV2;
import org.apache.doris.catalog.ArrayType;
import org.apache.doris.catalog.DistributionInfo;
import org.apache.doris.catalog.HashDistributionInfo;
import org.apache.doris.catalog.MapType;
import org.apache.doris.catalog.OdbcCatalogResource;
import org.apache.doris.catalog.RandomDistributionInfo;
import org.apache.doris.catalog.Resource;
import org.apache.doris.catalog.ScalarType;
import org.apache.doris.catalog.SparkResource;
import org.apache.doris.catalog.StructType;
import org.apache.doris.load.loadv2.LoadJob.LoadJobStateUpdateInfo;
import org.apache.doris.load.loadv2.SparkLoadJob.SparkLoadJobStateUpdateInfo;
@ -90,7 +93,10 @@ public class GsonUtils {
private static RuntimeTypeAdapterFactory<org.apache.doris.catalog.Type> columnTypeAdapterFactory = RuntimeTypeAdapterFactory
.of(org.apache.doris.catalog.Type.class, "clazz")
// TODO: register other sub type after Doris support more types.
.registerSubtype(ScalarType.class, ScalarType.class.getSimpleName());
.registerSubtype(ScalarType.class, ScalarType.class.getSimpleName())
.registerSubtype(ArrayType.class, ArrayType.class.getSimpleName())
.registerSubtype(MapType.class, MapType.class.getSimpleName())
.registerSubtype(StructType.class, StructType.class.getSimpleName());
// runtime adapter for class "DistributionInfo"
private static RuntimeTypeAdapterFactory<DistributionInfo> distributionInfoTypeAdapterFactory = RuntimeTypeAdapterFactory

View File

@ -0,0 +1,29 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.rewrite;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface FEFunctionList {
FEFunction[] value();
}

View File

@ -17,6 +17,10 @@
package org.apache.doris.rewrite;
import java.math.BigDecimal;
import java.math.BigInteger;
import org.apache.doris.analysis.ArrayLiteral;
import org.apache.doris.analysis.DateLiteral;
import org.apache.doris.analysis.DecimalLiteral;
import org.apache.doris.analysis.FloatLiteral;
@ -347,14 +351,6 @@ public class FEFunctions {
return new FloatLiteral(result, Type.DOUBLE);
}
@FEFunction(name = "add", argTypes = { "DECIMAL", "DECIMAL" }, returnType = "DECIMAL")
public static DecimalLiteral addDecimal(LiteralExpr first, LiteralExpr second) throws AnalysisException {
BigDecimal left = new BigDecimal(first.getStringValue());
BigDecimal right = new BigDecimal(second.getStringValue());
BigDecimal result = left.add(right);
return new DecimalLiteral(result);
}
@FEFunction(name = "add", argTypes = { "DECIMALV2", "DECIMALV2" }, returnType = "DECIMALV2")
public static DecimalLiteral addDecimalV2(LiteralExpr first, LiteralExpr second) throws AnalysisException {
BigDecimal left = new BigDecimal(first.getStringValue());
@ -384,14 +380,6 @@ public class FEFunctions {
return new FloatLiteral(result, Type.DOUBLE);
}
@FEFunction(name = "subtract", argTypes = { "DECIMAL", "DECIMAL" }, returnType = "DECIMAL")
public static DecimalLiteral subtractDecimal(LiteralExpr first, LiteralExpr second) throws AnalysisException {
BigDecimal left = new BigDecimal(first.getStringValue());
BigDecimal right = new BigDecimal(second.getStringValue());
BigDecimal result = left.subtract(right);
return new DecimalLiteral(result);
}
@FEFunction(name = "subtract", argTypes = { "DECIMALV2", "DECIMALV2" }, returnType = "DECIMALV2")
public static DecimalLiteral subtractDecimalV2(LiteralExpr first, LiteralExpr second) throws AnalysisException {
BigDecimal left = new BigDecimal(first.getStringValue());
@ -423,14 +411,6 @@ public class FEFunctions {
return new FloatLiteral(result, Type.DOUBLE);
}
@FEFunction(name = "multiply", argTypes = { "DECIMAL", "DECIMAL" }, returnType = "DECIMAL")
public static DecimalLiteral multiplyDecimal(LiteralExpr first, LiteralExpr second) throws AnalysisException {
BigDecimal left = new BigDecimal(first.getStringValue());
BigDecimal right = new BigDecimal(second.getStringValue());
BigDecimal result = left.multiply(right);
return new DecimalLiteral(result);
}
@FEFunction(name = "multiply", argTypes = { "DECIMALV2", "DECIMALV2" }, returnType = "DECIMALV2")
public static DecimalLiteral multiplyDecimalV2(LiteralExpr first, LiteralExpr second) throws AnalysisException {
BigDecimal left = new BigDecimal(first.getStringValue());
@ -457,17 +437,6 @@ public class FEFunctions {
return new FloatLiteral(result, Type.DOUBLE);
}
@FEFunction(name = "divide", argTypes = { "DECIMAL", "DECIMAL" }, returnType = "DECIMAL")
public static DecimalLiteral divideDecimal(LiteralExpr first, LiteralExpr second) throws AnalysisException {
BigDecimal left = new BigDecimal(first.getStringValue());
BigDecimal right = new BigDecimal(second.getStringValue());
if (right.compareTo(BigDecimal.ZERO) == 0) {
return null;
}
BigDecimal result = left.divide(right);
return new DecimalLiteral(result);
}
@FEFunction(name = "divide", argTypes = { "DECIMALV2", "DECIMALV2" }, returnType = "DECIMALV2")
public static DecimalLiteral divideDecimalV2(LiteralExpr first, LiteralExpr second) throws AnalysisException {
BigDecimal left = new BigDecimal(first.getStringValue());
@ -500,39 +469,25 @@ public class FEFunctions {
return new StringLiteral(resultBuilder.toString());
}
@FEFunction(name = "ifnull", argTypes = {"VARCHAR", "VARCHAR"}, returnType = "VARCHAR")
public static LiteralExpr ifNullString(LiteralExpr first, LiteralExpr second) throws AnalysisException {
@FEFunctionList({
@FEFunction(name = "ifnull", argTypes = {"VARCHAR", "VARCHAR"}, returnType = "VARCHAR"),
@FEFunction(name = "ifnull", argTypes = {"TINYINT", "TINYINT"}, returnType = "TINYINT"),
@FEFunction(name = "ifnull", argTypes = {"INT", "INT"}, returnType = "INT"),
@FEFunction(name = "ifnull", argTypes = {"BIGINT", "BIGINT"}, returnType = "BIGINT"),
@FEFunction(name = "ifnull", argTypes = {"DATETIME", "DATETIME"}, returnType = "DATETIME"),
@FEFunction(name = "ifnull", argTypes = { "DATE", "DATETIME" }, returnType = "DATETIME"),
@FEFunction(name = "ifnull", argTypes = { "DATETIME", "DATE" }, returnType = "DATETIME")
})
public static LiteralExpr ifNull(LiteralExpr first, LiteralExpr second) throws AnalysisException {
return first instanceof NullLiteral ? second : first;
}
@FEFunction(name = "ifnull", argTypes = {"TINYINT", "TINYINT"}, returnType = "TINYINT")
public static LiteralExpr ifNullTinyInt(LiteralExpr first, LiteralExpr second) throws AnalysisException {
return first instanceof NullLiteral ? second : first;
}
@FEFunction(name = "ifnull", argTypes = {"INT", "INT"}, returnType = "INT")
public static LiteralExpr ifNullInt(LiteralExpr first, LiteralExpr second) throws AnalysisException {
return first instanceof NullLiteral ? second : first;
}
@FEFunction(name = "ifnull", argTypes = {"BIGINT", "BIGINT"}, returnType = "BIGINT")
public static LiteralExpr ifNullBigInt(LiteralExpr first, LiteralExpr second) throws AnalysisException {
return first instanceof NullLiteral ? second : first;
}
@FEFunction(name = "ifnull", argTypes = { "DATETIME", "DATETIME" }, returnType = "DATETIME")
public static LiteralExpr ifNullDateTime(LiteralExpr first, LiteralExpr second) throws AnalysisException {
return first instanceof NullLiteral ? second : first;
}
@FEFunction(name = "ifnull", argTypes = { "DATE", "DATETIME" }, returnType = "DATETIME")
public static LiteralExpr ifNullDateDatetime(LiteralExpr first, LiteralExpr second) throws AnalysisException {
return first instanceof NullLiteral ? second : first;
}
@FEFunction(name = "ifnull", argTypes = { "DATETIME", "DATE" }, returnType = "DATETIME")
public static LiteralExpr ifNullDatetimeDate(LiteralExpr first, LiteralExpr second) throws AnalysisException {
return first instanceof NullLiteral ? second : first;
@FEFunctionList({
@FEFunction(name = "array", argTypes = {"INT"}, returnType = "ARRAY"),
@FEFunction(name = "array", argTypes = {"VARCHAR"}, returnType = "ARRAY")
})
public static ArrayLiteral array(LiteralExpr... exprs) throws AnalysisException {
return new ArrayLiteral(exprs);
}
}

View File

@ -100,6 +100,7 @@ import org.apache.doris.qe.SqlModeHelper;
keywordMap.put("as", new Integer(SqlParserSymbols.KW_AS));
keywordMap.put("asc", new Integer(SqlParserSymbols.KW_ASC));
keywordMap.put("authors", new Integer(SqlParserSymbols.KW_AUTHORS));
keywordMap.put("array", new Integer(SqlParserSymbols.KW_ARRAY));
keywordMap.put("backend", new Integer(SqlParserSymbols.KW_BACKEND));
keywordMap.put("backends", new Integer(SqlParserSymbols.KW_BACKENDS));
keywordMap.put("backup", new Integer(SqlParserSymbols.KW_BACKUP));
@ -249,6 +250,7 @@ import org.apache.doris.qe.SqlModeHelper;
keywordMap.put("load", new Integer(SqlParserSymbols.KW_LOAD));
keywordMap.put("local", new Integer(SqlParserSymbols.KW_LOCAL));
keywordMap.put("location", new Integer(SqlParserSymbols.KW_LOCATION));
keywordMap.put("map", new Integer(SqlParserSymbols.KW_MAP));
keywordMap.put("materialized", new Integer(SqlParserSymbols.KW_MATERIALIZED));
keywordMap.put("max", new Integer(SqlParserSymbols.KW_MAX));
keywordMap.put("maxvalue", new Integer(SqlParserSymbols.KW_MAX_VALUE));
@ -346,6 +348,7 @@ import org.apache.doris.qe.SqlModeHelper;
keywordMap.put("storage", new Integer(SqlParserSymbols.KW_STORAGE));
keywordMap.put("stream", new Integer(SqlParserSymbols.KW_STREAM));
keywordMap.put("string", new Integer(SqlParserSymbols.KW_STRING));
keywordMap.put("struct", new Integer(SqlParserSymbols.KW_STRUCT));
keywordMap.put("sum", new Integer(SqlParserSymbols.KW_SUM));
keywordMap.put("superuser", new Integer(SqlParserSymbols.KW_SUPERUSER));
keywordMap.put("sync", new Integer(SqlParserSymbols.KW_SYNC));

View File

@ -304,17 +304,6 @@ public class FEFunctionsTest {
Assert.assertEquals(expectedResult, actualResult);
}
@Test
public void addDecimalTest() throws AnalysisException {
DecimalLiteral actualResult = FEFunctions.addDecimal(new DecimalLiteral("2.2"), new DecimalLiteral("3.3"));
DecimalLiteral expectedResult = new DecimalLiteral("5.5");
Assert.assertEquals(expectedResult, actualResult);
actualResult = FEFunctions.addDecimal(new DecimalLiteral("-2.2"), new DecimalLiteral("3.3"));
expectedResult = new DecimalLiteral("1.1");
Assert.assertEquals(expectedResult, actualResult);
}
@Test
public void addDecimalV2Test() throws AnalysisException {
DecimalLiteral actualResult = FEFunctions.addDecimalV2(new DecimalLiteral("2.2"), new DecimalLiteral("3.3"));
@ -359,17 +348,6 @@ public class FEFunctionsTest {
Assert.assertEquals(expectedResult, actualResult);
}
@Test
public void subtractDecimalTest() throws AnalysisException {
DecimalLiteral actualResult = FEFunctions.subtractDecimal(new DecimalLiteral("2.2"), new DecimalLiteral("3.3"));
DecimalLiteral expectedResult = new DecimalLiteral("-1.1");
Assert.assertEquals(expectedResult, actualResult);
actualResult = FEFunctions.subtractDecimal(new DecimalLiteral("5.5"), new DecimalLiteral("3.3"));
expectedResult = new DecimalLiteral("2.2");
Assert.assertEquals(expectedResult, actualResult);
}
@Test
public void subtractDecimalV2Test() throws AnalysisException {
DecimalLiteral actualResult = FEFunctions.subtractDecimalV2(new DecimalLiteral("2.2"), new DecimalLiteral("3.3"));
@ -422,22 +400,6 @@ public class FEFunctionsTest {
Assert.assertEquals(expectedResult, actualResult);
}
@Test
public void multiplyDecimalTest() throws AnalysisException {
DecimalLiteral actualResult = FEFunctions.multiplyDecimal(new DecimalLiteral("1.1"), new DecimalLiteral("1.0"));
DecimalLiteral expectedResult = new DecimalLiteral("1.1");
Assert.assertEquals(expectedResult, actualResult);
actualResult = FEFunctions.multiplyDecimal(new DecimalLiteral("-1.1"), new DecimalLiteral("-10.0"));
expectedResult = new DecimalLiteral("11.0");
Assert.assertEquals(expectedResult, actualResult);
actualResult = FEFunctions.multiplyDecimal(new DecimalLiteral("-1.1"), new DecimalLiteral("-1.1"));
expectedResult = new DecimalLiteral("1.21");
Assert.assertEquals(expectedResult, actualResult);
}
@Test
public void multiplyDecimalV2Test() throws AnalysisException {
DecimalLiteral actualResult = FEFunctions.multiplyDecimalV2(new DecimalLiteral("1.1"), new DecimalLiteral("1.0"));
@ -479,17 +441,6 @@ public class FEFunctionsTest {
Assert.assertEquals(expectedResult, actualResult);
}
@Test
public void divideDecimalTest() throws AnalysisException {
DecimalLiteral actualResult = FEFunctions.divideDecimal(new DecimalLiteral("1.1"), new DecimalLiteral("1.0"));
DecimalLiteral expectedResult = new DecimalLiteral("1.1");
Assert.assertEquals(expectedResult, actualResult);
actualResult = FEFunctions.divideDecimal(new DecimalLiteral("-1.1"), new DecimalLiteral("-10.0"));
expectedResult = new DecimalLiteral("0.11");
Assert.assertEquals(expectedResult, actualResult);
}
@Test
public void divideDecimalV2Test() throws AnalysisException {
DecimalLiteral actualResult = FEFunctions.divideDecimalV2(new DecimalLiteral("1.1"), new DecimalLiteral("1.0"));