From debb58c2781150995153d359e241c9f1a8bdf93f Mon Sep 17 00:00:00 2001 From: ZHAO Chun Date: Sat, 11 May 2019 21:46:37 +0800 Subject: [PATCH] Add SHOW FUNCTION and update docs for UDF (#1140) --- .../extending-doris/user-defined-function.md | 87 +++++++++++++++++++ .../sql-statements/create-function.md | 70 +++++++++++++++ .../sql-statements/drop-function.md | 27 ++++++ .../cn/sql-reference/sql-statements/insert.md | 6 +- .../sql-statements/show-function.md | 36 ++++++++ fe/src/main/cup/sql_parser.cup | 4 + .../doris/analysis/CreateFunctionStmt.java | 21 +++-- .../doris/analysis/ShowFunctionStmt.java | 72 +++++++++++++++ .../doris/catalog/AggregateFunction.java | 17 ++++ .../org/apache/doris/catalog/Database.java | 9 ++ .../org/apache/doris/catalog/Function.java | 22 ++++- .../apache/doris/catalog/ScalarFunction.java | 13 +++ .../org/apache/doris/qe/ShowExecutor.java | 47 ++++++++++ 13 files changed, 418 insertions(+), 13 deletions(-) create mode 100644 docs/documentation/cn/extending-doris/user-defined-function.md create mode 100644 docs/documentation/cn/sql-reference/sql-statements/create-function.md create mode 100644 docs/documentation/cn/sql-reference/sql-statements/drop-function.md create mode 100644 docs/documentation/cn/sql-reference/sql-statements/show-function.md create mode 100644 fe/src/main/java/org/apache/doris/analysis/ShowFunctionStmt.java diff --git a/docs/documentation/cn/extending-doris/user-defined-function.md b/docs/documentation/cn/extending-doris/user-defined-function.md new file mode 100644 index 0000000000..24a51afbf7 --- /dev/null +++ b/docs/documentation/cn/extending-doris/user-defined-function.md @@ -0,0 +1,87 @@ +# USER DEFINED FUNCTION + +用户可以通过UDF机制来扩展Doris的能力。通过这篇文档,用户能够创建自己的UDF。 + +## 编写UDF函数 + +在使用UDF之前,用户需要先在Doris的UDF框架下,编写自己的UDF函数。在`be/src/udf_samples/udf_sample.h|cpp`文件中是一个简单的UDF Demo。 + +编写一个UDF函数需要以下几个步骤。 + +### 编写函数 + +创建对应的头文件、CPP文件,在CPP文件中实现你需要的逻辑。CPP文件中的实现函数格式与UDF的对应关系。 + +#### 非可变参数 + +对于非可变参数的UDF,那么两者之间的对应关系很直接。 +比如`INT MyADD(INT, INT)`的UDF就会对应`IntVal AddUdf(FunctionContext* context, const IntVal& arg1, const IntVal& arg2)`。 + +1. `AddUdf`可以为任意的名字,只要创建UDF的时候指定即可。 +2. 实现函数中的第一个参数永远是`FunctionContext*`。实现者可以通过这个结构体获得一些查询相关的内容,以及申请一些需要使用的内存。具体使用的接口可以参考`udf/udf.h`中的定义。 +3. 实现函数中从第二个参数开始需要与UDF的参数一一对应,比如`IntVal`对应`INT`类型。这部分的类型都要使用`const`引用。 +4. 返回参数与UDF的参数的类型要相对应。 + +#### 可变参数 + +对于可变参数,可以参见以下例子,UDF`String md5sum(String, ...)`对应的 +实现函数是`StringVal md5sumUdf(FunctionContext* ctx, int num_args, const StringVal* args)` + +1. `md5sumUdf`这个也是可以任意改变的,创建的时候指定即可。 +2. 第一个参数与非可变参数函数一样,传入的是一个`FunctionContext*`。 +3. 可变参数部分由两部分组成,首先会传入一个整数,说明后面还有几个参数。后面传入的是一个可变参数部分的数组。 + +#### 类型对应关系 + +|UDF Type|Argument Type| +|----|---------| +|TinyInt|TinyIntVal| +|SmallInt|SmallIntVal| +|Int|IntVal| +|BigInt|BigIntVal| +|LargeInt|LargeIntVal| +|Float|FloatVal| +|Double|DoubleVal| +|Date|DateTimeVal| +|Datetime|DateTimeVal| +|Char|StringVal| +|Varchar|StringVal| +|Decimal|DecimalVal| + +### 修改CMakeLists.txt + +在`be/src/udf_samples/CMakeLists.txt`增加对应的动态库创建描述,类似于`add_library(udfsample SHARED udf_sample.cpp)`。这个描述增加了一个`udfsample`动态库。后面需要写上涉及的所有源文件(不包含头文件)。 + +### 执行编译 + +在最外部执行`sh build.sh`就可以生成对应的动态库。生成的动态库的位置,位于`be/build/src/udf_samples/`下。比如`udfsample`就会生成一个文件位于`be/build/src/udf_samples/libudfsample.so`。 + +## 创建UDF函数 + +通过上述的步骤后,你可以得到一个动态库。你需要将这个动态库放到一个能够通过HTTP协议访问到的位置。然后执行创建UDF函数在Doris系统内部创建一个UDF,你需要拥有AMDIN权限才能够完成这个操作。 + +``` +CREATE [AGGREGATE] FUNCTION + name ([argtype][,...]) + [RETURNS] rettype + PROPERTIES (["key"="value"][,...]) +``` +说明: + +1. PROPERTIES中`symbol`表示的是,执行入口函数的对应symbol,这个参数是必须设定。你可以通过`nm`命令来获得对应的symbol,比如`nm libudfsample.so | grep AddUdf`获得到的`_ZN9doris_udf6AddUdfEPNS_15FunctionContextERKNS_6IntValES4_`就是对应的symbol。 +2. PROPERTIES中`object_file`表示的是从哪里能够下载到对应的动态库,这个参数是必须设定的。 +3. name: 一个function是要归属于某个DB的,name的形式为`dbName`.`funcName`。当`dbName`没有明确指定的时候,就是使用当前session所在的db作为`dbName`。 + +具体使用可以参见 `CREATE FUNCTION` 获取更详细信息。 + +## 使用UDF + +用户使用UDF/UDAF必须拥有对应数据库的 `SELECT` 权限。 + +UDF的使用与普通的函数方式一致,唯一的区别在于,内置函数的作用域是全局的,而UDF的作用域是DB内部。当链接session位于数据内部时,直接使用UDF名字会在当前DB内部查找对应的UDF。否则用户需要显示的指定UDF的数据库名字,例如`dbName`.`funcName`。 + + +## 删除UDF函数 + +当你不再需要UDF函数时,你可以通过下述命令来删除一个UDF函数, 可以参考 `DROP FUNCTION`。 + diff --git a/docs/documentation/cn/sql-reference/sql-statements/create-function.md b/docs/documentation/cn/sql-reference/sql-statements/create-function.md new file mode 100644 index 0000000000..b2dbd9954f --- /dev/null +++ b/docs/documentation/cn/sql-reference/sql-statements/create-function.md @@ -0,0 +1,70 @@ +# CREATE FUNCTION + +## Syntax + +``` +CREATE [AGGREGATE] FUNCTION function_name + (arg_type [, ...]) + RETURNS ret_type + [INTERMEDIATE inter_type] + [PROPERTIES ("key" = "value" [, ...]) ] +``` + +## Description + +此语句创建一个自定义函数。执行此命令需要用户拥有 `ADMIN` 权限。 + +如果 `function_name` 中包含了数据库名字,那么这个自定义函数会创建在对应的数据库中,否则这个函数将会创建在当前会话所在的数据库。新函数的名字与参数不能够与当前命名空间中已存在的函数相同,否则会创建失败。但是只有名字相同,参数不同是能够创建成功的。 + +## Parameters + +> `AGGREGATE`: 如果有此项,表示的是创建的函数是一个聚合函数,否则创建的是一个标量函数。 +> +> `function_name`: 要创建函数的名字, 可以包含数据库的名字。比如:`db1.my_func`。 +> +> `arg_type`: 函数的参数类型,与建表时定义的类型一致。变长参数时可以使用`, ...`来表示,如果是变长类型,那么变长部分参数的类型与最后一个非变长参数类型一致。 +> +> `ret_type`: 函数返回类型。 +> +> `inter_type`: 用于表示聚合函数中间阶段的数据类型。 +> +> `properties`: 用于设定此函数相关属性,能够设置的属性包括 +> +> "object_file": 自定义函数动态库的URL路径,当前只支持 HTTP/HTTPS 协议,此路径需要在函数整个生命周期内保持有效。此选项为必选项 +> +> "symbol": 标量函数的函数签名,用于从动态库里面找到函数入口。此选项对于标量函数是必选项 +> +> "init_fn": 聚合函数的初始化函数签名。对于聚合函数是必选项 +> +> "update_fn": 聚合函数的更新函数签名。对于聚合函数是必选项 +> +> "merge_fn": 聚合函数的合并函数签名。对于聚合函数是必选项 +> +> "serialize_fn": 聚合函数的序列化函数签名。对于聚合函数是可选项,如果没有指定,那么将会使用默认的序列化函数 +> +> "finalize_fn": 聚合函数获取最后结果的函数签名。对于聚合函数是可选项,如果没有指定,将会使用默认的获取结果函数 +> +> "md5": 函数动态链接库的MD5值,用于校验下载的内容是否正确。此选项是可选项 + +## Examples + +1. 创建一个自定义标量函数 + +``` +CREATE FUNCTION my_add(INT, INT) RETURNS INT PROPERTIES ( + "symbol" = "_ZN9doris_udf6AddUdfEPNS_15FunctionContextERKNS_6IntValES4_", + "object_file" = "http://host:port/libmyadd.so" +); +``` + +2. 创建一个自定义聚合函数 + +``` +CREATE AGGREGATE FUNCTION my_count (BIGINT) RETURNS BIGINT PROPERTIES ( + "init_fn"="_ZN9doris_udf9CountInitEPNS_15FunctionContextEPNS_9BigIntValE", + "update_fn"="_ZN9doris_udf11CountUpdateEPNS_15FunctionContextERKNS_6IntValEPNS_9BigIntValE", + "merge_fn"="_ZN9doris_udf10CountMergeEPNS_15FunctionContextERKNS_9BigIntValEPS2_", + "finalize_fn"="_ZN9doris_udf13CountFinalizeEPNS_15FunctionContextERKNS_9BigIntValE", + "object_file"="http://host:port/libudasample.so" +); +``` diff --git a/docs/documentation/cn/sql-reference/sql-statements/drop-function.md b/docs/documentation/cn/sql-reference/sql-statements/drop-function.md new file mode 100644 index 0000000000..f228d11a3f --- /dev/null +++ b/docs/documentation/cn/sql-reference/sql-statements/drop-function.md @@ -0,0 +1,27 @@ +# DROP FUNCTION + +## Syntax + +``` +DROP FUNCTION function_name + (arg_type [, ...]) +``` + +## Description + +删除一个自定义函数。函数的名字、参数类型完全一致才能够被删除 + +## Parameters + +> `function_name`: 要删除函数的名字 +> +> `arg_type`: 要删除函数的参数列表 +> + +## Examples + +1. 删除掉一个函数 + +``` +DROP FUNCTION my_add(INT, INT) +``` diff --git a/docs/documentation/cn/sql-reference/sql-statements/insert.md b/docs/documentation/cn/sql-reference/sql-statements/insert.md index a2f6d709f5..cce2f194a1 100644 --- a/docs/documentation/cn/sql-reference/sql-statements/insert.md +++ b/docs/documentation/cn/sql-reference/sql-statements/insert.md @@ -1,4 +1,4 @@ -# insert +# INSERT ## Syntax @@ -33,6 +33,10 @@ column是目标列,可以以任意的顺序存在。如果没有指定目标 > > hint: 用于指示`INSERT`执行行为的一些指示符。`streaming`,用于指示使用同步方式来完成`INSERT`语句执行。 +## Note + +当前执行 `INSERT` 语句时,对于有不符合目标表格式的数据,默认的行为是过滤,比如字符串超长等。但是对于有要求数据不能够被过滤的业务场景,可以通过设置会话变量 `enable_insert_strict` 为 `true` 来确保当有数据被过滤掉的时候,`INSERT` 不会被执行成功。 + ## Examples `test` 表包含两个列`c1`, `c2`。 diff --git a/docs/documentation/cn/sql-reference/sql-statements/show-function.md b/docs/documentation/cn/sql-reference/sql-statements/show-function.md new file mode 100644 index 0000000000..ec1818d6de --- /dev/null +++ b/docs/documentation/cn/sql-reference/sql-statements/show-function.md @@ -0,0 +1,36 @@ +# SHOW FUNCTION + +## Syntax + +``` +SHOW FUNCTION [FROM db] +``` + +## Description + +查看数据库下所有的自定义函数。如果用户指定了数据库,那么查看对应数据库的,否则直接查询当前会话所在数据库 + +需要对这个数据库拥有 `SHOW` 权限 + +## Parameters + +> `db`: 要查询的数据库名字 + +## Examples + +``` +mysql> show function in testDb\G +*************************** 1. row *************************** + Signature: my_count(BIGINT) + Return Type: BIGINT + Function Type: Aggregate +Intermediate Type: NULL + Properties: {"object_file":"http://host:port/libudasample.so","finalize_fn":"_ZN9doris_udf13CountFinalizeEPNS_15FunctionContextERKNS_9BigIntValE","init_fn":"_ZN9doris_udf9CountInitEPNS_15FunctionContextEPNS_9BigIntValE","merge_fn":"_ZN9doris_udf10CountMergeEPNS_15FunctionContextERKNS_9BigIntValEPS2_","md5":"37d185f80f95569e2676da3d5b5b9d2f","update_fn":"_ZN9doris_udf11CountUpdateEPNS_15FunctionContextERKNS_6IntValEPNS_9BigIntValE"} +*************************** 2. row *************************** + Signature: my_add(INT,INT) + Return Type: INT + Function Type: Scalar +Intermediate Type: NULL + Properties: {"symbol":"_ZN9doris_udf6AddUdfEPNS_15FunctionContextERKNS_6IntValES4_","object_file":"http://host:port/libudfsample.so","md5":"cfe7a362d10f3aaf6c49974ee0f1f878"} +2 rows in set (0.00 sec) +``` diff --git a/fe/src/main/cup/sql_parser.cup b/fe/src/main/cup/sql_parser.cup index 1476aa84bb..949d966109 100644 --- a/fe/src/main/cup/sql_parser.cup +++ b/fe/src/main/cup/sql_parser.cup @@ -1894,6 +1894,10 @@ show_param ::= {: RESULT = new ShowRolesStmt(); :} + | KW_FUNCTION opt_db:dbName + {: + RESULT = new ShowFunctionStmt(dbName); + :} ; keys_or_index ::= diff --git a/fe/src/main/java/org/apache/doris/analysis/CreateFunctionStmt.java b/fe/src/main/java/org/apache/doris/analysis/CreateFunctionStmt.java index 6128fdd500..079d0e01c1 100644 --- a/fe/src/main/java/org/apache/doris/analysis/CreateFunctionStmt.java +++ b/fe/src/main/java/org/apache/doris/analysis/CreateFunctionStmt.java @@ -41,6 +41,16 @@ import java.util.Map; // create a user define function public class CreateFunctionStmt extends DdlStmt { + public static final String OBJECT_FILE_KEY = "object_file"; + public static final String SYMBOL_KEY = "symbol"; + public static final String MD5_CHECKSUM = "md5"; + public static final String INIT_KEY = "init_fn"; + public static final String UPDATE_KEY = "update_fn"; + public static final String MERGE_KEY = "merge_fn"; + public static final String SERIALIZE_KEY = "serialize_fn"; + public static final String FINALIZE_KEY = "finalize_fn"; + public static final String GET_VALUE_KEY = "get_value_fn"; + public static final String REMOVE_KEY = "remove_fn"; private final FunctionName functionName; private final boolean isAggregate; @@ -102,7 +112,6 @@ public class CreateFunctionStmt extends DdlStmt { intermediateType = returnType; } - String OBJECT_FILE_KEY = "object_file"; objectFile = properties.get(OBJECT_FILE_KEY); if (Strings.isNullOrEmpty(objectFile)) { throw new AnalysisException("No 'object_file' in properties"); @@ -113,7 +122,6 @@ public class CreateFunctionStmt extends DdlStmt { throw new AnalysisException("cannot to compute object's checksum"); } - String MD5_CHECKSUM = "md5"; String md5sum = properties.get(MD5_CHECKSUM); if (md5sum != null && !md5sum.equalsIgnoreCase(checksum)) { throw new AnalysisException("library's checksum is not equal with input, checksum=" + checksum); @@ -140,14 +148,6 @@ public class CreateFunctionStmt extends DdlStmt { } private void analyzeUda() throws AnalysisException { - final String INIT_KEY = "init_fn"; - final String UPDATE_KEY = "update_fn"; - final String MERGE_KEY = "merge_fn"; - final String SERIALIZE_KEY = "serialize_fn"; - final String FINALIZE_KEY = "finalize_fn"; - final String GET_VALUE_KEY = "get_value_fn"; - final String REMOVE_KEY = "remove_fn"; - AggregateFunction.AggregateFunctionBuilder builder = AggregateFunction.AggregateFunctionBuilder.createUdfBuilder(); builder.name(functionName).argsType(argsDef.getArgTypes()).retType(returnType.getType()) @@ -173,7 +173,6 @@ public class CreateFunctionStmt extends DdlStmt { } private void analyzeUdf() throws AnalysisException { - final String SYMBOL_KEY = "symbol"; String symbol = properties.get(SYMBOL_KEY); if (Strings.isNullOrEmpty(symbol)) { throw new AnalysisException("No 'symbol' in properties"); diff --git a/fe/src/main/java/org/apache/doris/analysis/ShowFunctionStmt.java b/fe/src/main/java/org/apache/doris/analysis/ShowFunctionStmt.java new file mode 100644 index 0000000000..285eb7a757 --- /dev/null +++ b/fe/src/main/java/org/apache/doris/analysis/ShowFunctionStmt.java @@ -0,0 +1,72 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package org.apache.doris.analysis; + +import com.google.common.base.Strings; +import org.apache.doris.catalog.Catalog; +import org.apache.doris.catalog.Column; +import org.apache.doris.catalog.ScalarType; +import org.apache.doris.cluster.ClusterNamespace; +import org.apache.doris.common.ErrorCode; +import org.apache.doris.common.ErrorReport; +import org.apache.doris.common.UserException; +import org.apache.doris.mysql.privilege.PrivPredicate; +import org.apache.doris.qe.ConnectContext; +import org.apache.doris.qe.ShowResultSetMetaData; + +public class ShowFunctionStmt extends ShowStmt { + private static final ShowResultSetMetaData META_DATA = + ShowResultSetMetaData.builder() + .addColumn(new Column("Signature", ScalarType.createVarchar(256))) + .addColumn(new Column("Return Type", ScalarType.createVarchar(32))) + .addColumn(new Column("Function Type", ScalarType.createVarchar(16))) + .addColumn(new Column("Intermediate Type", ScalarType.createVarchar(16))) + .addColumn(new Column("Properties", ScalarType.createVarchar(16))) + .build(); + + private String dbName; + + public ShowFunctionStmt(String dbName) { + this.dbName = dbName; + } + + public String getDbName() { return dbName; } + + @Override + public void analyze(Analyzer analyzer) throws UserException { + super.analyze(analyzer); + if (Strings.isNullOrEmpty(dbName)) { + dbName = analyzer.getDefaultDb(); + if (Strings.isNullOrEmpty(dbName)) { + ErrorReport.reportAnalysisException(ErrorCode.ERR_NO_DB_ERROR); + } + } else { + dbName = ClusterNamespace.getFullName(getClusterName(), dbName); + } + + if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(), dbName, PrivPredicate.SHOW)) { + ErrorReport.reportAnalysisException( + ErrorCode.ERR_DB_ACCESS_DENIED, ConnectContext.get().getQualifiedUser(), dbName); + } + } + + @Override + public ShowResultSetMetaData getMetaData() { + return META_DATA; + } +} diff --git a/fe/src/main/java/org/apache/doris/catalog/AggregateFunction.java b/fe/src/main/java/org/apache/doris/catalog/AggregateFunction.java index 551f1ef6c1..02ea8f434b 100644 --- a/fe/src/main/java/org/apache/doris/catalog/AggregateFunction.java +++ b/fe/src/main/java/org/apache/doris/catalog/AggregateFunction.java @@ -20,6 +20,9 @@ package org.apache.doris.catalog; import static org.apache.doris.common.io.IOUtils.readOptionStringOrNull; import static org.apache.doris.common.io.IOUtils.writeOptionString; +import com.google.common.collect.Maps; +import com.google.gson.Gson; +import org.apache.doris.analysis.CreateFunctionStmt; import org.apache.doris.common.io.IOUtils; import org.apache.doris.analysis.FunctionName; @@ -35,6 +38,7 @@ import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Map; // import org.apache.doris.analysis.String; @@ -412,5 +416,18 @@ public class AggregateFunction extends Function { isAggregateFn = input.readBoolean(); returnsNonNullOnEmpty = input.readBoolean(); } + + @Override + public String getProperties() { + Map properties = Maps.newHashMap(); + properties.put(CreateFunctionStmt.OBJECT_FILE_KEY, getLocation().toString()); + properties.put(CreateFunctionStmt.MD5_CHECKSUM, checksum); + properties.put(CreateFunctionStmt.INIT_KEY, initFnSymbol); + properties.put(CreateFunctionStmt.UPDATE_KEY, updateFnSymbol); + properties.put(CreateFunctionStmt.MERGE_KEY, mergeFnSymbol); + properties.put(CreateFunctionStmt.SERIALIZE_KEY, serializeFnSymbol); + properties.put(CreateFunctionStmt.FINALIZE_KEY, finalizeFnSymbol); + return new Gson().toJson(properties); + } } diff --git a/fe/src/main/java/org/apache/doris/catalog/Database.java b/fe/src/main/java/org/apache/doris/catalog/Database.java index ad1010162f..f042d9c7db 100644 --- a/fe/src/main/java/org/apache/doris/catalog/Database.java +++ b/fe/src/main/java/org/apache/doris/catalog/Database.java @@ -17,6 +17,7 @@ package org.apache.doris.catalog; +import com.google.common.collect.Lists; import org.apache.doris.catalog.MaterializedIndex.IndexState; import org.apache.doris.catalog.Replica.ReplicaState; import org.apache.doris.catalog.Table.TableType; @@ -596,6 +597,14 @@ public class Database extends MetaObject implements Writable { return Function.getFunction(fns, desc, mode); } + public synchronized List getFunctions() { + List functions = Lists.newArrayList(); + for (Map.Entry> entry : name2Function.entrySet()) { + functions.addAll(entry.getValue()); + } + return functions; + } + public boolean isInfoSchemaDb() { return ClusterNamespace.getNameFromFullName(fullQualifiedName).equalsIgnoreCase(InfoSchemaDb.DATABASE_NAME); } diff --git a/fe/src/main/java/org/apache/doris/catalog/Function.java b/fe/src/main/java/org/apache/doris/catalog/Function.java index 0df7bc951d..30e5e63a67 100644 --- a/fe/src/main/java/org/apache/doris/catalog/Function.java +++ b/fe/src/main/java/org/apache/doris/catalog/Function.java @@ -102,7 +102,7 @@ public class Function implements Writable { private TFunctionBinaryType binaryType; // library's checksum to make sure all backends use one library to serve user's request - private String checksum = ""; + protected String checksum = ""; // Only used for serialization protected Function() { @@ -645,4 +645,24 @@ public class Function implements Writable { function.readFields(input); return function; } + + public String getSignature() { + StringBuilder sb = new StringBuilder(); + sb.append(name.getFunction()).append("("); + for (int i = 0; i < argTypes.length; ++i) { + if (i != 0) { + sb.append(','); + } + sb.append(argTypes[i].getPrimitiveType().toString()); + } + if (hasVarArgs) { + sb.append(", ..."); + } + sb.append(")"); + return sb.toString(); + } + + public String getProperties() { + return ""; + } } diff --git a/fe/src/main/java/org/apache/doris/catalog/ScalarFunction.java b/fe/src/main/java/org/apache/doris/catalog/ScalarFunction.java index 9a1e1928ee..6be2cfb0cd 100644 --- a/fe/src/main/java/org/apache/doris/catalog/ScalarFunction.java +++ b/fe/src/main/java/org/apache/doris/catalog/ScalarFunction.java @@ -21,6 +21,9 @@ import static org.apache.doris.common.io.IOUtils.writeOptionString; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.gson.Gson; +import org.apache.doris.analysis.CreateFunctionStmt; import org.apache.doris.analysis.FunctionName; import org.apache.doris.analysis.HdfsURI; import org.apache.doris.common.io.Text; @@ -35,6 +38,7 @@ import java.io.DataOutput; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Map; // import org.apache.doris.thrift.TSymbolType; @@ -293,4 +297,13 @@ public class ScalarFunction extends Function { closeFnSymbol = Text.readString(input); } } + + @Override + public String getProperties() { + Map properties = Maps.newHashMap(); + properties.put(CreateFunctionStmt.OBJECT_FILE_KEY, getLocation().toString()); + properties.put(CreateFunctionStmt.MD5_CHECKSUM, checksum); + properties.put(CreateFunctionStmt.SYMBOL_KEY, symbolName); + return new Gson().toJson(properties); + } } diff --git a/fe/src/main/java/org/apache/doris/qe/ShowExecutor.java b/fe/src/main/java/org/apache/doris/qe/ShowExecutor.java index b18331be99..c72cb17ff6 100644 --- a/fe/src/main/java/org/apache/doris/qe/ShowExecutor.java +++ b/fe/src/main/java/org/apache/doris/qe/ShowExecutor.java @@ -39,6 +39,7 @@ import org.apache.doris.analysis.ShowDeleteStmt; import org.apache.doris.analysis.ShowEnginesStmt; import org.apache.doris.analysis.ShowExportStmt; import org.apache.doris.analysis.ShowFrontendsStmt; +import org.apache.doris.analysis.ShowFunctionStmt; import org.apache.doris.analysis.ShowGrantsStmt; import org.apache.doris.analysis.ShowLoadStmt; import org.apache.doris.analysis.ShowLoadWarningsStmt; @@ -63,16 +64,20 @@ import org.apache.doris.backup.AbstractJob; import org.apache.doris.backup.BackupJob; import org.apache.doris.backup.Repository; import org.apache.doris.backup.RestoreJob; +import org.apache.doris.catalog.AggregateFunction; import org.apache.doris.catalog.Catalog; import org.apache.doris.catalog.Column; import org.apache.doris.catalog.Database; +import org.apache.doris.catalog.Function; import org.apache.doris.catalog.MaterializedIndex; import org.apache.doris.catalog.MetadataViewer; import org.apache.doris.catalog.OlapTable; import org.apache.doris.catalog.Partition; +import org.apache.doris.catalog.ScalarFunction; import org.apache.doris.catalog.Table; import org.apache.doris.catalog.Tablet; import org.apache.doris.catalog.TabletInvertedIndex; +import org.apache.doris.catalog.Type; import org.apache.doris.catalog.View; import org.apache.doris.cluster.BaseParam; import org.apache.doris.cluster.ClusterNamespace; @@ -167,6 +172,8 @@ public class ShowExecutor { handleShowProcesslist(); } else if (stmt instanceof ShowEnginesStmt) { handleShowEngines(); + } else if (stmt instanceof ShowFunctionStmt) { + handleShowFunction(); } else if (stmt instanceof ShowVariablesStmt) { handleShowVariables(); } else if (stmt instanceof ShowColumnStmt) { @@ -276,6 +283,46 @@ public class ShowExecutor { resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet); } + // Handle show function + private void handleShowFunction() throws AnalysisException { + ShowFunctionStmt showStmt = (ShowFunctionStmt) stmt; + + Database db = ctx.getCatalog().getDb(showStmt.getDbName()); + if (db == null) { + ErrorReport.reportAnalysisException(ErrorCode.ERR_BAD_DB_ERROR, showStmt.getDbName()); + } + List functions = db.getFunctions(); + + List> rowSet = Lists.newArrayList(); + for (Function function : functions) { + List row = Lists.newArrayList(); + // signature + row.add(function.getSignature()); + // return type + row.add(function.getReturnType().getPrimitiveType().toString()); + // function type + // intermediate type + if (function instanceof ScalarFunction) { + row.add("Scalar"); + row.add("NULL"); + } else { + row.add("Aggregate"); + AggregateFunction aggFunc = (AggregateFunction) function; + Type intermediateType = aggFunc.getIntermediateType(); + if (intermediateType != null) { + row.add(intermediateType.getPrimitiveType().toString()); + } else { + row.add("NULL"); + } + } + // property + row.add(function.getProperties()); + rowSet.add(row); + } + // Only success + resultSet = new ShowResultSet(showStmt.getMetaData(), rowSet); + } + private void handleShowProc() throws AnalysisException { ShowProcStmt showProcStmt = (ShowProcStmt) stmt; ShowResultSetMetaData metaData = showProcStmt.getMetaData();