Merge branch '2.3' into 2.4

This commit is contained in:
Markus Mäkelä 2020-11-16 09:25:43 +02:00
commit 5a33d45d4d
No known key found for this signature in database
GPG Key ID: 5CE746D557ACC499
4 changed files with 48 additions and 52 deletions

View File

@ -25,7 +25,7 @@ file locations, configuration options and version information.
},
"data": {
"attributes": {
"parameters": {
"parameters": { // Core MaxScale parameters
"libdir": "/usr/lib64/maxscale",
"datadir": "/var/lib/maxscale",
"process_datadir": "/var/lib/maxscale/data16218",
@ -58,11 +58,11 @@ file locations, configuration options and version information.
"dump_last_statements": "never",
"load_persisted_configs": false
},
"version": "2.3.6",
"commit": "47158faf12c156775c39388652a77f8a8c542d28",
"started_at": "Thu, 04 Apr 2019 21:04:06 GMT",
"activated_at": "Thu, 04 Apr 2019 21:04:06 GMT",
"uptime": 337
"version": "2.3.6", // The MaxScale version
"commit": "47158faf12c156775c39388652a77f8a8c542d28", // Commit that MaxScale was built from
"started_at": "Thu, 04 Apr 2019 21:04:06 GMT", // The time when MaxScale was started
"activated_at": "Thu, 04 Apr 2019 21:04:06 GMT", // The last time when the `passive` parameter was set to `false`
"uptime": 337 // How many seconds MaxScale has been running
},
"id": "maxscale",
"type": "maxscale"
@ -298,7 +298,7 @@ location where the log files are stored.
},
"data": {
"attributes": {
"parameters": {
"parameters": { // Logging parameters
"highprecision": false,
"maxlog": true,
"syslog": true,
@ -312,8 +312,8 @@ location where the log files are stored.
"log_info": true,
"log_debug": false
},
"log_file": "/home/markusjm/build/log/maxscale/maxscale.log",
"log_priorities": [
"log_file": "/home/markusjm/build/log/maxscale/maxscale.log", // The file MaxScale is logging into if `maxlog` is enabled
"log_priorities": [ // Active log priorities
"error",
"warning",
"notice",
@ -406,22 +406,22 @@ valid loaded module.
"description": "Firewall Filter",
"api": "filter",
"status": "GA",
"commands": [
"commands": [ // List of module commands
{
"id": "rules/reload",
"id": "rules/reload", // Name of the command
"type": "module_command",
"links": {
"self": "http://localhost:8989/v1/modules/dbfwfilter/rules/reload"
},
"attributes": {
"method": "POST",
"arg_min": 1,
"arg_max": 2,
"parameters": [
"method": "POST", // Whether POST or GET should be used to call this command
"arg_min": 1, // Minimum number of arguments
"arg_max": 2, // Maximum number of arguments
"parameters": [ // Parameter types for the command
{
"description": "Filter to reload",
"type": "FILTER",
"required": true
"description": "Filter to reload", // Parameter description
"type": "FILTER", // Value type
"required": true // Whether the parameter is required
},
{
"description": "Path to rule file",
@ -432,15 +432,15 @@ valid loaded module.
}
}
],
"parameters": [
"parameters": [ // Module parameters
{
"name": "rules",
"type": "path"
"name": "rules", // Parameter name
"type": "path" // Parameter type
},
{
"name": "log_match",
"type": "bool",
"default_value": "false"
"default_value": "false" // Default value of the parameter
},
{
"name": "log_no_match",
@ -451,7 +451,7 @@ valid loaded module.
"name": "action",
"type": "enum",
"default_value": "block",
"enum_values": [
"enum_values": [ // Accepted enumeration values
"allow",
"block",
"ignore"
@ -559,7 +559,7 @@ Command with output:
"links": {
"self": "http://localhost:8989/v1/maxscale/modules/dbfwfilter/rules/json"
},
"meta": [
"meta": [ // Output of module command (module dependent)
{
"name": "test3",
"type": "COLUMN",
@ -602,14 +602,12 @@ GET /v1/maxscale/query_classifier/classify?sql=SELECT+1
"id": "classify",
"type": "classify",
"attributes": {
"parameters": {
"parse_result": "QC_QUERY_PARSED",
"type_mask": "QUERY_TYPE_READ",
"operation": "QUERY_OP_SELECT",
"has_where_clause": false,
"fields": [],
"functions": []
}
"parse_result": "QC_QUERY_PARSED",
"type_mask": "QUERY_TYPE_READ",
"operation": "QUERY_OP_SELECT",
"has_where_clause": false,
"fields": [],
"functions": []
}
}
}

View File

@ -59,7 +59,7 @@ MXS_BEGIN_DECLS
#define MYSQL_CHARSET_OFFSET 12
#define MYSQL_CLIENT_CAP_OFFSET 4
#define MYSQL_CLIENT_CAP_SIZE 4
#define MARIADB_CAP_OFFSET MYSQL_CHARSET_OFFSET + 19
#define MARIADB_CAP_OFFSET MYSQL_CHARSET_OFFSET + 20
#define GW_MYSQL_PROTOCOL_VERSION 10 // version is 10
#define GW_MYSQL_HANDSHAKE_FILLER 0x00

View File

@ -13,12 +13,12 @@
require('./common.js')()
const classify_fields = [
{'Parse result': 'attributes.parameters.parse_result'},
{'Type mask': 'attributes.parameters.type_mask'},
{'Operation': 'attributes.parameters.operation'},
{'Has where clause': 'attributes.parameters.has_where_clause'},
{'Fields': 'attributes.parameters.fields'},
{'Functions': 'attributes.parameters.functions'}
{'Parse result': 'attributes.parse_result'},
{'Type mask': 'attributes.type_mask'},
{'Operation': 'attributes.operation'},
{'Has where clause': 'attributes.has_where_clause'},
{'Fields': 'attributes.fields'},
{'Functions': 'attributes.functions'}
]
exports.command = 'classify <statement>'
@ -28,12 +28,12 @@ exports.handler = function (argv) {
return doRequest(host,
'maxscale/query_classifier/classify?sql=' + argv.statement,
(res) => {
if (res.data.attributes.parameters.functions) {
var a = res.data.attributes.parameters.functions.map((f) => {
if (res.data.attributes.functions) {
var a = res.data.attributes.functions.map((f) => {
return f.name + ': (' + f.arguments.join(', ') + ')'
});
res.data.attributes.parameters.functions = a;
res.data.attributes.functions = a;
}
return formatResource(classify_fields, res.data)

View File

@ -1555,32 +1555,30 @@ void append_function_info(json_t* pParams, GWBUF* pBuffer)
std::unique_ptr<json_t> qc_classify_as_json(const char* zHost, const std::string& statement)
{
json_t* pParams = json_object();
json_t* pAttributes = json_object();
std::unique_ptr<GWBUF> sBuffer(modutil_create_query(statement.c_str()));
GWBUF* pBuffer = sBuffer.get();
qc_parse_result_t result = qc_parse(pBuffer, QC_COLLECT_ALL);
json_object_set_new(pParams, CN_PARSE_RESULT, json_string(qc_result_to_string(result)));
json_object_set_new(pAttributes, CN_PARSE_RESULT, json_string(qc_result_to_string(result)));
if (result != QC_QUERY_INVALID)
{
char* zType_mask = qc_typemask_to_string(qc_get_type_mask(pBuffer));
json_object_set_new(pParams, CN_TYPE_MASK, json_string(zType_mask));
json_object_set_new(pAttributes, CN_TYPE_MASK, json_string(zType_mask));
MXS_FREE(zType_mask);
json_object_set_new(pParams, CN_OPERATION, json_string(qc_op_to_string(qc_get_operation(pBuffer))));
json_object_set_new(pAttributes, CN_OPERATION,
json_string(qc_op_to_string(qc_get_operation(pBuffer))));
bool has_clause = qc_query_has_clause(pBuffer);
json_object_set_new(pParams, CN_HAS_WHERE_CLAUSE, json_boolean(has_clause));
json_object_set_new(pAttributes, CN_HAS_WHERE_CLAUSE, json_boolean(has_clause));
append_field_info(pParams, pBuffer);
append_function_info(pParams, pBuffer);
append_field_info(pAttributes, pBuffer);
append_function_info(pAttributes, pBuffer);
}
json_t* pAttributes = json_object();
json_object_set_new(pAttributes, CN_PARAMETERS, pParams);
json_t* pSelf = json_object();
json_object_set_new(pSelf, CN_ID, json_string(CN_CLASSIFY));
json_object_set_new(pSelf, CN_TYPE, json_string(CN_CLASSIFY));