Merge branch '2.0' into develop
This commit is contained in:
@ -291,7 +291,9 @@ static void unpack_datetime2(uint8_t *ptr, uint8_t decimals, struct tm *dest)
|
||||
dest->tm_hour = time >> 12;
|
||||
dest->tm_mday = date % (1 << 5);
|
||||
dest->tm_mon = yearmonth % 13;
|
||||
dest->tm_year = yearmonth / 13;
|
||||
|
||||
/** struct tm stores the year as: Year - 1900 */
|
||||
dest->tm_year = (yearmonth / 13) - 1900;
|
||||
}
|
||||
|
||||
/** Unpack a "reverse" byte order value */
|
||||
|
||||
@ -2304,6 +2304,24 @@ DBFW_USER* find_user_data(HASHTABLE *hash, const char *name, const char *remote)
|
||||
return user;
|
||||
}
|
||||
|
||||
static bool command_is_mandatory(const GWBUF *buffer)
|
||||
{
|
||||
switch (MYSQL_GET_COMMAND((uint8_t*)GWBUF_DATA(buffer)))
|
||||
{
|
||||
case MYSQL_COM_QUIT:
|
||||
case MYSQL_COM_PING:
|
||||
case MYSQL_COM_CHANGE_USER:
|
||||
case MYSQL_COM_SET_OPTION:
|
||||
case MYSQL_COM_FIELD_LIST:
|
||||
case MYSQL_COM_PROCESS_KILL:
|
||||
case MYSQL_COM_PROCESS_INFO:
|
||||
return true;
|
||||
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The routeQuery entry point. This is passed the query buffer
|
||||
* to which the filter should be applied. Once processed the
|
||||
@ -2340,6 +2358,13 @@ routeQuery(MXS_FILTER *instance, MXS_FILTER_SESSION *session, GWBUF *queue)
|
||||
type = qc_get_type_mask(queue);
|
||||
}
|
||||
|
||||
uint32_t type = 0;
|
||||
|
||||
if (modutil_is_SQL(queue) || modutil_is_SQL_prepare(queue))
|
||||
{
|
||||
type = qc_get_type(queue);
|
||||
}
|
||||
|
||||
if (modutil_is_SQL(queue) && modutil_count_statements(queue) > 1)
|
||||
{
|
||||
GWBUF* err = gen_dummy_error(my_session, "This filter does not support "
|
||||
@ -2349,6 +2374,17 @@ routeQuery(MXS_FILTER *instance, MXS_FILTER_SESSION *session, GWBUF *queue)
|
||||
my_session->errmsg = NULL;
|
||||
rval = dcb->func.write(dcb, err);
|
||||
}
|
||||
else if (QUERY_IS_TYPE(type, QUERY_TYPE_PREPARE_STMT) ||
|
||||
QUERY_IS_TYPE(type, QUERY_TYPE_PREPARE_NAMED_STMT) ||
|
||||
modutil_is_SQL_prepare(queue))
|
||||
{
|
||||
GWBUF* err = gen_dummy_error(my_session, "This filter does not support "
|
||||
"prepared statements.");
|
||||
gwbuf_free(queue);
|
||||
free(my_session->errmsg);
|
||||
my_session->errmsg = NULL;
|
||||
rval = dcb->func.write(dcb, err);
|
||||
}
|
||||
else
|
||||
{
|
||||
GWBUF* analyzed_queue = queue;
|
||||
|
||||
@ -36,7 +36,7 @@ producer = KafkaProducer(bootstrap_servers=[opts.kafka_broker])
|
||||
|
||||
while True:
|
||||
try:
|
||||
buf = sys.stdin.read(128)
|
||||
buf = sys.stdin.readline()
|
||||
|
||||
if len(buf) == 0:
|
||||
break
|
||||
@ -48,6 +48,7 @@ while True:
|
||||
data = decoder.raw_decode(rbuf.decode('ascii'))
|
||||
rbuf = rbuf[data[1]:]
|
||||
producer.send(topic=opts.kafka_topic, value=json.dumps(data[0]).encode())
|
||||
producer.flush()
|
||||
|
||||
# JSONDecoder will return a ValueError if a partial JSON object is read
|
||||
except ValueError as err:
|
||||
@ -57,5 +58,3 @@ while True:
|
||||
except Exception as ex:
|
||||
print(ex)
|
||||
break
|
||||
|
||||
producer.flush()
|
||||
|
||||
@ -774,48 +774,54 @@ static bool avro_client_stream_data(AVRO_CLIENT *client)
|
||||
char filename[PATH_MAX + 1];
|
||||
snprintf(filename, PATH_MAX, "%s/%s", router->avrodir, client->avro_binfile);
|
||||
|
||||
bool ok = true;
|
||||
|
||||
spinlock_acquire(&client->file_lock);
|
||||
if (client->file_handle == NULL)
|
||||
if (client->file_handle == NULL &&
|
||||
(client->file_handle = maxavro_file_open(filename)) == NULL)
|
||||
{
|
||||
client->file_handle = maxavro_file_open(filename);
|
||||
ok = false;
|
||||
}
|
||||
spinlock_release(&client->file_lock);
|
||||
|
||||
switch (client->format)
|
||||
if (ok)
|
||||
{
|
||||
case AVRO_FORMAT_JSON:
|
||||
/** Currently only JSON format supports seeking to a GTID */
|
||||
if (client->requested_gtid &&
|
||||
seek_to_index_pos(client, client->file_handle) &&
|
||||
seek_to_gtid(client, client->file_handle))
|
||||
switch (client->format)
|
||||
{
|
||||
client->requested_gtid = false;
|
||||
case AVRO_FORMAT_JSON:
|
||||
/** Currently only JSON format supports seeking to a GTID */
|
||||
if (client->requested_gtid &&
|
||||
seek_to_index_pos(client, client->file_handle) &&
|
||||
seek_to_gtid(client, client->file_handle))
|
||||
{
|
||||
client->requested_gtid = false;
|
||||
}
|
||||
|
||||
read_more = stream_json(client);
|
||||
break;
|
||||
|
||||
case AVRO_FORMAT_AVRO:
|
||||
read_more = stream_binary(client);
|
||||
break;
|
||||
|
||||
default:
|
||||
MXS_ERROR("Unexpected format: %d", client->format);
|
||||
break;
|
||||
}
|
||||
|
||||
read_more = stream_json(client);
|
||||
break;
|
||||
|
||||
case AVRO_FORMAT_AVRO:
|
||||
read_more = stream_binary(client);
|
||||
break;
|
||||
if (maxavro_get_error(client->file_handle) != MAXAVRO_ERR_NONE)
|
||||
{
|
||||
MXS_ERROR("Reading Avro file failed with error '%s'.",
|
||||
maxavro_get_error_string(client->file_handle));
|
||||
}
|
||||
|
||||
default:
|
||||
MXS_ERROR("Unexpected format: %d", client->format);
|
||||
break;
|
||||
/* update client struct */
|
||||
memcpy(&client->avro_file, client->file_handle, sizeof(client->avro_file));
|
||||
|
||||
/* may be just use client->avro_file->records_read and remove this var */
|
||||
client->last_sent_pos = client->avro_file.records_read;
|
||||
}
|
||||
|
||||
|
||||
if (maxavro_get_error(client->file_handle) != MAXAVRO_ERR_NONE)
|
||||
{
|
||||
MXS_ERROR("Reading Avro file failed with error '%s'.",
|
||||
maxavro_get_error_string(client->file_handle));
|
||||
}
|
||||
|
||||
/* update client struct */
|
||||
memcpy(&client->avro_file, client->file_handle, sizeof(client->avro_file));
|
||||
|
||||
/* may be just use client->avro_file->records_read and remove this var */
|
||||
client->last_sent_pos = client->avro_file.records_read;
|
||||
}
|
||||
else
|
||||
{
|
||||
|
||||
@ -73,6 +73,7 @@ int index_query_cb(void *data, int rows, char** values, char** names)
|
||||
void avro_index_file(AVRO_INSTANCE *router, const char* filename)
|
||||
{
|
||||
MAXAVRO_FILE *file = maxavro_file_open(filename);
|
||||
|
||||
if (file)
|
||||
{
|
||||
char *name = strrchr(filename, '/');
|
||||
@ -166,6 +167,10 @@ void avro_index_file(AVRO_INSTANCE *router, const char* filename)
|
||||
|
||||
maxavro_file_close(file);
|
||||
}
|
||||
else
|
||||
{
|
||||
MXS_ERROR("Failed to open file '%s' when generating file index.", filename);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
if(BUILD_TESTS)
|
||||
add_executable(testbinlogrouter testbinlog.c ../blr.c ../blr_slave.c ../blr_master.c ../blr_file.c ../blr_cache.c)
|
||||
target_link_libraries(testbinlogrouter maxscale-common ${PCRE_LINK_FLAGS} uuid)
|
||||
add_test(TestBinlogRouter ${CMAKE_CURRENT_BINARY_DIR}/testbinlogrouter)
|
||||
add_test(NAME TestBinlogRouter COMMAND ./testbinlogrouter WORKING_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR})
|
||||
endif()
|
||||
|
||||
Reference in New Issue
Block a user