Add missing error handling to Avro file handling
Some of the JSON errors weren't handled which could cause problems when a malformed schema definition is read. Also added more error messages for situations when opening of the files fails.
This commit is contained in:
@ -126,21 +126,35 @@ MAXAVRO_SCHEMA* maxavro_schema_alloc(const char* json)
|
||||
if (schema)
|
||||
{
|
||||
json_t *field_arr = NULL;
|
||||
json_unpack(schema, "{s:o}", "fields", &field_arr);
|
||||
size_t arr_size = json_array_size(field_arr);
|
||||
rval->fields = malloc(sizeof(MAXAVRO_SCHEMA_FIELD) * arr_size);
|
||||
rval->num_fields = arr_size;
|
||||
|
||||
for (int i = 0; i < arr_size; i++)
|
||||
if (json_unpack(schema, "{s:o}", "fields", &field_arr) == 0)
|
||||
{
|
||||
json_t *object = json_array_get(field_arr, i);
|
||||
char *key;
|
||||
json_t *value_obj;
|
||||
size_t arr_size = json_array_size(field_arr);
|
||||
rval->fields = malloc(sizeof(MAXAVRO_SCHEMA_FIELD) * arr_size);
|
||||
rval->num_fields = arr_size;
|
||||
|
||||
json_unpack(object, "{s:s s:o}", "name", &key, "type", &value_obj);
|
||||
rval->fields[i].name = strdup(key);
|
||||
rval->fields[i].type = unpack_to_type(value_obj, &rval->fields[i]);
|
||||
for (int i = 0; i < arr_size; i++)
|
||||
{
|
||||
json_t *object = json_array_get(field_arr, i);
|
||||
char *key;
|
||||
json_t *value_obj;
|
||||
|
||||
if (json_unpack(object, "{s:s s:o}", "name", &key, "type", &value_obj) == 0)
|
||||
{
|
||||
rval->fields[i].name = strdup(key);
|
||||
rval->fields[i].type = unpack_to_type(value_obj, &rval->fields[i]);
|
||||
}
|
||||
else
|
||||
{
|
||||
MXS_ERROR("Failed to unpack JSON Object \"name\": %s", json);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
MXS_ERROR("Failed to unpack JSON Object \"fields\": %s", json);
|
||||
}
|
||||
|
||||
|
||||
json_decref(schema);
|
||||
}
|
||||
|
Reference in New Issue
Block a user