This is a security consideration. We don't want a client to cause an arbitrary amount of memory to be allocated in QEMU. For now, we use a limit of 64MB which should be large enough for any reasonably sized token.
This is important for parsing JSON from untrusted sources. Signed-off-by: Anthony Liguori <aligu...@us.ibm.com> diff --git a/json-lexer.c b/json-lexer.c index 834d7af..3462c89 100644 --- a/json-lexer.c +++ b/json-lexer.c @@ -18,6 +18,8 @@ #include "qemu-common.h" #include "json-lexer.h" +#define MAX_TOKEN_SIZE (64ULL << 20) + /* * \"([^\\\"]|(\\\"\\'\\\\\\/\\b\\f\\n\\r\\t\\u[0-9a-fA-F][0-9a-fA-F][0-9a-fA-F][0-9a-fA-F]))*\" * '([^\\']|(\\\"\\'\\\\\\/\\b\\f\\n\\r\\t\\u[0-9a-fA-F][0-9a-fA-F][0-9a-fA-F][0-9a-fA-F]))*' @@ -312,6 +314,17 @@ static int json_lexer_feed_char(JSONLexer *lexer, char ch) } lexer->state = new_state; } while (!char_consumed); + + /* Do not let a single token grow to an arbitrarily large size, + * this is a security consideration. + */ + if (lexer->token->length > MAX_TOKEN_SIZE) { + lexer->emit(lexer, lexer->token, lexer->state, lexer->x, lexer->y); + QDECREF(lexer->token); + lexer->token = qstring_new(); + lexer->state = IN_START; + } + return 0; } -- 1.7.0.4