aboutsummaryrefslogtreecommitdiff
path: root/qobject
diff options
context:
space:
mode:
authorMarkus Armbruster <armbru@redhat.com>2018-08-23 18:40:15 +0200
committerMarkus Armbruster <armbru@redhat.com>2018-08-24 20:26:37 +0200
commitda09cfbf9dcd07c48fe95bdfb2968305de9b9690 (patch)
tree97b894695b7170ca371fb2a3bd82eb7b65e34e34 /qobject
parentdd98e8481992741a6b5ec0bdfcee05c1c8f602d6 (diff)
downloadqemu-da09cfbf9dcd07c48fe95bdfb2968305de9b9690.zip
qemu-da09cfbf9dcd07c48fe95bdfb2968305de9b9690.tar.gz
qemu-da09cfbf9dcd07c48fe95bdfb2968305de9b9690.tar.bz2
json: Enforce token count and size limits more tightly
Token count and size limits exist to guard against excessive heap usage. We check them only after we created the token on the heap. That's assigning a cowboy to the barn to lasso the horse after it has bolted. Close the barn door instead: check before we create the token. Signed-off-by: Markus Armbruster <armbru@redhat.com> Reviewed-by: Eric Blake <eblake@redhat.com> Message-Id: <20180823164025.12553-49-armbru@redhat.com>
Diffstat (limited to 'qobject')
-rw-r--r--qobject/json-streamer.c36
1 files changed, 18 insertions, 18 deletions
diff --git a/qobject/json-streamer.c b/qobject/json-streamer.c
index 674dfe6..810aae5 100644
--- a/qobject/json-streamer.c
+++ b/qobject/json-streamer.c
@@ -20,7 +20,7 @@
#define MAX_TOKEN_SIZE (64ULL << 20)
#define MAX_TOKEN_COUNT (2ULL << 20)
-#define MAX_NESTING (1ULL << 10)
+#define MAX_NESTING (1 << 10)
static void json_message_free_token(void *token, void *opaque)
{
@@ -71,6 +71,23 @@ void json_message_process_token(JSONLexer *lexer, GString *input,
break;
}
+ /*
+ * Security consideration, we limit total memory allocated per object
+ * and the maximum recursion depth that a message can force.
+ */
+ if (parser->token_size + input->len + 1 > MAX_TOKEN_SIZE) {
+ error_setg(&err, "JSON token size limit exceeded");
+ goto out_emit;
+ }
+ if (g_queue_get_length(parser->tokens) + 1 > MAX_TOKEN_COUNT) {
+ error_setg(&err, "JSON token count limit exceeded");
+ goto out_emit;
+ }
+ if (parser->bracket_count + parser->brace_count > MAX_NESTING) {
+ error_setg(&err, "JSON nesting depth limit exceeded");
+ goto out_emit;
+ }
+
token = g_malloc(sizeof(JSONToken) + input->len + 1);
token->type = type;
memcpy(token->str, input->str, input->len);
@@ -91,23 +108,6 @@ void json_message_process_token(JSONLexer *lexer, GString *input,
goto out_emit;
}
- /*
- * Security consideration, we limit total memory allocated per object
- * and the maximum recursion depth that a message can force.
- */
- if (parser->token_size > MAX_TOKEN_SIZE) {
- error_setg(&err, "JSON token size limit exceeded");
- goto out_emit;
- }
- if (g_queue_get_length(parser->tokens) > MAX_TOKEN_COUNT) {
- error_setg(&err, "JSON token count limit exceeded");
- goto out_emit;
- }
- if (parser->bracket_count + parser->brace_count > MAX_NESTING) {
- error_setg(&err, "JSON nesting depth limit exceeded");
- goto out_emit;
- }
-
return;
out_emit: