4 * Copyright IBM, Corp. 2009
9 * This work is licensed under the terms of the GNU LGPL, version 2.1 or later.
10 * See the COPYING.LIB file in the top-level directory.
14 #include "qemu/osdep.h"
15 #include "qemu-common.h"
16 #include "qapi/qmp/json-lexer.h"
18 #define MAX_TOKEN_SIZE (64ULL << 20)
21 * Required by JSON (RFC 7159):
23 * \"([^\\\"]|\\[\"'\\/bfnrt]|\\u[0-9a-fA-F]{4})*\"
24 * -?(0|[1-9][0-9]*)(.[0-9]+)?([eE][-+]?[0-9]+)?
26 * [a-z]+ # covers null, true, false
28 * Extension of '' strings:
30 * '([^\\']|\\[\"'\\/bfnrt]|\\u[0-9a-fA-F]{4})*'
32 * Extension for vararg handling in JSON construction:
34 * %((l|ll|I64)?d|[ipsf])
38 enum json_lexer_state {
39 IN_ERROR = 0, /* must really be 0, see json_lexer[] */
59 IN_NEG_NONZERO_NUMBER,
71 QEMU_BUILD_BUG_ON((int)JSON_MIN <= (int)IN_START);
73 #define TERMINAL(state) [0 ... 0x7F] = (state)
75 /* Return whether TERMINAL is a terminal state and the transition to it
76 from OLD_STATE required lookahead. This happens whenever the table
77 below uses the TERMINAL macro. */
78 #define TERMINAL_NEEDED_LOOKAHEAD(old_state, terminal) \
79 (json_lexer[(old_state)][0] == (terminal))
81 static const uint8_t json_lexer[][256] = {
82 /* Relies on default initialization to IN_ERROR! */
84 /* double quote string */
86 ['0' ... '9'] = IN_DQ_STRING,
87 ['a' ... 'f'] = IN_DQ_STRING,
88 ['A' ... 'F'] = IN_DQ_STRING,
91 ['0' ... '9'] = IN_DQ_UCODE3,
92 ['a' ... 'f'] = IN_DQ_UCODE3,
93 ['A' ... 'F'] = IN_DQ_UCODE3,
96 ['0' ... '9'] = IN_DQ_UCODE2,
97 ['a' ... 'f'] = IN_DQ_UCODE2,
98 ['A' ... 'F'] = IN_DQ_UCODE2,
101 ['0' ... '9'] = IN_DQ_UCODE1,
102 ['a' ... 'f'] = IN_DQ_UCODE1,
103 ['A' ... 'F'] = IN_DQ_UCODE1,
105 [IN_DQ_STRING_ESCAPE] = {
106 ['b'] = IN_DQ_STRING,
107 ['f'] = IN_DQ_STRING,
108 ['n'] = IN_DQ_STRING,
109 ['r'] = IN_DQ_STRING,
110 ['t'] = IN_DQ_STRING,
111 ['/'] = IN_DQ_STRING,
112 ['\\'] = IN_DQ_STRING,
113 ['\''] = IN_DQ_STRING,
114 ['\"'] = IN_DQ_STRING,
115 ['u'] = IN_DQ_UCODE0,
118 [1 ... 0xBF] = IN_DQ_STRING,
119 [0xC2 ... 0xF4] = IN_DQ_STRING,
120 ['\\'] = IN_DQ_STRING_ESCAPE,
124 /* single quote string */
126 ['0' ... '9'] = IN_SQ_STRING,
127 ['a' ... 'f'] = IN_SQ_STRING,
128 ['A' ... 'F'] = IN_SQ_STRING,
131 ['0' ... '9'] = IN_SQ_UCODE3,
132 ['a' ... 'f'] = IN_SQ_UCODE3,
133 ['A' ... 'F'] = IN_SQ_UCODE3,
136 ['0' ... '9'] = IN_SQ_UCODE2,
137 ['a' ... 'f'] = IN_SQ_UCODE2,
138 ['A' ... 'F'] = IN_SQ_UCODE2,
141 ['0' ... '9'] = IN_SQ_UCODE1,
142 ['a' ... 'f'] = IN_SQ_UCODE1,
143 ['A' ... 'F'] = IN_SQ_UCODE1,
145 [IN_SQ_STRING_ESCAPE] = {
146 ['b'] = IN_SQ_STRING,
147 ['f'] = IN_SQ_STRING,
148 ['n'] = IN_SQ_STRING,
149 ['r'] = IN_SQ_STRING,
150 ['t'] = IN_SQ_STRING,
151 ['/'] = IN_SQ_STRING,
152 ['\\'] = IN_SQ_STRING,
153 ['\''] = IN_SQ_STRING,
154 ['\"'] = IN_SQ_STRING,
155 ['u'] = IN_SQ_UCODE0,
158 [1 ... 0xBF] = IN_SQ_STRING,
159 [0xC2 ... 0xF4] = IN_SQ_STRING,
160 ['\\'] = IN_SQ_STRING_ESCAPE,
161 ['\''] = JSON_STRING,
166 TERMINAL(JSON_INTEGER),
167 ['0' ... '9'] = IN_ERROR,
173 TERMINAL(JSON_FLOAT),
174 ['0' ... '9'] = IN_DIGITS,
178 ['0' ... '9'] = IN_DIGITS,
184 ['0' ... '9'] = IN_DIGITS,
187 [IN_MANTISSA_DIGITS] = {
188 TERMINAL(JSON_FLOAT),
189 ['0' ... '9'] = IN_MANTISSA_DIGITS,
195 ['0' ... '9'] = IN_MANTISSA_DIGITS,
199 [IN_NONZERO_NUMBER] = {
200 TERMINAL(JSON_INTEGER),
201 ['0' ... '9'] = IN_NONZERO_NUMBER,
207 [IN_NEG_NONZERO_NUMBER] = {
209 ['1' ... '9'] = IN_NONZERO_NUMBER,
214 TERMINAL(JSON_KEYWORD),
215 ['a' ... 'z'] = IN_KEYWORD,
221 [' '] = IN_WHITESPACE,
222 ['\t'] = IN_WHITESPACE,
223 ['\r'] = IN_WHITESPACE,
224 ['\n'] = IN_WHITESPACE,
235 ['l'] = IN_ESCAPE_LL,
245 ['4'] = IN_ESCAPE_I64,
249 ['6'] = IN_ESCAPE_I6,
265 ['"'] = IN_DQ_STRING,
266 ['\''] = IN_SQ_STRING,
268 ['1' ... '9'] = IN_NONZERO_NUMBER,
269 ['-'] = IN_NEG_NONZERO_NUMBER,
272 ['['] = JSON_LSQUARE,
273 [']'] = JSON_RSQUARE,
276 ['a' ... 'z'] = IN_KEYWORD,
278 [' '] = IN_WHITESPACE,
279 ['\t'] = IN_WHITESPACE,
280 ['\r'] = IN_WHITESPACE,
281 ['\n'] = IN_WHITESPACE,
285 void json_lexer_init(JSONLexer *lexer, JSONLexerEmitter func)
288 lexer->state = IN_START;
289 lexer->token = g_string_sized_new(3);
290 lexer->x = lexer->y = 0;
293 static int json_lexer_feed_char(JSONLexer *lexer, char ch, bool flush)
295 int char_consumed, new_state;
304 assert(lexer->state <= ARRAY_SIZE(json_lexer));
305 new_state = json_lexer[lexer->state][(uint8_t)ch];
306 char_consumed = !TERMINAL_NEEDED_LOOKAHEAD(lexer->state, new_state);
308 g_string_append_c(lexer->token, ch);
323 lexer->emit(lexer, lexer->token, new_state, lexer->x, lexer->y);
326 g_string_truncate(lexer->token, 0);
327 new_state = IN_START;
330 /* XXX: To avoid having previous bad input leaving the parser in an
331 * unresponsive state where we consume unpredictable amounts of
332 * subsequent "good" input, percolate this error state up to the
333 * tokenizer/parser by forcing a NULL object to be emitted, then
336 * Also note that this handling is required for reliable channel
337 * negotiation between QMP and the guest agent, since chr(0xFF)
338 * is placed at the beginning of certain events to ensure proper
339 * delivery when the channel is in an unknown state. chr(0xFF) is
340 * never a valid ASCII/UTF-8 sequence, so this should reliably
341 * induce an error/flush state.
343 lexer->emit(lexer, lexer->token, JSON_ERROR, lexer->x, lexer->y);
344 g_string_truncate(lexer->token, 0);
345 new_state = IN_START;
346 lexer->state = new_state;
351 lexer->state = new_state;
352 } while (!char_consumed && !flush);
354 /* Do not let a single token grow to an arbitrarily large size,
355 * this is a security consideration.
357 if (lexer->token->len > MAX_TOKEN_SIZE) {
358 lexer->emit(lexer, lexer->token, lexer->state, lexer->x, lexer->y);
359 g_string_truncate(lexer->token, 0);
360 lexer->state = IN_START;
366 int json_lexer_feed(JSONLexer *lexer, const char *buffer, size_t size)
370 for (i = 0; i < size; i++) {
373 err = json_lexer_feed_char(lexer, buffer[i], false);
382 int json_lexer_flush(JSONLexer *lexer)
384 return lexer->state == IN_START ? 0 : json_lexer_feed_char(lexer, 0, true);
387 void json_lexer_destroy(JSONLexer *lexer)
389 g_string_free(lexer->token, true);