From 0580bd8eed161949889506e792979de2458852db Mon Sep 17 00:00:00 2001 From: Daniel Silverstone Date: Mon, 6 May 2019 16:15:49 +0100 Subject: Consume insert_buf when resuming a parse Signed-off-by: Daniel Silverstone --- src/tokeniser/tokeniser.c | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) (limited to 'src') diff --git a/src/tokeniser/tokeniser.c b/src/tokeniser/tokeniser.c index a7e67a1..2d9c4ed 100644 --- a/src/tokeniser/tokeniser.c +++ b/src/tokeniser/tokeniser.c @@ -393,6 +393,24 @@ hubbub_error hubbub_tokeniser_setopt(hubbub_tokeniser *tokeniser, } else { if (tokeniser->paused == true) { tokeniser->paused = false; + /* When unpausing, if we have had something + * akin to document.write() happen while + * we were paused, then the insert_buf will + * have some content. + * In this case, we need to prepend it to + * the input buffer before we resume parsing, + * discarding the insert_buf as we go. + */ + if (tokeniser->insert_buf->length > 0) { + parserutils_inputstream_insert( + tokeniser->input, + tokeniser->insert_buf->data, + tokeniser->insert_buf->length); + parserutils_buffer_discard( + tokeniser->insert_buf, 0, + tokeniser->insert_buf->length); + } + err = hubbub_tokeniser_run(tokeniser); } } -- cgit v1.2.3