diff options
author | John-Mark Bell <jmb@netsurf-browser.org> | 2023-09-16 20:30:02 +0100 |
---|---|---|
committer | John-Mark Bell <jmb@netsurf-browser.org> | 2023-09-16 22:20:13 +0100 |
commit | d0aa58c3647593044286a565294e94700c9a50a0 (patch) | |
tree | 0ee55ee5d231fb9d104b608412ea14b38e589080 | |
parent | 873ed6e236f7669afd3ef44259c34addc6dc95b6 (diff) | |
download | libhubbub-d0aa58c3647593044286a565294e94700c9a50a0.tar.gz libhubbub-d0aa58c3647593044286a565294e94700c9a50a0.tar.bz2 |
tests/tokenizer[23]: fix handling of CHARACTER tokens
Where a CHARACTER token is emitted but the expected data is shorter
than the data in the token, we want to consume the expected data
and then process the remaining token data as if it were emitted
separately. Sadly this didn't happen as we never initialised the
replacement token correctly. Make this so, and also tell the
compiler that the recursive call is actually a tail call so it
can optimise it appropriately.
-rw-r--r-- | test/tokeniser2.c | 4 | ||||
-rw-r--r-- | test/tokeniser3.c | 4 |
2 files changed, 4 insertions, 4 deletions
diff --git a/test/tokeniser2.c b/test/tokeniser2.c index f38f7ab..f468d1c 100644 --- a/test/tokeniser2.c +++ b/test/tokeniser2.c @@ -438,7 +438,7 @@ hubbub_error token_handler(const hubbub_token *token, void *pw) /* Expected token only contained part of the data * Calculate how much is left, then try again with * the next expected token */ - hubbub_token t; + hubbub_token t = *token; t.type = HUBBUB_TOKEN_CHARACTER; t.data.character.ptr += len; @@ -446,7 +446,7 @@ hubbub_error token_handler(const hubbub_token *token, void *pw) ctx->char_off = 0; - token_handler(&t, pw); + return token_handler(&t, pw); } else if (strlen(expstr + ctx->char_off) > token->data.character.len) { /* Tokeniser output only contained part of the data diff --git a/test/tokeniser3.c b/test/tokeniser3.c index 416ff5d..eb921ce 100644 --- a/test/tokeniser3.c +++ b/test/tokeniser3.c @@ -447,7 +447,7 @@ hubbub_error token_handler(const hubbub_token *token, void *pw) /* Expected token only contained part of the data * Calculate how much is left, then try again with * the next expected token */ - hubbub_token t; + hubbub_token t = *token; t.type = HUBBUB_TOKEN_CHARACTER; t.data.character.ptr += len; @@ -455,7 +455,7 @@ hubbub_error token_handler(const hubbub_token *token, void *pw) ctx->char_off = 0; - token_handler(&t, pw); + return token_handler(&t, pw); } else if (strlen(expstr + ctx->char_off) > token->data.character.len) { /* Tokeniser output only contained part of the data |