From d0aa58c3647593044286a565294e94700c9a50a0 Mon Sep 17 00:00:00 2001 From: John-Mark Bell Date: Sat, 16 Sep 2023 20:30:02 +0100 Subject: tests/tokenizer[23]: fix handling of CHARACTER tokens Where a CHARACTER token is emitted but the expected data is shorter than the data in the token, we want to consume the expected data and then process the remaining token data as if it were emitted separately. Sadly this didn't happen as we never initialised the replacement token correctly. Make this so, and also tell the compiler that the recursive call is actually a tail call so it can optimise it appropriately. --- test/tokeniser2.c | 4 ++-- test/tokeniser3.c | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) (limited to 'test') diff --git a/test/tokeniser2.c b/test/tokeniser2.c index f38f7ab..f468d1c 100644 --- a/test/tokeniser2.c +++ b/test/tokeniser2.c @@ -438,7 +438,7 @@ hubbub_error token_handler(const hubbub_token *token, void *pw) /* Expected token only contained part of the data * Calculate how much is left, then try again with * the next expected token */ - hubbub_token t; + hubbub_token t = *token; t.type = HUBBUB_TOKEN_CHARACTER; t.data.character.ptr += len; @@ -446,7 +446,7 @@ hubbub_error token_handler(const hubbub_token *token, void *pw) ctx->char_off = 0; - token_handler(&t, pw); + return token_handler(&t, pw); } else if (strlen(expstr + ctx->char_off) > token->data.character.len) { /* Tokeniser output only contained part of the data diff --git a/test/tokeniser3.c b/test/tokeniser3.c index 416ff5d..eb921ce 100644 --- a/test/tokeniser3.c +++ b/test/tokeniser3.c @@ -447,7 +447,7 @@ hubbub_error token_handler(const hubbub_token *token, void *pw) /* Expected token only contained part of the data * Calculate how much is left, then try again with * the next expected token */ - hubbub_token t; + hubbub_token t = *token; t.type = HUBBUB_TOKEN_CHARACTER; t.data.character.ptr += len; @@ -455,7 +455,7 @@ hubbub_error token_handler(const hubbub_token *token, void *pw) ctx->char_off = 0; - token_handler(&t, pw); + return token_handler(&t, pw); } else if (strlen(expstr + ctx->char_off) > token->data.character.len) { /* Tokeniser output only contained part of the data -- cgit v1.2.3