summaryrefslogtreecommitdiff
path: root/test/tokeniser2.c
diff options
context:
space:
mode:
Diffstat (limited to 'test/tokeniser2.c')
-rw-r--r--test/tokeniser2.c107
1 files changed, 51 insertions, 56 deletions
diff --git a/test/tokeniser2.c b/test/tokeniser2.c
index 14ac71d..eb369b4 100644
--- a/test/tokeniser2.c
+++ b/test/tokeniser2.c
@@ -4,11 +4,12 @@
#include <json.h>
+#include <parserutils/input/inputstream.h>
+
#include <hubbub/hubbub.h>
#include "utils/utils.h"
-#include "input/inputstream.h"
#include "tokeniser/tokeniser.h"
#include "testutils.h"
@@ -29,7 +30,6 @@ typedef struct context {
} context;
static void run_test(context *ctx);
-static void buffer_handler(const uint8_t *buffer, size_t len, void *pw);
static void token_handler(const hubbub_token *token, void *pw);
static void *myrealloc(void *ptr, size_t len, void *pw)
@@ -74,6 +74,7 @@ int main(int argc, char **argv)
ctx.last_start_tag = NULL;
ctx.content_model = NULL;
+ ctx.process_cdata = false;
/* Extract settings */
for (entry = json_object_get_object(test)->head; entry;
@@ -119,7 +120,7 @@ int main(int argc, char **argv)
void run_test(context *ctx)
{
- hubbub_inputstream *stream;
+ parserutils_inputstream *stream;
hubbub_tokeniser *tok;
hubbub_tokeniser_optparams params;
int i, max_i;
@@ -138,7 +139,7 @@ void run_test(context *ctx)
ctx->output_index = 0;
ctx->char_off = 0;
- stream = hubbub_inputstream_create("UTF-8", "UTF-8",
+ stream = parserutils_inputstream_create("UTF-8", 0, NULL,
myrealloc, NULL);
assert(stream != NULL);
@@ -152,7 +153,7 @@ void run_test(context *ctx)
snprintf((char *) buf, sizeof buf, "<%s>",
ctx->last_start_tag);
- assert(hubbub_inputstream_append(stream,
+ assert(parserutils_inputstream_append(stream,
buf, strlen(ctx->last_start_tag) + 2) ==
HUBBUB_OK);
@@ -166,12 +167,6 @@ void run_test(context *ctx)
&params) == HUBBUB_OK);
}
- params.buffer_handler.handler = buffer_handler;
- params.buffer_handler.pw = ctx;
- assert(hubbub_tokeniser_setopt(tok,
- HUBBUB_TOKENISER_BUFFER_HANDLER,
- &params) == HUBBUB_OK);
-
params.token_handler.handler = token_handler;
params.token_handler.pw = ctx;
assert(hubbub_tokeniser_setopt(tok,
@@ -204,10 +199,10 @@ void run_test(context *ctx)
HUBBUB_TOKENISER_CONTENT_MODEL,
&params) == HUBBUB_OK);
- assert(hubbub_inputstream_append(stream,
+ assert(parserutils_inputstream_append(stream,
ctx->input, ctx->input_len) == HUBBUB_OK);
- assert(hubbub_inputstream_append(stream, NULL, 0) ==
+ assert(parserutils_inputstream_append(stream, NULL, 0) ==
HUBBUB_OK);
printf("Input: '%.*s' (%d)\n", (int) ctx->input_len,
@@ -218,19 +213,10 @@ void run_test(context *ctx)
hubbub_tokeniser_destroy(tok);
- hubbub_inputstream_destroy(stream);
+ parserutils_inputstream_destroy(stream);
}
}
-void buffer_handler(const uint8_t *buffer, size_t len, void *pw)
-{
- context *ctx = (context *) pw;
-
- UNUSED(len);
-
- ctx->pbuffer = buffer;
-}
-
void token_handler(const hubbub_token *token, void *pw)
{
static const char *token_names[] = {
@@ -277,7 +263,7 @@ void token_handler(const hubbub_token *token, void *pw)
items = json_object_get_array(obj);
- printf("%s: %s\n", token_names[token->type],
+ printf("got %s: expected %s\n", token_names[token->type],
json_object_get_string((struct json_object *)
array_list_get_idx(items, 0)));
@@ -297,9 +283,8 @@ void token_handler(const hubbub_token *token, void *pw)
array_list_get_idx(items, 3));
bool expquirks = !json_object_get_boolean(
array_list_get_idx(items, 4));
- char *gotname = (char *) (ctx->pbuffer +
- token->data.doctype.name.data.off);
- char *gotpub, *gotsys;
+ const char *gotname = (const char *)token->data.doctype.name.ptr;
+ const char *gotpub, *gotsys;
printf("'%.*s' %sids:\n",
(int) token->data.doctype.name.len,
@@ -311,34 +296,36 @@ void token_handler(const hubbub_token *token, void *pw)
gotpub = NULL;
printf("\tpublic: missing\n");
} else {
- gotpub = (char *) (ctx->pbuffer +
- token->data.doctype.public_id.data.off);
- printf("\tpublic: '%.*s'\n",
+ gotpub = (const char *) token->data.doctype.public_id.ptr;
+ printf("\tpublic: '%.*s' (%d)\n",
(int) token->data.doctype.public_id.len,
- gotpub);
+ gotpub,
+ (int) token->data.doctype.public_id.len);
}
if (token->data.doctype.system_missing) {
gotsys = NULL;
printf("\tsystem: missing\n");
} else {
- gotsys = (char *) (ctx->pbuffer +
- token->data.doctype.system_id.data.off);
- printf("\tsystem: '%.*s'\n",
+ gotsys = (const char *) token->data.doctype.system_id.ptr;
+ printf("\tsystem: '%.*s' (%d)\n",
(int) token->data.doctype.system_id.len,
- gotsys);
+ gotsys,
+ token->data.doctype.system_id.len);
}
assert(token->data.doctype.name.len == strlen(expname));
assert(strncmp(gotname, expname, strlen(expname)) == 0);
- assert((exppub == NULL) == (gotpub == NULL));
+ assert((exppub == NULL) ==
+ (token->data.doctype.public_missing == true));
if (exppub) {
assert(token->data.doctype.public_id.len == strlen(exppub));
assert(strncmp(gotpub, exppub, strlen(exppub)) == 0);
}
- assert((expsys == NULL) == (gotsys == NULL));
+ assert((expsys == NULL) ==
+ (token->data.doctype.system_missing == true));
if (gotsys) {
assert(token->data.doctype.system_id.len == strlen(expsys));
assert(strncmp(gotsys, expsys, strlen(expsys)) == 0);
@@ -356,16 +343,22 @@ void token_handler(const hubbub_token *token, void *pw)
bool self_closing = json_object_get_boolean(
array_list_get_idx(items, 3));
- char *tagname = (char *) (ctx->pbuffer +
- token->data.tag.name.data.off);
+ const char *tagname = (const char *)
+ token->data.tag.name.ptr;
- printf("'%.*s' %s%s\n",
+ printf("expected: '%s' %s\n",
+ expname,
+ (self_closing) ? "(self-closing) " : "");
+
+ printf(" got: '%.*s' %s\n",
(int) token->data.tag.name.len,
tagname,
(token->data.tag.self_closing) ?
- "(self-closing) " : "",
- (token->data.tag.n_attributes > 0) ?
- "attributes:" : "");
+ "(self-closing) " : "");
+
+ if (token->data.tag.n_attributes > 0) {
+ printf("attributes:\n");
+ }
assert(token->data.tag.name.len == strlen(expname));
assert(strncmp(tagname, expname, strlen(expname)) == 0);
@@ -379,12 +372,12 @@ void token_handler(const hubbub_token *token, void *pw)
char *expname = (char *) expattrs->k;
char *expval = json_object_get_string(
(struct json_object *) expattrs->v);
- char *gotname = (char *) (ctx->pbuffer +
- token->data.tag.attributes[i].name.data.off);
+ const char *gotname = (const char *)
+ token->data.tag.attributes[i].name.ptr;
size_t namelen =
token->data.tag.attributes[i].name.len;
- char *gotval = (char *) (ctx->pbuffer +
- token->data.tag.attributes[i].value.data.off);
+ const char *gotval = (const char *)
+ token->data.tag.attributes[i].value.ptr;
size_t vallen =
token->data.tag.attributes[i].value.len;
@@ -408,8 +401,8 @@ void token_handler(const hubbub_token *token, void *pw)
{
char *expname = json_object_get_string(
array_list_get_idx(items, 1));
- char *tagname = (char *) (ctx->pbuffer +
- token->data.tag.name.data.off);
+ const char *tagname = (const char *)
+ token->data.tag.name.ptr;
printf("'%.*s' %s\n",
(int) token->data.tag.name.len,
@@ -425,10 +418,12 @@ void token_handler(const hubbub_token *token, void *pw)
{
char *expstr = json_object_get_string(
array_list_get_idx(items, 1));
- char *gotstr = (char *) (ctx->pbuffer +
- token->data.comment.data.off);
+ const char *gotstr = (const char *)
+ token->data.comment.ptr;
- printf("'%.*s'\n", (int) token->data.comment.len, gotstr);
+ printf("expected: '%s'\n", expstr);
+ printf(" got: '%.*s'\n",
+ (int) token->data.comment.len, gotstr);
assert(token->data.comment.len == strlen(expstr));
assert(strncmp(gotstr, expstr, strlen(expstr)) == 0);
@@ -439,9 +434,9 @@ void token_handler(const hubbub_token *token, void *pw)
int expstrlen;
char *expstr = json_object_get_string_len(
array_list_get_idx(items, 1), &expstrlen);
- char *gotstr = (char *) (ctx->pbuffer +
- token->data.character.data.off);
- size_t len = min(token->data.character.len,
+ const char *gotstr = (const char *)
+ token->data.character.ptr;
+ size_t len = min(token->data.character.len,
expstrlen - ctx->char_off);
printf("expected: '%.*s'\n", (int) len, expstr + ctx->char_off);
@@ -457,7 +452,7 @@ void token_handler(const hubbub_token *token, void *pw)
hubbub_token t;
t.type = HUBBUB_TOKEN_CHARACTER;
- t.data.character.data.off += len;
+ t.data.character.ptr += len;
t.data.character.len -= len;
ctx->char_off = 0;