summaryrefslogtreecommitdiff
path: root/test/tokeniser.c
diff options
context:
space:
mode:
Diffstat (limited to 'test/tokeniser.c')
-rw-r--r--test/tokeniser.c54
1 files changed, 20 insertions, 34 deletions
diff --git a/test/tokeniser.c b/test/tokeniser.c
index 2d9577b..0ccf264 100644
--- a/test/tokeniser.c
+++ b/test/tokeniser.c
@@ -1,18 +1,16 @@
#include <inttypes.h>
#include <stdio.h>
+#include <parserutils/input/inputstream.h>
+
#include <hubbub/hubbub.h>
#include "utils/utils.h"
-#include "input/inputstream.h"
#include "tokeniser/tokeniser.h"
#include "testutils.h"
-static const uint8_t *pbuffer;
-
-static void buffer_handler(const uint8_t *buffer, size_t len, void *pw);
static void token_handler(const hubbub_token *token, void *pw);
static void *myrealloc(void *ptr, size_t len, void *pw)
@@ -24,7 +22,7 @@ static void *myrealloc(void *ptr, size_t len, void *pw)
int main(int argc, char **argv)
{
- hubbub_inputstream *stream;
+ parserutils_inputstream *stream;
hubbub_tokeniser *tok;
hubbub_tokeniser_optparams params;
FILE *fp;
@@ -40,17 +38,13 @@ int main(int argc, char **argv)
/* Initialise library */
assert(hubbub_initialise(argv[1], myrealloc, NULL) == HUBBUB_OK);
- stream = hubbub_inputstream_create("UTF-8", "UTF-8", myrealloc, NULL);
+ stream = parserutils_inputstream_create("UTF-8", 0, NULL,
+ myrealloc, NULL);
assert(stream != NULL);
tok = hubbub_tokeniser_create(stream, myrealloc, NULL);
assert(tok != NULL);
- params.buffer_handler.handler = buffer_handler;
- params.buffer_handler.pw = NULL;
- assert(hubbub_tokeniser_setopt(tok, HUBBUB_TOKENISER_BUFFER_HANDLER,
- &params) == HUBBUB_OK);
-
params.token_handler.handler = token_handler;
params.token_handler.pw = NULL;
assert(hubbub_tokeniser_setopt(tok, HUBBUB_TOKENISER_TOKEN_HANDLER,
@@ -69,7 +63,7 @@ int main(int argc, char **argv)
while (len >= CHUNK_SIZE) {
fread(buf, 1, CHUNK_SIZE, fp);
- assert(hubbub_inputstream_append(stream,
+ assert(parserutils_inputstream_append(stream,
buf, CHUNK_SIZE) == HUBBUB_OK);
len -= CHUNK_SIZE;
@@ -80,12 +74,12 @@ int main(int argc, char **argv)
if (len > 0) {
fread(buf, 1, len, fp);
- assert(hubbub_inputstream_append(stream,
+ assert(parserutils_inputstream_append(stream,
buf, len) == HUBBUB_OK);
len = 0;
- assert(hubbub_inputstream_append(stream, NULL, 0) ==
+ assert(parserutils_inputstream_append(stream, NULL, 0) ==
HUBBUB_OK);
assert(hubbub_tokeniser_run(tok) == HUBBUB_OK);
@@ -95,7 +89,7 @@ int main(int argc, char **argv)
hubbub_tokeniser_destroy(tok);
- hubbub_inputstream_destroy(stream);
+ parserutils_inputstream_destroy(stream);
assert(hubbub_finalise(myrealloc, NULL) == HUBBUB_OK);
@@ -104,14 +98,6 @@ int main(int argc, char **argv)
return 0;
}
-void buffer_handler(const uint8_t *buffer, size_t len, void *pw)
-{
- UNUSED(len);
- UNUSED(pw);
-
- pbuffer = buffer;
-}
-
void token_handler(const hubbub_token *token, void *pw)
{
static const char *token_names[] = {
@@ -128,7 +114,7 @@ void token_handler(const hubbub_token *token, void *pw)
case HUBBUB_TOKEN_DOCTYPE:
printf("'%.*s' %sids:\n",
(int) token->data.doctype.name.len,
- pbuffer + token->data.doctype.name.data.off,
+ token->data.doctype.name.ptr,
token->data.doctype.force_quirks ?
"(force-quirks) " : "");
@@ -137,20 +123,20 @@ void token_handler(const hubbub_token *token, void *pw)
else
printf("\tpublic: '%.*s'\n",
(int) token->data.doctype.public_id.len,
- pbuffer + token->data.doctype.public_id.data.off);
+ token->data.doctype.public_id.ptr);
if (token->data.doctype.system_missing)
printf("\tsystem: missing\n");
else
printf("\tsystem: '%.*s'\n",
(int) token->data.doctype.system_id.len,
- pbuffer + token->data.doctype.system_id.data.off);
+ token->data.doctype.system_id.ptr);
break;
case HUBBUB_TOKEN_START_TAG:
printf("'%.*s' %s%s\n",
(int) token->data.tag.name.len,
- pbuffer + token->data.tag.name.data.off,
+ token->data.tag.name.ptr,
(token->data.tag.self_closing) ?
"(self-closing) " : "",
(token->data.tag.n_attributes > 0) ?
@@ -158,15 +144,15 @@ void token_handler(const hubbub_token *token, void *pw)
for (i = 0; i < token->data.tag.n_attributes; i++) {
printf("\t'%.*s' = '%.*s'\n",
(int) token->data.tag.attributes[i].name.len,
- pbuffer + token->data.tag.attributes[i].name.data.off,
+ token->data.tag.attributes[i].name.ptr,
(int) token->data.tag.attributes[i].value.len,
- pbuffer + token->data.tag.attributes[i].value.data.off);
+ token->data.tag.attributes[i].value.ptr);
}
break;
case HUBBUB_TOKEN_END_TAG:
printf("'%.*s' %s%s\n",
(int) token->data.tag.name.len,
- pbuffer + token->data.tag.name.data.off,
+ token->data.tag.name.ptr,
(token->data.tag.self_closing) ?
"(self-closing) " : "",
(token->data.tag.n_attributes > 0) ?
@@ -174,18 +160,18 @@ void token_handler(const hubbub_token *token, void *pw)
for (i = 0; i < token->data.tag.n_attributes; i++) {
printf("\t'%.*s' = '%.*s'\n",
(int) token->data.tag.attributes[i].name.len,
- pbuffer + token->data.tag.attributes[i].name.data.off,
+ token->data.tag.attributes[i].name.ptr,
(int) token->data.tag.attributes[i].value.len,
- pbuffer + token->data.tag.attributes[i].value.data.off);
+ token->data.tag.attributes[i].value.ptr);
}
break;
case HUBBUB_TOKEN_COMMENT:
printf("'%.*s'\n", (int) token->data.comment.len,
- pbuffer + token->data.comment.data.off);
+ token->data.comment.ptr);
break;
case HUBBUB_TOKEN_CHARACTER:
printf("'%.*s'\n", (int) token->data.character.len,
- pbuffer + token->data.character.data.off);
+ token->data.character.ptr);
break;
case HUBBUB_TOKEN_EOF:
printf("\n");