summaryrefslogtreecommitdiff
path: root/src/tokeniser
diff options
context:
space:
mode:
authorJohn Mark Bell <jmb@netsurf-browser.org>2008-08-13 11:07:16 +0000
committerJohn Mark Bell <jmb@netsurf-browser.org>2008-08-13 11:07:16 +0000
commit1e55c5ba81e9a8883c41d0e991d245c5397c44b9 (patch)
tree2168987f417cb1b48963c524e0b2b044c62cfb08 /src/tokeniser
parent704cd8b4f290c2afb0579b940773f1d73c0a2cc5 (diff)
downloadlibhubbub-1e55c5ba81e9a8883c41d0e991d245c5397c44b9.tar.gz
libhubbub-1e55c5ba81e9a8883c41d0e991d245c5397c44b9.tar.bz2
Sanity checking for string data
svn path=/trunk/hubbub/; revision=5080
Diffstat (limited to 'src/tokeniser')
-rw-r--r--src/tokeniser/tokeniser.c39
1 files changed, 39 insertions, 0 deletions
diff --git a/src/tokeniser/tokeniser.c b/src/tokeniser/tokeniser.c
index 9144c38..1188e2d 100644
--- a/src/tokeniser/tokeniser.c
+++ b/src/tokeniser/tokeniser.c
@@ -2969,6 +2969,45 @@ hubbub_error hubbub_tokeniser_emit_token(hubbub_tokeniser *tokeniser,
assert(tokeniser != NULL);
assert(token != NULL);
+#ifndef NDEBUG
+ /* Sanity checks */
+ switch (token->type) {
+ case HUBBUB_TOKEN_DOCTYPE:
+ assert(memchr(token->data.doctype.name.ptr, 0xff,
+ token->data.doctype.name.len) == NULL);
+ if (token->data.doctype.public_missing == false)
+ assert(memchr(token->data.doctype.public_id.ptr, 0xff,
+ token->data.doctype.public_id.len) == NULL);
+ if (token->data.doctype.system_missing == false)
+ assert(memchr(token->data.doctype.system_id.ptr, 0xff,
+ token->data.doctype.system_id.len) == NULL);
+ break;
+ case HUBBUB_TOKEN_START_TAG:
+ case HUBBUB_TOKEN_END_TAG:
+ assert(memchr(token->data.tag.name.ptr, 0xff,
+ token->data.tag.name.len) == NULL);
+ for (uint32_t i = 0; i < token->data.tag.n_attributes; i++) {
+ hubbub_attribute *attr = &token->data.tag.attributes[i];
+
+ assert(memchr(attr->name.ptr, 0xff, attr->name.len) ==
+ NULL);
+ assert(memchr(attr->value.ptr, 0xff, attr->value.len) ==
+ NULL);
+ }
+ break;
+ case HUBBUB_TOKEN_COMMENT:
+ assert(memchr(token->data.comment.ptr, 0xff,
+ token->data.comment.len) == NULL);
+ break;
+ case HUBBUB_TOKEN_CHARACTER:
+ assert(memchr(token->data.character.ptr, 0xff,
+ token->data.character.len) == NULL);
+ break;
+ case HUBBUB_TOKEN_EOF:
+ break;
+ }
+#endif
+
/* Emit the token */
if (tokeniser->token_handler) {
err = tokeniser->token_handler(token, tokeniser->token_pw);