summaryrefslogtreecommitdiff
path: root/src/tokeniser
diff options
context:
space:
mode:
authorAndrew Sidwell <andy@entai.co.uk>2008-08-13 15:33:02 +0000
committerAndrew Sidwell <andy@entai.co.uk>2008-08-13 15:33:02 +0000
commit22245bce3db591076a4e7ccb352757d994fb8adc (patch)
tree48f2f82b2b28a4b10a92eb9b55334dd37113015c /src/tokeniser
parent523dc4f249106cfcd8547ad0743b76bb1a4d68cf (diff)
downloadlibhubbub-22245bce3db591076a4e7ccb352757d994fb8adc.tar.gz
libhubbub-22245bce3db591076a4e7ccb352757d994fb8adc.tar.bz2
Optimise comment states slightly, taking advantage of the fact that buffers store their own length and when emitting the comment, the buffer contains the whole comment and nothing else.
svn path=/trunk/hubbub/; revision=5095
Diffstat (limited to 'src/tokeniser')
-rw-r--r--src/tokeniser/tokeniser.c21
1 files changed, 1 insertions, 20 deletions
diff --git a/src/tokeniser/tokeniser.c b/src/tokeniser/tokeniser.c
index b01ada9..755b3b5 100644
--- a/src/tokeniser/tokeniser.c
+++ b/src/tokeniser/tokeniser.c
@@ -1587,8 +1587,6 @@ hubbub_error hubbub_tokeniser_handle_self_closing_start_tag(
/* this state expects tokeniser->context.chars to be empty on first entry */
hubbub_error hubbub_tokeniser_handle_bogus_comment(hubbub_tokeniser *tokeniser)
{
- hubbub_string *comment = &tokeniser->context.current_comment;
-
size_t len;
uintptr_t cptr = parserutils_inputstream_peek(tokeniser->input,
tokeniser->context.pending, &len);
@@ -1597,8 +1595,6 @@ hubbub_error hubbub_tokeniser_handle_bogus_comment(hubbub_tokeniser *tokeniser)
return HUBBUB_OOD;
} else if (cptr == PARSERUTILS_INPUTSTREAM_EOF) {
tokeniser->state = STATE_DATA;
- tokeniser->context.current_comment.ptr =
- tokeniser->buffer->data;
return emit_current_comment(tokeniser);
}
@@ -1614,7 +1610,6 @@ hubbub_error hubbub_tokeniser_handle_bogus_comment(hubbub_tokeniser *tokeniser)
} else if (c == '\0') {
parserutils_buffer_append(tokeniser->buffer,
u_fffd, sizeof(u_fffd));
- comment->len += sizeof(u_fffd);
} else if (c == '\r') {
cptr = parserutils_inputstream_peek(
tokeniser->input,
@@ -1627,14 +1622,12 @@ hubbub_error hubbub_tokeniser_handle_bogus_comment(hubbub_tokeniser *tokeniser)
CHAR(cptr) != '\n') {
parserutils_buffer_append(tokeniser->buffer,
&lf, sizeof(lf));
- comment->len += sizeof(lf);
}
tokeniser->context.pending += len;
} else {
parserutils_buffer_append(tokeniser->buffer,
(uint8_t *)cptr, len);
- comment->len += len;
}
return HUBBUB_OK;
@@ -1709,8 +1702,6 @@ hubbub_error hubbub_tokeniser_handle_match_comment(hubbub_tokeniser *tokeniser)
hubbub_error hubbub_tokeniser_handle_comment(hubbub_tokeniser *tokeniser)
{
- hubbub_string *comment = &tokeniser->context.current_comment;
-
size_t len;
uintptr_t cptr = parserutils_inputstream_peek(
tokeniser->input, tokeniser->context.pending, &len);
@@ -1718,8 +1709,6 @@ hubbub_error hubbub_tokeniser_handle_comment(hubbub_tokeniser *tokeniser)
if (cptr == PARSERUTILS_INPUTSTREAM_OOD) {
return HUBBUB_OOD;
} else if (cptr == PARSERUTILS_INPUTSTREAM_EOF) {
- tokeniser->context.current_comment.ptr =
- tokeniser->buffer->data;
tokeniser->state = STATE_DATA;
return emit_current_comment(tokeniser);
}
@@ -1732,8 +1721,6 @@ hubbub_error hubbub_tokeniser_handle_comment(hubbub_tokeniser *tokeniser)
tokeniser->context.pending += len;
/** \todo parse error if state != COMMENT_END */
- tokeniser->context.current_comment.ptr =
- tokeniser->buffer->data;
tokeniser->state = STATE_DATA;
return emit_current_comment(tokeniser);
} else if (c == '-') {
@@ -1748,7 +1735,6 @@ hubbub_error hubbub_tokeniser_handle_comment(hubbub_tokeniser *tokeniser)
} else if (tokeniser->state == STATE_COMMENT_END) {
parserutils_buffer_append(tokeniser->buffer,
(uint8_t *) "-", SLEN("-"));
- comment->len += SLEN("-");
}
tokeniser->context.pending += len;
@@ -1757,17 +1743,14 @@ hubbub_error hubbub_tokeniser_handle_comment(hubbub_tokeniser *tokeniser)
tokeniser->state == STATE_COMMENT_END_DASH) {
parserutils_buffer_append(tokeniser->buffer,
(uint8_t *) "-", SLEN("-"));
- comment->len += SLEN("-");
} else if (tokeniser->state == STATE_COMMENT_END) {
parserutils_buffer_append(tokeniser->buffer,
(uint8_t *) "--", SLEN("--"));
- comment->len += SLEN("--");
}
if (c == '\0') {
parserutils_buffer_append(tokeniser->buffer,
u_fffd, sizeof(u_fffd));
- comment->len += sizeof(u_fffd);
} else if (c == '\r') {
cptr = parserutils_inputstream_peek(
tokeniser->input,
@@ -1779,12 +1762,10 @@ hubbub_error hubbub_tokeniser_handle_comment(hubbub_tokeniser *tokeniser)
CHAR(cptr) != '\n') {
parserutils_buffer_append(tokeniser->buffer,
&lf, sizeof(lf));
- comment->len += sizeof(lf);
}
} else {
parserutils_buffer_append(tokeniser->buffer,
(uint8_t *)cptr, len);
- comment->len += len;
}
tokeniser->context.pending += len;
@@ -2937,8 +2918,8 @@ hubbub_error emit_current_comment(hubbub_tokeniser *tokeniser)
hubbub_token token;
token.type = HUBBUB_TOKEN_COMMENT;
- token.data.comment = tokeniser->context.current_comment;
token.data.comment.ptr = tokeniser->buffer->data;
+ token.data.comment.len = tokeniser->buffer->length;
return hubbub_tokeniser_emit_token(tokeniser, &token);
}