summaryrefslogtreecommitdiff
path: root/src/tokeniser
diff options
context:
space:
mode:
authorAndrew Sidwell <andy@entai.co.uk>2008-08-04 01:15:19 +0000
committerAndrew Sidwell <andy@entai.co.uk>2008-08-04 01:15:19 +0000
commitf88f034a4f9e0db06a64326198e19b2ef5a0283f (patch)
tree8469c805584826d8f9b128dcc6cc87e7e86d093c /src/tokeniser
parentb4760da5d776ca41efb612f373b0545590ef4df3 (diff)
downloadlibhubbub-f88f034a4f9e0db06a64326198e19b2ef5a0283f.tar.gz
libhubbub-f88f034a4f9e0db06a64326198e19b2ef5a0283f.tar.bz2
Fix previous commit.
svn path=/trunk/hubbub/; revision=4893
Diffstat (limited to 'src/tokeniser')
-rw-r--r--src/tokeniser/tokeniser.c20
1 files changed, 14 insertions, 6 deletions
diff --git a/src/tokeniser/tokeniser.c b/src/tokeniser/tokeniser.c
index 1f1ae40..54ad118 100644
--- a/src/tokeniser/tokeniser.c
+++ b/src/tokeniser/tokeniser.c
@@ -2811,17 +2811,25 @@ bool hubbub_tokeniser_handle_named_entity(hubbub_tokeniser *tokeniser)
return false;
cptr = parserutils_inputstream_peek(tokeniser->input,
- ctx->match_entity.offset + ctx->match_entity.length - 1,
+ ctx->match_entity.offset + ctx->match_entity.length,
&len);
c = CHAR(cptr);
if ((tokeniser->context.match_entity.return_state ==
STATE_CHARACTER_REFERENCE_IN_ATTRIBUTE_VALUE) &&
- (c != ';') &&
- ((0x0030 <= c && c <= 0x0039) ||
- (0x0041 <= c && c <= 0x005A) ||
- (0x0061 <= c && c <= 0x007A))) {
- ctx->match_entity.codepoint = 0;
+ (c != ';')) {
+
+ cptr = parserutils_inputstream_peek(tokeniser->input,
+ ctx->match_entity.offset +
+ ctx->match_entity.length,
+ &len);
+ c = CHAR(cptr);
+
+ if ((0x0030 <= c && c <= 0x0039) ||
+ (0x0041 <= c && c <= 0x005A) ||
+ (0x0061 <= c && c <= 0x007A)) {
+ ctx->match_entity.codepoint = 0;
+ }
}
/* Flag completion */