summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorJohn Mark Bell <jmb@netsurf-browser.org>2008-11-09 17:40:40 +0000
committerJohn Mark Bell <jmb@netsurf-browser.org>2008-11-09 17:40:40 +0000
commit9c6e96d743c993f1b92f0cc2f07164d44780536e (patch)
tree9067578eeb652fd998ddc5462b7ff8c7fb8abd57 /src
parent6c0e77c643e96436d85aebb9195c83cb403c25fa (diff)
downloadlibhubbub-9c6e96d743c993f1b92f0cc2f07164d44780536e.tar.gz
libhubbub-9c6e96d743c993f1b92f0cc2f07164d44780536e.tar.bz2
Return errors from tokeniser constructor/destructor
svn path=/trunk/hubbub/; revision=5664
Diffstat (limited to 'src')
-rw-r--r--src/parser.c8
-rw-r--r--src/tokeniser/tokeniser.c35
-rw-r--r--src/tokeniser/tokeniser.h6
3 files changed, 30 insertions, 19 deletions
diff --git a/src/parser.c b/src/parser.c
index 342f19b..b67e9d1 100644
--- a/src/parser.c
+++ b/src/parser.c
@@ -41,6 +41,7 @@ hubbub_parser *hubbub_parser_create(const char *enc, bool fix_enc,
hubbub_alloc alloc, void *pw)
{
parserutils_error perror;
+ hubbub_error error;
hubbub_parser *parser;
if (alloc == NULL)
@@ -71,11 +72,12 @@ hubbub_parser *hubbub_parser_create(const char *enc, bool fix_enc,
return NULL;
}
- parser->tok = hubbub_tokeniser_create(parser->stream, alloc, pw);
- if (parser->tok == NULL) {
+ error = hubbub_tokeniser_create(parser->stream, alloc, pw,
+ &parser->tok);
+ if (error != HUBBUB_OK) {
parserutils_inputstream_destroy(parser->stream);
alloc(parser, 0, pw);
- return NULL;
+ return NULL; ///
}
parser->tb = hubbub_treebuilder_create(parser->tok, alloc, pw);
diff --git a/src/tokeniser/tokeniser.c b/src/tokeniser/tokeniser.c
index b5d9e07..88e767d 100644
--- a/src/tokeniser/tokeniser.c
+++ b/src/tokeniser/tokeniser.c
@@ -13,6 +13,7 @@
#include <parserutils/charset/utf8.h>
+#include "utils/parserutilserror.h"
#include "utils/utils.h"
#include "tokeniser/entities.h"
@@ -273,28 +274,31 @@ static hubbub_error hubbub_tokeniser_emit_token(hubbub_tokeniser *tokeniser,
/**
* Create a hubbub tokeniser
*
- * \param input Input stream instance
- * \param alloc Memory (de)allocation function
- * \param pw Pointer to client-specific private data (may be NULL)
- * \return Pointer to tokeniser instance, or NULL on failure
+ * \param input Input stream instance
+ * \param alloc Memory (de)allocation function
+ * \param pw Pointer to client-specific private data (may be NULL)
+ * \param tokeniser Pointer to location to receive tokeniser instance
+ * \return HUBBUB_OK on success,
+ * HUBBUB_BADPARM on bad parameters,
+ * HUBBUB_NOMEM on memory exhaustion
*/
-hubbub_tokeniser *hubbub_tokeniser_create(parserutils_inputstream *input,
- hubbub_alloc alloc, void *pw)
+hubbub_error hubbub_tokeniser_create(parserutils_inputstream *input,
+ hubbub_alloc alloc, void *pw, hubbub_tokeniser **tokeniser)
{
parserutils_error perror;
hubbub_tokeniser *tok;
- if (input == NULL || alloc == NULL)
- return NULL;
+ if (input == NULL || alloc == NULL || tokeniser == NULL)
+ return HUBBUB_BADPARM;
tok = alloc(NULL, sizeof(hubbub_tokeniser), pw);
if (tok == NULL)
- return NULL;
+ return HUBBUB_NOMEM;
perror = parserutils_buffer_create(alloc, pw, &tok->buffer);
if (perror != PARSERUTILS_OK) {
alloc(tok, 0, pw);
- return NULL;
+ return hubbub_error_from_parserutils_error(perror);
}
tok->state = STATE_DATA;
@@ -316,18 +320,21 @@ hubbub_tokeniser *hubbub_tokeniser_create(parserutils_inputstream *input,
memset(&tok->context, 0, sizeof(hubbub_tokeniser_context));
- return tok;
+ *tokeniser = tok;
+
+ return HUBBUB_OK;
}
/**
* Destroy a hubbub tokeniser
*
* \param tokeniser The tokeniser instance to destroy
+ * \return HUBBUB_OK on success, appropriate error otherwise
*/
-void hubbub_tokeniser_destroy(hubbub_tokeniser *tokeniser)
+hubbub_error hubbub_tokeniser_destroy(hubbub_tokeniser *tokeniser)
{
if (tokeniser == NULL)
- return;
+ return HUBBUB_BADPARM;
if (tokeniser->context.current_tag.attributes != NULL) {
tokeniser->alloc(tokeniser->context.current_tag.attributes,
@@ -337,6 +344,8 @@ void hubbub_tokeniser_destroy(hubbub_tokeniser *tokeniser)
parserutils_buffer_destroy(tokeniser->buffer);
tokeniser->alloc(tokeniser, 0, tokeniser->alloc_pw);
+
+ return HUBBUB_OK;
}
/**
diff --git a/src/tokeniser/tokeniser.h b/src/tokeniser/tokeniser.h
index ffc10fe..a5bc8de 100644
--- a/src/tokeniser/tokeniser.h
+++ b/src/tokeniser/tokeniser.h
@@ -51,10 +51,10 @@ typedef union hubbub_tokeniser_optparams {
} hubbub_tokeniser_optparams;
/* Create a hubbub tokeniser */
-hubbub_tokeniser *hubbub_tokeniser_create(parserutils_inputstream *input,
- hubbub_alloc alloc, void *pw);
+hubbub_error hubbub_tokeniser_create(parserutils_inputstream *input,
+ hubbub_alloc alloc, void *pw, hubbub_tokeniser **tokeniser);
/* Destroy a hubbub tokeniser */
-void hubbub_tokeniser_destroy(hubbub_tokeniser *tokeniser);
+hubbub_error hubbub_tokeniser_destroy(hubbub_tokeniser *tokeniser);
/* Configure a hubbub tokeniser */
hubbub_error hubbub_tokeniser_setopt(hubbub_tokeniser *tokeniser,