summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/tokeniser2.c14
-rw-r--r--test/tokeniser3.c14
2 files changed, 14 insertions, 14 deletions
diff --git a/test/tokeniser2.c b/test/tokeniser2.c
index c8ab9c0..f468d1c 100644
--- a/test/tokeniser2.c
+++ b/test/tokeniser2.c
@@ -48,7 +48,7 @@ int main(int argc, char **argv)
}
json = json_object_from_file(argv[1]);
- assert(!is_error(json));
+ assert(json != NULL);
assert(strcmp((char *) ((json_object_get_object(json)->head)->k),
"tests") == 0);
@@ -57,7 +57,7 @@ int main(int argc, char **argv)
tests = json_object_get_array((struct json_object *)
(json_object_get_object(json)->head)->v);
- for (i = 0; i < array_list_length(tests); i++) {
+ for (i = 0; i < (int)array_list_length(tests); i++) {
/* Get test */
struct json_object *test =
(struct json_object *) array_list_get_idx(tests, i);
@@ -216,7 +216,7 @@ hubbub_error token_handler(const hubbub_token *token, void *pw)
struct json_object *obj = NULL;
struct array_list *items;
- for (; ctx->output_index < array_list_length(ctx->output);
+ for (; ctx->output_index < (int)array_list_length(ctx->output);
ctx->output_index++) {
/* Get object for index */
obj = (struct json_object *)
@@ -236,11 +236,11 @@ hubbub_error token_handler(const hubbub_token *token, void *pw)
* produced more tokens than expected. We allow for the generation
* of a terminating EOF token, however. */
assert("too many tokens" &&
- (ctx->output_index < array_list_length(ctx->output) ||
+ (ctx->output_index < (int)array_list_length(ctx->output) ||
token->type == HUBBUB_TOKEN_EOF));
/* Got a terminating EOF -- no error */
- if (ctx->output_index >= array_list_length(ctx->output))
+ if (ctx->output_index >= (int)array_list_length(ctx->output))
return HUBBUB_OK;
/* Now increment the output index so we don't re-expect this token */
@@ -438,7 +438,7 @@ hubbub_error token_handler(const hubbub_token *token, void *pw)
/* Expected token only contained part of the data
* Calculate how much is left, then try again with
* the next expected token */
- hubbub_token t;
+ hubbub_token t = *token;
t.type = HUBBUB_TOKEN_CHARACTER;
t.data.character.ptr += len;
@@ -446,7 +446,7 @@ hubbub_error token_handler(const hubbub_token *token, void *pw)
ctx->char_off = 0;
- token_handler(&t, pw);
+ return token_handler(&t, pw);
} else if (strlen(expstr + ctx->char_off) >
token->data.character.len) {
/* Tokeniser output only contained part of the data
diff --git a/test/tokeniser3.c b/test/tokeniser3.c
index e33d018..eb921ce 100644
--- a/test/tokeniser3.c
+++ b/test/tokeniser3.c
@@ -46,7 +46,7 @@ int main(int argc, char **argv)
}
json = json_object_from_file(argv[1]);
- assert(!is_error(json));
+ assert(json != NULL);
assert(strcmp((char *) ((json_object_get_object(json)->head)->k),
"tests") == 0);
@@ -55,7 +55,7 @@ int main(int argc, char **argv)
tests = json_object_get_array((struct json_object *)
(json_object_get_object(json)->head)->v);
- for (i = 0; i < array_list_length(tests); i++) {
+ for (i = 0; i < (int)array_list_length(tests); i++) {
/* Get test */
struct json_object *test =
(struct json_object *) array_list_get_idx(tests, i);
@@ -221,7 +221,7 @@ hubbub_error token_handler(const hubbub_token *token, void *pw)
struct json_object *obj = NULL;
struct array_list *items;
- for (; ctx->output_index < array_list_length(ctx->output);
+ for (; ctx->output_index < (int)array_list_length(ctx->output);
ctx->output_index++) {
/* Get object for index */
obj = (struct json_object *)
@@ -241,11 +241,11 @@ hubbub_error token_handler(const hubbub_token *token, void *pw)
* produced more tokens than expected. We allow for the generation
* of a terminating EOF token, however. */
assert("too many tokens" &&
- (ctx->output_index < array_list_length(ctx->output) ||
+ (ctx->output_index < (int)array_list_length(ctx->output) ||
token->type == HUBBUB_TOKEN_EOF));
/* Got a terminating EOF -- no error */
- if (ctx->output_index >= array_list_length(ctx->output))
+ if (ctx->output_index >= (int)array_list_length(ctx->output))
return HUBBUB_OK;
/* Now increment the output index so we don't re-expect this token */
@@ -447,7 +447,7 @@ hubbub_error token_handler(const hubbub_token *token, void *pw)
/* Expected token only contained part of the data
* Calculate how much is left, then try again with
* the next expected token */
- hubbub_token t;
+ hubbub_token t = *token;
t.type = HUBBUB_TOKEN_CHARACTER;
t.data.character.ptr += len;
@@ -455,7 +455,7 @@ hubbub_error token_handler(const hubbub_token *token, void *pw)
ctx->char_off = 0;
- token_handler(&t, pw);
+ return token_handler(&t, pw);
} else if (strlen(expstr + ctx->char_off) >
token->data.character.len) {
/* Tokeniser output only contained part of the data