From 1db020fb0e52995b2938512496740c11b81d61a3 Mon Sep 17 00:00:00 2001 From: Andrew Sidwell Date: Thu, 19 Jun 2008 01:20:15 +0000 Subject: Add a tokeniser3, which tests the tokeniser byte-by-byte rather than with all the data at once. svn path=/trunk/hubbub/; revision=4409 --- test/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'test/Makefile') diff --git a/test/Makefile b/test/Makefile index 3d5788c..00ca279 100644 --- a/test/Makefile +++ b/test/Makefile @@ -39,7 +39,7 @@ LDFLAGS := $(LDFLAGS) `$(PKGCONFIG) $(PKGCONFIGFLAGS) --libs json` # Tests TESTS_$(d) := aliases cscodec csdetect dict entities filter hubbub \ - inputstream parser parser-utf16 tokeniser tokeniser2 \ + inputstream parser parser-utf16 tokeniser tokeniser2 tokeniser3 \ tree TESTS_$(d) := $(TESTS_$(d)) regression/cscodec-segv regression/filter-segv \ regression/stream-nomem -- cgit v1.2.3