From: Martin Mares Date: Sat, 24 Nov 2007 14:38:39 +0000 (+0100) Subject: Fixed a bug in tokenization. X-Git-Tag: python-dummy-working~259 X-Git-Url: http://mj.ucw.cz/gitweb/?a=commitdiff_plain;h=43b2cd49673f6fc77e5ff8b4483560e812c1ef9a;p=eval.git Fixed a bug in tokenization. --- diff --git a/judge/judge-shuff.c b/judge/judge-shuff.c index fe6933e..99df3d5 100644 --- a/judge/judge-shuff.c +++ b/judge/judge-shuff.c @@ -34,7 +34,7 @@ struct tokbuf { char *read_pos; }; -#define TOKBUF_PAGE 256 +#define TOKBUF_PAGE 65536 static void init_tokbuf(struct tokbuf *tb) { @@ -47,6 +47,8 @@ static void add_token(struct tokbuf *tb, char *token, int l) struct tokpage *pg = tb->last_page; if (!pg || pg->end - pg->pos < l) { + if (pg) + pg->end = pg->pos; int size = TOKBUF_PAGE - sizeof(struct tokbuf); if (l > size/5) size = l;