aboutsummaryrefslogtreecommitdiffhomepage
path: root/tokenizer.cpp
diff options
context:
space:
mode:
authorGravatar ridiculousfish <corydoras@ridiculousfish.com>2014-11-24 01:20:57 -0800
committerGravatar ridiculousfish <corydoras@ridiculousfish.com>2014-11-24 01:23:42 -0800
commiteafd5776292c37d37870fc6013029f7146f34f70 (patch)
tree6d9d81452eef02560933c42734ce92562407d875 /tokenizer.cpp
parent196a7c9d188304cd6b189b1bcf4e2c088fcf3434 (diff)
Hack the tokenizer to compress multiple adjacent newlines into one
This slightly reduces the size of parse trees, and is otherwise a minor optimization
Diffstat (limited to 'tokenizer.cpp')
-rw-r--r--tokenizer.cpp12
1 files changed, 10 insertions, 2 deletions
diff --git a/tokenizer.cpp b/tokenizer.cpp
index 17999356..29db04bd 100644
--- a/tokenizer.cpp
+++ b/tokenizer.cpp
@@ -621,14 +621,22 @@ void tok_next(tokenizer_t *tok)
switch (*tok->buff)
{
-
case L'\0':
tok->last_type = TOK_END;
/*fwprintf( stderr, L"End of string\n" );*/
tok->has_next = false;
break;
- case 13:
+ case 13: // carriage return
case L'\n':
+ // Hack: when we get a newline, swallow as many as we can
+ // This compresses multiple subsequent newlines into a single one
+ while (*tok->buff == L'\n' || *tok->buff == 13 || *tok->buff == ' ' || *tok->buff == '\t')
+ {
+ tok->buff++;
+ }
+ tok->last_type = TOK_END;
+ break;
+
case L';':
tok->last_type = TOK_END;
tok->buff++;