aboutsummaryrefslogtreecommitdiffhomepage
path: root/src/tokenizer.cpp
diff options
context:
space:
mode:
authorGravatar ridiculousfish <corydoras@ridiculousfish.com>2015-07-25 23:05:47 -0700
committerGravatar ridiculousfish <corydoras@ridiculousfish.com>2015-07-25 23:05:47 -0700
commit618896c0436e6ca70feb2fa317b34171cc4e1a81 (patch)
tree4f6d8130f52fe7c9523e034ab85fe3b49fd0ea0b /src/tokenizer.cpp
parent0dbd83ffaf571dce9b1e8449c28e3ae0040d4e75 (diff)
Early reworking of tokenizer interface
Diffstat (limited to 'src/tokenizer.cpp')
-rw-r--r--src/tokenizer.cpp20
1 files changed, 18 insertions, 2 deletions
diff --git a/src/tokenizer.cpp b/src/tokenizer.cpp
index 55e9dc9c..22412c47 100644
--- a/src/tokenizer.cpp
+++ b/src/tokenizer.cpp
@@ -55,7 +55,7 @@ segments.
/**
Set the latest tokens string to be the specified error message
*/
-static void tok_call_error(tokenizer_t *tok, int error_type, const wchar_t *error_message)
+static void tok_call_error(tokenizer_t *tok, enum tokenizer_error error_type, const wchar_t *error_message)
{
tok->last_type = TOK_ERROR;
tok->error = error_type;
@@ -67,7 +67,7 @@ int tok_get_error(tokenizer_t *tok)
return tok->error;
}
-tokenizer_t::tokenizer_t(const wchar_t *b, tok_flags_t flags) : buff(NULL), orig_buff(NULL), last_type(TOK_NONE), last_pos(0), has_next(false), accept_unfinished(false), show_comments(false), show_blank_lines(false), error(0), squash_errors(false), continue_line_after_comment(false)
+tokenizer_t::tokenizer_t(const wchar_t *b, tok_flags_t flags) : buff(NULL), orig_buff(NULL), last_type(TOK_NONE), last_pos(0), has_next(false), accept_unfinished(false), show_comments(false), show_blank_lines(false), error(TOK_ERROR_NONE), squash_errors(false), continue_line_after_comment(false)
{
CHECK(b,);
@@ -81,6 +81,22 @@ tokenizer_t::tokenizer_t(const wchar_t *b, tok_flags_t flags) : buff(NULL), orig
tok_next(this);
}
+bool tokenizer_t::next(struct tok_t *result)
+{
+ assert(result != NULL);
+ if (! this->has_next)
+ {
+ return false;
+ }
+ result->text = this->last_token;
+ result->type = this->last_type;
+ result->offset = last_pos;
+ assert(this->buff >= this->orig_buff);
+ result->length = this->buff - this->orig_buff;
+ tok_next(this);
+ return true;
+}
+
enum token_type tok_last_type(tokenizer_t *tok)
{
CHECK(tok, TOK_ERROR);