lexer now produces a vector of heap allocated tokens
This removes the problem of possibly expensive copies occurring due to working with tokens produced from the lexer (that C++ just... does): now we hold pointers where the copy operator is a lot easier to use. I want expensive stuff to be done by me and for a reason: I want to be holding the shotgun.
This commit is contained in:
@@ -310,7 +310,7 @@ token_t tokenise_literal_string(string_view &source, size_t &column, size_t end)
|
|||||||
return token;
|
return token;
|
||||||
}
|
}
|
||||||
|
|
||||||
lerr_t tokenise_buffer(string_view source, std::vector<token_t> &tokens)
|
lerr_t tokenise_buffer(string_view source, std::vector<token_t *> &tokens)
|
||||||
{
|
{
|
||||||
size_t column = 0, line = 1;
|
size_t column = 0, line = 1;
|
||||||
while (source.size() > 0)
|
while (source.size() > 0)
|
||||||
@@ -393,8 +393,9 @@ lerr_t tokenise_buffer(string_view source, std::vector<token_t> &tokens)
|
|||||||
}
|
}
|
||||||
if (is_token)
|
if (is_token)
|
||||||
{
|
{
|
||||||
t.line = line;
|
t.line = line;
|
||||||
tokens.push_back(t);
|
token_t *acc = new token_t(t);
|
||||||
|
tokens.push_back(acc);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return lerr_t::OK;
|
return lerr_t::OK;
|
||||||
|
|||||||
@@ -91,6 +91,6 @@ enum class lerr_t
|
|||||||
};
|
};
|
||||||
const char *lerr_as_cstr(lerr_t);
|
const char *lerr_as_cstr(lerr_t);
|
||||||
|
|
||||||
lerr_t tokenise_buffer(std::string_view, std::vector<token_t> &);
|
lerr_t tokenise_buffer(std::string_view, std::vector<token_t *> &);
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
Reference in New Issue
Block a user