Preprocesser: Fix memory leak in including another file
Fixed a memory leak when included files had an error in lexing due to not adding them to the token bag before error handling.
This commit is contained in:
@@ -129,14 +129,14 @@ namespace Preprocesser
|
||||
Lexer::Err lexer_err = Lexer::tokenise_buffer(tokens[i + 1]->content,
|
||||
content.value(), body);
|
||||
|
||||
if (lexer_err.type != LET::OK)
|
||||
return new Err{ET::IN_FILE_LEXING, token, nullptr, lexer_err};
|
||||
|
||||
// Here we add the tokens, freshly allocated, to the bag so we can
|
||||
// free it later
|
||||
// Add tokens to the bag for deallocation later
|
||||
// NOTE: We do this before errors so no memory leaks happen
|
||||
new_token_bag.insert(std::end(new_token_bag), std::begin(body),
|
||||
std::end(body));
|
||||
|
||||
if (lexer_err.type != LET::OK)
|
||||
return new Err{ET::IN_FILE_LEXING, token, nullptr, lexer_err};
|
||||
|
||||
file_map[name].body = body;
|
||||
std::vector<Unit> body_units;
|
||||
Err *err = preprocess(body, body_units, new_token_bag, const_map,
|
||||
|
||||
Reference in New Issue
Block a user