From d54bea3276cfd823e5a0e52365d64a9cd7e81d51 Mon Sep 17 00:00:00 2001 From: Aryadev Chavali Date: Wed, 10 Jul 2024 01:35:42 +0100 Subject: [PATCH] Preprocesser: Fix memory leak in including another file Fixed a memory leak when included files had an error in lexing due to not adding them to the token bag before error handling. --- src/preprocesser.cpp | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/preprocesser.cpp b/src/preprocesser.cpp index 2712aa5..d1dd892 100644 --- a/src/preprocesser.cpp +++ b/src/preprocesser.cpp @@ -129,14 +129,14 @@ namespace Preprocesser Lexer::Err lexer_err = Lexer::tokenise_buffer(tokens[i + 1]->content, content.value(), body); - if (lexer_err.type != LET::OK) - return new Err{ET::IN_FILE_LEXING, token, nullptr, lexer_err}; - - // Here we add the tokens, freshly allocated, to the bag so we can - // free it later + // Add tokens to the bag for deallocation later + // NOTE: We do this before errors so no memory leaks happen new_token_bag.insert(std::end(new_token_bag), std::begin(body), std::end(body)); + if (lexer_err.type != LET::OK) + return new Err{ET::IN_FILE_LEXING, token, nullptr, lexer_err}; + file_map[name].body = body; std::vector body_units; Err *err = preprocess(body, body_units, new_token_bag, const_map,