diff --git a/src/Importer.cpp b/src/Importer.cpp index c332f8c..bb26551 100644 --- a/src/Importer.cpp +++ b/src/Importer.cpp @@ -14,7 +14,6 @@ std::vector Importer::imported_files; TokenStream Importer::evaluate(const TokenStream& original) { - benchmark(); int i = 0; auto ret_tk = original; TokenStream new_tokens; diff --git a/src/Lexer.cpp b/src/Lexer.cpp index 47ccd98..c75150f 100644 --- a/src/Lexer.cpp +++ b/src/Lexer.cpp @@ -77,7 +77,6 @@ bool Lexer::is_in_string(const std::string& string, const char& character) TokenStream Lexer::lex(const std::string& text) { - benchmark(); TokenStream result; bool comment = false; current_lexed_text = text; diff --git a/src/Normalizer.cpp b/src/Normalizer.cpp index baa2e25..f048457 100644 --- a/src/Normalizer.cpp +++ b/src/Normalizer.cpp @@ -3,7 +3,6 @@ TokenStream Normalizer::normalize(const TokenStream& input) { - benchmark(); TokenStream result; int i = 0; while (i < input.size()) diff --git a/src/sapphire.cpp b/src/sapphire.cpp index dbf67c1..10340c7 100644 --- a/src/sapphire.cpp +++ b/src/sapphire.cpp @@ -4,6 +4,7 @@ #include "Lexer.h" #include "Normalizer.h" #include "sapphirepch.h" +#include "utils.h" int main(int argc, char** argv) { @@ -12,12 +13,23 @@ int main(int argc, char** argv) std::string fname = Arguments::input_fname; std::string contents = FileIO::read_all(fname); - TokenStream result = Lexer::make_lexer(fname)->lex(contents); + std::unique_ptr lexer = Lexer::make_lexer(fname); + + TokenStream result; + { + benchmark("Lexing"); + result = lexer->lex(contents); + } Importer::init(FileIO::remove_file_extension(fname)); - - result = Importer::evaluate(result); - result = Normalizer::normalize(result); + { + benchmark("Importing"); + result = Importer::evaluate(result); + } + { + benchmark("Normalizing"); + result = Normalizer::normalize(result); + } for (int i = 0; i < result.size(); i++) {