Moved all benchmark code to main()
This commit is contained in:
parent
1f11d8ce4d
commit
416ad3dd85
@ -14,7 +14,6 @@ std::vector<std::string> Importer::imported_files;
|
|||||||
|
|
||||||
TokenStream Importer::evaluate(const TokenStream& original)
|
TokenStream Importer::evaluate(const TokenStream& original)
|
||||||
{
|
{
|
||||||
benchmark();
|
|
||||||
int i = 0;
|
int i = 0;
|
||||||
auto ret_tk = original;
|
auto ret_tk = original;
|
||||||
TokenStream new_tokens;
|
TokenStream new_tokens;
|
||||||
|
@ -77,7 +77,6 @@ bool Lexer::is_in_string(const std::string& string, const char& character)
|
|||||||
|
|
||||||
TokenStream Lexer::lex(const std::string& text)
|
TokenStream Lexer::lex(const std::string& text)
|
||||||
{
|
{
|
||||||
benchmark();
|
|
||||||
TokenStream result;
|
TokenStream result;
|
||||||
bool comment = false;
|
bool comment = false;
|
||||||
current_lexed_text = text;
|
current_lexed_text = text;
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
|
|
||||||
TokenStream Normalizer::normalize(const TokenStream& input)
|
TokenStream Normalizer::normalize(const TokenStream& input)
|
||||||
{
|
{
|
||||||
benchmark();
|
|
||||||
TokenStream result;
|
TokenStream result;
|
||||||
int i = 0;
|
int i = 0;
|
||||||
while (i < input.size())
|
while (i < input.size())
|
||||||
|
@ -4,6 +4,7 @@
|
|||||||
#include "Lexer.h"
|
#include "Lexer.h"
|
||||||
#include "Normalizer.h"
|
#include "Normalizer.h"
|
||||||
#include "sapphirepch.h"
|
#include "sapphirepch.h"
|
||||||
|
#include "utils.h"
|
||||||
|
|
||||||
int main(int argc, char** argv)
|
int main(int argc, char** argv)
|
||||||
{
|
{
|
||||||
@ -12,12 +13,23 @@ int main(int argc, char** argv)
|
|||||||
std::string fname = Arguments::input_fname;
|
std::string fname = Arguments::input_fname;
|
||||||
std::string contents = FileIO::read_all(fname);
|
std::string contents = FileIO::read_all(fname);
|
||||||
|
|
||||||
TokenStream result = Lexer::make_lexer(fname)->lex(contents);
|
std::unique_ptr<Lexer> lexer = Lexer::make_lexer(fname);
|
||||||
|
|
||||||
|
TokenStream result;
|
||||||
|
{
|
||||||
|
benchmark("Lexing");
|
||||||
|
result = lexer->lex(contents);
|
||||||
|
}
|
||||||
|
|
||||||
Importer::init(FileIO::remove_file_extension(fname));
|
Importer::init(FileIO::remove_file_extension(fname));
|
||||||
|
{
|
||||||
result = Importer::evaluate(result);
|
benchmark("Importing");
|
||||||
result = Normalizer::normalize(result);
|
result = Importer::evaluate(result);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
benchmark("Normalizing");
|
||||||
|
result = Normalizer::normalize(result);
|
||||||
|
}
|
||||||
|
|
||||||
for (int i = 0; i < result.size(); i++)
|
for (int i = 0; i < result.size(); i++)
|
||||||
{
|
{
|
||||||
|
Loading…
Reference in New Issue
Block a user