169 lines
6.3 KiB
C++
169 lines
6.3 KiB
C++
#include "Importer.h"
|
|
#include "Arguments.h"
|
|
#include "Error.h"
|
|
#include "FileIO.h"
|
|
#include "sapphirepch.h"
|
|
#include "utils.h"
|
|
#include <fstream>
|
|
#define MAX_IMPORTS 100
|
|
|
|
int Importer::import_count = 0;
|
|
|
|
std::vector<std::shared_ptr<Location>> Importer::import_stack;
|
|
std::vector<std::string> Importer::imported_files;
|
|
|
|
TokenStream Importer::evaluate(const TokenStream& original)
|
|
{
|
|
int i = 0;
|
|
auto ret_tk = original;
|
|
TokenStream new_tokens;
|
|
|
|
while (original[i].tk_type != TT_EOF)
|
|
{
|
|
Token current_token = original[i];
|
|
|
|
if (current_token.tk_type == TT_Import)
|
|
{
|
|
Token next_token = original[i + 1];
|
|
if (next_token.tk_type == TT_EOF)
|
|
Error::throw_error(current_token.loc, current_token.line(),
|
|
"did not expect EOF after import statement");
|
|
|
|
if (next_token.tk_type == TT_Identifier) // TODO: add support for strings
|
|
{
|
|
Token last_token = original[i + 2];
|
|
|
|
if (last_token.tk_type != TT_Semicolon)
|
|
Error::throw_error(last_token.loc, last_token.line(), "expected a semicolon");
|
|
|
|
if (std::find(imported_files.begin(), imported_files.end(), next_token.string_value) !=
|
|
imported_files.end())
|
|
{
|
|
if (Arguments::wimport)
|
|
Error::throw_warning(next_token.loc, next_token.line(), "file already imported, skipping");
|
|
Token::erase(ret_tk[i]);
|
|
Token::erase(ret_tk[i + 1]);
|
|
Token::erase(ret_tk[i + 2]);
|
|
++i;
|
|
continue;
|
|
}
|
|
|
|
if (import_count > MAX_IMPORTS)
|
|
|
|
Error::throw_error(current_token.loc, current_token.line(), "maximum import depth exceeded");
|
|
|
|
std::string input_file_name = next_token.string_value + ".sp";
|
|
|
|
std::ifstream input_file(input_file_name); // only used to check if it exists, thus closed afterwards
|
|
if (!input_file.good()) Error::throw_error(next_token.loc, next_token.line(), "file not found");
|
|
input_file.close();
|
|
|
|
auto file_contents = FileIO::read_all(input_file_name);
|
|
|
|
auto top_location = std::make_shared<Location>(current_token.loc.line, current_token.loc.column,
|
|
current_token.loc.fname);
|
|
top_location.get()->parent = current_token.loc.parent;
|
|
|
|
import_stack.push_back(top_location); // Keep ref_count above 0, just in case
|
|
|
|
auto import_lexer = Lexer::make_lexer(input_file_name);
|
|
|
|
Lexer::assign_parent_location(import_lexer, top_location);
|
|
|
|
TokenStream imported_tokens = import_lexer->lex(file_contents);
|
|
|
|
imported_tokens.pop_back(); // remove EOF at end of token stream
|
|
|
|
for (auto& tk : imported_tokens)
|
|
{
|
|
tk.loc.parent = top_location;
|
|
}
|
|
|
|
imported_files.push_back(next_token.string_value);
|
|
|
|
new_tokens.insert(new_tokens.end(), imported_tokens.begin(), imported_tokens.end());
|
|
|
|
Token::erase(ret_tk[i]);
|
|
Token::erase(ret_tk[i + 1]);
|
|
Token::erase(ret_tk[i + 2]);
|
|
}
|
|
else if (next_token.tk_type == TT_Path)
|
|
{
|
|
Token last_token = original[i + 2];
|
|
|
|
if (last_token.tk_type != TT_Semicolon)
|
|
Error::throw_error(last_token.loc, last_token.line(), "expected a semicolon");
|
|
|
|
if (std::find(imported_files.begin(), imported_files.end(), next_token.string_value) !=
|
|
imported_files.end())
|
|
{
|
|
if (Arguments::wimport)
|
|
Error::throw_warning(next_token.loc, next_token.line(), "file already imported, skipping");
|
|
Token::erase(ret_tk[i]);
|
|
Token::erase(ret_tk[i + 1]);
|
|
Token::erase(ret_tk[i + 2]);
|
|
++i;
|
|
continue;
|
|
}
|
|
|
|
if (import_count > MAX_IMPORTS)
|
|
Error::throw_error(current_token.loc, current_token.line(), "maximum import depth exceeded");
|
|
|
|
std::string input_file_name = next_token.string_value + ".sp";
|
|
|
|
std::ifstream input_file(input_file_name); // only used to check if it exists, thus closed afterwards
|
|
if (!input_file.good()) Error::throw_error(next_token.loc, next_token.line(), "file not found");
|
|
input_file.close();
|
|
|
|
auto file_contents = FileIO::read_all(input_file_name);
|
|
|
|
auto top_location = std::make_shared<Location>(current_token.loc.line, current_token.loc.column,
|
|
current_token.loc.fname);
|
|
top_location.get()->parent = current_token.loc.parent;
|
|
|
|
import_stack.push_back(top_location); // Keep ref_count above 0, just in case
|
|
|
|
auto import_lexer = Lexer::make_lexer(input_file_name);
|
|
|
|
Lexer::assign_parent_location(import_lexer, top_location);
|
|
|
|
TokenStream imported_tokens = import_lexer->lex(file_contents);
|
|
|
|
imported_tokens.pop_back(); // remove EOF at end of token stream
|
|
|
|
for (auto& tk : imported_tokens)
|
|
{
|
|
tk.loc.parent = top_location;
|
|
}
|
|
|
|
imported_files.push_back(next_token.string_value);
|
|
|
|
new_tokens.insert(new_tokens.end(), imported_tokens.begin(), imported_tokens.end());
|
|
|
|
Token::erase(ret_tk[i]);
|
|
Token::erase(ret_tk[i + 1]);
|
|
Token::erase(ret_tk[i + 2]);
|
|
}
|
|
else
|
|
Error::throw_error(next_token.loc, next_token.line(),
|
|
"import keyword should be followed by an identifier");
|
|
}
|
|
|
|
++i;
|
|
}
|
|
|
|
if (new_tokens.size() != 0)
|
|
{
|
|
new_tokens.insert(new_tokens.end(), ret_tk.begin(), ret_tk.end());
|
|
import_count++;
|
|
return evaluate(new_tokens);
|
|
}
|
|
|
|
return ret_tk;
|
|
}
|
|
|
|
void Importer::init(std::string init_file)
|
|
{
|
|
imported_files.push_back(init_file);
|
|
}
|