used clang-format :)
This commit is contained in:
parent
8fe5297933
commit
4f6a1235a0
@ -10,6 +10,6 @@ SpaceAfterTemplateKeyword: 'false'
|
|||||||
SpacesInCStyleCastParentheses: 'false'
|
SpacesInCStyleCastParentheses: 'false'
|
||||||
SpacesInSquareBrackets: 'false'
|
SpacesInSquareBrackets: 'false'
|
||||||
TabWidth: '4'
|
TabWidth: '4'
|
||||||
UseTab: ForIndentation
|
UseTab: Never
|
||||||
|
|
||||||
...
|
...
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
class ASTNode
|
class ASTNode
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
ASTNode();
|
ASTNode();
|
||||||
~ASTNode();
|
~ASTNode();
|
||||||
};
|
};
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#include "BinaryOpNode.h"
|
#include "BinaryOpNode.h"
|
||||||
|
|
||||||
BinaryOpNode::BinaryOpNode(std::shared_ptr<ExprNode> left,std::shared_ptr<ExprNode> right)
|
BinaryOpNode::BinaryOpNode(std::shared_ptr<ExprNode> left, std::shared_ptr<ExprNode> right)
|
||||||
: left(left), right(right), ExprNode()
|
: left(left), right(right), ExprNode()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
@ -3,10 +3,11 @@
|
|||||||
|
|
||||||
class BinaryOpNode : public ExprNode
|
class BinaryOpNode : public ExprNode
|
||||||
{
|
{
|
||||||
protected:
|
protected:
|
||||||
std::shared_ptr<ExprNode> left;
|
std::shared_ptr<ExprNode> left;
|
||||||
std::shared_ptr<ExprNode> right;
|
std::shared_ptr<ExprNode> right;
|
||||||
public:
|
|
||||||
BinaryOpNode(std::shared_ptr<ExprNode> left,std::shared_ptr<ExprNode> right);
|
public:
|
||||||
|
BinaryOpNode(std::shared_ptr<ExprNode> left, std::shared_ptr<ExprNode> right);
|
||||||
~BinaryOpNode();
|
~BinaryOpNode();
|
||||||
};
|
};
|
@ -1,16 +1,13 @@
|
|||||||
#include "ExprNode.h"
|
#include "ExprNode.h"
|
||||||
|
|
||||||
ExprNode::ExprNode(std::shared_ptr<ExprNode> child): child(child), ASTNode()
|
ExprNode::ExprNode(std::shared_ptr<ExprNode> child) : child(child), ASTNode()
|
||||||
{
|
{
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ExprNode::ExprNode(): ASTNode()
|
ExprNode::ExprNode() : ASTNode()
|
||||||
{
|
{
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ExprNode::~ExprNode()
|
ExprNode::~ExprNode()
|
||||||
{
|
{
|
||||||
|
|
||||||
}
|
}
|
@ -4,7 +4,8 @@
|
|||||||
class ExprNode : public ASTNode
|
class ExprNode : public ASTNode
|
||||||
{
|
{
|
||||||
std::shared_ptr<ExprNode> child;
|
std::shared_ptr<ExprNode> child;
|
||||||
public:
|
|
||||||
|
public:
|
||||||
ExprNode(std::shared_ptr<ExprNode> child);
|
ExprNode(std::shared_ptr<ExprNode> child);
|
||||||
ExprNode();
|
ExprNode();
|
||||||
~ExprNode();
|
~ExprNode();
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
#include "MulNode.h"
|
#include "MulNode.h"
|
||||||
|
|
||||||
MulNode::MulNode(std::shared_ptr<ExprNode> left,std::shared_ptr<ExprNode> right, char op)
|
MulNode::MulNode(std::shared_ptr<ExprNode> left, std::shared_ptr<ExprNode> right, char op)
|
||||||
: BinaryOpNode(left,right), op(op)
|
: BinaryOpNode(left, right), op(op)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,7 +4,8 @@
|
|||||||
class MulNode final : public BinaryOpNode
|
class MulNode final : public BinaryOpNode
|
||||||
{
|
{
|
||||||
char op;
|
char op;
|
||||||
public:
|
|
||||||
MulNode(std::shared_ptr<ExprNode> left,std::shared_ptr<ExprNode> right, char op);
|
public:
|
||||||
|
MulNode(std::shared_ptr<ExprNode> left, std::shared_ptr<ExprNode> right, char op);
|
||||||
~MulNode();
|
~MulNode();
|
||||||
};
|
};
|
@ -1,2 +1 @@
|
|||||||
#include "NumberNode.h"
|
#include "NumberNode.h"
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
|
|
||||||
class NumberNode : public ExprNode
|
class NumberNode : public ExprNode
|
||||||
{
|
{
|
||||||
public:
|
public:
|
||||||
NumberNode();
|
NumberNode();
|
||||||
~NumberNode();
|
~NumberNode();
|
||||||
};
|
};
|
@ -1,11 +1,9 @@
|
|||||||
#include "StatementNode.h"
|
#include "StatementNode.h"
|
||||||
|
|
||||||
StatementNode::StatementNode(std::shared_ptr<ExprNode> child): child(child), ASTNode()
|
StatementNode::StatementNode(std::shared_ptr<ExprNode> child) : child(child), ASTNode()
|
||||||
{
|
{
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
StatementNode::~StatementNode()
|
StatementNode::~StatementNode()
|
||||||
{
|
{
|
||||||
|
|
||||||
}
|
}
|
@ -4,7 +4,8 @@
|
|||||||
class StatementNode : public ASTNode
|
class StatementNode : public ASTNode
|
||||||
{
|
{
|
||||||
std::shared_ptr<ExprNode> child;
|
std::shared_ptr<ExprNode> child;
|
||||||
public:
|
|
||||||
|
public:
|
||||||
StatementNode(std::shared_ptr<ExprNode> child);
|
StatementNode(std::shared_ptr<ExprNode> child);
|
||||||
~StatementNode();
|
~StatementNode();
|
||||||
};
|
};
|
@ -1,7 +1,7 @@
|
|||||||
#include "SumNode.h"
|
#include "SumNode.h"
|
||||||
|
|
||||||
SumNode::SumNode(std::shared_ptr<ExprNode> left,std::shared_ptr<ExprNode> right, char op)
|
SumNode::SumNode(std::shared_ptr<ExprNode> left, std::shared_ptr<ExprNode> right, char op)
|
||||||
: BinaryOpNode(left,right), op(op)
|
: BinaryOpNode(left, right), op(op)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,7 +4,8 @@
|
|||||||
class SumNode final : public BinaryOpNode
|
class SumNode final : public BinaryOpNode
|
||||||
{
|
{
|
||||||
char op;
|
char op;
|
||||||
public:
|
|
||||||
SumNode(std::shared_ptr<ExprNode> left,std::shared_ptr<ExprNode> right, char op);
|
public:
|
||||||
|
SumNode(std::shared_ptr<ExprNode> left, std::shared_ptr<ExprNode> right, char op);
|
||||||
~SumNode();
|
~SumNode();
|
||||||
};
|
};
|
@ -9,19 +9,21 @@ bool Arguments::wimport;
|
|||||||
llvm::Triple Arguments::TargetTriple;
|
llvm::Triple Arguments::TargetTriple;
|
||||||
std::string Arguments::cpu;
|
std::string Arguments::cpu;
|
||||||
|
|
||||||
void Arguments::parse(int argc, char **argv)
|
void Arguments::parse(int argc, char** argv)
|
||||||
{
|
{
|
||||||
try {
|
try
|
||||||
TCLAP::CmdLine command_line("The Sapphire compiler.",' ',"0.1");
|
{
|
||||||
|
TCLAP::CmdLine command_line("The Sapphire compiler.", ' ', "0.1");
|
||||||
|
|
||||||
TCLAP::UnlabeledValueArg<std::string> input_fname_arg("file","Input file.",true,"test.sp","string");
|
TCLAP::UnlabeledValueArg<std::string> input_fname_arg("file", "Input file.", true, "test.sp", "string");
|
||||||
|
|
||||||
TCLAP::ValueArg<std::string> output_fname_arg("o","output","Output file.",false,"sp-output","string");
|
TCLAP::ValueArg<std::string> output_fname_arg("o", "output", "Output file.", false, "sp-output", "string");
|
||||||
TCLAP::ValueArg<std::string> march_arg("","march","Architecture to compile for.",false,"native","string");
|
TCLAP::ValueArg<std::string> march_arg("", "march", "Architecture to compile for.", false, "native", "string");
|
||||||
TCLAP::ValueArg<std::string> mcpu_arg("","mcpu","CPU to compile for.",false,"generic","string");
|
TCLAP::ValueArg<std::string> mcpu_arg("", "mcpu", "CPU to compile for.", false, "generic", "string");
|
||||||
TCLAP::ValueArg<std::string> msystem_arg("","msystem","Operating System to compile for.",false,"native","string");
|
TCLAP::ValueArg<std::string> msystem_arg("", "msystem", "Operating System to compile for.", false, "native",
|
||||||
|
"string");
|
||||||
|
|
||||||
TCLAP::SwitchArg wimport_arg("","wimport","Show a warning when trying to import an already imported file.");
|
TCLAP::SwitchArg wimport_arg("", "wimport", "Show a warning when trying to import an already imported file.");
|
||||||
|
|
||||||
command_line.add(wimport_arg);
|
command_line.add(wimport_arg);
|
||||||
|
|
||||||
@ -32,7 +34,7 @@ void Arguments::parse(int argc, char **argv)
|
|||||||
command_line.add(mcpu_arg);
|
command_line.add(mcpu_arg);
|
||||||
command_line.add(msystem_arg);
|
command_line.add(msystem_arg);
|
||||||
|
|
||||||
command_line.parse(argc,argv);
|
command_line.parse(argc, argv);
|
||||||
|
|
||||||
input_fname = input_fname_arg.getValue();
|
input_fname = input_fname_arg.getValue();
|
||||||
output_fname = output_fname_arg.getValue();
|
output_fname = output_fname_arg.getValue();
|
||||||
@ -40,9 +42,10 @@ void Arguments::parse(int argc, char **argv)
|
|||||||
|
|
||||||
cpu = mcpu_arg.getValue();
|
cpu = mcpu_arg.getValue();
|
||||||
|
|
||||||
setTriple(march_arg.getValue(),msystem_arg.getValue());
|
setTriple(march_arg.getValue(), msystem_arg.getValue());
|
||||||
|
}
|
||||||
} catch (TCLAP::ArgException &e) {
|
catch (TCLAP::ArgException& e)
|
||||||
|
{
|
||||||
Error::throw_error_without_location(e.error());
|
Error::throw_error_without_location(e.error());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -50,18 +53,18 @@ void Arguments::parse(int argc, char **argv)
|
|||||||
void Arguments::setTriple(const std::string& arch, const std::string& system)
|
void Arguments::setTriple(const std::string& arch, const std::string& system)
|
||||||
{
|
{
|
||||||
std::string triple = llvm::sys::getDefaultTargetTriple();
|
std::string triple = llvm::sys::getDefaultTargetTriple();
|
||||||
llvm::Triple tgTriple(triple);
|
llvm::Triple targetTriple(triple);
|
||||||
|
|
||||||
if(arch != "native")
|
if (arch != "native")
|
||||||
{
|
{
|
||||||
tgTriple.setArchName(arch);
|
targetTriple.setArchName(arch);
|
||||||
}
|
}
|
||||||
if(system != "native")
|
if (system != "native")
|
||||||
{
|
{
|
||||||
tgTriple.setOSAndEnvironmentName(system);
|
targetTriple.setOSAndEnvironmentName(system);
|
||||||
}
|
}
|
||||||
|
|
||||||
tgTriple.setVendor(llvm::Triple::VendorType::UnknownVendor); // let's leave it like that
|
targetTriple.setVendor(llvm::Triple::VendorType::UnknownVendor); // let's leave it like that
|
||||||
|
|
||||||
TargetTriple = tgTriple;
|
TargetTriple = targetTriple;
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
#include <string>
|
|
||||||
#include <llvm/ADT/Triple.h>
|
#include <llvm/ADT/Triple.h>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
struct Arguments
|
struct Arguments
|
||||||
{
|
{
|
||||||
@ -14,6 +14,7 @@ struct Arguments
|
|||||||
static std::string cpu;
|
static std::string cpu;
|
||||||
|
|
||||||
static llvm::Triple TargetTriple;
|
static llvm::Triple TargetTriple;
|
||||||
private:
|
|
||||||
|
private:
|
||||||
static void setTriple(const std::string& arch, const std::string& system);
|
static void setTriple(const std::string& arch, const std::string& system);
|
||||||
};
|
};
|
||||||
|
@ -1,13 +1,16 @@
|
|||||||
#include "Error.h"
|
#include "Error.h"
|
||||||
#include "StringConversion.h"
|
|
||||||
#include "Importer.h"
|
#include "Importer.h"
|
||||||
#include <iostream>
|
#include "StringConversion.h"
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
|
#include <iostream>
|
||||||
|
|
||||||
std::string Error::get_spaces(const int& num)
|
std::string Error::get_spaces(const int& num)
|
||||||
{
|
{
|
||||||
std::string output;
|
std::string output;
|
||||||
for(int i = 0; i < num; i++) { output += " "; }
|
for (int i = 0; i < num; i++)
|
||||||
|
{
|
||||||
|
output += " ";
|
||||||
|
}
|
||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -25,28 +28,29 @@ void Error::show_import_line(const Location& loc, std::ostream& output_stream)
|
|||||||
output_stream << std::endl;
|
output_stream << std::endl;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Error::show_import_lines(const Location& loc, void(*import_line_printer)(const Location&, std::ostream&), std::ostream& stream)
|
void Error::show_import_lines(const Location& loc, void (*import_line_printer)(const Location&, std::ostream&),
|
||||||
|
std::ostream& stream)
|
||||||
{
|
{
|
||||||
std::vector<Location> locations;
|
std::vector<Location> locations;
|
||||||
Location scanned_loc = loc;
|
Location scanned_loc = loc;
|
||||||
while(scanned_loc.parent)
|
while (scanned_loc.parent)
|
||||||
{
|
{
|
||||||
locations.push_back(*scanned_loc.parent.get());
|
locations.push_back(*scanned_loc.parent.get());
|
||||||
|
|
||||||
scanned_loc = *scanned_loc.parent.get();
|
scanned_loc = *scanned_loc.parent.get();
|
||||||
}
|
}
|
||||||
|
|
||||||
std::reverse(locations.begin(),locations.end());
|
std::reverse(locations.begin(), locations.end());
|
||||||
|
|
||||||
for(const auto& location : locations)
|
for (const auto& location : locations)
|
||||||
{
|
{
|
||||||
import_line_printer(location,stream);
|
import_line_printer(location, stream);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
[[noreturn]] void Error::throw_error(const Location& loc, const std::string line_text, const std::string& details)
|
[[noreturn]] void Error::throw_error(const Location& loc, const std::string line_text, const std::string& details)
|
||||||
{
|
{
|
||||||
show_import_lines(loc,show_import_line,std::cerr);
|
show_import_lines(loc, show_import_line, std::cerr);
|
||||||
|
|
||||||
std::string linestr = int_to_string(loc.line);
|
std::string linestr = int_to_string(loc.line);
|
||||||
|
|
||||||
@ -89,12 +93,11 @@ void Error::show_import_lines(const Location& loc, void(*import_line_printer)(co
|
|||||||
std::cerr << std::endl;
|
std::cerr << std::endl;
|
||||||
|
|
||||||
exit(1);
|
exit(1);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void Error::throw_warning(const Location& loc, const std::string line_text, const std::string& details)
|
void Error::throw_warning(const Location& loc, const std::string line_text, const std::string& details)
|
||||||
{
|
{
|
||||||
show_import_lines(loc,show_import_line,std::cout);
|
show_import_lines(loc, show_import_line, std::cout);
|
||||||
|
|
||||||
std::string linestr = int_to_string(loc.line);
|
std::string linestr = int_to_string(loc.line);
|
||||||
|
|
||||||
|
15
src/Error.h
15
src/Error.h
@ -3,15 +3,16 @@
|
|||||||
|
|
||||||
namespace Error
|
namespace Error
|
||||||
{
|
{
|
||||||
void show_import_line(const Location& loc, std::ostream& output_stream);
|
void show_import_line(const Location& loc, std::ostream& output_stream);
|
||||||
|
|
||||||
[[noreturn]] void throw_error(const Location& loc, const std::string line_text, const std::string& details);
|
[[noreturn]] void throw_error(const Location& loc, const std::string line_text, const std::string& details);
|
||||||
|
|
||||||
[[noreturn]] void throw_error_without_location(const std::string& details);
|
[[noreturn]] void throw_error_without_location(const std::string& details);
|
||||||
|
|
||||||
void throw_warning(const Location& loc, const std::string line_text, const std::string& details);
|
void throw_warning(const Location& loc, const std::string line_text, const std::string& details);
|
||||||
|
|
||||||
void show_import_lines(const Location& loc, void(*import_line_printer)(const Location&, std::ostream&), std::ostream& stream);
|
void show_import_lines(const Location& loc, void (*import_line_printer)(const Location&, std::ostream&),
|
||||||
|
std::ostream& stream);
|
||||||
|
|
||||||
std::string get_spaces(const int& num);
|
std::string get_spaces(const int& num);
|
||||||
}
|
} // namespace Error
|
||||||
|
@ -1,34 +1,36 @@
|
|||||||
#include "FileIO.h"
|
#include "FileIO.h"
|
||||||
#include "Error.h"
|
#include "Error.h"
|
||||||
|
#include <cstring>
|
||||||
|
#include <errno.h>
|
||||||
|
#include <filesystem>
|
||||||
#include <fstream>
|
#include <fstream>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
#include <errno.h>
|
|
||||||
#include <cstring>
|
|
||||||
#include <filesystem>
|
|
||||||
|
|
||||||
std::string FileIO::read_all(const std::string& filename)
|
std::string FileIO::read_all(const std::string& filename)
|
||||||
{
|
{
|
||||||
if(std::filesystem::is_directory(std::filesystem::status(filename))) Error::throw_error_without_location("unable to open file "+ filename + ": Is a directory");
|
if (std::filesystem::is_directory(std::filesystem::status(filename)))
|
||||||
|
Error::throw_error_without_location("unable to open file " + filename + ": Is a directory");
|
||||||
std::ifstream file;
|
std::ifstream file;
|
||||||
file.exceptions(std::ios::badbit | std::ios::failbit);
|
file.exceptions(std::ios::badbit | std::ios::failbit);
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
file.open(filename);
|
file.open(filename);
|
||||||
}
|
}
|
||||||
catch(const std::exception& e)
|
catch (const std::exception& e)
|
||||||
{
|
{
|
||||||
Error::throw_error_without_location("unable to open file "+ filename + ": " + strerror(errno));
|
Error::throw_error_without_location("unable to open file " + filename + ": " + strerror(errno));
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
file.exceptions(std::ios::goodbit);
|
file.exceptions(std::ios::goodbit);
|
||||||
std::vector<char> file_chars;
|
std::vector<char> file_chars;
|
||||||
char fchar;
|
char fchar;
|
||||||
while (file.good()) {
|
while (file.good())
|
||||||
|
{
|
||||||
fchar = file.get();
|
fchar = file.get();
|
||||||
if(fchar != -1 ) file_chars.push_back(fchar);
|
if (fchar != -1) file_chars.push_back(fchar);
|
||||||
}
|
}
|
||||||
file.close();
|
file.close();
|
||||||
return std::string(file_chars.begin(),file_chars.end());
|
return std::string(file_chars.begin(), file_chars.end());
|
||||||
}
|
}
|
||||||
|
|
||||||
void FileIO::write_all(const std::string& filename, const std::string& contents)
|
void FileIO::write_all(const std::string& filename, const std::string& contents)
|
||||||
@ -37,3 +39,8 @@ void FileIO::write_all(const std::string& filename, const std::string& contents)
|
|||||||
file << contents;
|
file << contents;
|
||||||
file.close();
|
file.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
std::string FileIO::remove_file_extension(const std::string& filename)
|
||||||
|
{
|
||||||
|
return filename.substr(0, filename.find_last_of('.'));
|
||||||
|
}
|
12
src/FileIO.h
12
src/FileIO.h
@ -4,8 +4,10 @@
|
|||||||
/* Namespace for simple file operations. */
|
/* Namespace for simple file operations. */
|
||||||
namespace FileIO
|
namespace FileIO
|
||||||
{
|
{
|
||||||
/* Helper function to read all of a file's contents. */
|
/* Helper function to read all of a file's contents. */
|
||||||
std::string read_all(const std::string& filename);
|
std::string read_all(const std::string& filename);
|
||||||
/* Helper function to write a string to a file. */
|
/* Helper function to write a string to a file. */
|
||||||
void write_all(const std::string& filename, const std::string& contents);
|
void write_all(const std::string& filename, const std::string& contents);
|
||||||
}
|
/* Return a filename without its extension. */
|
||||||
|
std::string remove_file_extension(const std::string& filename);
|
||||||
|
} // namespace FileIO
|
||||||
|
105
src/Importer.cpp
105
src/Importer.cpp
@ -1,7 +1,7 @@
|
|||||||
#include "Importer.h"
|
#include "Importer.h"
|
||||||
|
#include "Arguments.h"
|
||||||
#include "Error.h"
|
#include "Error.h"
|
||||||
#include "FileIO.h"
|
#include "FileIO.h"
|
||||||
#include "Arguments.h"
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <fstream>
|
#include <fstream>
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
@ -18,139 +18,142 @@ TokenStream Importer::evaluate(const TokenStream& original)
|
|||||||
auto ret_tk = original;
|
auto ret_tk = original;
|
||||||
TokenStream new_tokens;
|
TokenStream new_tokens;
|
||||||
|
|
||||||
while(original[i].tk_type != TT_EOF)
|
while (original[i].tk_type != TT_EOF)
|
||||||
{
|
{
|
||||||
Token current_token = original[i];
|
Token current_token = original[i];
|
||||||
|
|
||||||
if(current_token.tk_type == TT_Import)
|
if (current_token.tk_type == TT_Import)
|
||||||
{
|
{
|
||||||
Token next_token = original[i+1];
|
Token next_token = original[i + 1];
|
||||||
if(next_token.tk_type == TT_EOF)
|
if (next_token.tk_type == TT_EOF)
|
||||||
Error::throw_error(current_token.loc,current_token.line(),"did not expect EOF after import statement");
|
Error::throw_error(current_token.loc, current_token.line(),
|
||||||
|
"did not expect EOF after import statement");
|
||||||
|
|
||||||
if(next_token.tk_type == TT_Identifier) // TODO: add support for strings
|
if (next_token.tk_type == TT_Identifier) // TODO: add support for strings
|
||||||
{
|
{
|
||||||
Token last_token = original[i+2];
|
Token last_token = original[i + 2];
|
||||||
|
|
||||||
if(last_token.tk_type != TT_Semicolon)
|
if (last_token.tk_type != TT_Semicolon)
|
||||||
Error::throw_error(last_token.loc,last_token.line(),"expected a semicolon");
|
Error::throw_error(last_token.loc, last_token.line(), "expected a semicolon");
|
||||||
|
|
||||||
if(std::find(imported_files.begin(),imported_files.end(),next_token.string_value) != imported_files.end())
|
if (std::find(imported_files.begin(), imported_files.end(), next_token.string_value) !=
|
||||||
|
imported_files.end())
|
||||||
{
|
{
|
||||||
if(Arguments::wimport)
|
if (Arguments::wimport)
|
||||||
Error::throw_warning(next_token.loc,next_token.line(),"file already imported, skipping");
|
Error::throw_warning(next_token.loc, next_token.line(), "file already imported, skipping");
|
||||||
Token::erase(ret_tk[i]);
|
Token::erase(ret_tk[i]);
|
||||||
Token::erase(ret_tk[i+1]);
|
Token::erase(ret_tk[i + 1]);
|
||||||
Token::erase(ret_tk[i+2]);
|
Token::erase(ret_tk[i + 2]);
|
||||||
++i;
|
++i;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if(import_count > MAX_IMPORTS)
|
if (import_count > MAX_IMPORTS)
|
||||||
Error::throw_error(current_token.loc,current_token.line(),"maximum import depth exceeded");
|
Error::throw_error(current_token.loc, current_token.line(), "maximum import depth exceeded");
|
||||||
|
|
||||||
std::string input_file_name = next_token.string_value + ".sp";
|
std::string input_file_name = next_token.string_value + ".sp";
|
||||||
|
|
||||||
std::ifstream input_file(input_file_name); // only used to check if it exists, thus closed afterwards
|
std::ifstream input_file(input_file_name); // only used to check if it exists, thus closed afterwards
|
||||||
if(!input_file.good())
|
if (!input_file.good()) Error::throw_error(next_token.loc, next_token.line(), "file not found");
|
||||||
Error::throw_error(next_token.loc,next_token.line(),"file not found");
|
|
||||||
input_file.close();
|
input_file.close();
|
||||||
|
|
||||||
auto file_contents = FileIO::read_all(input_file_name);
|
auto file_contents = FileIO::read_all(input_file_name);
|
||||||
|
|
||||||
auto top_location = std::make_shared<Location>(current_token.loc.line,current_token.loc.column,current_token.loc.fname);
|
auto top_location = std::make_shared<Location>(current_token.loc.line, current_token.loc.column,
|
||||||
|
current_token.loc.fname);
|
||||||
top_location.get()->parent = current_token.loc.parent;
|
top_location.get()->parent = current_token.loc.parent;
|
||||||
|
|
||||||
import_stack.push_back(top_location); // Keep ref_count above 0, just in case
|
import_stack.push_back(top_location); // Keep ref_count above 0, just in case
|
||||||
|
|
||||||
auto import_lexer = Lexer::make_lexer(input_file_name);
|
auto import_lexer = Lexer::make_lexer(input_file_name);
|
||||||
|
|
||||||
Lexer::assign_parent_location(import_lexer,top_location);
|
Lexer::assign_parent_location(import_lexer, top_location);
|
||||||
|
|
||||||
TokenStream imported_tokens = import_lexer->lex(file_contents);
|
TokenStream imported_tokens = import_lexer->lex(file_contents);
|
||||||
|
|
||||||
imported_tokens.pop_back(); // remove EOF at end of token stream
|
imported_tokens.pop_back(); // remove EOF at end of token stream
|
||||||
|
|
||||||
for(auto& tk : imported_tokens)
|
for (auto& tk : imported_tokens)
|
||||||
{
|
{
|
||||||
tk.loc.parent = top_location;
|
tk.loc.parent = top_location;
|
||||||
}
|
}
|
||||||
|
|
||||||
imported_files.push_back(next_token.string_value);
|
imported_files.push_back(next_token.string_value);
|
||||||
|
|
||||||
new_tokens.insert(new_tokens.end(),imported_tokens.begin(),imported_tokens.end());
|
new_tokens.insert(new_tokens.end(), imported_tokens.begin(), imported_tokens.end());
|
||||||
|
|
||||||
Token::erase(ret_tk[i]);
|
Token::erase(ret_tk[i]);
|
||||||
Token::erase(ret_tk[i+1]);
|
Token::erase(ret_tk[i + 1]);
|
||||||
Token::erase(ret_tk[i+2]);
|
Token::erase(ret_tk[i + 2]);
|
||||||
} else if(next_token.tk_type == TT_Path)
|
}
|
||||||
|
else if (next_token.tk_type == TT_Path)
|
||||||
{
|
{
|
||||||
Token last_token = original[i+2];
|
Token last_token = original[i + 2];
|
||||||
|
|
||||||
if(last_token.tk_type != TT_Semicolon)
|
if (last_token.tk_type != TT_Semicolon)
|
||||||
Error::throw_error(last_token.loc,last_token.line(),"expected a semicolon");
|
Error::throw_error(last_token.loc, last_token.line(), "expected a semicolon");
|
||||||
|
|
||||||
if(std::find(imported_files.begin(),imported_files.end(),next_token.string_value) != imported_files.end())
|
if (std::find(imported_files.begin(), imported_files.end(), next_token.string_value) !=
|
||||||
|
imported_files.end())
|
||||||
{
|
{
|
||||||
if(Arguments::wimport)
|
if (Arguments::wimport)
|
||||||
Error::throw_warning(next_token.loc,next_token.line(),"file already imported, skipping");
|
Error::throw_warning(next_token.loc, next_token.line(), "file already imported, skipping");
|
||||||
Token::erase(ret_tk[i]);
|
Token::erase(ret_tk[i]);
|
||||||
Token::erase(ret_tk[i+1]);
|
Token::erase(ret_tk[i + 1]);
|
||||||
Token::erase(ret_tk[i+2]);
|
Token::erase(ret_tk[i + 2]);
|
||||||
++i;
|
++i;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if(import_count > MAX_IMPORTS)
|
if (import_count > MAX_IMPORTS)
|
||||||
Error::throw_error(current_token.loc,current_token.line(),"maximum import depth exceeded");
|
Error::throw_error(current_token.loc, current_token.line(), "maximum import depth exceeded");
|
||||||
|
|
||||||
std::string input_file_name = next_token.string_value + ".sp";
|
std::string input_file_name = next_token.string_value + ".sp";
|
||||||
|
|
||||||
std::ifstream input_file(input_file_name); // only used to check if it exists, thus closed afterwards
|
std::ifstream input_file(input_file_name); // only used to check if it exists, thus closed afterwards
|
||||||
if(!input_file.good())
|
if (!input_file.good()) Error::throw_error(next_token.loc, next_token.line(), "file not found");
|
||||||
Error::throw_error(next_token.loc,next_token.line(),"file not found");
|
|
||||||
input_file.close();
|
input_file.close();
|
||||||
|
|
||||||
auto file_contents = FileIO::read_all(input_file_name);
|
auto file_contents = FileIO::read_all(input_file_name);
|
||||||
|
|
||||||
auto top_location = std::make_shared<Location>(current_token.loc.line,current_token.loc.column,current_token.loc.fname);
|
auto top_location = std::make_shared<Location>(current_token.loc.line, current_token.loc.column,
|
||||||
|
current_token.loc.fname);
|
||||||
top_location.get()->parent = current_token.loc.parent;
|
top_location.get()->parent = current_token.loc.parent;
|
||||||
|
|
||||||
import_stack.push_back(top_location); // Keep ref_count above 0, just in case
|
import_stack.push_back(top_location); // Keep ref_count above 0, just in case
|
||||||
|
|
||||||
auto import_lexer = Lexer::make_lexer(input_file_name);
|
auto import_lexer = Lexer::make_lexer(input_file_name);
|
||||||
|
|
||||||
Lexer::assign_parent_location(import_lexer,top_location);
|
Lexer::assign_parent_location(import_lexer, top_location);
|
||||||
|
|
||||||
TokenStream imported_tokens = import_lexer->lex(file_contents);
|
TokenStream imported_tokens = import_lexer->lex(file_contents);
|
||||||
|
|
||||||
imported_tokens.pop_back(); // remove EOF at end of token stream
|
imported_tokens.pop_back(); // remove EOF at end of token stream
|
||||||
|
|
||||||
for(auto& tk : imported_tokens)
|
for (auto& tk : imported_tokens)
|
||||||
{
|
{
|
||||||
tk.loc.parent = top_location;
|
tk.loc.parent = top_location;
|
||||||
}
|
}
|
||||||
|
|
||||||
imported_files.push_back(next_token.string_value);
|
imported_files.push_back(next_token.string_value);
|
||||||
|
|
||||||
new_tokens.insert(new_tokens.end(),imported_tokens.begin(),imported_tokens.end());
|
new_tokens.insert(new_tokens.end(), imported_tokens.begin(), imported_tokens.end());
|
||||||
|
|
||||||
Token::erase(ret_tk[i]);
|
Token::erase(ret_tk[i]);
|
||||||
Token::erase(ret_tk[i+1]);
|
Token::erase(ret_tk[i + 1]);
|
||||||
Token::erase(ret_tk[i+2]);
|
Token::erase(ret_tk[i + 2]);
|
||||||
} else
|
}
|
||||||
Error::throw_error(next_token.loc,next_token.line(),"import keyword should be followed by an identifier");
|
else
|
||||||
|
Error::throw_error(next_token.loc, next_token.line(),
|
||||||
|
"import keyword should be followed by an identifier");
|
||||||
}
|
}
|
||||||
|
|
||||||
++i;
|
++i;
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if(new_tokens.size() != 0)
|
if (new_tokens.size() != 0)
|
||||||
{
|
{
|
||||||
new_tokens.insert(new_tokens.end(),ret_tk.begin(),ret_tk.end());
|
new_tokens.insert(new_tokens.end(), ret_tk.begin(), ret_tk.end());
|
||||||
import_count++;
|
import_count++;
|
||||||
return evaluate(new_tokens);
|
return evaluate(new_tokens);
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
#include "Token.h"
|
|
||||||
#include "Lexer.h"
|
#include "Lexer.h"
|
||||||
|
#include "Token.h"
|
||||||
|
|
||||||
namespace Importer
|
namespace Importer
|
||||||
{
|
{
|
||||||
extern int import_count;
|
extern int import_count;
|
||||||
extern std::vector<std::shared_ptr<Location>> import_stack;
|
extern std::vector<std::shared_ptr<Location>> import_stack;
|
||||||
extern std::vector<std::string> imported_files;
|
extern std::vector<std::string> imported_files;
|
||||||
TokenStream evaluate(const TokenStream& original);
|
TokenStream evaluate(const TokenStream& original);
|
||||||
void init(std::string init_file);
|
void init(std::string init_file);
|
||||||
}
|
} // namespace Importer
|
||||||
|
215
src/Lexer.cpp
215
src/Lexer.cpp
@ -7,10 +7,10 @@
|
|||||||
#define IDENTIFIERS "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWYZ_0123456789"
|
#define IDENTIFIERS "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWYZ_0123456789"
|
||||||
#define DIGITS "0123456789"
|
#define DIGITS "0123456789"
|
||||||
|
|
||||||
const std::array<std::string,TYPE_COUNT> Lexer::types = {"void","bool","str","i8","i16","i32","i64","u8","u16","u32","u64","f32","f64","f128"};
|
const std::array<std::string, TYPE_COUNT> Lexer::types = {"void", "bool", "str", "i8", "i16", "i32", "i64",
|
||||||
|
"u8", "u16", "u32", "u64", "f32", "f64", "f128"};
|
||||||
|
|
||||||
Lexer::Lexer(const std::string& fname)
|
Lexer::Lexer(const std::string& fname) : loc(1, 0, fname), index(-1), prev_loc(1, 0, fname)
|
||||||
: loc(1,0,fname), index(-1), prev_loc(1,0,fname)
|
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -23,10 +23,10 @@ int Lexer::advance()
|
|||||||
prev_loc = loc;
|
prev_loc = loc;
|
||||||
++index;
|
++index;
|
||||||
loc.advance();
|
loc.advance();
|
||||||
if(index >= current_lexed_text.size()) return 0;
|
if (index >= current_lexed_text.size()) return 0;
|
||||||
current_char = current_lexed_text[index];
|
current_char = current_lexed_text[index];
|
||||||
loc.pos_from_char(current_char);
|
loc.pos_from_char(current_char);
|
||||||
if(current_char == '\n')
|
if (current_char == '\n')
|
||||||
{
|
{
|
||||||
previous_line_text = current_line_text;
|
previous_line_text = current_line_text;
|
||||||
current_line_text = this->recalculate_current_line(current_lexed_text);
|
current_line_text = this->recalculate_current_line(current_lexed_text);
|
||||||
@ -38,8 +38,8 @@ int Lexer::rewind()
|
|||||||
{
|
{
|
||||||
loc = prev_loc;
|
loc = prev_loc;
|
||||||
--index;
|
--index;
|
||||||
if(index == -1) return 0;
|
if (index == -1) return 0;
|
||||||
if(current_char == '\n')
|
if (current_char == '\n')
|
||||||
{
|
{
|
||||||
current_line_text = previous_line_text;
|
current_line_text = previous_line_text;
|
||||||
}
|
}
|
||||||
@ -52,7 +52,7 @@ std::string Lexer::recalculate_current_line(const std::string& text)
|
|||||||
int idx = index;
|
int idx = index;
|
||||||
std::string final_str;
|
std::string final_str;
|
||||||
++idx;
|
++idx;
|
||||||
while(idx != text.size() && text[idx] != '\n')
|
while (idx != text.size() && text[idx] != '\n')
|
||||||
{
|
{
|
||||||
final_str += text[idx];
|
final_str += text[idx];
|
||||||
++idx;
|
++idx;
|
||||||
@ -82,99 +82,100 @@ TokenStream Lexer::lex(const std::string& text)
|
|||||||
current_lexed_text = text;
|
current_lexed_text = text;
|
||||||
current_line_text = this->recalculate_current_line(current_lexed_text);
|
current_line_text = this->recalculate_current_line(current_lexed_text);
|
||||||
|
|
||||||
while(this->advance())
|
while (this->advance())
|
||||||
{
|
{
|
||||||
if(this->current_char == '\n') comment = false;
|
if (this->current_char == '\n') comment = false;
|
||||||
|
|
||||||
if(comment) continue;
|
if (comment) continue;
|
||||||
|
|
||||||
if(is_in_string(WHITESPACE,current_char)) continue;
|
if (is_in_string(WHITESPACE, current_char)) continue;
|
||||||
|
|
||||||
else if(is_in_string(LETTERS,current_char))
|
else if (is_in_string(LETTERS, current_char))
|
||||||
{
|
{
|
||||||
result.push_back(create_identifier());
|
result.push_back(create_identifier());
|
||||||
}
|
}
|
||||||
|
|
||||||
else if(is_in_string(DIGITS,current_char))
|
else if (is_in_string(DIGITS, current_char))
|
||||||
{
|
{
|
||||||
result.push_back(create_number());
|
result.push_back(create_number());
|
||||||
}
|
}
|
||||||
|
|
||||||
else if(current_char == '\'')
|
else if (current_char == '\'')
|
||||||
{
|
{
|
||||||
result.push_back(create_string());
|
result.push_back(create_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
else switch(current_char)
|
else
|
||||||
|
switch (current_char)
|
||||||
{
|
{
|
||||||
case '/':
|
case '/':
|
||||||
if(index + 1 != current_lexed_text.size())
|
if (index + 1 != current_lexed_text.size())
|
||||||
{
|
{
|
||||||
if(current_lexed_text[index+1] == '/')
|
if (current_lexed_text[index + 1] == '/')
|
||||||
{
|
{
|
||||||
comment = true;
|
comment = true;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
result.push_back(Token::make_with_line({TT_Div,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_Div, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case '+':
|
case '+':
|
||||||
result.push_back(Token::make_with_line({TT_Plus,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_Plus, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case '-':
|
case '-':
|
||||||
result.push_back(Token::make_with_line({TT_Minus,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_Minus, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case '*':
|
case '*':
|
||||||
result.push_back(Token::make_with_line({TT_Mul,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_Mul, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case '@':
|
case '@':
|
||||||
result.push_back(Token::make_with_line({TT_At,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_At, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case '=':
|
case '=':
|
||||||
result.push_back(Token::make_with_line({TT_Equal,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_Equal, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case '>':
|
case '>':
|
||||||
result.push_back(Token::make_with_line({TT_GreaterThan,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_GreaterThan, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case '<':
|
case '<':
|
||||||
result.push_back(Token::make_with_line({TT_LessThan,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_LessThan, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case '(':
|
case '(':
|
||||||
result.push_back(Token::make_with_line({TT_LParen,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_LParen, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case ')':
|
case ')':
|
||||||
result.push_back(Token::make_with_line({TT_RParen,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_RParen, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case '{':
|
case '{':
|
||||||
result.push_back(Token::make_with_line({TT_RBracket,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_RBracket, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case '}':
|
case '}':
|
||||||
result.push_back(Token::make_with_line({TT_LBracket,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_LBracket, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case ';':
|
case ';':
|
||||||
result.push_back(Token::make_with_line({TT_Semicolon,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_Semicolon, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case '.':
|
case '.':
|
||||||
result.push_back(Token::make_with_line({TT_Period,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_Period, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case ',':
|
case ',':
|
||||||
result.push_back(Token::make_with_line({TT_Comma,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_Comma, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case '!':
|
case '!':
|
||||||
result.push_back(Token::make_with_line({TT_Exclamation,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_Exclamation, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case '[':
|
case '[':
|
||||||
result.push_back(Token::make_with_line({TT_Exclamation,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_Exclamation, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
case ']':
|
case ']':
|
||||||
result.push_back(Token::make_with_line({TT_Exclamation,loc},current_line_text));
|
result.push_back(Token::make_with_line({TT_Exclamation, loc}, current_line_text));
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
Error::throw_error(loc,current_line_text,"unknown character");
|
Error::throw_error(loc, current_line_text, "unknown character");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
result.push_back(Token(TT_EOF,loc));
|
result.push_back(Token(TT_EOF, loc));
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@ -191,22 +192,24 @@ Token Lexer::create_identifier()
|
|||||||
|
|
||||||
characters.push_back(current_char);
|
characters.push_back(current_char);
|
||||||
|
|
||||||
while(this->advance())
|
while (this->advance())
|
||||||
{
|
{
|
||||||
if(is_in_string(IDENTIFIERS,current_char))
|
if (is_in_string(IDENTIFIERS, current_char))
|
||||||
{
|
{
|
||||||
characters.push_back(current_char);
|
characters.push_back(current_char);
|
||||||
last_was_path = false;
|
last_was_path = false;
|
||||||
}
|
}
|
||||||
else if(current_char == '/')
|
else if (current_char == '/')
|
||||||
|
{
|
||||||
|
if (last_was_path)
|
||||||
{
|
{
|
||||||
if(last_was_path) {
|
|
||||||
characters.pop_back();
|
characters.pop_back();
|
||||||
this->loc = saved_loc;
|
this->loc = saved_loc;
|
||||||
this->prev_loc = saved_prev_loc;
|
this->prev_loc = saved_prev_loc;
|
||||||
this->rewind();
|
this->rewind();
|
||||||
std::string identifier(characters.begin(), characters.end());
|
std::string identifier(characters.begin(), characters.end());
|
||||||
return Token::make_with_line({TT_Path,identifier,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_Path, identifier, {prev_line, prev_column, loc.fname}},
|
||||||
|
current_line_text);
|
||||||
}
|
}
|
||||||
|
|
||||||
saved_loc = this->loc;
|
saved_loc = this->loc;
|
||||||
@ -220,40 +223,62 @@ Token Lexer::create_identifier()
|
|||||||
{
|
{
|
||||||
this->rewind();
|
this->rewind();
|
||||||
std::string identifier(characters.begin(), characters.end());
|
std::string identifier(characters.begin(), characters.end());
|
||||||
if(is_path) return Token::make_with_line({TT_Path,identifier,{prev_line,prev_column,loc.fname}},current_line_text);
|
if (is_path)
|
||||||
auto location = std::find(types.begin(),types.end(),identifier);
|
return Token::make_with_line({TT_Path, identifier, {prev_line, prev_column, loc.fname}},
|
||||||
if(location != types.end())
|
current_line_text);
|
||||||
|
auto location = std::find(types.begin(), types.end(), identifier);
|
||||||
|
if (location != types.end())
|
||||||
{
|
{
|
||||||
return Token::make_with_line({TT_Type,identifier,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_Type, identifier, {prev_line, prev_column, loc.fname}},
|
||||||
|
current_line_text);
|
||||||
}
|
}
|
||||||
if (identifier == "import") return Token::make_with_line({TT_Import,{prev_line,prev_column,loc.fname}},current_line_text);
|
if (identifier == "import")
|
||||||
if (identifier == "syscall0") return Token::make_with_line({TT_Syscall0,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_Import, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
if (identifier == "syscall1") return Token::make_with_line({TT_Syscall1,{prev_line,prev_column,loc.fname}},current_line_text);
|
if (identifier == "syscall0")
|
||||||
if (identifier == "syscall2") return Token::make_with_line({TT_Syscall2,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_Syscall0, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
if (identifier == "syscall3") return Token::make_with_line({TT_Syscall3,{prev_line,prev_column,loc.fname}},current_line_text);
|
if (identifier == "syscall1")
|
||||||
if (identifier == "syscall4") return Token::make_with_line({TT_Syscall4,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_Syscall1, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
if (identifier == "syscall5") return Token::make_with_line({TT_Syscall5,{prev_line,prev_column,loc.fname}},current_line_text);
|
if (identifier == "syscall2")
|
||||||
if( identifier == "compmacro" ) return Token::make_with_line({TT_CompilerMacro,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_Syscall2, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
return Token::make_with_line({TT_Identifier,identifier,{prev_line,prev_column,loc.fname}},current_line_text);
|
if (identifier == "syscall3")
|
||||||
|
return Token::make_with_line({TT_Syscall3, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
|
if (identifier == "syscall4")
|
||||||
|
return Token::make_with_line({TT_Syscall4, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
|
if (identifier == "syscall5")
|
||||||
|
return Token::make_with_line({TT_Syscall5, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
|
if (identifier == "compmacro")
|
||||||
|
return Token::make_with_line({TT_CompilerMacro, {prev_line, prev_column, loc.fname}},
|
||||||
|
current_line_text);
|
||||||
|
return Token::make_with_line({TT_Identifier, identifier, {prev_line, prev_column, loc.fname}},
|
||||||
|
current_line_text);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string identifier(characters.begin(), characters.end());
|
std::string identifier(characters.begin(), characters.end());
|
||||||
if(is_path) return Token::make_with_line({TT_Path,identifier,{prev_line,prev_column,loc.fname}},current_line_text);
|
if (is_path)
|
||||||
auto location = std::find(types.begin(),types.end(),identifier);
|
return Token::make_with_line({TT_Path, identifier, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
if(location != types.end())
|
auto location = std::find(types.begin(), types.end(), identifier);
|
||||||
|
if (location != types.end())
|
||||||
{
|
{
|
||||||
return Token::make_with_line({TT_Type,identifier,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_Type, identifier, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
}
|
}
|
||||||
if (identifier == "import") return Token::make_with_line({TT_Import,{prev_line,prev_column,loc.fname}},current_line_text);
|
if (identifier == "import")
|
||||||
if (identifier == "syscall0") return Token::make_with_line({TT_Syscall0,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_Import, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
if (identifier == "syscall1") return Token::make_with_line({TT_Syscall1,{prev_line,prev_column,loc.fname}},current_line_text);
|
if (identifier == "syscall0")
|
||||||
if (identifier == "syscall2") return Token::make_with_line({TT_Syscall2,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_Syscall0, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
if (identifier == "syscall3") return Token::make_with_line({TT_Syscall3,{prev_line,prev_column,loc.fname}},current_line_text);
|
if (identifier == "syscall1")
|
||||||
if (identifier == "syscall4") return Token::make_with_line({TT_Syscall4,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_Syscall1, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
if (identifier == "syscall5") return Token::make_with_line({TT_Syscall5,{prev_line,prev_column,loc.fname}},current_line_text);
|
if (identifier == "syscall2")
|
||||||
if( identifier == "compmacro" ) return Token::make_with_line({TT_CompilerMacro,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_Syscall2, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
return Token::make_with_line({TT_Identifier,identifier,{prev_line,prev_column,loc.fname}},current_line_text);
|
if (identifier == "syscall3")
|
||||||
|
return Token::make_with_line({TT_Syscall3, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
|
if (identifier == "syscall4")
|
||||||
|
return Token::make_with_line({TT_Syscall4, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
|
if (identifier == "syscall5")
|
||||||
|
return Token::make_with_line({TT_Syscall5, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
|
if (identifier == "compmacro")
|
||||||
|
return Token::make_with_line({TT_CompilerMacro, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
|
return Token::make_with_line({TT_Identifier, identifier, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
}
|
}
|
||||||
|
|
||||||
Token Lexer::create_number()
|
Token Lexer::create_number()
|
||||||
@ -265,46 +290,49 @@ Token Lexer::create_number()
|
|||||||
|
|
||||||
characters.push_back(current_char);
|
characters.push_back(current_char);
|
||||||
|
|
||||||
while(this->advance())
|
while (this->advance())
|
||||||
{
|
{
|
||||||
if(is_in_string(DIGITS,current_char))
|
if (is_in_string(DIGITS, current_char))
|
||||||
{
|
{
|
||||||
characters.push_back(current_char);
|
characters.push_back(current_char);
|
||||||
}
|
}
|
||||||
else if (current_char == '.')
|
else if (current_char == '.')
|
||||||
{
|
{
|
||||||
if(dot_count == 0)
|
if (dot_count == 0)
|
||||||
{
|
{
|
||||||
characters.push_back(current_char);
|
characters.push_back(current_char);
|
||||||
++dot_count;
|
++dot_count;
|
||||||
}
|
}
|
||||||
else {
|
else
|
||||||
Error::throw_warning(loc,current_line_text,"floats can only have one dot");
|
{
|
||||||
|
Error::throw_warning(loc, current_line_text, "floats can only have one dot");
|
||||||
this->rewind();
|
this->rewind();
|
||||||
float tk_value = std::stof(std::string(characters.begin(), characters.end()).c_str());
|
float tk_value = std::stof(std::string(characters.begin(), characters.end()).c_str());
|
||||||
return Token::make_with_line({TT_Float,tk_value,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_Float, tk_value, {prev_line, prev_column, loc.fname}},
|
||||||
|
current_line_text);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
this->rewind();
|
this->rewind();
|
||||||
if(dot_count != 0)
|
if (dot_count != 0)
|
||||||
{
|
{
|
||||||
float tk_value = std::stof(std::string(characters.begin(), characters.end()).c_str());
|
float tk_value = std::stof(std::string(characters.begin(), characters.end()).c_str());
|
||||||
return Token::make_with_line({TT_Float,tk_value,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_Float, tk_value, {prev_line, prev_column, loc.fname}},
|
||||||
|
current_line_text);
|
||||||
}
|
}
|
||||||
int tk_value = atoi(std::string(characters.begin(), characters.end()).c_str());
|
int tk_value = atoi(std::string(characters.begin(), characters.end()).c_str());
|
||||||
return Token::make_with_line({TT_Number,tk_value,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_Number, tk_value, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if(dot_count != 0)
|
if (dot_count != 0)
|
||||||
{
|
{
|
||||||
float tk_value = std::stof(std::string(characters.begin(), characters.end()).c_str());
|
float tk_value = std::stof(std::string(characters.begin(), characters.end()).c_str());
|
||||||
return Token::make_with_line({TT_Float,tk_value,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_Float, tk_value, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
}
|
}
|
||||||
int tk_value = atoi(std::string(characters.begin(), characters.end()).c_str());
|
int tk_value = atoi(std::string(characters.begin(), characters.end()).c_str());
|
||||||
return Token::make_with_line({TT_Number,tk_value,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_Number, tk_value, {prev_line, prev_column, loc.fname}}, current_line_text);
|
||||||
}
|
}
|
||||||
|
|
||||||
Token Lexer::create_string()
|
Token Lexer::create_string()
|
||||||
@ -313,25 +341,26 @@ Token Lexer::create_string()
|
|||||||
int prev_line = loc.line;
|
int prev_line = loc.line;
|
||||||
int prev_column = loc.column;
|
int prev_column = loc.column;
|
||||||
|
|
||||||
while(this->advance())
|
while (this->advance())
|
||||||
{
|
{
|
||||||
if(current_char == '\n')
|
if (current_char == '\n')
|
||||||
{
|
{
|
||||||
this->rewind();
|
this->rewind();
|
||||||
Error::throw_error(loc,current_line_text,"expected end of string but got newline");
|
Error::throw_error(loc, current_line_text, "expected end of string but got newline");
|
||||||
}
|
}
|
||||||
if(current_char == '\'')
|
if (current_char == '\'')
|
||||||
{
|
{
|
||||||
std::string identifier(characters.begin(), characters.end());
|
std::string identifier(characters.begin(), characters.end());
|
||||||
return Token::make_with_line({TT_String,identifier,{prev_line,prev_column,loc.fname}},current_line_text);
|
return Token::make_with_line({TT_String, identifier, {prev_line, prev_column, loc.fname}},
|
||||||
|
current_line_text);
|
||||||
}
|
}
|
||||||
if(current_char == '\\')
|
if (current_char == '\\')
|
||||||
{
|
{
|
||||||
if(index + 1 == current_lexed_text.size())
|
if (index + 1 == current_lexed_text.size())
|
||||||
{
|
{
|
||||||
Error::throw_error(loc,current_line_text,"unfinished escape sequence");
|
Error::throw_error(loc, current_line_text, "unfinished escape sequence");
|
||||||
}
|
}
|
||||||
switch(current_lexed_text[index+1])
|
switch (current_lexed_text[index + 1])
|
||||||
{
|
{
|
||||||
case 'n':
|
case 'n':
|
||||||
characters.push_back('\n');
|
characters.push_back('\n');
|
||||||
@ -343,7 +372,7 @@ Token Lexer::create_string()
|
|||||||
characters.push_back('\\');
|
characters.push_back('\\');
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
Error::throw_error(loc,current_line_text,"unknown escape sequence");
|
Error::throw_error(loc, current_line_text, "unknown escape sequence");
|
||||||
}
|
}
|
||||||
++index;
|
++index;
|
||||||
++loc.column;
|
++loc.column;
|
||||||
@ -353,7 +382,7 @@ Token Lexer::create_string()
|
|||||||
characters.push_back(current_char);
|
characters.push_back(current_char);
|
||||||
}
|
}
|
||||||
this->rewind();
|
this->rewind();
|
||||||
Error::throw_error(loc,current_line_text,"expected end of string but got EOF");
|
Error::throw_error(loc, current_line_text, "expected end of string but got EOF");
|
||||||
|
|
||||||
return Token(TT_Null,loc); // unreachable since Error::throw_error calls exit()
|
return Token(TT_Null, loc); // unreachable since Error::throw_error calls exit()
|
||||||
}
|
}
|
||||||
|
16
src/Lexer.h
16
src/Lexer.h
@ -1,19 +1,20 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
#include "Token.h"
|
#include "Token.h"
|
||||||
#include <memory>
|
|
||||||
#include <vector>
|
|
||||||
#include <string>
|
|
||||||
#include <array>
|
#include <array>
|
||||||
|
#include <memory>
|
||||||
|
#include <string>
|
||||||
|
#include <vector>
|
||||||
|
|
||||||
/* Let's redefine TokenStream, as if it wasn't already defined in Token.h*/
|
/* Let's redefine TokenStream, as if it wasn't already defined in Token.h*/
|
||||||
typedef std::vector<Token> TokenStream;
|
typedef std::vector<Token> TokenStream;
|
||||||
/* The number of data types currently in Sapphire. */
|
/* The number of data types currently in Sapphire. */
|
||||||
#define TYPE_COUNT 14
|
#define TYPE_COUNT 14
|
||||||
|
|
||||||
/* The Lexer for the Sapphire compiler. A Lexer reads source code from a file, and turns it into a stream of tokens the compiler can understand. */
|
/* The Lexer for the Sapphire compiler. A Lexer reads source code from a file, and turns it into a stream of tokens the
|
||||||
|
* compiler can understand. */
|
||||||
class Lexer
|
class Lexer
|
||||||
{
|
{
|
||||||
private:
|
private:
|
||||||
Location loc;
|
Location loc;
|
||||||
Location prev_loc;
|
Location prev_loc;
|
||||||
|
|
||||||
@ -36,9 +37,10 @@ private:
|
|||||||
Token create_identifier();
|
Token create_identifier();
|
||||||
|
|
||||||
bool is_in_string(const std::string& string, const char& character);
|
bool is_in_string(const std::string& string, const char& character);
|
||||||
public:
|
|
||||||
|
public:
|
||||||
/* An array containing Sapphire's current data types. */
|
/* An array containing Sapphire's current data types. */
|
||||||
static const std::array<std::string,TYPE_COUNT> types;
|
static const std::array<std::string, TYPE_COUNT> types;
|
||||||
|
|
||||||
~Lexer();
|
~Lexer();
|
||||||
|
|
||||||
|
@ -2,8 +2,7 @@
|
|||||||
#include "StringConversion.h"
|
#include "StringConversion.h"
|
||||||
#include <sstream>
|
#include <sstream>
|
||||||
|
|
||||||
Location::Location(int ln, int col, std::string file)
|
Location::Location(int ln, int col, std::string file) : line(ln), column(col), fname(file)
|
||||||
: line(ln), column(col), fname(file)
|
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -34,14 +33,14 @@ void Location::advance()
|
|||||||
|
|
||||||
void Location::pos_from_char(const char& character)
|
void Location::pos_from_char(const char& character)
|
||||||
{
|
{
|
||||||
if(character == '\n')
|
if (character == '\n')
|
||||||
{
|
{
|
||||||
++line;
|
++line;
|
||||||
column = 0;
|
column = 0;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Location::operator=(const Location &other)
|
void Location::operator=(const Location& other)
|
||||||
{
|
{
|
||||||
this->parent = other.parent;
|
this->parent = other.parent;
|
||||||
this->line = other.line;
|
this->line = other.line;
|
||||||
@ -49,7 +48,7 @@ void Location::operator=(const Location &other)
|
|||||||
this->fname.assign(other.fname.c_str());
|
this->fname.assign(other.fname.c_str());
|
||||||
}
|
}
|
||||||
|
|
||||||
void Location::copy(const Location &other)
|
void Location::copy(const Location& other)
|
||||||
{
|
{
|
||||||
this->operator=(other);
|
this->operator=(other);
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
#include <string>
|
|
||||||
#include <memory>
|
#include <memory>
|
||||||
|
#include <string>
|
||||||
|
|
||||||
/* Struct to represent a location in a file. */
|
/* Struct to represent a location in a file. */
|
||||||
struct Location
|
struct Location
|
||||||
|
@ -7,16 +7,16 @@ TokenStream Normalizer::normalize(const TokenStream& input)
|
|||||||
while (i < input.size())
|
while (i < input.size())
|
||||||
{
|
{
|
||||||
Token current = input[i];
|
Token current = input[i];
|
||||||
if(current.tk_type == TT_Null)
|
if (current.tk_type == TT_Null)
|
||||||
{
|
{
|
||||||
i++;
|
i++;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if(current.tk_type == TT_Equal)
|
if (current.tk_type == TT_Equal)
|
||||||
{
|
{
|
||||||
if(i+1 != input.size())
|
if (i + 1 != input.size())
|
||||||
{
|
{
|
||||||
if(input[i+1].tk_type == TT_Equal)
|
if (input[i + 1].tk_type == TT_Equal)
|
||||||
{
|
{
|
||||||
i += 2;
|
i += 2;
|
||||||
result.push_back(current.copy_with_new_type(TT_Equals));
|
result.push_back(current.copy_with_new_type(TT_Equals));
|
||||||
@ -27,11 +27,11 @@ TokenStream Normalizer::normalize(const TokenStream& input)
|
|||||||
result.push_back(current);
|
result.push_back(current);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if(current.tk_type == TT_Exclamation)
|
if (current.tk_type == TT_Exclamation)
|
||||||
{
|
{
|
||||||
if(i+1 != input.size())
|
if (i + 1 != input.size())
|
||||||
{
|
{
|
||||||
if(input[i+1].tk_type == TT_Equal)
|
if (input[i + 1].tk_type == TT_Equal)
|
||||||
{
|
{
|
||||||
i += 2;
|
i += 2;
|
||||||
result.push_back(current.copy_with_new_type(TT_NEqual));
|
result.push_back(current.copy_with_new_type(TT_NEqual));
|
||||||
@ -42,11 +42,11 @@ TokenStream Normalizer::normalize(const TokenStream& input)
|
|||||||
result.push_back(current);
|
result.push_back(current);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if(current.tk_type == TT_GreaterThan)
|
if (current.tk_type == TT_GreaterThan)
|
||||||
{
|
{
|
||||||
if(i+1 != input.size())
|
if (i + 1 != input.size())
|
||||||
{
|
{
|
||||||
if(input[i+1].tk_type == TT_Equal)
|
if (input[i + 1].tk_type == TT_Equal)
|
||||||
{
|
{
|
||||||
i += 2;
|
i += 2;
|
||||||
result.push_back(current.copy_with_new_type(TT_GTE));
|
result.push_back(current.copy_with_new_type(TT_GTE));
|
||||||
@ -57,11 +57,11 @@ TokenStream Normalizer::normalize(const TokenStream& input)
|
|||||||
result.push_back(current);
|
result.push_back(current);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if(current.tk_type == TT_LessThan)
|
if (current.tk_type == TT_LessThan)
|
||||||
{
|
{
|
||||||
if(i+1 != input.size())
|
if (i + 1 != input.size())
|
||||||
{
|
{
|
||||||
if(input[i+1].tk_type == TT_Equal)
|
if (input[i + 1].tk_type == TT_Equal)
|
||||||
{
|
{
|
||||||
i += 2;
|
i += 2;
|
||||||
result.push_back(current.copy_with_new_type(TT_LTE));
|
result.push_back(current.copy_with_new_type(TT_LTE));
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
#include "Token.h"
|
|
||||||
#include "Lexer.h" // for TokenStream
|
#include "Lexer.h" // for TokenStream
|
||||||
|
#include "Token.h"
|
||||||
|
|
||||||
/* Namespace to normalize a TokenStream. */
|
/* Namespace to normalize a TokenStream. */
|
||||||
namespace Normalizer
|
namespace Normalizer
|
||||||
{
|
{
|
||||||
/* Some tokens are difficult for the Lexer to parse right, or maybe I'm just lazy.
|
/* Some tokens are difficult for the Lexer to parse right, or maybe I'm just lazy.
|
||||||
Anyways, this function transforms > and = tokens next to each other into a single >=, which has a different meaning, etc...
|
Anyways, this function transforms > and = tokens next to each other into a single >=, which has a different meaning,
|
||||||
For example: = + = : ==, < + = : <=...
|
etc... For example: = + = : ==, < + = : <=...
|
||||||
|
|
||||||
It also takes blank tokens and removes them. */
|
It also takes blank tokens and removes them. */
|
||||||
TokenStream normalize(const TokenStream& input);
|
TokenStream normalize(const TokenStream& input);
|
||||||
}
|
} // namespace Normalizer
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
#include "Parser.h"
|
#include "Parser.h"
|
||||||
|
|
||||||
Parser::Parser(const TokenStream& tokens)
|
Parser::Parser(const TokenStream& tokens) : tokens(tokens)
|
||||||
: tokens(tokens)
|
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -11,14 +10,15 @@ Parser::~Parser()
|
|||||||
|
|
||||||
std::shared_ptr<Parser> Parser::new_parser(const TokenStream& tokens)
|
std::shared_ptr<Parser> Parser::new_parser(const TokenStream& tokens)
|
||||||
{
|
{
|
||||||
return std::shared_ptr<Parser>(new Parser(tokens)); // As always, not using std::make_shared 'cause constructor is private
|
return std::shared_ptr<Parser>(
|
||||||
|
new Parser(tokens)); // As always, not using std::make_shared 'cause constructor is private
|
||||||
}
|
}
|
||||||
|
|
||||||
std::shared_ptr<ASTNode> Parser::parse()
|
std::shared_ptr<ASTNode> Parser::parse()
|
||||||
{
|
{
|
||||||
auto result = walk_expr();
|
auto result = walk_expr();
|
||||||
|
|
||||||
if(result.is_error())
|
if (result.is_error())
|
||||||
{
|
{
|
||||||
result.ethrow();
|
result.ethrow();
|
||||||
}
|
}
|
||||||
@ -28,7 +28,8 @@ std::shared_ptr<ASTNode> Parser::parse()
|
|||||||
|
|
||||||
Parser::ErrorOr<ExprNode> Parser::walk_expr()
|
Parser::ErrorOr<ExprNode> Parser::walk_expr()
|
||||||
{
|
{
|
||||||
return ErrorOr<ExprNode>(new ExprNode()); // constructor does not want to accept a shared_ptr<T> in the argument list, thats why im not using make_shared here
|
return ErrorOr<ExprNode>(new ExprNode()); // constructor does not want to accept a shared_ptr<T> in the argument
|
||||||
|
// list, thats why im not using make_shared here
|
||||||
}
|
}
|
||||||
|
|
||||||
Parser::ErrorOr<NumberNode> Parser::walk_number()
|
Parser::ErrorOr<NumberNode> Parser::walk_number()
|
||||||
|
35
src/Parser.h
35
src/Parser.h
@ -1,16 +1,16 @@
|
|||||||
#pragma once
|
#pragma once
|
||||||
#include <memory>
|
|
||||||
#include "Lexer.h"
|
|
||||||
#include "AST/NumberNode.h"
|
#include "AST/NumberNode.h"
|
||||||
#include "Error.h"
|
#include "Error.h"
|
||||||
|
#include "Lexer.h"
|
||||||
#include <cassert>
|
#include <cassert>
|
||||||
|
#include <memory>
|
||||||
|
|
||||||
/* Parser class for the Sapphire compiler. */
|
/* Parser class for the Sapphire compiler. */
|
||||||
class Parser
|
class Parser
|
||||||
{
|
{
|
||||||
/* Struct to store a parsing result which can be either a parsing error or a success, in which case it contains a pointer to the result. */
|
/* Struct to store a parsing result which can be either a parsing error or a success, in which case it contains a
|
||||||
template<typename T>
|
* pointer to the result. */
|
||||||
struct ErrorOr
|
template<typename T> struct ErrorOr
|
||||||
{
|
{
|
||||||
/* Return the stored pointer. */
|
/* Return the stored pointer. */
|
||||||
std::shared_ptr<T> get()
|
std::shared_ptr<T> get()
|
||||||
@ -19,20 +19,29 @@ class Parser
|
|||||||
return m_ptr;
|
return m_ptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Call Error::throw_error() with the stored error's location, line text, and the error string provided to this struct instance. */
|
/* Call Error::throw_error() with the stored error's location, line text, and the error string provided to this
|
||||||
|
* struct instance. */
|
||||||
void ethrow()
|
void ethrow()
|
||||||
{
|
{
|
||||||
assert(m_is_error);
|
assert(m_is_error);
|
||||||
Error::throw_error(error_tok->loc,error_tok->line(),m_error);
|
Error::throw_error(error_tok->loc, error_tok->line(), m_error);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Construct a new successful ErrorOr with a heap-allocated pointer to the result class. */
|
/* Construct a new successful ErrorOr with a heap-allocated pointer to the result class. */
|
||||||
ErrorOr(T* ptr) : m_ptr(ptr), m_is_error(false) {}
|
ErrorOr(T* ptr) : m_ptr(ptr), m_is_error(false)
|
||||||
|
{
|
||||||
|
}
|
||||||
/* Construct a new failed ErrorOr with the error details and the token where parsing failed. */
|
/* Construct a new failed ErrorOr with the error details and the token where parsing failed. */
|
||||||
ErrorOr(const std::string& error, const Token& error_tok) : m_error(error), m_is_error(true), error_tok(error_tok) {}
|
ErrorOr(const std::string& error, const Token& error_tok)
|
||||||
|
: m_error(error), m_is_error(true), error_tok(error_tok)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
/* Is this ErrorOr instance successful or failed? */
|
/* Is this ErrorOr instance successful or failed? */
|
||||||
bool is_error() { return m_is_error; }
|
bool is_error()
|
||||||
|
{
|
||||||
|
return m_is_error;
|
||||||
|
}
|
||||||
|
|
||||||
private:
|
private:
|
||||||
bool m_is_error;
|
bool m_is_error;
|
||||||
@ -40,7 +49,8 @@ class Parser
|
|||||||
std::shared_ptr<Token> error_tok;
|
std::shared_ptr<Token> error_tok;
|
||||||
std::shared_ptr<T> m_ptr;
|
std::shared_ptr<T> m_ptr;
|
||||||
};
|
};
|
||||||
private:
|
|
||||||
|
private:
|
||||||
Parser(const TokenStream& tokens);
|
Parser(const TokenStream& tokens);
|
||||||
TokenStream tokens;
|
TokenStream tokens;
|
||||||
|
|
||||||
@ -52,7 +62,8 @@ private:
|
|||||||
|
|
||||||
void save_current_position();
|
void save_current_position();
|
||||||
void restore_current_position();
|
void restore_current_position();
|
||||||
public:
|
|
||||||
|
public:
|
||||||
~Parser();
|
~Parser();
|
||||||
|
|
||||||
/* Construct a new Parser with the given TokenStream. */
|
/* Construct a new Parser with the given TokenStream. */
|
||||||
|
@ -4,13 +4,13 @@
|
|||||||
std::string int_to_string(const int& value)
|
std::string int_to_string(const int& value)
|
||||||
{
|
{
|
||||||
char buffer[12];
|
char buffer[12];
|
||||||
std::sprintf(buffer,"%d",value);
|
std::sprintf(buffer, "%d", value);
|
||||||
return {buffer};
|
return {buffer};
|
||||||
}
|
}
|
||||||
|
|
||||||
std::string float_to_string(const float& value)
|
std::string float_to_string(const float& value)
|
||||||
{
|
{
|
||||||
char buffer[50];
|
char buffer[50];
|
||||||
std::sprintf(buffer,"%f",value);
|
std::sprintf(buffer, "%f", value);
|
||||||
return {buffer};
|
return {buffer};
|
||||||
}
|
}
|
@ -1,63 +1,26 @@
|
|||||||
#include "Token.h"
|
#include "Token.h"
|
||||||
#include "StringConversion.h"
|
|
||||||
#include "FormatString/FormatString.hpp"
|
#include "FormatString/FormatString.hpp"
|
||||||
|
#include "StringConversion.h"
|
||||||
#include "replace.h"
|
#include "replace.h"
|
||||||
|
|
||||||
const std::string token_strings[] = {
|
const std::string token_strings[] = {
|
||||||
"TT_IDENTIFIER",
|
"TT_IDENTIFIER", "TT_NUMBER", "TT_FLOAT", "TT_KEYWORD", "TT_STRING", "TT_PLUS",
|
||||||
"TT_NUMBER",
|
"TT_MINUS", "TT_MUL", "TT_DIV", "TT_AT", "TT_EQUAL", "TT_LESSTHAN",
|
||||||
"TT_FLOAT",
|
"TT_GREATERTHAN", "TT_LPAREN", "TT_RPAREN", "TT_LBRACKET", "TT_RBRACKET", "TT_SEMICOLON",
|
||||||
"TT_KEYWORD",
|
"TT_LOADEDSTRING", "TT_EOF", "TT_NULL", "TT_EQUALS", "TT_GTE", "TT_LTE",
|
||||||
"TT_STRING",
|
"TT_PERIOD", "TT_COMMA", "TT_PATH", "TT_EXCLAMATION", "TT_NEQUAL", "TT_LSQB",
|
||||||
"TT_PLUS",
|
"TT_RSQB", "TT_TYPE", "TT_IMPORT", "TT_SYSCALL0", "TT_SYSCALL1", "TT_SYSCALL2",
|
||||||
"TT_MINUS",
|
"TT_SYSCALL3", "TT_SYSCALL4", "TT_SYSCALL5", "TT_COMPILERMACRO"};
|
||||||
"TT_MUL",
|
|
||||||
"TT_DIV",
|
|
||||||
"TT_AT",
|
|
||||||
"TT_EQUAL",
|
|
||||||
"TT_LESSTHAN",
|
|
||||||
"TT_GREATERTHAN",
|
|
||||||
"TT_LPAREN",
|
|
||||||
"TT_RPAREN",
|
|
||||||
"TT_LBRACKET",
|
|
||||||
"TT_RBRACKET",
|
|
||||||
"TT_SEMICOLON",
|
|
||||||
"TT_LOADEDSTRING",
|
|
||||||
"TT_EOF",
|
|
||||||
"TT_NULL",
|
|
||||||
"TT_EQUALS",
|
|
||||||
"TT_GTE",
|
|
||||||
"TT_LTE",
|
|
||||||
"TT_PERIOD",
|
|
||||||
"TT_COMMA",
|
|
||||||
"TT_PATH",
|
|
||||||
"TT_EXCLAMATION",
|
|
||||||
"TT_NEQUAL",
|
|
||||||
"TT_LSQB",
|
|
||||||
"TT_RSQB",
|
|
||||||
"TT_TYPE",
|
|
||||||
"TT_IMPORT",
|
|
||||||
"TT_SYSCALL0",
|
|
||||||
"TT_SYSCALL1",
|
|
||||||
"TT_SYSCALL2",
|
|
||||||
"TT_SYSCALL3",
|
|
||||||
"TT_SYSCALL4",
|
|
||||||
"TT_SYSCALL5",
|
|
||||||
"TT_COMPILERMACRO"
|
|
||||||
};
|
|
||||||
|
|
||||||
Token::Token(const TokenType& type)
|
Token::Token(const TokenType& type) : tk_type(type), loc(0, 0, "")
|
||||||
: tk_type(type), loc(0,0,"")
|
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
Token::Token(const TokenType& type, const Location& location)
|
Token::Token(const TokenType& type, const Location& location) : tk_type(type), loc(location)
|
||||||
: tk_type(type), loc(location)
|
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
Token::Token(const TokenType& type, const std::string& val)
|
Token::Token(const TokenType& type, const std::string& val) : tk_type(type), loc(0, 0, ""), string_value(val)
|
||||||
: tk_type(type), loc(0,0,""), string_value(val)
|
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -82,7 +45,7 @@ Token::~Token()
|
|||||||
|
|
||||||
Token Token::copy_with_new_type(const TokenType& type)
|
Token Token::copy_with_new_type(const TokenType& type)
|
||||||
{
|
{
|
||||||
Token result(type,loc);
|
Token result(type, loc);
|
||||||
|
|
||||||
result.int_value = int_value;
|
result.int_value = int_value;
|
||||||
result.float_value = float_value;
|
result.float_value = float_value;
|
||||||
@ -96,32 +59,32 @@ Token Token::copy_with_new_type(const TokenType& type)
|
|||||||
std::string Token::to_string() const
|
std::string Token::to_string() const
|
||||||
{
|
{
|
||||||
std::string details = loc.to_parenthesized_string();
|
std::string details = loc.to_parenthesized_string();
|
||||||
if(tk_type == TT_Number)
|
if (tk_type == TT_Number)
|
||||||
{
|
{
|
||||||
return format_string("INT:%d %s",int_value,details);
|
return format_string("INT:%d %s", int_value, details);
|
||||||
}
|
}
|
||||||
else if (tk_type == TT_Float)
|
else if (tk_type == TT_Float)
|
||||||
{
|
{
|
||||||
return format_string("FLOAT:%f %s",float_value,details);
|
return format_string("FLOAT:%f %s", float_value, details);
|
||||||
}
|
}
|
||||||
else if (tk_type == TT_Identifier)
|
else if (tk_type == TT_Identifier)
|
||||||
{
|
{
|
||||||
return format_string("ID:%s %s",string_value,details);
|
return format_string("ID:%s %s", string_value, details);
|
||||||
}
|
}
|
||||||
else if (tk_type == TT_Keyword)
|
else if (tk_type == TT_Keyword)
|
||||||
{
|
{
|
||||||
return format_string("KEYWORD:%s %s",string_value,details);
|
return format_string("KEYWORD:%s %s", string_value, details);
|
||||||
}
|
}
|
||||||
else if (tk_type == TT_Type)
|
else if (tk_type == TT_Type)
|
||||||
{
|
{
|
||||||
return format_string("TYPE:%s %s",string_value,details);
|
return format_string("TYPE:%s %s", string_value, details);
|
||||||
}
|
}
|
||||||
else if (tk_type == TT_String)
|
else if (tk_type == TT_String)
|
||||||
{
|
{
|
||||||
replace(const_cast<std::string&>(string_value),"\n","\\n");
|
replace(const_cast<std::string&>(string_value), "\n", "\\n");
|
||||||
return format_string("STRING:'%s' %s",string_value,details);
|
return format_string("STRING:'%s' %s", string_value, details);
|
||||||
}
|
}
|
||||||
switch(tk_type)
|
switch (tk_type)
|
||||||
{
|
{
|
||||||
case TT_EOF:
|
case TT_EOF:
|
||||||
return "EOF " + details;
|
return "EOF " + details;
|
||||||
@ -200,7 +163,7 @@ std::string Token::line() const
|
|||||||
|
|
||||||
Token Token::make_with_line(const Token& origin, const std::string& line_text)
|
Token Token::make_with_line(const Token& origin, const std::string& line_text)
|
||||||
{
|
{
|
||||||
Token result(origin.tk_type,origin.loc);
|
Token result(origin.tk_type, origin.loc);
|
||||||
|
|
||||||
result.int_value = origin.int_value;
|
result.int_value = origin.int_value;
|
||||||
result.float_value = origin.float_value;
|
result.float_value = origin.float_value;
|
||||||
@ -227,11 +190,11 @@ void Token::erase(Token& tk)
|
|||||||
|
|
||||||
bool Token::match_token_types(const std::vector<Token>& a, const std::vector<Token>& b, int count)
|
bool Token::match_token_types(const std::vector<Token>& a, const std::vector<Token>& b, int count)
|
||||||
{
|
{
|
||||||
int size = [](int a, int b){ return a > b ? b : a; }(a.size() - count,b.size());
|
int size = [](int a, int b) { return a > b ? b : a; }(a.size() - count, b.size());
|
||||||
|
|
||||||
for(int i = 0; i < size; ++i)
|
for (int i = 0; i < size; ++i)
|
||||||
{
|
{
|
||||||
if(a[i+count].tk_type != b[i].tk_type)
|
if (a[i + count].tk_type != b[i].tk_type)
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -94,11 +94,11 @@ struct Token
|
|||||||
/* Return a copy of this Token, but with its TokenType changed. */
|
/* Return a copy of this Token, but with its TokenType changed. */
|
||||||
Token copy_with_new_type(const TokenType& type);
|
Token copy_with_new_type(const TokenType& type);
|
||||||
|
|
||||||
/* Iterate over two vectors of Tokens, starting from count for vector A, starting from 0 for vector B, checking if the current Tokens' types match.
|
/* Iterate over two vectors of Tokens, starting from count for vector A, starting from 0 for vector B, checking if
|
||||||
If at any point they don't, return false. Else, return true. */
|
the current Tokens' types match. If at any point they don't, return false. Else, return true. */
|
||||||
static bool match_token_types(const std::vector<Token>& a, const std::vector<Token>& b, int count);
|
static bool match_token_types(const std::vector<Token>& a, const std::vector<Token>& b, int count);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::string line_text;
|
std::string line_text;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
#include "replace.h"
|
#include "replace.h"
|
||||||
|
|
||||||
bool replace(std::string& str, const std::string& from, const std::string& to) {
|
bool replace(std::string& str, const std::string& from, const std::string& to)
|
||||||
|
{
|
||||||
size_t start_pos = str.find(from);
|
size_t start_pos = str.find(from);
|
||||||
if(start_pos == std::string::npos)
|
if (start_pos == std::string::npos) return false;
|
||||||
return false;
|
|
||||||
str.replace(start_pos, from.length(), to);
|
str.replace(start_pos, from.length(), to);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
@ -1,24 +1,26 @@
|
|||||||
#include "Lexer.h"
|
#include "Arguments.h"
|
||||||
#include "FileIO.h"
|
#include "FileIO.h"
|
||||||
#include "Importer.h"
|
#include "Importer.h"
|
||||||
#include "Arguments.h"
|
#include "Lexer.h"
|
||||||
#include "Normalizer.h"
|
#include "Normalizer.h"
|
||||||
#include <iostream>
|
#include <iostream>
|
||||||
|
|
||||||
int main(int argc, char** argv)
|
int main(int argc, char** argv)
|
||||||
{
|
{
|
||||||
Arguments::parse(argc,argv);
|
Arguments::parse(argc, argv);
|
||||||
std::string fname = Arguments::input_fname;
|
std::string fname = Arguments::input_fname;
|
||||||
std::string contents = FileIO::read_all(fname);
|
std::string contents = FileIO::read_all(fname);
|
||||||
|
|
||||||
TokenStream res = Lexer::make_lexer(fname)->lex(contents);
|
TokenStream result = Lexer::make_lexer(fname)->lex(contents);
|
||||||
Importer::init(fname.substr(0,fname.find_last_of('.')));
|
|
||||||
res = Importer::evaluate(res);
|
|
||||||
res = Normalizer::normalize(res);
|
|
||||||
|
|
||||||
for(int i = 0; i < res.size(); i++)
|
Importer::init(FileIO::remove_file_extension(fname));
|
||||||
|
|
||||||
|
result = Importer::evaluate(result);
|
||||||
|
result = Normalizer::normalize(result);
|
||||||
|
|
||||||
|
for (int i = 0; i < result.size(); i++)
|
||||||
{
|
{
|
||||||
std::cout << res[i].to_string() << std::endl;
|
std::cout << result[i].to_string() << std::endl;
|
||||||
}
|
}
|
||||||
|
|
||||||
std::cout << "Output filename: " << Arguments::output_fname << std::endl;
|
std::cout << "Output filename: " << Arguments::output_fname << std::endl;
|
||||||
|
Loading…
Reference in New Issue
Block a user