diff --git a/headers/AST.h b/headers/AST.h new file mode 100644 index 0000000..e69de29 diff --git a/src/Lexer.cpp b/src/Lexer.cpp index 2901089..15a93c0 100644 --- a/src/Lexer.cpp +++ b/src/Lexer.cpp @@ -64,12 +64,14 @@ std::vector Lexer::Tokenize(std::string source){ } else if(t == ' ' || t == '\t' || t == '\n') { - src.erase(src.begin()); + src.erase(src.begin()); //ignore t } else { throw std::runtime_error("Unknown Token: '" + std::string(1, t) + "'"); } + + } } diff --git a/src/bob.cpp b/src/bob.cpp index fc2879d..9bfb3c2 100644 --- a/src/bob.cpp +++ b/src/bob.cpp @@ -3,38 +3,83 @@ #include #include "../headers/Lexer.h" +#define VERSION "0.0.1" + using namespace std; +class Bob +{ +public: + Lexer lexer; + +public: + void runFile(string path) + { + ifstream file = ifstream(path); + + string source = ""; + + if(file.is_open()){ + source = string(istreambuf_iterator(file), istreambuf_iterator()); + } + else + { + cout << "File not found" << endl; + return; + } + + this->run(source); + } + + void runPrompt() + { + cout << "Bob v" << VERSION << ", 2023" << endl; + for(;;) + { + string line; + cout << "-> "; + std::getline(std::cin, line); + + if(std::cin.eof()) + { + break; + } + + this->run(line); + } + } + + +private: + bool hadError = false; + +private: + void run(string source) + { + vector tokens = lexer.Tokenize(source); + + for(Token t : tokens){ + cout << "{type: " << t.type << ", value: " << t.value << "}" << endl; + } + } +}; int main(){ - string TokenTypeMappings[] = { - "Identifier", - "Number", - "Equals", - "OpenParen", - "CloseParen", - "BinaryOperator", - "TestKeyword" - }; - Lexer l; - - string path = "source.bob"; - ifstream file = ifstream(path); + // string TokenTypeMappings[] = { + // "Identifier", + // "Number", + // "Equals", + // "OpenParen", + // "CloseParen", + // "BinaryOperator", + // "TestKeyword" + // }; - string source = ""; + Bob bobLang; - if(file.is_open()){ - source = string(istreambuf_iterator(file), istreambuf_iterator()); - } - else - { - cout << "File not found" << endl; - } + //bobLang.runFile("source.bob"); + bobLang.runPrompt(); - vector tokens = l.Tokenize(source); - for(Token t : tokens){ - cout << "Type: " << TokenTypeMappings[t.type] << ", Value: " + t.value << endl; - } return 0; }