Initial Commit
This commit is contained in:
commit
650771e6fe
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
.vscode/
|
||||
build/
|
||||
37
Makefile
Normal file
37
Makefile
Normal file
@ -0,0 +1,37 @@
|
||||
# Makefile
|
||||
|
||||
# Compiler
|
||||
CC = g++
|
||||
|
||||
# Compiler flags
|
||||
CFLAGS = -Wall -Wextra -std=c++11
|
||||
|
||||
# Source directory
|
||||
SRC_DIR = ./src
|
||||
|
||||
# Output directory
|
||||
BUILD_DIR = ./build
|
||||
|
||||
# Get all CPP files in the source directory
|
||||
CPP_FILES := $(wildcard $(SRC_DIR)/*.cpp)
|
||||
|
||||
# Generate object file names by replacing the source directory with the build directory
|
||||
OBJ_FILES := $(patsubst $(SRC_DIR)/%.cpp,$(BUILD_DIR)/%.o,$(CPP_FILES))
|
||||
|
||||
# Default target
|
||||
all: $(BUILD_DIR)/bob
|
||||
|
||||
# Rule to create the build directory if it doesn't exist
|
||||
$(shell mkdir -p $(BUILD_DIR))
|
||||
|
||||
# Rule to compile object files
|
||||
$(BUILD_DIR)/%.o: $(SRC_DIR)/%.cpp
|
||||
$(CC) $(CFLAGS) -c $< -o $@
|
||||
|
||||
# Rule to link object files into the final executable
|
||||
$(BUILD_DIR)/bob: $(OBJ_FILES)
|
||||
$(CC) $(CFLAGS) $^ -o $@
|
||||
|
||||
# Clean build directory
|
||||
clean:
|
||||
rm -rf $(BUILD_DIR)/*
|
||||
31
headers/Lexer.h
Normal file
31
headers/Lexer.h
Normal file
@ -0,0 +1,31 @@
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <map>
|
||||
|
||||
enum TokenType{
|
||||
Identifier,
|
||||
Number,
|
||||
Equals,
|
||||
OpenParenthesis,
|
||||
CloseParenthesis,
|
||||
BinaryOperator,
|
||||
Test
|
||||
};
|
||||
|
||||
const std::map<std::string, TokenType> KEYWORDS {
|
||||
{"test", Test}
|
||||
};
|
||||
|
||||
struct Token
|
||||
{
|
||||
TokenType type;
|
||||
std::string value;
|
||||
};
|
||||
|
||||
|
||||
class Lexer{
|
||||
public:
|
||||
std::vector<Token> Tokenize(std::string source);
|
||||
|
||||
};
|
||||
14
headers/test.h
Normal file
14
headers/test.h
Normal file
@ -0,0 +1,14 @@
|
||||
|
||||
#include <string>
|
||||
#include <iostream>
|
||||
|
||||
class Test
|
||||
{
|
||||
public:
|
||||
std::string message;
|
||||
|
||||
Test(std::string msg);
|
||||
|
||||
public:
|
||||
void Hello();
|
||||
};
|
||||
9
source.bob
Normal file
9
source.bob
Normal file
@ -0,0 +1,9 @@
|
||||
x = 45 * ( 4 / 3 ) * (45 / (20 * 3))
|
||||
x = 45 * ( 4 / 3 ) * (45 / (20 * 3))
|
||||
x = 45 * ( 4 / 3 ) * (45 / (20 * 3))
|
||||
x = 45 * ( 4 / 3 ) * (45 / (20 * 3))
|
||||
x = 45 * ( 4 / 3 ) * (45 / (20 * 3))
|
||||
x = 45 * ( 4 / 3 ) * (45 / (20 * 3))
|
||||
x = 45 * ( 4 / 3 ) * (45 / (20 * 3))
|
||||
x = 45 * ( 4 / 3 ) * (45 / (20 * 3))
|
||||
x = 45 * ( 4 / 3 ) * (45 / (20 * 3))
|
||||
78
src/Lexer.cpp
Normal file
78
src/Lexer.cpp
Normal file
@ -0,0 +1,78 @@
|
||||
#include "../headers/Lexer.h"
|
||||
#include <iostream>
|
||||
#include <cctype>
|
||||
|
||||
std::vector<Token> Lexer::Tokenize(std::string source){
|
||||
std::vector<Token> tokens;
|
||||
std::vector<char> src{source.begin(), source.end()};
|
||||
|
||||
while(src.size() > 0)
|
||||
{
|
||||
char t = src[0];
|
||||
if(t == '(')
|
||||
{
|
||||
tokens.push_back(Token{OpenParenthesis, std::string(1, t)}); //brace initialization in case you forget
|
||||
src.erase(src.begin());
|
||||
}
|
||||
else if(t == ')')
|
||||
{
|
||||
tokens.push_back(Token{CloseParenthesis, std::string(1, t)});
|
||||
src.erase(src.begin());
|
||||
}
|
||||
else if(t == '+' || t == '-' || t == '*' || t == '/')
|
||||
{
|
||||
tokens.push_back(Token{BinaryOperator, std::string(1, t)});
|
||||
src.erase(src.begin());
|
||||
}
|
||||
else if(t == '=')
|
||||
{
|
||||
tokens.push_back(Token{Equals, std::string(1, t)});
|
||||
src.erase(src.begin());
|
||||
}
|
||||
else
|
||||
{
|
||||
//Multi char tokens
|
||||
if(std::isdigit(t))
|
||||
{
|
||||
std::string num = "";
|
||||
while(src.size() > 0 && std::isdigit(src[0]))
|
||||
{
|
||||
num += src[0];
|
||||
src.erase(src.begin());
|
||||
}
|
||||
|
||||
tokens.push_back(Token{Number, num});
|
||||
}
|
||||
else if(std::isalpha(t))
|
||||
{
|
||||
std::string ident = "";
|
||||
while(src.size() > 0 && std::isalpha(src[0]))
|
||||
{
|
||||
ident += src[0];
|
||||
src.erase(src.begin());
|
||||
}
|
||||
|
||||
if(KEYWORDS.find(ident) != KEYWORDS.end()) //identifier is a keyword
|
||||
{
|
||||
tokens.push_back(Token{KEYWORDS.at(ident), ident});
|
||||
}
|
||||
else
|
||||
{
|
||||
tokens.push_back(Token{Identifier, ident});
|
||||
}
|
||||
|
||||
}
|
||||
else if(t == ' ' || t == '\t' || t == '\n')
|
||||
{
|
||||
src.erase(src.begin());
|
||||
}
|
||||
else
|
||||
{
|
||||
throw std::runtime_error("Unknown Token: '" + std::string(1, t) + "'");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return tokens;
|
||||
}
|
||||
40
src/bob.cpp
Normal file
40
src/bob.cpp
Normal file
@ -0,0 +1,40 @@
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <string>
|
||||
#include "../headers/Lexer.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
int main(){
|
||||
|
||||
string TokenTypeMappings[] = {
|
||||
"Identifier",
|
||||
"Number",
|
||||
"Equals",
|
||||
"OpenParen",
|
||||
"CloseParen",
|
||||
"BinaryOperator",
|
||||
"TestKeyword"
|
||||
};
|
||||
Lexer l;
|
||||
|
||||
string path = "source.bob";
|
||||
ifstream file = ifstream(path);
|
||||
|
||||
string source = "";
|
||||
|
||||
if(file.is_open()){
|
||||
source = string(istreambuf_iterator<char>(file), istreambuf_iterator<char>());
|
||||
}
|
||||
else
|
||||
{
|
||||
cout << "File not found" << endl;
|
||||
}
|
||||
|
||||
|
||||
vector<Token> tokens = l.Tokenize(source);
|
||||
for(Token t : tokens){
|
||||
cout << "Type: " << TokenTypeMappings[t.type] << ", Value: " + t.value << endl;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
9
src/test.cpp
Normal file
9
src/test.cpp
Normal file
@ -0,0 +1,9 @@
|
||||
#include "../headers/test.h"
|
||||
|
||||
Test::Test(std::string msg){
|
||||
this->message = msg;
|
||||
}
|
||||
|
||||
void Test::Hello(){
|
||||
std::cout << this->message << std::endl;
|
||||
}
|
||||
Loading…
Reference in New Issue
Block a user