Ver Fonte

refactoring, hashmap, token list (to ensure delete of tokens), more meaningful token types

Kajetan Johannes Hammerle há 6 anos atrás
pai
commit
011b73cd4e

+ 7 - 24
Main.cpp

@@ -1,41 +1,24 @@
 #include <iostream>
 #include "File.h"
-#include "Tokenizer.h"
+#include "compiler/Tokenizer.h"
 #include "Exception.h"
+#include "data/HashMap.h"
+#include "compiler/Compiler.h"
 
 using namespace std;
 
 int main(int argc, char** argv) 
 {    
-    File f("tests/if.snuvi");
+    File f("tests/vars.snuvi");
     if(f.exists())
     {
-        Tokenizer t(f.read());
-        ArrayList<Token*> tokens;
-        try
-        {
-            t.tokenize(tokens);
-        }
-        catch(Exception ex)
-        {
-            ex.print();
-        }
-        
-        tokens.forEach([](Token* t) 
-        {
-            cout << *t << endl;
-        });
-        
-        tokens.forEach([](Token* t) 
-        {
-            delete t;
-        });
-        tokens.clear();
+        Compiler c(f.read());
+        c.compile();
     }
     else
     {
         cout << "no" << endl;
-    }
+    }    
     return 0;
 }
 

+ 2 - 2
Makefile

@@ -2,8 +2,8 @@
 run: all
 	./LonelyTiger
 
-all: *.cpp *.h
-	g++ -Wall -std=c++14 *.cpp -o LonelyTiger
+all: *.cpp *.h compiler/*.cpp compiler/*.h
+	g++ -Wall -std=c++14 *.cpp compiler/*.cpp -o LonelyTiger
 	
 clean:
 	rm LonelyTiger

+ 0 - 70
TokenType.cpp

@@ -1,70 +0,0 @@
-#include "TokenType.h"
-
-std::ostream& operator<<(std::ostream& os, const Tokens::Type& c)
-{
-    switch(c)
-    {
-        case Tokens::FLOAT: os << "float"; break;
-        case Tokens::TRUE: os << "true"; break;
-        case Tokens::FALSE: os << "false"; break;
-        case Tokens::TNULL: os << "null"; break;
-        case Tokens::TEXT: os << "String"; break;
-        case Tokens::LABEL: os << "Label"; break;
-        case Tokens::VAR: os << "var"; break;
-        case Tokens::GLOBAL: os << "$"; break;
-        case Tokens::INC: os << "++"; break;
-        case Tokens::DEC: os << "--"; break;
-        case Tokens::INVERT: os << "!"; break;
-        case Tokens::BIT_INVERT: os << "~"; break;
-        case Tokens::MUL: os << "*"; break;
-        case Tokens::DIV: os << "/"; break;
-        case Tokens::MOD: os << "%"; break;
-        case Tokens::ADD: os << "+"; break;
-        case Tokens::SUB: os << "-"; break;
-        case Tokens::LEFT_SHIFT: os << "<<"; break;
-        case Tokens::RIGHT_SHIFT: os << ">>"; break;
-        case Tokens::LESS: os << "<"; break;
-        case Tokens::LESS_EQUAL: os << "<="; break;
-        case Tokens::GREATER: os << ">"; break;
-        case Tokens::GREATER_EQUAL: os << ">="; break;
-        case Tokens::EQUAL: os << "=="; break;
-        case Tokens::NOT_EQUAL: os << "!="; break;
-        case Tokens::BIT_AND: os << "&"; break;
-        case Tokens::BIT_XOR: os << "^"; break;
-        case Tokens::BIT_OR: os << "|"; break;
-        case Tokens::AND: os << "&&"; break;
-        case Tokens::OR: os << "||"; break;
-        case Tokens::SET: os << "="; break;
-        case Tokens::ADD_SET: os << "+="; break;
-        case Tokens::SUB_SET: os << "-="; break;
-        case Tokens::MUL_SET: os << "*="; break;
-        case Tokens::DIV_SET: os << "/="; break;
-        case Tokens::MOD_SET: os << "%="; break;
-        case Tokens::LEFT_SHIFT_SET: os << "<<="; break;
-        case Tokens::RIGHT_SHIFT_SET: os << ">>="; break;
-        case Tokens::BIT_AND_SET: os << "&="; break;
-        case Tokens::BIT_XOR_SET: os << "^="; break;
-        case Tokens::BIT_OR_SET: os << "|="; break;
-        case Tokens::COMMA: os << ","; break;
-        case Tokens::OPEN_BRACKET: os << ")"; break;
-        case Tokens::CLOSE_BRACKET: os << ")"; break;
-        case Tokens::OPEN_SQUARE_BRACKET: os << "["; break;
-        case Tokens::CLOSE_SQUARE_BRACKET: os << "]"; break;
-        case Tokens::OPEN_CURVED_BRACKET: os << "{"; break;
-        case Tokens::CLOSE_CURVED_BRACKET: os << "}"; break;
-        case Tokens::SEMICOLON: os <<";"; break;
-        case Tokens::IF: os << "if"; break;
-        case Tokens::ELSE_IF: os << "else if"; break;
-        case Tokens::ELSE: os << "else"; break;
-        case Tokens::FOR: os << "for"; break;
-        case Tokens::WHILE: os << "while"; break;
-        case Tokens::FUNCTION: os << "function"; break;
-        case Tokens::BREAK: os << "break"; break;
-        case Tokens::CONTINUE: os << "continue"; break;
-        case Tokens::RETURN: os << "return"; break;
-        case Tokens::TRY: os << "try"; break;
-        case Tokens::CATCH: os << "catch"; break;
-        case Tokens::END_OF_FILE: os << "end_of_file"; break;
-    }
-    return os;
-}

+ 0 - 80
TokenType.h

@@ -1,80 +0,0 @@
-#ifndef TOKENTYPE_H
-#define TOKENTYPE_H
-
-#include <iostream>
-
-namespace Tokens
-{
-    enum Type 
-    {
-        FLOAT, // double
-        TRUE, // true
-        FALSE, // false
-        TNULL, // null
-        TEXT, // String
-        LABEL, // Label
-        VAR, // var
-        GLOBAL, // $
-
-        INC, // ++
-        DEC, // --
-        INVERT, // ! 
-        BIT_INVERT, // ~ 
-        MUL, // * 
-        DIV, // / 
-        MOD, // % 
-        ADD, // + 
-        SUB, // - 
-        LEFT_SHIFT, // << 
-        RIGHT_SHIFT, // >> 
-        LESS, // < 
-        LESS_EQUAL, // <= 
-        GREATER, // > 
-        GREATER_EQUAL, // >= 
-        EQUAL, // == 
-        NOT_EQUAL, // != 
-        BIT_AND, // & 
-        BIT_XOR, // ^ 
-        BIT_OR, // | 
-        AND, // && 
-        OR, // || 
-        SET, // = 
-        ADD_SET, // += 
-        SUB_SET, // -= 
-        MUL_SET, // *= 
-        DIV_SET, // /= 
-        MOD_SET, // %= 
-        LEFT_SHIFT_SET, // <<= 
-        RIGHT_SHIFT_SET, // >>= 
-        BIT_AND_SET, // &= 
-        BIT_XOR_SET, // ^= 
-        BIT_OR_SET, // |= 
-        COMMA, // , 
-        OPEN_BRACKET, // ) 
-        CLOSE_BRACKET, // ) 
-        OPEN_SQUARE_BRACKET, // [ 
-        CLOSE_SQUARE_BRACKET, // ] 
-        OPEN_CURVED_BRACKET, // { 
-        CLOSE_CURVED_BRACKET, // } 
-        SEMICOLON, // ; 
-
-        IF, // if
-        ELSE_IF, // else if
-        ELSE, // else
-        FOR, // for
-        WHILE, // while
-        FUNCTION, // function
-        BREAK, // break
-        CONTINUE, // continue
-        RETURN, // return
-        TRY, // try
-        CATCH, // catch
-
-        END_OF_FILE // end_of_file      
-    };
-};
-
-std::ostream& operator<<(std::ostream& os, const Tokens::Type& c);
-
-#endif
-

+ 0 - 30
Tokenizer.h

@@ -1,30 +0,0 @@
-#ifndef TOKENIZER_H
-#define TOKENIZER_H
-
-#include <string>
-#include "ArrayList.h"
-#include "Token.h"
-
-using namespace std;
-
-class Tokenizer 
-{
-public:
-    Tokenizer(string s);
-    Tokenizer(const Tokenizer& orig);
-    virtual ~Tokenizer();
-    
-    void tokenize(ArrayList<Token*>& tokens);
-private:
-    string data;
-    int pos;
-    int line;
-    int length;
-    
-    void tokenize(ArrayList<Token*>& tokens, char c, Tokens::Type type1, Tokens::Type type2);
-    void tokenize(ArrayList<Token*>& tokens, Tokens::Type type1, char c2, Tokens::Type type2, char c3, Tokens::Type type3);
-    void tokenize(ArrayList<Token*>& tokens, Tokens::Type type1, char c2, char c3, Tokens::Type type2, Tokens::Type type3, char c4, Tokens::Type type4);
-};
-
-#endif
-

+ 32 - 0
compiler/Compiler.cpp

@@ -0,0 +1,32 @@
+#include "Compiler.h"
+#include "../Exception.h"
+
+Compiler::Compiler(string s) : tokenizer(s)
+{
+}
+
+Compiler::~Compiler()
+{
+}
+
+void Compiler::compile()
+{
+    TokenList tokens;
+    try
+    {
+        tokenizer.tokenize(tokens);
+        compile(tokens);
+    }
+    catch(Exception ex)
+    {
+        ex.print();
+    }
+}
+
+void Compiler::compile(TokenList& tokens)
+{
+    tokens.forEach([](Token* t) 
+    {
+        cout << *t << endl;
+    });
+}

+ 25 - 0
compiler/Compiler.h

@@ -0,0 +1,25 @@
+#ifndef COMPILER_H
+#define COMPILER_H
+
+#include <string>
+#include "../data/ArrayList.h"
+#include "Token.h"
+#include "Tokenizer.h"
+
+using namespace std;
+
+class Compiler
+{
+public:
+    Compiler(string s);
+    virtual ~Compiler();
+    
+    void compile();
+private:
+    Tokenizer tokenizer;
+    
+    void compile(TokenList& tokens);
+};
+
+#endif
+

+ 3 - 4
Token.cpp → compiler/Token.cpp

@@ -1,11 +1,10 @@
 #include "Token.h"
-#include "Exception.h"
+#include "../Exception.h"
 #include <iostream>
 #include "TokenType.h"
 
-Token::Token(Tokens::Type type, int line)
+Token::Token(TokenType type, int line) : type(type)
 {
-    this->type = type;
     this->line = line;
     f = 0.0f;
     s = "";
@@ -57,7 +56,7 @@ int Token::getLine() const
     return line;
 }
 
-Tokens::Type Token::getType() const
+TokenType Token::getType() const
 {
     return type;
 }

+ 3 - 3
Token.h → compiler/Token.h

@@ -9,7 +9,7 @@ using namespace std;
 class Token
 {
 public:
-    Token(Tokens::Type type, int line);
+    Token(TokenType type, int line);
     virtual ~Token();
     
     void setFloat(float f);
@@ -20,9 +20,9 @@ public:
     bool getBool() const;
     string getString() const;
     int getLine() const;
-    Tokens::Type getType() const;
+    TokenType getType() const;
 private:
-    Tokens::Type type;
+    TokenType type;
     float f;
     string s;
     int line;

+ 33 - 0
compiler/TokenList.cpp

@@ -0,0 +1,33 @@
+#include "TokenList.h"
+
+TokenList::TokenList()
+{
+}
+
+TokenList::TokenList(const TokenList& orig)
+{
+}
+
+TokenList::~TokenList()
+{
+    list.forEach([](Token* t) 
+    {
+        delete t;
+    });
+}
+
+void TokenList::add(Token* t)
+{
+    list.add(t);
+}
+
+void TokenList::remove(int index)
+{
+    list.remove(index);
+}
+
+void TokenList::forEach(void (*f) (Token*))
+{
+    list.forEach(f);
+}
+

+ 22 - 0
compiler/TokenList.h

@@ -0,0 +1,22 @@
+#ifndef TOKENLIST_H
+#define TOKENLIST_H
+
+#include "Token.h"
+#include "../data/ArrayList.h"
+
+class TokenList
+{
+public:
+    TokenList();
+    TokenList(const TokenList& orig);
+    virtual ~TokenList();
+    
+    void add(Token* t);
+    void remove(int index);
+    void forEach(void (*f) (Token*));
+private:
+    ArrayList<Token*> list;
+};
+
+#endif
+

+ 103 - 0
compiler/TokenType.cpp

@@ -0,0 +1,103 @@
+#include "TokenType.h"
+
+#include <iostream>
+
+TokenTypeClass::TokenTypeClass(string name, int level) : typeName(name)
+{
+    this->level = level;
+}
+
+TokenTypeClass::TokenTypeClass(string name) : typeName(name)
+{
+    level = -1;
+}
+
+TokenTypeClass::TokenTypeClass(const TokenTypeClass& orig)
+{
+}
+
+TokenTypeClass::~TokenTypeClass()
+{
+}
+
+string TokenTypeClass::getName() const
+{
+    return typeName;
+}
+
+int TokenTypeClass::getLevel() const
+{
+    return level;
+}
+
+std::ostream& operator<<(std::ostream& os, TokenType& c)
+{
+    return os << c->getName();
+}
+
+namespace Tokens
+{
+    TokenType FLOAT = new const TokenTypeClass("float");
+    TokenType TRUE = new const TokenTypeClass("true");
+    TokenType FALSE = new const TokenTypeClass("false");
+    TokenType TNULL = new const TokenTypeClass("null");
+    TokenType TEXT = new const TokenTypeClass("String");
+    TokenType LABEL = new const TokenTypeClass("Label");
+    TokenType VAR = new const TokenTypeClass("var");
+    TokenType GLOBAL = new const TokenTypeClass("$");
+    
+    TokenType INC = new const TokenTypeClass("++", 2);
+    TokenType DEC = new const TokenTypeClass("--", 2);
+    TokenType INVERT = new const TokenTypeClass("!", 2);
+    TokenType BIT_INVERT = new const TokenTypeClass("~", 2);
+    TokenType MUL = new const TokenTypeClass("*", 3);
+    TokenType DIV = new const TokenTypeClass("/", 3);
+    TokenType MOD = new const TokenTypeClass("%", 3);
+    TokenType ADD = new const TokenTypeClass("+", 4);
+    TokenType SUB = new const TokenTypeClass("-", 4);
+    TokenType LEFT_SHIFT = new const TokenTypeClass("<<", 5);
+    TokenType RIGHT_SHIFT = new const TokenTypeClass(">>", 5);
+    TokenType LESS = new const TokenTypeClass("<", 6);
+    TokenType LESS_EQUAL = new const TokenTypeClass("<=", 6);
+    TokenType GREATER = new const TokenTypeClass(">", 6);
+    TokenType GREATER_EQUAL = new const TokenTypeClass(">=", 6);
+    TokenType EQUAL = new const TokenTypeClass("==", 7);
+    TokenType NOT_EQUAL = new const TokenTypeClass("!=", 7);
+    TokenType BIT_AND = new const TokenTypeClass("&", 8);
+    TokenType BIT_XOR = new const TokenTypeClass("^", 9);
+    TokenType BIT_OR = new const TokenTypeClass("|", 10);
+    TokenType AND = new const TokenTypeClass("&&", 11);
+    TokenType OR = new const TokenTypeClass("||", 12);
+    TokenType SET = new const TokenTypeClass("=", 14);
+    TokenType ADD_SET = new const TokenTypeClass("+=", 14);
+    TokenType SUB_SET = new const TokenTypeClass("-=", 14);
+    TokenType MUL_SET = new const TokenTypeClass("*=", 14);
+    TokenType DIV_SET = new const TokenTypeClass("/=", 14);
+    TokenType MOD_SET = new const TokenTypeClass("%=", 14);
+    TokenType LEFT_SHIFT_SET = new const TokenTypeClass("<<=", 14);
+    TokenType RIGHT_SHIFT_SET = new const TokenTypeClass(">>=", 14);
+    TokenType BIT_AND_SET = new const TokenTypeClass("&=", 14);
+    TokenType BIT_XOR_SET = new const TokenTypeClass("^=", 14);
+    TokenType BIT_OR_SET = new const TokenTypeClass("|=", 14);
+    TokenType COMMA = new const TokenTypeClass(",", 15);
+    TokenType OPEN_BRACKET = new const TokenTypeClass("(", 1);
+    TokenType CLOSE_BRACKET = new const TokenTypeClass(")", 1);
+    TokenType OPEN_SQUARE_BRACKET = new const TokenTypeClass("[", 1);
+    TokenType CLOSE_SQUARE_BRACKET = new const TokenTypeClass("]", 1);
+    TokenType OPEN_CURVED_BRACKET = new const TokenTypeClass("{");
+    TokenType CLOSE_CURVED_BRACKET = new const TokenTypeClass("}");
+    TokenType SEMICOLON = new const TokenTypeClass(";");
+    
+    TokenType IF = new const TokenTypeClass("if");
+    TokenType ELSE_IF = new const TokenTypeClass("else if");
+    TokenType ELSE = new const TokenTypeClass("else");
+    TokenType FOR = new const TokenTypeClass("for");
+    TokenType WHILE = new const TokenTypeClass("while");
+    TokenType FUNCTION = new const TokenTypeClass("function");
+    TokenType BREAK = new const TokenTypeClass("break");
+    TokenType CONTINUE = new const TokenTypeClass("continue");
+    TokenType RETURN = new const TokenTypeClass("return");
+    TokenType TRY = new const TokenTypeClass("try");
+    TokenType CATCH = new const TokenTypeClass("catch");
+    TokenType END_OF_FILE = new const TokenTypeClass("end_of_file");  
+};

+ 96 - 0
compiler/TokenType.h

@@ -0,0 +1,96 @@
+#ifndef TOKENTYPE_H
+#define TOKENTYPE_H
+
+#include <iostream>
+#include <string>
+
+using namespace std;
+
+class TokenTypeClass
+{
+public:
+    TokenTypeClass(string name, int level);
+    TokenTypeClass(string name);
+    TokenTypeClass(const TokenTypeClass& orig);
+    virtual ~TokenTypeClass();
+
+    string getName() const;
+    int getLevel() const;
+private:
+    string typeName;
+    int level;
+};
+
+typedef const TokenTypeClass* const TokenType;
+
+std::ostream& operator<<(std::ostream& os, TokenType& c);
+
+namespace Tokens
+{
+    extern TokenType FLOAT;
+    extern TokenType TRUE;
+    extern TokenType FALSE;
+    extern TokenType TNULL;
+    extern TokenType TEXT;
+    extern TokenType LABEL;
+    extern TokenType VAR;
+    extern TokenType GLOBAL;
+    
+    extern TokenType INC;
+    extern TokenType DEC;
+    extern TokenType INVERT;
+    extern TokenType BIT_INVERT;
+    extern TokenType MUL;
+    extern TokenType DIV;
+    extern TokenType MOD;
+    extern TokenType ADD;
+    extern TokenType SUB;
+    extern TokenType LEFT_SHIFT;
+    extern TokenType RIGHT_SHIFT;
+    extern TokenType LESS;
+    extern TokenType LESS_EQUAL;
+    extern TokenType GREATER;
+    extern TokenType GREATER_EQUAL;
+    extern TokenType EQUAL;
+    extern TokenType NOT_EQUAL;
+    extern TokenType BIT_AND;
+    extern TokenType BIT_XOR;
+    extern TokenType BIT_OR;
+    extern TokenType AND;
+    extern TokenType OR;
+    extern TokenType SET;
+    extern TokenType ADD_SET;
+    extern TokenType SUB_SET;
+    extern TokenType MUL_SET;
+    extern TokenType DIV_SET;
+    extern TokenType MOD_SET;
+    extern TokenType LEFT_SHIFT_SET;
+    extern TokenType RIGHT_SHIFT_SET;
+    extern TokenType BIT_AND_SET;
+    extern TokenType BIT_XOR_SET;
+    extern TokenType BIT_OR_SET;
+    extern TokenType COMMA;
+    extern TokenType OPEN_BRACKET;
+    extern TokenType CLOSE_BRACKET;
+    extern TokenType OPEN_SQUARE_BRACKET;
+    extern TokenType CLOSE_SQUARE_BRACKET;
+    extern TokenType OPEN_CURVED_BRACKET;
+    extern TokenType CLOSE_CURVED_BRACKET;
+    extern TokenType SEMICOLON;
+    
+    extern TokenType IF;
+    extern TokenType ELSE_IF;
+    extern TokenType ELSE;
+    extern TokenType FOR;
+    extern TokenType WHILE;
+    extern TokenType FUNCTION;
+    extern TokenType BREAK;
+    extern TokenType CONTINUE;
+    extern TokenType RETURN;
+    extern TokenType TRY;
+    extern TokenType CATCH;
+    extern TokenType END_OF_FILE;
+};
+
+#endif
+

+ 6 - 6
Tokenizer.cpp → compiler/Tokenizer.cpp

@@ -1,6 +1,6 @@
 #include "Tokenizer.h"
-#include "Utils.h"
-#include "Exception.h"
+#include "../Utils.h"
+#include "../Exception.h"
 
 Tokenizer::Tokenizer(string s) 
 {
@@ -18,7 +18,7 @@ Tokenizer::~Tokenizer()
 {
 }
 
-void Tokenizer::tokenize(ArrayList<Token*>& tokens, char c, Tokens::Type type1, Tokens::Type type2)
+void Tokenizer::tokenize(TokenList& tokens, char c, const TokenTypeClass* type1, const TokenTypeClass* type2)
 {
     if(pos + 1 < length && data[pos + 1] == c)
     {
@@ -31,7 +31,7 @@ void Tokenizer::tokenize(ArrayList<Token*>& tokens, char c, Tokens::Type type1,
     }
 }
 
-void Tokenizer::tokenize(ArrayList<Token*>& tokens, Tokens::Type type1, char c2, Tokens::Type type2, char c3, Tokens::Type type3)
+void Tokenizer::tokenize(TokenList& tokens, const TokenTypeClass* type1, char c2, const TokenTypeClass* type2, char c3, const TokenTypeClass* type3)
 {
     if(pos + 1 >= length)
     {
@@ -53,7 +53,7 @@ void Tokenizer::tokenize(ArrayList<Token*>& tokens, Tokens::Type type1, char c2,
     }
 }
 
-void Tokenizer::tokenize(ArrayList<Token*>& tokens, Tokens::Type type1, char c2, char c3, Tokens::Type type2, Tokens::Type type3, char c4, Tokens::Type type4)
+void Tokenizer::tokenize(TokenList& tokens, const TokenTypeClass* type1, char c2, char c3, const TokenTypeClass* type2, const TokenTypeClass* type3, char c4, const TokenTypeClass* type4)
 {
     if(pos + 1 >= length)
     {
@@ -83,7 +83,7 @@ void Tokenizer::tokenize(ArrayList<Token*>& tokens, Tokens::Type type1, char c2,
     }
 }
 
-void Tokenizer::tokenize(ArrayList<Token*>& tokens)
+void Tokenizer::tokenize(TokenList& tokens)
 {
     line = 1;
     pos = 0;

+ 31 - 0
compiler/Tokenizer.h

@@ -0,0 +1,31 @@
+#ifndef TOKENIZER_H
+#define TOKENIZER_H
+
+#include <string>
+#include "../data/ArrayList.h"
+#include "Token.h"
+#include "TokenList.h"
+
+using namespace std;
+
+class Tokenizer 
+{
+public:
+    Tokenizer(string s);
+    Tokenizer(const Tokenizer& orig);
+    virtual ~Tokenizer();
+    
+    void tokenize(TokenList& tokens);
+private:
+    string data;
+    int pos;
+    int line;
+    int length;
+    
+    void tokenize(TokenList& tokens, char c, const TokenTypeClass* type1, const TokenTypeClass* type2);
+    void tokenize(TokenList& tokens, const TokenTypeClass* type1, char c2, const TokenTypeClass*, char c3, const TokenTypeClass* type3);
+    void tokenize(TokenList& tokens, const TokenTypeClass* type1, char c2, char c3, const TokenTypeClass* type2, const TokenTypeClass* type3, char c4, const TokenTypeClass* type4);
+};
+
+#endif
+

+ 1 - 0
ArrayList.h → data/ArrayList.h

@@ -2,6 +2,7 @@
 #define ARRAYLIST_H
 
 #include <cstring>
+#include <iostream>
 
 template<class T>
 class ArrayList

+ 340 - 0
data/HashMap.h

@@ -0,0 +1,340 @@
+#ifndef HASHMAP_H
+#define HASHMAP_H
+
+const static int PRIMES[26] = 
+{
+    17, 37, 79, 163, 331, 673, 1361, 2729, 5471, 10949, 21911, 43853, 87719, 
+    175447, 350899, 701819, 1403641, 2807303, 5614657, 11229331, 22458671,
+    44917381, 89834777, 179669557, 359339171, 718678369
+};
+
+template<class K, class V> 
+class HashMap
+{
+    class Node
+    {
+    public:
+        Node* next;
+        K k;
+        V v;
+        
+        Node(K k, V v)
+        {
+            next = nullptr;
+            this->k = k;
+            this->v = v;
+        }
+    };
+    
+private:
+    // hasher, key comparison
+    int (*hasher)(K); 
+    bool (*equal)(K, K);
+    
+    // size specs
+    int primeIndex;
+    int capacity;
+    int resizeCap;
+    
+    // entries
+    Node** data;
+    int entries;
+    
+    // found node
+    K lastKey;
+    Node* found;
+    int lastIndex;
+    
+    int getHigherPrimeIndex(int lower) const
+    {
+        int low = 0;
+        int high = 25;
+        int mid;
+        while(true)
+        {
+            if(low == high)
+            {
+                return low;
+            }
+            mid = (high + low) >> 1;
+            if(PRIMES[mid] >= lower)
+            {
+                high = mid;
+            }
+            else
+            {
+                low = mid + 1;
+            }
+        }
+    }
+    
+    int getHash(K k) const
+    {
+        int hash = ((*hasher)(k)) % capacity;
+        return hash < 0 ? hash + capacity : hash;
+    }
+    
+    void ensureCapacity()
+    {
+        if(entries < resizeCap)
+        {
+            return;
+        }
+        
+        primeIndex++;
+        if(primeIndex >= 26)
+        {
+            resizeCap = 2147483647;
+            return;
+        }
+        
+        int oldCapacity = capacity;
+        capacity = PRIMES[primeIndex];
+        
+        resizeCap = (capacity >> 2) * 3;
+        
+        Node** newData = new Node*[capacity];
+        Node* n;
+        Node* m;
+        
+        int hash;
+        for(int i = 0; i < oldCapacity; i++)
+        {
+            Node* old = data[i];
+            if(old != nullptr)
+            {
+                hash = getHash(old->k);
+                n = newData[hash];
+                if(n == nullptr)
+                {
+                    newData[hash] = old;
+                }
+                else
+                {
+                    while(n->next != nullptr)
+                    {
+                        n = n->next;
+                    }
+                    n->next = old;
+                }
+                
+                while(old->next != nullptr)
+                {
+                    n = old->next;
+                    old->next = nullptr;
+                    
+                    hash = getHash(n->k);
+                    m = newData[hash];
+                    if(m == nullptr)
+                    {
+                        newData[hash] = n;
+                    }
+                    else
+                    {
+                        while(m->next != nullptr)
+                        {
+                            m = m->next;
+                        }
+                        m->next = n;
+                    }
+                    
+                    old = n;
+                }
+            }
+        }
+        
+        delete[] data;
+        data = newData;
+    }
+
+public:
+    HashMap(int initialLoad, int (*hasher)(K), bool (*equal)(K, K))
+    {
+        this->hasher = hasher;
+        this->equal = equal;
+        this->hasher = hasher;
+        
+        primeIndex = getHigherPrimeIndex(initialLoad);
+        capacity = PRIMES[primeIndex];
+        resizeCap = (capacity >> 2) * 3;
+        
+        data = new Node*[capacity];
+        for(int i = 0; i < capacity; i++)
+        {
+            data[i] = nullptr;
+        }
+        entries = 0;
+        
+        found = nullptr;
+        lastIndex = -1;
+    }
+    
+    virtual ~HashMap()
+    {
+        for(int i = 0; i < capacity; i++)
+        {
+            Node* n = data[i];
+            while(n != nullptr)
+            {
+                Node* next = n->next;
+                delete n;
+                n = next;
+            }
+        }
+        delete[] data;
+    }
+    
+    void search(K k)
+    {
+        lastKey = k;
+        int hash = getHash(k);
+        Node* n = data[hash];
+        if(n == nullptr) // free slot, nothing found
+        {
+            found = nullptr;
+            // mark index for inserting
+            lastIndex = hash;
+        }
+        else
+        {
+            while(true)
+            {
+                if((*equal)(k, n->k)) // key was found
+                {
+                    // mark existing node for overwrite / deleting
+                    found = n;
+                    lastIndex = hash;
+                    return;
+                }
+                if(n->next == nullptr)
+                {
+                    break;
+                }
+                n = n->next;
+            }
+            // nothing found, mark last node for adding
+            found = n;
+            lastIndex = -1;
+        }
+    }
+    
+    void print() const
+    {
+        for(int i = 0; i < capacity; i++)
+        {
+            Node* n = data[i];
+            if(n != nullptr)
+            {
+                std::cout << n->k << " - " << n->v;
+                while(n->next != nullptr)
+                {
+                    n = n->next;
+                    std::cout << ", " << n->k << " - " << n->v;
+                }
+                std::cout << "\n";
+            }
+        }
+    }
+    
+    void insert(K k, V v)
+    {
+        search(k);
+        insert(v);
+    }
+    
+    void insert(V v)
+    {
+        if(found == nullptr)
+        {
+            if(lastIndex != -1)
+            {
+                // inserting into empty slot
+                data[lastIndex] = new Node(lastKey, v);
+                entries++;
+                lastIndex = -1;
+                ensureCapacity();
+            }
+        }
+        else
+        {
+            if(lastIndex != -1)
+            {
+                // overwriting old value
+                found->v = v;
+            }
+            else
+            {
+                // adding new node to list
+                found->next = new Node(lastKey, v);
+                entries++;
+                ensureCapacity();
+            }
+            found = nullptr;
+        }
+    }
+    
+    void remove()
+    {
+        if(found != nullptr && lastIndex != -1)
+        {
+            // search previous node
+            Node* n = data[lastIndex];
+            if(found == n)
+            {
+                data[lastIndex] = n->next;
+                delete n;
+            }
+            else
+            {
+                while(n->next != found)
+                {
+                    n = n->next;
+                }
+                n->next = found->next;
+                delete found;
+            }
+            entries--;
+            found = nullptr;
+        }
+    }
+    
+    bool isFound() const
+    {
+        return found != nullptr && lastIndex != -1;
+    }
+    
+    V getValue() const
+    {
+        return found->v;
+    }
+    
+    int getCapacity() const
+    {
+        return capacity;
+    }
+    
+    int getSize() const
+    {
+        return entries;
+    }
+    
+    void forEach(void (*w)(K, V)) const
+    {
+        for(int i = 0; i < capacity; i++)
+        {
+            Node* n = data[i];
+            if(n != nullptr)
+            {
+                (*w)(n->k, n->v);
+                while(n->next != nullptr)
+                {
+                    n = n->next;
+                    (*w)(n->k, n->v);
+                }
+            }
+        }
+    }
+};
+
+#endif
+

+ 0 - 33
tests/if.snuvi

@@ -1,33 +0,0 @@
-a = 3.0046
-<<<<=< <=
->>>>=> >=
-b = 4;
-if(a < b)
-{
-    print(a % b);
-    a %= 3;
-}
-elseif(a == b)
-{
-    print(a);
-    print(" dgfdg dgdfg dgdf");
-}$
-else
-{
-    prin.t(!b);
-}
-@wusi_gdf
-a++;
-a--;
-
-for(i = 3; i <= 4; ++i)
-{
-    print(add(~a + b + i));
-}
-
-i = 5;
-while(i < 20)
-{
-    print(i);
-    i++;
-}

+ 2 - 0
tests/vars.snuvi

@@ -0,0 +1,2 @@
+a = 3.0046;
+b = 3;