#include #include "tokenizer/TokenStream.h" TokenStream::TokenStream() : readIndex(0) { } bool TokenStream::hasToken() const { return readIndex < buffer.size(); } String TokenStream::nextTokenString() { String s; s += '('; s += nextLine(); s += ", "; Token token = nextToken(); s += getTokenEnumName(token); if(token == STRING || token == LITERAL || token == LABEL) { s += ", \""; s += nextString(); s += '"'; } if(token == NUMBER) { s += ", "; s += nextDouble(); } s += ')'; return s; } void TokenStream::read(void* data, size_t length) { memcpy(data, buffer.data() + readIndex, length); readIndex += length; } Token TokenStream::nextToken() { Token token = EOF_TOKEN; read(&token, sizeof (unsigned int)); return token; } unsigned int TokenStream::nextLine() { unsigned int line = 0; read(&line, sizeof (unsigned int)); return line; } const char* TokenStream::nextString() { size_t offset = readIndex; readIndex += strlen(buffer.data() + readIndex) + 1; return buffer.data() + offset; } double TokenStream::nextDouble() { double d; read(&d, sizeof (double)); return d; } void TokenStream::write(const void* data, size_t length) { const char* chars = static_cast (data); for(size_t i = 0; i < length; i++) { buffer.push_back(chars[i]); } } void TokenStream::add(Token token, unsigned int line) { write(&line, sizeof (unsigned int)); write(&token, sizeof (Token)); } void TokenStream::add(Token token, unsigned int line, double d) { add(token, line); write(&d, sizeof (double)); } void TokenStream::add(Token token, unsigned int line, const char* text) { add(token, line); write(text, strlen(text) + 1); }