#include #include #include #include #include #include #include "test/Test.h" #include "utils/Path.h" #include "tokenizer/TokenStream.h" #include "tokenizer/Tokenizer.h" static unsigned int done = 0; static unsigned int tests = 0; static bool test_checkPath(Path* path, const char* ending, bool(*f) (const char*, const char*)) { (void) ending; DIR* dir; dir = opendir(path->path); if(dir == NULL) { fprintf(stderr, "cannot open '%s': ", path->path); perror(""); return true; } struct dirent* entry = NULL; while(true) { entry = readdir(dir); if(entry == NULL) { break; } else if(strcmp(entry->d_name, ".") == 0 || strcmp(entry->d_name, "..") == 0) { continue; } if(entry->d_type == DT_DIR) { if(enterPath(path, entry->d_name)) { return true; } if(test_checkPath(path, ending, f)) { leavePath(path); return true; } leavePath(path); } else if(entry->d_type == DT_REG && strchr(entry->d_name, '.') == NULL) { Path inputPath; initPath(&inputPath, path->path); enterFile(&inputPath, entry->d_name, ""); Path outputPath; initPath(&outputPath, path->path); enterFile(&outputPath, entry->d_name, ending); if(f(inputPath.path, outputPath.path)) { return true; } } } if(closedir(dir)) { fprintf(stderr, "cannot close '%s': ", path->path); perror(""); return true; } return false; } static bool test_forEachFile(const char* strPath, const char* ending, bool(*f) (const char*, const char*)) { Path path; initPath(&path, strPath); return test_checkPath(&path, ending, f); } static void test_readLine(char* buffer, size_t capacity, FILE* f) { size_t index = 0; while(index < capacity - 1) { int c = fgetc(f); if(c == EOF || c == '\n') { break; } buffer[index++] = c; } buffer[index] = '\0'; } static bool test_tokenizerTester(const char* input, const char* output) { FILE* f = fopen(output, "r"); if(f == NULL) { perror("cannot open file"); return false; } TokenStream* tokenStream = newTokenStream(); bool b = tokenize(tokenStream, input); while(hasToken(tokenStream)) { const size_t bufferLength = 64; char buffer[bufferLength]; nextTokenString(tokenStream, buffer, bufferLength); char expected[1024]; test_readLine(expected, 1024, f); if(strchr(buffer, '\n') != NULL) { size_t length = strlen(expected); expected[length] = '\n'; test_readLine(expected + length + 1, 1024 - length - 1, f); } if(strcmp(buffer, expected) != 0) { printf("error in '%s'\n", input); printf("'%s' should be '%s'\n", buffer, expected); break; } } fclose(f); deleteTokenStream(&tokenStream); return b; } static void test_testTokenizer(const char* path) { done = 0; tests = 0; test_forEachFile(path, ".tout", test_tokenizerTester); /*forEachFile(path, ".tout", [](const std::string& input, const std::string & output) { tests++; Tokenizer::if32stream iStream; iStream.open(input); std::ifstream oStream; oStream.open(output); if(!iStream.good() || !oStream.good()) { return false; } TokenStream tokens; if(Tokenizer::tokenize(tokens, iStream)) { return false; } logger.reset(); while(tokens.hasToken()) { std::string s = tokens.nextTokenString(); logger.print(&s); } if(logger.check(input, oStream)) { done++; } return true; }); std::cout << done << " / " << tests << " tokenizer tests succeeded" << std::endl;*/ } void startTests(const char* path) { test_testTokenizer(path); //testCompiler(); //testOutput(); }