123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154 |
- #include <stdio.h>
- #include <stdbool.h>
- #include <stdlib.h>
- #include <dirent.h>
- #include <string.h>
- #include <unistd.h>
- #include "test/Test.h"
- #include "utils/Path.h"
- #include "tokenizer/TokenStream.h"
- #include "tokenizer/Tokenizer.h"
- static unsigned int done = 0;
- static unsigned int tests = 0;
- static bool test_checkPath(Path* path, const char* ending, bool(*f) (const char*, const char*)) {
- (void) ending;
- DIR* dir;
- dir = opendir(path->path);
- if(dir == NULL) {
- fprintf(stderr, "cannot open '%s': ", path->path);
- perror("");
- return true;
- }
- struct dirent* entry = NULL;
- while(true) {
- entry = readdir(dir);
- if(entry == NULL) {
- break;
- } else if(strcmp(entry->d_name, ".") == 0 || strcmp(entry->d_name, "..") == 0) {
- continue;
- }
- if(entry->d_type == DT_DIR) {
- if(enterPath(path, entry->d_name)) {
- return true;
- }
- if(test_checkPath(path, ending, f)) {
- leavePath(path);
- return true;
- }
- leavePath(path);
- } else if(entry->d_type == DT_REG && strchr(entry->d_name, '.') == NULL) {
- Path inputPath;
- initPath(&inputPath, path->path);
- enterFile(&inputPath, entry->d_name, "");
- Path outputPath;
- initPath(&outputPath, path->path);
- enterFile(&outputPath, entry->d_name, ending);
- if(f(inputPath.path, outputPath.path)) {
- return true;
- }
- }
- }
- if(closedir(dir)) {
- fprintf(stderr, "cannot close '%s': ", path->path);
- perror("");
- return true;
- }
- return false;
- }
- static bool test_forEachFile(const char* strPath, const char* ending, bool(*f) (const char*, const char*)) {
- Path path;
- initPath(&path, strPath);
- return test_checkPath(&path, ending, f);
- }
- static void test_readLine(char* buffer, size_t capacity, FILE* f) {
- size_t index = 0;
- while(index < capacity - 1) {
- int c = fgetc(f);
- if(c == EOF || c == '\n') {
- break;
- }
- buffer[index++] = c;
- }
- buffer[index] = '\0';
- }
- static bool test_tokenizerTester(const char* input, const char* output) {
- FILE* f = fopen(output, "r");
- if(f == NULL) {
- perror("cannot open file");
- return false;
- }
- TokenStream* tokenStream = newTokenStream();
- bool b = tokenize(tokenStream, input);
- while(hasToken(tokenStream)) {
- const size_t bufferLength = 64;
- char buffer[bufferLength];
- nextTokenString(tokenStream, buffer, bufferLength);
- char expected[1024];
- test_readLine(expected, 1024, f);
- if(strchr(buffer, '\n') != NULL) {
- size_t length = strlen(expected);
- expected[length] = '\n';
- test_readLine(expected + length + 1, 1024 - length - 1, f);
- }
- if(strcmp(buffer, expected) != 0) {
- printf("error in '%s'\n", input);
- printf("'%s' should be '%s'\n", buffer, expected);
- break;
- }
- }
- fclose(f);
- deleteTokenStream(&tokenStream);
- return b;
- }
- static void test_testTokenizer(const char* path) {
- done = 0;
- tests = 0;
- test_forEachFile(path, ".tout", test_tokenizerTester);
- /*forEachFile(path, ".tout", [](const std::string& input, const std::string & output) {
- tests++;
- Tokenizer::if32stream iStream;
- iStream.open(input);
- std::ifstream oStream;
- oStream.open(output);
- if(!iStream.good() || !oStream.good()) {
- return false;
- }
- TokenStream tokens;
- if(Tokenizer::tokenize(tokens, iStream)) {
- return false;
- }
- logger.reset();
- while(tokens.hasToken()) {
- std::string s = tokens.nextTokenString();
- logger.print(&s);
- }
- if(logger.check(input, oStream)) {
- done++;
- }
- return true;
- });
- std::cout << done << " / " << tests << " tokenizer tests succeeded" << std::endl;*/
- }
- void startTests(const char* path) {
- test_testTokenizer(path);
- //testCompiler();
- //testOutput();
- }
|