Test.c 4.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154
  1. #include <stdio.h>
  2. #include <stdbool.h>
  3. #include <stdlib.h>
  4. #include <dirent.h>
  5. #include <string.h>
  6. #include <unistd.h>
  7. #include "test/Test.h"
  8. #include "utils/Path.h"
  9. #include "tokenizer/TokenStream.h"
  10. #include "tokenizer/Tokenizer.h"
  11. static unsigned int done = 0;
  12. static unsigned int tests = 0;
  13. static bool test_checkPath(Path* path, const char* ending, bool(*f) (const char*, const char*)) {
  14. (void) ending;
  15. DIR* dir;
  16. dir = opendir(path->path);
  17. if(dir == NULL) {
  18. fprintf(stderr, "cannot open '%s': ", path->path);
  19. perror("");
  20. return true;
  21. }
  22. struct dirent* entry = NULL;
  23. while(true) {
  24. entry = readdir(dir);
  25. if(entry == NULL) {
  26. break;
  27. } else if(strcmp(entry->d_name, ".") == 0 || strcmp(entry->d_name, "..") == 0) {
  28. continue;
  29. }
  30. if(entry->d_type == DT_DIR) {
  31. if(enterPath(path, entry->d_name)) {
  32. return true;
  33. }
  34. if(test_checkPath(path, ending, f)) {
  35. leavePath(path);
  36. return true;
  37. }
  38. leavePath(path);
  39. } else if(entry->d_type == DT_REG && strchr(entry->d_name, '.') == NULL) {
  40. Path inputPath;
  41. initPath(&inputPath, path->path);
  42. enterFile(&inputPath, entry->d_name, "");
  43. Path outputPath;
  44. initPath(&outputPath, path->path);
  45. enterFile(&outputPath, entry->d_name, ending);
  46. if(f(inputPath.path, outputPath.path)) {
  47. return true;
  48. }
  49. }
  50. }
  51. if(closedir(dir)) {
  52. fprintf(stderr, "cannot close '%s': ", path->path);
  53. perror("");
  54. return true;
  55. }
  56. return false;
  57. }
  58. static bool test_forEachFile(const char* strPath, const char* ending, bool(*f) (const char*, const char*)) {
  59. Path path;
  60. initPath(&path, strPath);
  61. return test_checkPath(&path, ending, f);
  62. }
  63. static void test_readLine(char* buffer, size_t capacity, FILE* f) {
  64. size_t index = 0;
  65. while(index < capacity - 1) {
  66. int c = fgetc(f);
  67. if(c == EOF || c == '\n') {
  68. break;
  69. }
  70. buffer[index++] = c;
  71. }
  72. buffer[index] = '\0';
  73. }
  74. static bool test_tokenizerTester(const char* input, const char* output) {
  75. FILE* f = fopen(output, "r");
  76. if(f == NULL) {
  77. perror("cannot open file");
  78. return false;
  79. }
  80. TokenStream* tokenStream = newTokenStream();
  81. bool b = tokenize(tokenStream, input);
  82. while(hasToken(tokenStream)) {
  83. const size_t bufferLength = 64;
  84. char buffer[bufferLength];
  85. nextTokenString(tokenStream, buffer, bufferLength);
  86. char expected[1024];
  87. test_readLine(expected, 1024, f);
  88. if(strchr(buffer, '\n') != NULL) {
  89. size_t length = strlen(expected);
  90. expected[length] = '\n';
  91. test_readLine(expected + length + 1, 1024 - length - 1, f);
  92. }
  93. if(strcmp(buffer, expected) != 0) {
  94. printf("error in '%s'\n", input);
  95. printf("'%s' should be '%s'\n", buffer, expected);
  96. break;
  97. }
  98. }
  99. fclose(f);
  100. deleteTokenStream(&tokenStream);
  101. return b;
  102. }
  103. static void test_testTokenizer(const char* path) {
  104. done = 0;
  105. tests = 0;
  106. test_forEachFile(path, ".tout", test_tokenizerTester);
  107. /*forEachFile(path, ".tout", [](const std::string& input, const std::string & output) {
  108. tests++;
  109. Tokenizer::if32stream iStream;
  110. iStream.open(input);
  111. std::ifstream oStream;
  112. oStream.open(output);
  113. if(!iStream.good() || !oStream.good()) {
  114. return false;
  115. }
  116. TokenStream tokens;
  117. if(Tokenizer::tokenize(tokens, iStream)) {
  118. return false;
  119. }
  120. logger.reset();
  121. while(tokens.hasToken()) {
  122. std::string s = tokens.nextTokenString();
  123. logger.print(&s);
  124. }
  125. if(logger.check(input, oStream)) {
  126. done++;
  127. }
  128. return true;
  129. });
  130. std::cout << done << " / " << tests << " tokenizer tests succeeded" << std::endl;*/
  131. }
  132. void startTests(const char* path) {
  133. test_testTokenizer(path);
  134. //testCompiler();
  135. //testOutput();
  136. }