123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139 |
- #include <iostream>
- #include <fstream>
- #include <cstring>
- #include <vector>
- #include <dirent.h>
- #include "test/Test.h"
- #include "test/TestLogger.h"
- #include "tokenizer/Tokenizer.h"
- #include "tokenizer/TokenStream.h"
- static unsigned int done = 0;
- static unsigned int tests = 0;
- static TestLogger logger;
- static bool run = true;
- static void forEachFile(const std::string& path, const std::string& ending, bool (*f) (const std::string&, const std::string&)) {
- DIR* dir;
- dir = opendir(path.c_str());
- struct dirent* entry = nullptr;
- if(dir != nullptr) {
- while(run && (entry = readdir(dir)) != nullptr) {
- if(strcmp(entry->d_name, ".") == 0 || strcmp(entry->d_name, "..") == 0) {
- continue;
- }
- if(entry->d_type == DT_DIR) {
- forEachFile(path + "/" + entry->d_name, ending, f);
- } else if(entry->d_type == DT_REG) {
- if(strchr(entry->d_name, '.') == nullptr) {
- std::string pathInputFile = path + "/" + entry->d_name;
- std::string pathOutputFile = pathInputFile + ending;
- run = f(pathInputFile, pathOutputFile);
- }
- }
- }
- closedir(dir);
- }
- }
- static void testTokenizer(const char* path) {
- done = 0;
- tests = 0;
- run = true;
- forEachFile(path, ".tout", [](const std::string& input, const std::string & output) {
- tests++;
- Tokenizer::if32stream iStream;
- iStream.open(input);
- std::ifstream oStream;
- oStream.open(output);
- if(!iStream.good() || !oStream.good()) {
- return false;
- }
- TokenStream tokens;
- if(Tokenizer::tokenize(tokens, iStream)) {
- return false;
- }
- logger.reset();
- while(tokens.hasToken()) {
- std::string s = tokens.nextTokenString();
- logger.print(&s);
- }
- if(logger.check(input, oStream)) {
- done++;
- }
- return true;
- });
- std::cout << done << " / " << tests << " tokenizer tests succeeded" << std::endl;
- }
- //void Test::testCompiler()
- //{
- // done = 0;
- // tests = 0;
- // final Compiler c = new Compiler();
- // forEachFile(new File("./test"), ".cout", (inFile, checkFile) ->
- // {
- // tests++;
- // try
- // {
- // try(FileInputStream in = new FileInputStream(inFile))
- // {
- // Tokenizer tokenizer = new Tokenizer();
- // LOGGER.reset();
- // Instruction[] instr = c.compile(tokenizer.tokenize(in),
- // new HashMap<>(), new HashMap<>(), new HashMap<>(),
- // new HashMap<>());
- // for(Instruction i : instr)
- // {
- // LOGGER.print(i.toString(), null, null, null, null, -1);
- // }
- // if(LOGGER.check(checkFile))
- // {
- // done++;
- // }
- // }
- // }
- // catch(Exception ex)
- // {
- // System.out.println("_________________________________________");
- // System.out.println(inFile + " failed:");
- // System.out.println(ex.getMessage());
- // ex.printStackTrace();
- // }
- // });
- // System.out.println(String.format("%d / %d compiler tests succeeded", done, tests));
- //}
- //
- //void Test::testOutput()
- //{
- // done = 0;
- // tests = 0;
- // forEachFile(new File("./test"), ".out", (inFile, checkFile) ->
- // {
- // tests++;
- //
- // LOGGER.reset();
- //
- // Script sc = new Script(PARSER, null, null, inFile.getName(), inFile.getPath());
- // sc.run();
- //
- // if(LOGGER.check(checkFile))
- // {
- // done++;
- // }
- // });
- // System.out.println(String.format("%d / %d output tests succeeded", done, tests));
- //}
- void Test::start(const char* path) {
- testTokenizer(path);
- //testCompiler();
- //testOutput();
- }
|