2022-03-18 23:16:53 +08:00
|
|
|
|
#include <iostream>
|
|
|
|
|
#include<fstream>
|
|
|
|
|
#include<string>
|
|
|
|
|
#include<unordered_map>
|
|
|
|
|
#include<algorithm>
|
|
|
|
|
#include<set>
|
|
|
|
|
#include<vector>
|
|
|
|
|
#include <iomanip>
|
|
|
|
|
#include <sstream>
|
2022-03-23 18:01:59 +08:00
|
|
|
|
#include <graphics.h>
|
2022-03-18 23:16:53 +08:00
|
|
|
|
#include"LexicalAnalyzer.h"
|
|
|
|
|
#include"Parsing_RD.h"
|
|
|
|
|
using namespace std;
|
|
|
|
|
int main()
|
|
|
|
|
{
|
|
|
|
|
LexicalAnalyzer lexicalanalyzer;
|
|
|
|
|
lexicalanalyzer.getTokenList();
|
|
|
|
|
int count = lexicalanalyzer.TokenList.size();
|
|
|
|
|
|
|
|
|
|
//for (int i = 0; i < count; i++)
|
|
|
|
|
//{
|
|
|
|
|
// cout<< "<22><>" << lexicalanalyzer.TokenList[i]->lineShow
|
|
|
|
|
// << "<22><> <" << ha.at(lexicalanalyzer.TokenList[i]->word.Lex)
|
|
|
|
|
// << "," << lexicalanalyzer.TokenList[i]->word.Sem
|
|
|
|
|
// << ">" << endl;
|
|
|
|
|
//}
|
|
|
|
|
|
|
|
|
|
ofstream file;
|
|
|
|
|
file.open("tokenList.txt");
|
|
|
|
|
for (int i = 0; i < count; i++)
|
|
|
|
|
{
|
|
|
|
|
file << lexicalanalyzer.TokenList[i]->lineShow
|
|
|
|
|
<< ' ' << lexicalanalyzer.TokenList[i]->word.Lex
|
|
|
|
|
<< ' ' << lexicalanalyzer.TokenList[i]->word.Sem
|
|
|
|
|
<< endl;
|
|
|
|
|
}
|
|
|
|
|
file.close();
|
|
|
|
|
|
|
|
|
|
RecursiveDescentParsing rd;
|
|
|
|
|
rd.initial();
|
|
|
|
|
TreeNode* root;
|
|
|
|
|
root = rd.parse();
|
2022-03-23 18:01:59 +08:00
|
|
|
|
|
|
|
|
|
{
|
|
|
|
|
initgraph(1300, 640); // <20><><EFBFBD><EFBFBD>ͼ<EFBFBD>ν<EFBFBD><CEBD><EFBFBD>
|
|
|
|
|
for (int y = 0; y <= 640; y++)
|
|
|
|
|
{
|
|
|
|
|
setcolor(RGB(255, 255, 255));
|
|
|
|
|
line(0, y, 1300, y);
|
|
|
|
|
} //<2F><>ɫ<EFBFBD><C9AB><EFBFBD><EFBFBD>
|
|
|
|
|
rd.printTree(root);
|
|
|
|
|
saveimage(_T("treeFile.bmp"));
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
closegraph();
|
2022-03-18 23:16:53 +08:00
|
|
|
|
rd.fileClose();
|
|
|
|
|
cout << "<EFBFBD><EFBFBD><EFBFBD>гɹ<EFBFBD>" << endl;
|
|
|
|
|
}
|