Course_Design_of_Compiling/the_main.cpp

70 lines
1.5 KiB
C++
Raw Permalink Normal View History

2022-03-18 23:16:53 +08:00
#include <iostream>
#include<fstream>
#include<string>
#include<unordered_map>
#include<algorithm>
#include<set>
#include<vector>
#include <iomanip>
#include <sstream>
2022-03-23 18:01:59 +08:00
#include <graphics.h>
2022-03-18 23:16:53 +08:00
#include"LexicalAnalyzer.h"
#include"Parsing_RD.h"
#include"SemanticAnalysis.h"
2022-03-18 23:16:53 +08:00
using namespace std;
int main()
{
/*======<3D>ʷ<EFBFBD><CAB7><EFBFBD><EFBFBD><EFBFBD>=====*/
2022-03-18 23:16:53 +08:00
LexicalAnalyzer lexicalanalyzer;
lexicalanalyzer.getTokenList();
int count = lexicalanalyzer.TokenList.size();
//for (int i = 0; i < count; i++)
//{
// cout<< "<22><>" << lexicalanalyzer.TokenList[i]->lineShow
// << "<22><> <" << ha.at(lexicalanalyzer.TokenList[i]->word.Lex)
// << "," << lexicalanalyzer.TokenList[i]->word.Sem
// << ">" << endl;
//}
/*=====<3D><EFBFBD><EFB7A8><EFBFBD><EFBFBD>=====*/
2022-03-18 23:16:53 +08:00
ofstream file;
file.open("tokenList.txt");
for (int i = 0; i < count; i++)
{
file << lexicalanalyzer.TokenList[i]->lineShow
<< ' ' << lexicalanalyzer.TokenList[i]->word.Lex
<< ' ' << lexicalanalyzer.TokenList[i]->word.Sem
<< endl;
}
file.close();
RecursiveDescentParsing rd;
rd.initial();
TreeNode* root;
root = rd.parse();
2022-03-23 18:01:59 +08:00
{
initgraph(1300, 640); // <20><><EFBFBD><EFBFBD>ͼ<EFBFBD>ν<EFBFBD><CEBD><EFBFBD>
for (int y = 0; y <= 640; y++)
{
setcolor(RGB(255, 255, 255));
line(0, y, 1300, y);
} //<2F><>ɫ<EFBFBD><C9AB><EFBFBD><EFBFBD>
rd.printTree(root);
saveimage(_T("treeFile.bmp"));
}
//rd.out_SynTree(root, 0);
2022-03-23 18:01:59 +08:00
closegraph();
2022-03-18 23:16:53 +08:00
rd.fileClose();
/*=====<3D><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>=====*/
SemanticAnalysis sa;
sa.initial();
sa.analyze(root);
sa.PrintSymbTable();
sa.fileClose();
2022-03-18 23:16:53 +08:00
cout << "<EFBFBD><EFBFBD><EFBFBD>гɹ<EFBFBD>" << endl;
}