-
Notifications
You must be signed in to change notification settings - Fork 0
/
test.py
124 lines (114 loc) · 3.25 KB
/
test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
import unittest
import glob
import sys
import os
import subprocess as sp
from src.generator import Generator
from src.parser import Parser
from src.lexer import Lexer
from src.grapher import Grapher
from src.symbolizer import Symbolizer
class Tests(unittest.TestCase):
def test_lexer(self):
for path in glob.glob("test/grader/*/src.pas"):
with open(path, 'r') as source:
try:
text = source.read()
lexer = Lexer(text)
lexer.lex()
except:
ex = sys.exc_info()[0]
self.fail("Failed to lex " + path + "\n"+ ex)
self.assertTrue(True)
def test_parser(self):
for path in glob.glob("test/grader/*/src.pas"):
with open(path, 'r') as source:
try:
text = source.read()
lexer = Lexer(text)
tokens = lexer.lex()
parser = Parser(tokens)
parser.parse()
except:
ex = sys.exc_info()[0]
self.fail("Failed to parse " + path + "\n"+ ex)
self.assertTrue(True)
def test_grapher(self):
for path in glob.glob("test/grader/*/src.pas"):
with open(path, 'r') as source:
print(f"testing {path}")
text = source.read()
lexer = Lexer(text)
tokens = lexer.lex()
parser = Parser(tokens)
ast = parser.parse()
grapher = Grapher(ast)
dot = grapher.graph()
grapher.save()
self.assertTrue(True)
def test_symbolizer(self):
for path in glob.glob("test/grader/*/src.pas"):
with open(path, 'r') as source:
print(f"testing {path}")
text = source.read()
lexer = Lexer(text)
tokens = lexer.lex()
parser = Parser(tokens)
ast = parser.parse()
symbolizer = Symbolizer(ast)
symbolizer.symbolize()
self.assertTrue(True)
def test_generator(self):
for path in glob.glob("test/grader/*/src.pas"):
dir = os.path.dirname(path)
should_fail = not dir.endswith('16')
with open(path, 'r') as source:
print(f"testing {path}")
text = source.read()
lexer = Lexer(text)
tokens = lexer.lex()
parser = Parser(tokens)
ast = parser.parse()
symbolizer = Symbolizer(ast)
symbolizer.symbolize()
grapher = Generator(ast,symbolizer)
grapher.generate()
sol = os.path.join(dir, 'src.c')
out = os.path.join(dir, 'out')
if os.path.exists(sol):
os.remove(sol)
if os.path.exists(out):
os.remove(out)
grapher.write(sol)
p = None
try:
p = sp.Popen(['gcc', sol, '-o', out], stdout=sp.PIPE)
retCode = p.wait()
self.assertTrue(retCode == 0)
p.stdout.close()
#s = str(p.stdout.read())
#self.assertTrue(s == '')
except Exception:
self.assertFalse(should_fail)
for i in range(1,5):
inFile = os.path.join(dir, str(i)+'.in')
outFile = os.path.join(dir, str(i) + '.out')
with open(inFile, 'r') as inText:
with open(outFile, 'r') as outText:
inText = inText.read()
outText = outText.read()
try:
of = sp.Popen([out], stdin=sp.PIPE, stdout=sp.PIPE)
of.stdin.write(inText.encode('utf-8'))
of.stdin.close()
rc = of.wait()
self.assertTrue(rc == 0)
b = of.stdout.read()
s = b.decode('utf-8')
of.stdout.close()
if(not should_fail):
self.assertEqual(s, str(outText))
except Exception:
self.assertFalse(should_fail)
self.assertTrue(True)
#Tests().test_grapher()