-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.p
87 lines (71 loc) · 1.92 KB
/
main.p
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
code = ""
def cleanup():
with open("main.py", "r") as codefile:
code = codefile.read().replace(" ", "\t")
#don't worry it only replaces tab
#nonono that dangerous we copy pasta from main
with open("main.p", "w") as newFile:
newFile.write(code)
cleanup()
code = """
# hehe fard dis is a fardy comment
FARDING # begin program
refard FardFunction& # no arguments bcuz fard
fard "fardy string"
fardyNumber = 0
fardyNumber += 1
fard "%fardyNumber"
STOP_FARDING # end program
"""
class Token:
def __init__(self, token, typef="identifer", start=0, end=0):
self.value = token
self.start = start
self.end = end
self.type = typef
def __repr__(self):
return f"\n-----------\nTOKEN (\n\tType:{self.type},\n\tValue:{self.value},\n\tfrom {self.start} to {self.end}\n)\n-----------"
class Lexer:
def __init__(self, code):
self.code = code
self.ind = 0
self.cur = self.code[self.ind] if self.ind < len(self.code) else None
self.DIGITS = "01234567890"
def advance(self):
self.ind += 1
self.cur = self.code[self.ind] if self.ind < len(self.code) else None
def back(self):
self.ind -= 1
self.cur = self.code[self.ind] if self.ind >= 0 else None
#we don't need that much
# we do.
def tokenizeFard(self):
toks = []
while self.cur:
if self.cur == '"':
strng = ""
start = self.ind
self.advance()
while self.cur:
if self.cur == '"':
if len(strng) > 0 and strng[-1] == "\\":
pass
else:
break
strng += self.cur
self.advance()
toks.append(Token(strng, "STRING", start, self.ind))
elif self.cur in self.DIGITS:
num = self.cur
start = self.ind
self.advance()
while self.cur in self.DIGITS:
num += self.cur
self.advance()
toks.append(Token(strng, "NUMBER", start, self.ind))
self.back()
self.advance()
return toks
lexer = Lexer(code)
print("\n".join(map(lambda x: x.__repr__(), lexer.tokenizeFard())))
#fine