-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathInterpreter.cpp
More file actions
221 lines (198 loc) · 4.79 KB
/
Interpreter.cpp
File metadata and controls
221 lines (198 loc) · 4.79 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
/**
* @cite Interpreter Pattern states that in order to process/interpret some textual information, the text has to be converted into tokens and then the tokens have to be parsed into something that can be operated on.
*
* @brief Interpreter Pattern can be exemplified by a Arithmetic Expression Interpreter that takes expressions as string input and produces the results/.
*/
#include <iostream>
#include <memory>
#include <sstream>
#include <string>
#include <vector>
/**
* @brief Keeps track of individual token in the text.
*
*/
struct Token
{
// Denotes all the allowed tokens in the text.
enum TokenType
{
integer,
plus,
minus,
lparen,
rparen
} type;
std::string token;
Token(TokenType type, std::string token) : type(type), token(token) {}
friend std::ostream &operator<<(std::ostream &oss, Token &t)
{
return oss << "'" << t.token << "'";
}
};
/**
* @brief Travereses the Text and divides it into the required Tokens
*/
std::vector<Token> lexer(std::string input)
{
std::vector<Token> res;
for (int i = 0; i < input.size(); i++)
{
switch (input[i])
{
case '+':
{
res.push_back(Token(Token::plus, "+"));
break;
}
case '-':
{
res.push_back(Token(Token::minus, "-"));
break;
}
case '(':
{
res.push_back(Token(Token::lparen, "("));
break;
}
case ')':
{
res.push_back(Token(Token::rparen, ")"));
break;
}
default:
{
std::ostringstream oss;
oss << input[i];
for (int j = i + 1; j < input.size(); j++)
{
if (isdigit(input[j]))
{
oss << input[j];
++i;
}
else
{
res.push_back(Token{Token::integer, oss.str()});
break;
}
}
}
}
}
return res;
}
/**
* @brief Abstraction for OOP-based notation for all the tokens.
* Provides evaluation funtionality to all the tokens.
*/
struct Element
{
virtual int eval() = 0;
};
// Stores the integers present in the text as a object.
struct Integer : Element
{
int value{0};
Integer(int val) : value(val) {}
int eval() override
{
return value;
}
};
/**
* @brief Stores all the binary operator present in the text as an object.
*/
struct BinaryOperation : Element
{
Element *lhs, *rhs;
enum Type
{
addition,
subtraction
} type;
BinaryOperation() {}
int eval() override
{
switch (type)
{
case addition:
return lhs->eval() + rhs->eval();
case subtraction:
return lhs->eval() - rhs->eval();
}
return 0;
}
};
/**
* @brief Parses all the tokens into their OOP-counterparts that can be evaulated.
* !@warning Cannot parse Nested Parenthesis and unary operations e.g. negative numbers.
*/
Element *parse(std::vector<Token> &tokens)
{
auto res = new BinaryOperation();
auto have_lhs{false};
for (int i = 0; i < tokens.size(); i++)
{
auto &token = tokens[i];
switch (token.type)
{
case Token::integer:
{
int val = std::stoi(token.token);
// std::cout << val << " ";
auto integer = new Integer{val};
if (!have_lhs)
{
res->lhs = integer;
have_lhs = true;
}
else
{
res->rhs = integer;
}
break;
}
case Token::plus:
{
res->type = BinaryOperation::addition;
break;
}
case Token::minus:
{
res->type = BinaryOperation::subtraction;
break;
}
case Token::lparen:
{
int j = i;
for (; j < tokens.size(); j++)
if (tokens[j].type == Token::rparen)
break;
std::vector<Token> sub_expr(&tokens[i + 1], &tokens[j]);
auto sub_expr_parsed = parse(sub_expr);
if (have_lhs)
{
res->rhs = sub_expr_parsed;
}
else
{
res->lhs = sub_expr_parsed;
have_lhs = true;
}
i = j;
}
}
}
return res;
}
int main()
{
std::string input("(13-10)-(12-8)");
auto tokens = lexer(input);
for (auto token : tokens)
std::cout << token << " ";
auto parsed = parse(tokens);
std::cout <<"\n" << input << "=" << parsed->eval();
return 0;
}