Skip to content

Commit 832e82a

Browse files
Fix: grammar.g : constrain main function naming (#15)
* Fix: grammar.g : constrain main function naming * updated lexer support * Fixed: lexer_test * Added: codecov.yml Co-authored-by: Shuvayan Ghosh Dastidar <[email protected]>
1 parent 7a498df commit 832e82a

File tree

6 files changed

+235
-2
lines changed

6 files changed

+235
-2
lines changed

codecov.yml

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
codecov:
2+
require_ci_to_pass: yes
3+
4+
coverage:
5+
precision: 2
6+
round: down
7+
range: "70...100"
8+
status:
9+
project:
10+
default:
11+
target: auto
12+
threshold: 5%
13+
patch:
14+
default:
15+
target: 80%
16+
17+
parsers:
18+
gcov:
19+
branch_detection:
20+
conditional: yes
21+
loop: yes
22+
method: no
23+
macro: no
24+
25+
comment:
26+
layout: "reach, diff, flags, files, footer"
27+
behavior: default
28+
require_changes: no

src/grammar/grammar.g

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
else float if int void
88
( ) { } * + - / % ,
99
<< >> < > <= >= = == != ;
10-
identifier integer_constant float_constant
10+
identifier integer_constant float_constant main
1111
%
1212

1313
## Non Terminals
@@ -93,5 +93,5 @@ identifier integer_constant float_constant
9393
<selection_statement> : if ( <expression> ) <statement> else <statement>
9494

9595
## Main
96-
<program> : <type_specifier> identifier ( ) <compound_statement>
96+
<program> : <type_specifier> main ( ) <compound_statement>
9797
%

src/include/lexer/lexer.h

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,21 @@ enum Token {
4646
TOK_LITERAL = -25, // "c++"
4747
TOK_CHARACTER = -26, // 'c'
4848

49+
// arithmetic tokens
50+
51+
TOK_PLUS = -29, // +
52+
TOK_MINUS = -30, // -
53+
TOK_MULTIPLY = -31, // *
54+
TOK_DIVIDE = -32, // /
55+
TOK_MODULUS = -33, // %
56+
57+
// RELATIONAL
58+
59+
TOK_GREATER_THAN_OR_EQUALS = -34, // >=
60+
TOK_LESS_THAN_OR_EQUALS = -35, // <=
61+
TOK_NOT = -36, // !
62+
TOK_NOT_EQUAL_TO = -37, // !=
63+
4964
// cout, cin
5065

5166
TOK_COUT = -27, // cout

src/lexer/lexer.cpp

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -120,6 +120,18 @@ int Lexer::GetToken(std::ifstream &is) {
120120
} else if (last_char == '.') {
121121
is.get(last_char);
122122
ret_token = TOK_DOT;
123+
} else if (last_char == '+') {
124+
is.get(last_char);
125+
ret_token = TOK_PLUS;
126+
} else if (last_char == '-') {
127+
is.get(last_char);
128+
ret_token = TOK_MINUS;
129+
} else if (last_char == '*') {
130+
is.get(last_char);
131+
ret_token = TOK_MULTIPLY;
132+
} else if (last_char == '%') {
133+
is.get(last_char);
134+
ret_token = TOK_MODULUS;
123135
} else if (last_char == '{') {
124136
current_nesting_level_++;
125137
is.get(last_char);
@@ -143,11 +155,22 @@ int Lexer::GetToken(std::ifstream &is) {
143155
} else {
144156
ret_token = TOK_ASSIGNMENT;
145157
}
158+
} else if (last_char == '!') {
159+
is.get(last_char);
160+
if (last_char == '=') {
161+
is.get(last_char);
162+
ret_token = TOK_NOT_EQUAL_TO;
163+
} else {
164+
ret_token = TOK_NOT;
165+
}
146166
} else if (last_char == '<') {
147167
is.get(last_char);
148168
if (last_char == '<') {
149169
is.get(last_char);
150170
ret_token = TOK_LEFT_SHIFT;
171+
} else if (last_char == '=') {
172+
is.get(last_char);
173+
ret_token = TOK_LESS_THAN_OR_EQUALS;
151174
} else {
152175
ret_token = TOK_LESS_THAN;
153176
}
@@ -156,6 +179,9 @@ int Lexer::GetToken(std::ifstream &is) {
156179
if (last_char == '>') {
157180
is.get(last_char);
158181
ret_token = TOK_RIGHT_SHIFT;
182+
} else if (last_char == '=') {
183+
is.get(last_char);
184+
ret_token = TOK_GREATER_THAN_OR_EQUALS;
159185
} else {
160186
ret_token = TOK_GREATER_THAN;
161187
}
@@ -205,6 +231,8 @@ int Lexer::GetToken(std::ifstream &is) {
205231
} while (!is.eof() && last_char != '\n' && last_char != '\r');
206232
is.get(last_char);
207233
ret_token = TOK_COMMENT;
234+
} else {
235+
ret_token = TOK_DIVIDE;
208236
}
209237
} else {
210238
error_string_ = "Unexpected Token\n";

test/lexer/arithmetic.txt

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
2+
3+
4+
5+
int main(){
6+
int a = 5;
7+
if ( a > 45){
8+
cout <<a;
9+
}
10+
else{
11+
if (a !=10 ){
12+
a = (a*5) + 4 - 6;
13+
// ooops , error not captured by lexer.
14+
a = a / 0;
15+
}
16+
if ( a >= 6){
17+
cout << a;
18+
}
19+
}
20+
}

test/lexer/lexer_test.cpp

Lines changed: 142 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -311,4 +311,146 @@ TEST(lexer, lexer5) {
311311
ASSERT_EQ(jucc::lexer::TOK_CURLY_CLOSE, token);
312312
token = lex.GetToken(is);
313313
ASSERT_EQ(jucc::lexer::TOK_EOF, token);
314+
is.close();
315+
}
316+
317+
TEST(lexer, lexer6) {
318+
std::string filename("../test/lexer/arithmetic.txt");
319+
Lexer lex = Lexer();
320+
321+
std::ifstream is(filename);
322+
int token;
323+
token = lex.GetToken(is);
324+
ASSERT_EQ(jucc::lexer::TOK_INT, token);
325+
token = lex.GetToken(is);
326+
ASSERT_EQ(jucc::lexer::TOK_IDENTIFIER, token);
327+
token = lex.GetToken(is);
328+
ASSERT_EQ(jucc::lexer::TOK_PAREN_OPEN, token);
329+
token = lex.GetToken(is);
330+
ASSERT_EQ(jucc::lexer::TOK_PAREN_CLOSE, token);
331+
token = lex.GetToken(is);
332+
ASSERT_EQ(jucc::lexer::TOK_CURLY_OPEN, token);
333+
token = lex.GetToken(is);
334+
ASSERT_EQ(jucc::lexer::TOK_INT, token);
335+
token = lex.GetToken(is);
336+
ASSERT_EQ(jucc::lexer::TOK_IDENTIFIER, token);
337+
token = lex.GetToken(is);
338+
ASSERT_EQ(jucc::lexer::TOK_ASSIGNMENT, token);
339+
token = lex.GetToken(is);
340+
ASSERT_EQ(jucc::lexer::TOK_DECIMAL, token);
341+
token = lex.GetToken(is);
342+
ASSERT_EQ(jucc::lexer::TOK_SEMICOLON, token);
343+
token = lex.GetToken(is);
344+
ASSERT_EQ(jucc::lexer::TOK_IF, token);
345+
token = lex.GetToken(is);
346+
ASSERT_EQ(jucc::lexer::TOK_PAREN_OPEN, token);
347+
token = lex.GetToken(is);
348+
ASSERT_EQ(jucc::lexer::TOK_IDENTIFIER, token);
349+
token = lex.GetToken(is);
350+
ASSERT_EQ(jucc::lexer::TOK_GREATER_THAN, token);
351+
token = lex.GetToken(is);
352+
ASSERT_EQ(jucc::lexer::TOK_DECIMAL, token);
353+
token = lex.GetToken(is);
354+
ASSERT_EQ(jucc::lexer::TOK_PAREN_CLOSE, token);
355+
token = lex.GetToken(is);
356+
ASSERT_EQ(jucc::lexer::TOK_CURLY_OPEN, token);
357+
token = lex.GetToken(is);
358+
ASSERT_EQ(jucc::lexer::TOK_COUT, token);
359+
token = lex.GetToken(is);
360+
ASSERT_EQ(jucc::lexer::TOK_LEFT_SHIFT, token);
361+
token = lex.GetToken(is);
362+
ASSERT_EQ(jucc::lexer::TOK_IDENTIFIER, token);
363+
token = lex.GetToken(is);
364+
ASSERT_EQ(jucc::lexer::TOK_SEMICOLON, token);
365+
token = lex.GetToken(is);
366+
ASSERT_EQ(jucc::lexer::TOK_CURLY_CLOSE, token);
367+
token = lex.GetToken(is);
368+
ASSERT_EQ(jucc::lexer::TOK_ELSE, token);
369+
token = lex.GetToken(is);
370+
ASSERT_EQ(jucc::lexer::TOK_CURLY_OPEN, token);
371+
token = lex.GetToken(is);
372+
ASSERT_EQ(jucc::lexer::TOK_IF, token);
373+
token = lex.GetToken(is);
374+
ASSERT_EQ(jucc::lexer::TOK_PAREN_OPEN, token);
375+
token = lex.GetToken(is);
376+
ASSERT_EQ(jucc::lexer::TOK_IDENTIFIER, token);
377+
token = lex.GetToken(is);
378+
ASSERT_EQ(jucc::lexer::TOK_NOT_EQUAL_TO, token);
379+
token = lex.GetToken(is);
380+
ASSERT_EQ(jucc::lexer::TOK_DECIMAL, token);
381+
token = lex.GetToken(is);
382+
ASSERT_EQ(jucc::lexer::TOK_PAREN_CLOSE, token);
383+
token = lex.GetToken(is);
384+
ASSERT_EQ(jucc::lexer::TOK_CURLY_OPEN, token);
385+
token = lex.GetToken(is);
386+
ASSERT_EQ(jucc::lexer::TOK_IDENTIFIER, token);
387+
token = lex.GetToken(is);
388+
ASSERT_EQ(jucc::lexer::TOK_ASSIGNMENT, token);
389+
token = lex.GetToken(is);
390+
ASSERT_EQ(jucc::lexer::TOK_PAREN_OPEN, token);
391+
token = lex.GetToken(is);
392+
ASSERT_EQ(jucc::lexer::TOK_IDENTIFIER, token);
393+
token = lex.GetToken(is);
394+
ASSERT_EQ(jucc::lexer::TOK_MULTIPLY, token);
395+
token = lex.GetToken(is);
396+
ASSERT_EQ(jucc::lexer::TOK_DECIMAL, token);
397+
token = lex.GetToken(is);
398+
ASSERT_EQ(jucc::lexer::TOK_PAREN_CLOSE, token);
399+
token = lex.GetToken(is);
400+
ASSERT_EQ(jucc::lexer::TOK_PLUS, token);
401+
token = lex.GetToken(is);
402+
ASSERT_EQ(jucc::lexer::TOK_DECIMAL, token);
403+
token = lex.GetToken(is);
404+
ASSERT_EQ(jucc::lexer::TOK_MINUS, token);
405+
token = lex.GetToken(is);
406+
ASSERT_EQ(jucc::lexer::TOK_DECIMAL, token);
407+
token = lex.GetToken(is);
408+
ASSERT_EQ(jucc::lexer::TOK_SEMICOLON, token);
409+
token = lex.GetToken(is);
410+
ASSERT_EQ(jucc::lexer::TOK_COMMENT, token);
411+
token = lex.GetToken(is);
412+
ASSERT_EQ(jucc::lexer::TOK_IDENTIFIER, token);
413+
token = lex.GetToken(is);
414+
ASSERT_EQ(jucc::lexer::TOK_ASSIGNMENT, token);
415+
token = lex.GetToken(is);
416+
ASSERT_EQ(jucc::lexer::TOK_IDENTIFIER, token);
417+
token = lex.GetToken(is);
418+
ASSERT_EQ(jucc::lexer::TOK_DIVIDE, token);
419+
token = lex.GetToken(is);
420+
ASSERT_EQ(jucc::lexer::TOK_DECIMAL, token);
421+
token = lex.GetToken(is);
422+
ASSERT_EQ(jucc::lexer::TOK_SEMICOLON, token);
423+
token = lex.GetToken(is);
424+
ASSERT_EQ(jucc::lexer::TOK_CURLY_CLOSE, token);
425+
token = lex.GetToken(is);
426+
ASSERT_EQ(jucc::lexer::TOK_IF, token);
427+
token = lex.GetToken(is);
428+
ASSERT_EQ(jucc::lexer::TOK_PAREN_OPEN, token);
429+
token = lex.GetToken(is);
430+
ASSERT_EQ(jucc::lexer::TOK_IDENTIFIER, token);
431+
token = lex.GetToken(is);
432+
ASSERT_EQ(jucc::lexer::TOK_GREATER_THAN_OR_EQUALS, token);
433+
token = lex.GetToken(is);
434+
ASSERT_EQ(jucc::lexer::TOK_DECIMAL, token);
435+
token = lex.GetToken(is);
436+
ASSERT_EQ(jucc::lexer::TOK_PAREN_CLOSE, token);
437+
token = lex.GetToken(is);
438+
ASSERT_EQ(jucc::lexer::TOK_CURLY_OPEN, token);
439+
token = lex.GetToken(is);
440+
ASSERT_EQ(jucc::lexer::TOK_COUT, token);
441+
token = lex.GetToken(is);
442+
ASSERT_EQ(jucc::lexer::TOK_LEFT_SHIFT, token);
443+
token = lex.GetToken(is);
444+
ASSERT_EQ(jucc::lexer::TOK_IDENTIFIER, token);
445+
token = lex.GetToken(is);
446+
ASSERT_EQ(jucc::lexer::TOK_SEMICOLON, token);
447+
token = lex.GetToken(is);
448+
ASSERT_EQ(jucc::lexer::TOK_CURLY_CLOSE, token);
449+
token = lex.GetToken(is);
450+
ASSERT_EQ(jucc::lexer::TOK_CURLY_CLOSE, token);
451+
token = lex.GetToken(is);
452+
ASSERT_EQ(jucc::lexer::TOK_CURLY_CLOSE, token);
453+
token = lex.GetToken(is);
454+
ASSERT_EQ(jucc::lexer::TOK_EOF, token);
455+
is.close();
314456
}

0 commit comments

Comments
 (0)