Add some fflush() for avoid loosing comme characters
[TP_AL_C.git] / lexer / syntactic_analyzer.c
1 /* Syntactic analyzer */
2
3 #include <stdbool.h>
4 #include <stdarg.h>
5 #include <stdlib.h>
6
7 #include "global_vars.h"
8 #include "lexical_analyzer.h"
9 #include "print_helper.h"
10
11 #define PRINT_TOKEN 0
12
13 /* Syntactic analyzer functions implementation */
14
15 enum TokenType tokenType;
16
17 static void analyze_TEXT() {
18 #if DEBUG
19 fprintf(stdout, "entering %s\n", __func__);
20 #endif
21 if (tokenType == MOT) {
22 scanner();
23 #if PRINT_TOKEN
24 wprint_token();
25 #endif
26 token[tokenFound].type = tokenTypestr[tokenType];
27 tokenFound++;
28 analyze_TEXT();
29 } else if (tokenType != MOTCLE && tokenType != NPARA && tokenType != SECTION && \
30 tokenType != SSECTION && tokenType != FIN) {
31 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
32 fflush(stderr);
33 exit(EXIT_FAILURE);
34 }
35 #if DEBUG
36 fprintf(stdout, "leaving %s\n", __func__);
37 #endif
38 }
39
40 static void analyze_P() {
41 #if DEBUG
42 fprintf(stdout, "entering %s\n", __func__);
43 #endif
44 if (tokenType == NPARA) {
45 scanner();
46 #if PRINT_TOKEN
47 wprint_token();
48 #endif
49 token[tokenFound].type = tokenTypestr[tokenType];
50 tokenFound++;
51 if (tokenType == MOT) {
52 scanner();
53 #if PRINT_TOKEN
54 wprint_token();
55 #endif
56 token[tokenFound].type = tokenTypestr[tokenType];
57 tokenFound++;
58 analyze_TEXT();
59 analyze_P();
60 } else if (tokenType != SECTION && tokenType != SSECTION && tokenType != FIN) {
61 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
62 fflush(stderr);
63 exit(EXIT_FAILURE);
64 }
65 } else if (tokenType != SECTION && tokenType != SSECTION && tokenType != FIN) {
66 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
67 fflush(stderr);
68 exit(EXIT_FAILURE);
69 }
70 #if DEBUG
71 fprintf(stdout, "leaving %s\n", __func__);
72 #endif
73 }
74
75 static void analyze_HEAD() {
76 #if DEBUG
77 fprintf(stdout, "entering %s\n", __func__);
78 #endif
79 if (tokenType == MOTCLE) {
80 scanner();
81 #if PRINT_TOKEN
82 wprint_token();
83 #endif
84 token[tokenFound].type = tokenTypestr[tokenType];
85 tokenFound++;
86 analyze_TEXT();
87 if (tokenType == MOTCLE) {
88 scanner();
89 #if PRINT_TOKEN
90 wprint_token();
91 #endif /* PRINT_TOKEN */
92 token[tokenFound].type = tokenTypestr[tokenType];
93 tokenFound++;
94 analyze_TEXT();
95 } else if (tokenType != NPARA && tokenType != SECTION && tokenType != FIN) {
96 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
97 fflush(stderr);
98 exit(EXIT_FAILURE);
99 }
100 } else if (tokenType != NPARA && tokenType != SECTION && tokenType != FIN) {
101 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
102 fflush(stderr);
103 exit(EXIT_FAILURE);
104 }
105 #if DEBUG
106 fprintf(stdout, "leaving %s\n", __func__);
107 #endif
108 }
109
110 static void analyze_H1() {
111 #if DEBUG
112 fprintf(stdout, "entering %s\n", __func__);
113 #endif
114 if (tokenType == SECTION) {
115 scanner();
116 #if PRINT_TOKEN
117 wprint_token();
118 #endif /* PRINT_TOKEN */
119 token[tokenFound].type = tokenTypestr[tokenType];
120 tokenFound++;
121 analyze_TEXT();
122 }
123 #if DEBUG
124 fprintf(stdout, "leaving %s\n", __func__);
125 #endif
126 }
127
128 static void analyze_H2() {
129 #if DEBUG
130 fprintf(stdout, "entering %s\n", __func__);
131 #endif
132 if (tokenType == SSECTION) {
133 scanner();
134 #if PRINT_TOKEN
135 wprint_token();
136 #endif /* PRINT_TOKEN */
137 token[tokenFound].type = tokenTypestr[tokenType];
138 tokenFound++;
139 analyze_TEXT();
140 }
141 #if DEBUG
142 fprintf(stdout, "leaving %s\n", __func__);
143 #endif
144 }
145
146 static void analyze_S2() {
147 #if DEBUG
148 fprintf(stdout, "entering %s\n", __func__);
149 #endif
150 if (tokenType == SSECTION) {
151 analyze_H2();
152 analyze_P();
153 analyze_S2();
154 } else if (tokenType != SECTION && tokenType != FIN) {
155 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
156 fflush(stderr);
157 exit(EXIT_FAILURE);
158 }
159 #if DEBUG
160 fprintf(stdout, "leaving %s\n", __func__);
161 #endif
162 }
163
164 static void analyze_S1() {
165 #if DEBUG
166 fprintf(stdout, "entering %s\n", __func__);
167 #endif
168 if (tokenType == SECTION) {
169 analyze_H1();
170 analyze_P();
171 analyze_S2();
172 analyze_S1();
173 } else if (tokenType != FIN) {
174 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
175 fflush(stderr);
176 exit(EXIT_FAILURE);
177 }
178 #if DEBUG
179 fprintf(stdout, "leaving %s\n", __func__);
180 #endif
181 }
182
183 static void analyze_BODY() {
184 #if DEBUG
185 fprintf(stdout, "entering %s\n", __func__);
186 #endif
187 analyze_P();
188 analyze_S1();
189 #if DEBUG
190 fprintf(stdout, "leaving %s\n", __func__);
191 #endif
192 }
193
194 void analyze_AXIOME() {
195 #if DEBUG
196 fprintf(stdout, "entering %s\n", __func__);
197 #endif
198 scanner();
199 /* print the lexical analysis result */
200 #if PRINT_TOKEN
201 wprint_token();
202 #endif /* PRINT_TOKEN */
203 token[tokenFound].type = tokenTypestr[tokenType];
204 tokenFound++;
205 analyze_HEAD();
206 analyze_BODY();
207 if (tokenType != FIN) {
208 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
209 fflush(stderr);
210 exit(EXIT_FAILURE);
211 }
212 fprintf(stdout, "successful syntactic analysis\n");
213 #if DEBUG
214 fprintf(stdout, "leaving %s\n", __func__);
215 #endif
216 fflush(stdout);
217 }