Merge branch 'master' of git.piment-noir.org:TP_AL_C
[TP_AL_C.git] / lexer / syntactic_analyzer.c
1 /* Syntactic analyzer */
2
3 #include <stdbool.h>
4 #include <stdarg.h>
5 #include <stdlib.h>
6
7 #include "global_vars.h"
8 #include "lexical_analyzer.h"
9 #include "print_helper.h"
10
11 #define PRINT_TOKEN 0
12
13 /* Syntactic analyzer functions implementation */
14
15 enum TokenType tokenType;
16
17 static void analyze_TEXT() {
18 #if DEBUG
19 fprintf(stdout, "entering %s\n", __func__);
20 #endif
21 if (tokenType == MOT) {
22 scanner();
23 #if PRINT_TOKEN
24 wprint_token();
25 #endif
26 token[tokenFound].type = tokenTypestr[tokenType];
27 tokenFound++;
28 analyze_TEXT();
29 } else if (tokenType != MOTCLE && tokenType != NPARA && tokenType != SECTION && \
30 tokenType != SSECTION && tokenType != FIN) {
31 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
32 exit(EXIT_FAILURE);
33 }
34 #if DEBUG
35 fprintf(stdout, "leaving %s\n", __func__);
36 #endif
37 }
38
39 static void analyze_P() {
40 #if DEBUG
41 fprintf(stdout, "entering %s\n", __func__);
42 #endif
43 if (tokenType == NPARA) {
44 scanner();
45 #if PRINT_TOKEN
46 wprint_token();
47 #endif
48 token[tokenFound].type = tokenTypestr[tokenType];
49 tokenFound++;
50 if (tokenType == MOT) {
51 scanner();
52 #if PRINT_TOKEN
53 wprint_token();
54 #endif
55 token[tokenFound].type = tokenTypestr[tokenType];
56 tokenFound++;
57 analyze_TEXT();
58 analyze_P();
59 } else if (tokenType != SECTION && tokenType != SSECTION && tokenType != FIN) {
60 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
61 exit(EXIT_FAILURE);
62 }
63 } else if (tokenType != SECTION && tokenType != SSECTION && tokenType != FIN) {
64 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
65 exit(EXIT_FAILURE);
66 }
67 #if DEBUG
68 fprintf(stdout, "leaving %s\n", __func__);
69 #endif
70 }
71
72 static void analyze_HEAD() {
73 #if DEBUG
74 fprintf(stdout, "entering %s\n", __func__);
75 #endif
76 if (tokenType == MOTCLE) {
77 scanner();
78 #if PRINT_TOKEN
79 wprint_token();
80 #endif
81 token[tokenFound].type = tokenTypestr[tokenType];
82 tokenFound++;
83 analyze_TEXT();
84 if (tokenType == MOTCLE) {
85 scanner();
86 #if PRINT_TOKEN
87 wprint_token();
88 #endif /* PRINT_TOKEN */
89 token[tokenFound].type = tokenTypestr[tokenType];
90 tokenFound++;
91 analyze_TEXT();
92 } else if (tokenType != NPARA && tokenType != SECTION && tokenType != FIN) {
93 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
94 exit(EXIT_FAILURE);
95 }
96 } else if (tokenType != NPARA && tokenType != SECTION && tokenType != FIN) {
97 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
98 exit(EXIT_FAILURE);
99 }
100 #if DEBUG
101 fprintf(stdout, "leaving %s\n", __func__);
102 #endif
103 }
104
105 static void analyze_H1() {
106 #if DEBUG
107 fprintf(stdout, "entering %s\n", __func__);
108 #endif
109 if (tokenType == SECTION) {
110 scanner();
111 #if PRINT_TOKEN
112 wprint_token();
113 #endif /* PRINT_TOKEN */
114 token[tokenFound].type = tokenTypestr[tokenType];
115 tokenFound++;
116 analyze_TEXT();
117 }
118 #if DEBUG
119 fprintf(stdout, "leaving %s\n", __func__);
120 #endif
121 }
122
123 static void analyze_H2() {
124 #if DEBUG
125 fprintf(stdout, "entering %s\n", __func__);
126 #endif
127 if (tokenType == SSECTION) {
128 scanner();
129 #if PRINT_TOKEN
130 wprint_token();
131 #endif /* PRINT_TOKEN */
132 token[tokenFound].type = tokenTypestr[tokenType];
133 tokenFound++;
134 analyze_TEXT();
135 }
136 #if DEBUG
137 fprintf(stdout, "leaving %s\n", __func__);
138 #endif
139 }
140
141 static void analyze_S2() {
142 #if DEBUG
143 fprintf(stdout, "entering %s\n", __func__);
144 #endif
145 if (tokenType == SSECTION) {
146 analyze_H2();
147 analyze_P();
148 analyze_S2();
149 } else if (tokenType != SECTION && tokenType != FIN) {
150 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
151 exit(EXIT_FAILURE);
152 }
153 #if DEBUG
154 fprintf(stdout, "leaving %s\n", __func__);
155 #endif
156 }
157
158 static void analyze_S1() {
159 #if DEBUG
160 fprintf(stdout, "entering %s\n", __func__);
161 #endif
162 if (tokenType == SECTION) {
163 analyze_H1();
164 analyze_P();
165 analyze_S2();
166 analyze_S1();
167 } else if (tokenType != FIN) {
168 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
169 exit(EXIT_FAILURE);
170 }
171 #if DEBUG
172 fprintf(stdout, "leaving %s\n", __func__);
173 #endif
174 }
175
176 static void analyze_BODY() {
177 #if DEBUG
178 fprintf(stdout, "entering %s\n", __func__);
179 #endif
180 analyze_P();
181 analyze_S1();
182 #if DEBUG
183 fprintf(stdout, "leaving %s\n", __func__);
184 #endif
185 }
186
187 void analyze_AXIOME() {
188 #if DEBUG
189 fprintf(stdout, "entering %s\n", __func__);
190 #endif
191 scanner();
192 /* print the lexical analysis result */
193 #if PRINT_TOKEN
194 wprint_token();
195 #endif /* PRINT_TOKEN */
196 token[tokenFound].type = tokenTypestr[tokenType];
197 tokenFound++;
198 analyze_HEAD();
199 analyze_BODY();
200 if (tokenType != FIN) {
201 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
202 exit(EXIT_FAILURE);
203 }
204 fprintf(stdout, "successful syntactic analysis\n");
205 #if DEBUG
206 fprintf(stdout, "leaving %s\n", __func__);
207 #endif
208 }