Syntactic analyser implementation with HTML conversion code
[TP_AL_C.git] / lexer / syntactic_analyzer.c
1 /* Syntactic analyzer */
2
3 #include <stdbool.h>
4 #include <stdarg.h>
5 #include <stdlib.h>
6
7 #include "global_vars.h"
8 #include "lexical_analyzer.h"
9 #include "print_helper.h"
10
11 #define PRINT_TOKEN 0
12
13 /* Syntactic analyzer functions implementation */
14
15 enum TokenType tokenType;
16
17 static void analyze_TEXT() {
18 #if DEBUG
19 fprintf(stdout, "entering %s\n", __func__);
20 #endif
21 if (tokenType == MOT) {
22 scanner();
23 if (target != NULL) {
24 fputws((const wchar_t*)token[tokenFound].value, target);
25 fputws(L" ", target);
26 }
27 #if PRINT_TOKEN
28 wprint_token_stdout();
29 #endif
30 token[tokenFound].type = tokenTypestr[tokenType];
31 tokenFound++;
32 analyze_TEXT();
33 } else if (tokenType != MOTCLE && tokenType != NPARA && tokenType != SECTION && \
34 tokenType != SSECTION && tokenType != FIN) {
35 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
36 fflush(stderr);
37 exit(EXIT_FAILURE);
38 }
39 #if DEBUG
40 fprintf(stdout, "leaving %s\n", __func__);
41 #endif
42 }
43
44 static void analyze_P() {
45 #if DEBUG
46 fprintf(stdout, "entering %s\n", __func__);
47 #endif
48 if (tokenType == NPARA) {
49 scanner();
50 #if PRINT_TOKEN
51 wprint_token_stdout();
52 #endif
53 if (target != NULL) {
54 fputws(L"<p>\n", target);
55 fputws((const wchar_t*)token[tokenFound].value, target);
56 fputws(L" ", target);
57 }
58 token[tokenFound].type = tokenTypestr[tokenType];
59 tokenFound++;
60 if (tokenType == MOT) {
61 scanner();
62 #if PRINT_TOKEN
63 wprint_token_stdout();
64 #endif
65 if (target != NULL) {
66 fputws((const wchar_t*)token[tokenFound].value, target);
67 fputws(L" ", target);
68 }
69 token[tokenFound].type = tokenTypestr[tokenType];
70 tokenFound++;
71 analyze_TEXT();
72 if (target != NULL) {
73 fputws(L"\n</p>\n", target);
74 }
75 analyze_P();
76 } else if (tokenType != SECTION && tokenType != SSECTION && tokenType != FIN) {
77 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
78 fflush(stderr);
79 exit(EXIT_FAILURE);
80 }
81 }
82 #if DEBUG
83 fprintf(stdout, "leaving %s\n", __func__);
84 #endif
85 }
86
87 static void analyze_HEAD() {
88 #if DEBUG
89 fprintf(stdout, "entering %s\n", __func__);
90 #endif
91 if (tokenType == MOTCLE) {
92 scanner();
93 #if PRINT_TOKEN
94 wprint_token_stdout();
95 #endif /* PRINT_TOKEN */
96 token[tokenFound].type = tokenTypestr[tokenType];
97 tokenFound++;
98 //FIXME: Check if the MOTCLE token value is set to >Titre
99 if (target != NULL) {
100 fputws(L"<title>\n", target);
101 fputws((const wchar_t*)token[tokenFound].value, target);
102 fputws(L" ", target);
103 }
104 analyze_TEXT();
105 if (target != NULL) {
106 fputws(L"\n</title>\n", target);
107 }
108 if (tokenType == MOTCLE) {
109 scanner();
110 #if PRINT_TOKEN
111 wprint_token_stdout();
112 #endif /* PRINT_TOKEN */
113 token[tokenFound].type = tokenTypestr[tokenType];
114 tokenFound++;
115 //The text contain the author
116 analyze_TEXT();
117 } else if (tokenType != NPARA && tokenType != SECTION && tokenType != FIN) {
118 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
119 fflush(stderr);
120 exit(EXIT_FAILURE);
121 }
122 }
123 #if DEBUG
124 fprintf(stdout, "leaving %s\n", __func__);
125 #endif
126 }
127
128 static void analyze_H1() {
129 #if DEBUG
130 fprintf(stdout, "entering %s\n", __func__);
131 #endif
132 if (tokenType == SECTION) {
133 scanner();
134 #if PRINT_TOKEN
135 wprint_token_stdout();
136 #endif /* PRINT_TOKEN */
137 if (target != NULL) {
138 fputws(L"<h1>\n", target);
139 fputws((const wchar_t*)token[tokenFound].value, target);
140 fputws(L" ", target);
141 }
142 token[tokenFound].type = tokenTypestr[tokenType];
143 tokenFound++;
144 analyze_TEXT();
145 if (target != NULL) {
146 fputws(L"\n</h1>\n", target);
147 }
148 }
149 #if DEBUG
150 fprintf(stdout, "leaving %s\n", __func__);
151 #endif
152 }
153
154 static void analyze_H2() {
155 #if DEBUG
156 fprintf(stdout, "entering %s\n", __func__);
157 #endif
158 if (tokenType == SSECTION) {
159 scanner();
160 #if PRINT_TOKEN
161 wprint_token_stdout();
162 #endif /* PRINT_TOKEN */
163 if (target != NULL) {
164 fputws(L"<h2>\n", target);
165 fputws((const wchar_t*)token[tokenFound].value, target);
166 fputws(L" ", target);
167 }
168 token[tokenFound].type = tokenTypestr[tokenType];
169 tokenFound++;
170 analyze_TEXT();
171 if (target != NULL) {
172 fputws(L"\n</h2>\n", target);
173 }
174 }
175 #if DEBUG
176 fprintf(stdout, "leaving %s\n", __func__);
177 #endif
178 }
179
180 static void analyze_S2() {
181 #if DEBUG
182 fprintf(stdout, "entering %s\n", __func__);
183 #endif
184 //FIXME: This test is probably useless
185 if (tokenType == SSECTION) {
186 analyze_H2();
187 analyze_P();
188 analyze_S2();
189 } else if (tokenType != SECTION && tokenType != FIN) {
190 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
191 fflush(stderr);
192 exit(EXIT_FAILURE);
193 }
194 #if DEBUG
195 fprintf(stdout, "leaving %s\n", __func__);
196 #endif
197 }
198
199 static void analyze_S1() {
200 #if DEBUG
201 fprintf(stdout, "entering %s\n", __func__);
202 #endif
203 //FIXME: This test is probably useless
204 if (tokenType == SECTION) {
205 analyze_H1();
206 analyze_P();
207 analyze_S2();
208 analyze_S1();
209 } else if (tokenType != FIN) {
210 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
211 fflush(stderr);
212 exit(EXIT_FAILURE);
213 }
214 #if DEBUG
215 fprintf(stdout, "leaving %s\n", __func__);
216 #endif
217 }
218
219 static void analyze_BODY() {
220 #if DEBUG
221 fprintf(stdout, "entering %s\n", __func__);
222 #endif
223 analyze_P();
224 analyze_S1();
225 #if DEBUG
226 fprintf(stdout, "leaving %s\n", __func__);
227 #endif
228 }
229
230 void analyze_AXIOME() {
231 #if DEBUG
232 fprintf(stdout, "entering %s\n", __func__);
233 #endif
234 scanner();
235 /* print the lexical analysis result */
236 #if PRINT_TOKEN
237 wprint_token_stdout();
238 #endif /* PRINT_TOKEN */
239 token[tokenFound].type = tokenTypestr[tokenType];
240 tokenFound++;
241 if (target != NULL) {
242 fputws(L"<head>\n", target);
243 }
244 analyze_HEAD();
245 if (target != NULL) {
246 fputws(L"\n</head>\n", target);
247 }
248 if (target != NULL) {
249 fputws(L"<body>\n", target);
250 }
251 analyze_BODY();
252 if (target != NULL) {
253 fputws(L"\n<body>\n", target);
254 }
255 if (tokenType != FIN) {
256 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
257 fflush(stderr);
258 exit(EXIT_FAILURE);
259 }
260 fprintf(stdout, "successful syntactic analysis\n");
261 #if DEBUG
262 fprintf(stdout, "leaving %s\n", __func__);
263 #endif
264 fflush(stdout);
265 }