Avoid filling twice token values.
[TP_AL_C.git] / lexer / syntactic_analyzer.c
1 /* Syntactic analyzer */
2
3 #include <stdbool.h>
4 #include <stdlib.h>
5
6 #include "global_vars.h"
7 #include "lexical_analyzer.h"
8 #include "print_helper.h"
9
10 #define PRINT_TOKEN 1
11
12 /* Syntactic analyzer functions implementation */
13
14 enum TokenType tokenType;
15
16 static void analyze_TEXT() {
17 #if DEBUG
18 fprintf(stdout, "entering %s\n", __func__);
19 #endif
20 if (tokenType == MOT) {
21 #if PRINT_TOKEN
22 wprint_ctoken(stdout);
23 #endif
24 if (target != NULL) {
25 fputws((const wchar_t*)tokenValue, target);
26 fputws(L" ", target);
27 }
28 scanner();
29 analyze_TEXT();
30 } else if (tokenType != MOTCLE && tokenType != NPARA && tokenType != SECTION && \
31 tokenType != SSECTION && tokenType != FIN) {
32 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
33 fflush(stderr);
34 exit(EXIT_FAILURE);
35 }
36 #if DEBUG
37 fprintf(stdout, "leaving %s\n", __func__);
38 #endif
39 }
40
41 static void analyze_P() {
42 #if DEBUG
43 fprintf(stdout, "entering %s\n", __func__);
44 #endif
45 if (tokenType == NPARA) {
46 #if PRINT_TOKEN
47 wprint_ctoken(stdout);
48 #endif /* PRINT_TOKEN */
49 if (target != NULL) {
50 fputws(L"<p>\n", target);
51 }
52 scanner();
53 analyze_TEXT();
54 if (target != NULL) {
55 fputws(L"\n</p>\n", target);
56 }
57 analyze_P();
58 } else if (tokenType != SECTION && tokenType != SSECTION && tokenType != FIN) {
59 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
60 fflush(stderr);
61 exit(EXIT_FAILURE);
62 }
63 #if DEBUG
64 fprintf(stdout, "leaving %s\n", __func__);
65 #endif
66 }
67
68 static void analyze_HEAD() {
69 #if DEBUG
70 fprintf(stdout, "entering %s\n", __func__);
71 #endif
72 if (tokenType == MOTCLE && wcscmp(L">Titre", (const wchar_t*)tokenValue) == 0) {
73 if (target != NULL) {
74 fputws(L"<title>\n", target);
75 fputws(L" ", target);
76 }
77 scanner();
78 analyze_TEXT();
79 if (target != NULL) {
80 fputws(L"\n</title>\n", target);
81 }
82 if (tokenType == MOTCLE && wcscmp(L">Auteur", (const wchar_t*)tokenValue) == 0) {
83 #if PRINT_TOKEN
84 wprint_ctoken(stdout);
85 #endif /* PRINT_TOKEN */
86 fputws(L"<meta name=\"author\" content=\"", target);
87 scanner();
88 analyze_TEXT();
89 fputws(L"\">", target);
90 } else if (tokenType != NPARA && tokenType != SECTION && tokenType != FIN) {
91 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
92 fflush(stderr);
93 exit(EXIT_FAILURE);
94 }
95 }
96 #if DEBUG
97 fprintf(stdout, "leaving %s\n", __func__);
98 #endif
99 }
100
101 static void analyze_H1() {
102 #if DEBUG
103 fprintf(stdout, "entering %s\n", __func__);
104 #endif
105 if (tokenType == SECTION) {
106 #if PRINT_TOKEN
107 wprint_ctoken(stdout);
108 #endif /* PRINT_TOKEN */
109 if (target != NULL) {
110 fputws(L"<h1>\n", target);
111 }
112 scanner();
113 analyze_TEXT();
114 if (target != NULL) {
115 fputws(L"\n</h1>\n", target);
116 }
117 } else {
118 fprintf(stderr, "%s error\n", __func__);
119 fflush(stderr);
120 exit(EXIT_FAILURE);
121 }
122 #if DEBUG
123 fprintf(stdout, "leaving %s\n", __func__);
124 #endif
125 }
126
127 static void analyze_H2() {
128 #if DEBUG
129 fprintf(stdout, "entering %s\n", __func__);
130 #endif
131 if (tokenType == SSECTION) {
132 #if PRINT_TOKEN
133 wprint_ctoken(stdout);
134 #endif /* PRINT_TOKEN */
135 if (target != NULL) {
136 fputws(L"<h2>\n", target);
137 }
138 scanner();
139 analyze_TEXT();
140 if (target != NULL) {
141 fputws(L"\n</h2>\n", target);
142 }
143 }
144 #if DEBUG
145 fprintf(stdout, "leaving %s\n", __func__);
146 #endif
147 }
148
149 static void analyze_S2() {
150 #if DEBUG
151 fprintf(stdout, "entering %s\n", __func__);
152 #endif
153 if (tokenType == SSECTION) {
154 analyze_H2();
155 analyze_P();
156 analyze_S2();
157 } else if (tokenType != SECTION && tokenType != FIN) {
158 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
159 fflush(stderr);
160 exit(EXIT_FAILURE);
161 }
162 #if DEBUG
163 fprintf(stdout, "leaving %s\n", __func__);
164 #endif
165 }
166
167 static void analyze_S1() {
168 #if DEBUG
169 fprintf(stdout, "entering %s\n", __func__);
170 #endif
171 if (tokenType == SECTION) {
172 analyze_H1();
173 analyze_P();
174 analyze_S2();
175 analyze_S1();
176 } else if (tokenType != FIN) {
177 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
178 fflush(stderr);
179 exit(EXIT_FAILURE);
180 }
181 #if DEBUG
182 fprintf(stdout, "leaving %s\n", __func__);
183 #endif
184 }
185
186 static void analyze_BODY() {
187 #if DEBUG
188 fprintf(stdout, "entering %s\n", __func__);
189 #endif
190 analyze_P();
191 analyze_S1();
192 #if DEBUG
193 fprintf(stdout, "leaving %s\n", __func__);
194 #endif
195 }
196
197 void analyze_AXIOME() {
198 #if DEBUG
199 fprintf(stdout, "entering %s\n", __func__);
200 #endif
201 /* print the lexical analysis result */
202 #if PRINT_TOKEN
203 wprint_ctoken(stdout);
204 #endif /* PRINT_TOKEN */
205 if (target != NULL) {
206 fputws(L"<head>\n", target);
207 }
208 analyze_HEAD();
209 if (target != NULL) {
210 fputws(L"\n</head>\n", target);
211 }
212 if (target != NULL) {
213 fputws(L"<body>\n", target);
214 }
215 analyze_BODY();
216 if (target != NULL) {
217 fputws(L"\n<body>\n", target);
218 }
219 #if PRINT_TOKEN
220 wprint_ctoken(stdout);
221 #endif /* PRINT_TOKEN */
222 if (tokenType != FIN) {
223 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
224 fflush(stderr);
225 exit(EXIT_FAILURE);
226 }
227 fprintf(stdout, "successful syntactic analysis\n");
228 #if DEBUG
229 fprintf(stdout, "leaving %s\n", __func__);
230 #endif
231 fflush(stdout);
232 }