Add includes to avoid missing prototype warning.
[TP_AL_C.git] / lexer / syntactic_analyzer.c
1 /* Syntactic analyzer */
2
3 #include <stdbool.h>
4 #include <stdlib.h>
5
6 #include "global_vars.h"
7 #include "print_helper.h"
8 #include "syntactic_analyzer.h"
9 #include "lexical_analyzer.h"
10
11 #define PRINT_TOKEN 1
12
13 /* Syntactic analyzer functions implementation */
14
15 enum TokenType tokenType;
16
17 static void analyze_TEXT() {
18 #if DEBUG
19 fprintf(stdout, "entering %s\n", __func__);
20 #endif
21 if (tokenType == MOT) {
22 #if PRINT_TOKEN
23 wprint_ctoken(stdout);
24 #endif
25 if (target != NULL) {
26 fputws((const wchar_t*)tokenValue, target);
27 fputws(L" ", target);
28 }
29 scanner();
30 analyze_TEXT();
31 } else if (tokenType != MOTCLE && tokenType != NPARA && tokenType != SECTION && \
32 tokenType != SSECTION && tokenType != FIN) {
33 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
34 fflush(stderr);
35 exit(EXIT_FAILURE);
36 }
37 #if DEBUG
38 fprintf(stdout, "leaving %s\n", __func__);
39 #endif
40 }
41
42 static void analyze_P() {
43 #if DEBUG
44 fprintf(stdout, "entering %s\n", __func__);
45 #endif
46 if (tokenType == NPARA) {
47 #if PRINT_TOKEN
48 wprint_ctoken(stdout);
49 #endif /* PRINT_TOKEN */
50 if (target != NULL) {
51 fputws(L"<p>\n", target);
52 }
53 scanner();
54 analyze_TEXT();
55 if (target != NULL) {
56 fputws(L"\n</p>\n", target);
57 }
58 analyze_P();
59 } else if (tokenType != SECTION && tokenType != SSECTION && tokenType != FIN) {
60 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
61 fflush(stderr);
62 exit(EXIT_FAILURE);
63 }
64 #if DEBUG
65 fprintf(stdout, "leaving %s\n", __func__);
66 #endif
67 }
68
69 static void analyze_HEAD() {
70 #if DEBUG
71 fprintf(stdout, "entering %s\n", __func__);
72 #endif
73 if (tokenType == MOTCLE && wcscmp(L">Titre", (const wchar_t*)tokenValue) == 0) {
74 if (target != NULL) {
75 fputws(L"<title>\n", target);
76 fputws(L" ", target);
77 }
78 scanner();
79 analyze_TEXT();
80 if (target != NULL) {
81 fputws(L"\n</title>\n", target);
82 }
83 if (tokenType == MOTCLE && wcscmp(L">Auteur", (const wchar_t*)tokenValue) == 0) {
84 #if PRINT_TOKEN
85 wprint_ctoken(stdout);
86 #endif /* PRINT_TOKEN */
87 fputws(L"<meta name=\"author\" content=\"", target);
88 scanner();
89 analyze_TEXT();
90 fputws(L"\">", target);
91 } else if (tokenType != NPARA && tokenType != SECTION && tokenType != FIN) {
92 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
93 fflush(stderr);
94 exit(EXIT_FAILURE);
95 }
96 }
97 #if DEBUG
98 fprintf(stdout, "leaving %s\n", __func__);
99 #endif
100 }
101
102 static void analyze_H1() {
103 #if DEBUG
104 fprintf(stdout, "entering %s\n", __func__);
105 #endif
106 if (tokenType == SECTION) {
107 #if PRINT_TOKEN
108 wprint_ctoken(stdout);
109 #endif /* PRINT_TOKEN */
110 if (target != NULL) {
111 fputws(L"<h1>\n", target);
112 }
113 scanner();
114 analyze_TEXT();
115 if (target != NULL) {
116 fputws(L"\n</h1>\n", target);
117 }
118 } else {
119 fprintf(stderr, "%s error\n", __func__);
120 fflush(stderr);
121 exit(EXIT_FAILURE);
122 }
123 #if DEBUG
124 fprintf(stdout, "leaving %s\n", __func__);
125 #endif
126 }
127
128 static void analyze_H2() {
129 #if DEBUG
130 fprintf(stdout, "entering %s\n", __func__);
131 #endif
132 if (tokenType == SSECTION) {
133 #if PRINT_TOKEN
134 wprint_ctoken(stdout);
135 #endif /* PRINT_TOKEN */
136 if (target != NULL) {
137 fputws(L"<h2>\n", target);
138 }
139 scanner();
140 analyze_TEXT();
141 if (target != NULL) {
142 fputws(L"\n</h2>\n", target);
143 }
144 }
145 #if DEBUG
146 fprintf(stdout, "leaving %s\n", __func__);
147 #endif
148 }
149
150 static void analyze_S2() {
151 #if DEBUG
152 fprintf(stdout, "entering %s\n", __func__);
153 #endif
154 if (tokenType == SSECTION) {
155 analyze_H2();
156 analyze_P();
157 analyze_S2();
158 } else if (tokenType != SECTION && tokenType != FIN) {
159 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
160 fflush(stderr);
161 exit(EXIT_FAILURE);
162 }
163 #if DEBUG
164 fprintf(stdout, "leaving %s\n", __func__);
165 #endif
166 }
167
168 static void analyze_S1() {
169 #if DEBUG
170 fprintf(stdout, "entering %s\n", __func__);
171 #endif
172 if (tokenType == SECTION) {
173 analyze_H1();
174 analyze_P();
175 analyze_S2();
176 analyze_S1();
177 } else if (tokenType != FIN) {
178 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
179 fflush(stderr);
180 exit(EXIT_FAILURE);
181 }
182 #if DEBUG
183 fprintf(stdout, "leaving %s\n", __func__);
184 #endif
185 }
186
187 static void analyze_BODY() {
188 #if DEBUG
189 fprintf(stdout, "entering %s\n", __func__);
190 #endif
191 analyze_P();
192 analyze_S1();
193 #if DEBUG
194 fprintf(stdout, "leaving %s\n", __func__);
195 #endif
196 }
197
198 void analyze_AXIOME(void) {
199 #if DEBUG
200 fprintf(stdout, "entering %s\n", __func__);
201 #endif
202 /* print the lexical analysis result */
203 #if PRINT_TOKEN
204 wprint_ctoken(stdout);
205 #endif /* PRINT_TOKEN */
206 if (target != NULL) {
207 fputws(L"<head>\n", target);
208 }
209 analyze_HEAD();
210 if (target != NULL) {
211 fputws(L"\n</head>\n", target);
212 }
213 if (target != NULL) {
214 fputws(L"<body>\n", target);
215 }
216 analyze_BODY();
217 if (target != NULL) {
218 fputws(L"\n<body>\n", target);
219 }
220 #if PRINT_TOKEN
221 wprint_ctoken(stdout);
222 #endif /* PRINT_TOKEN */
223 if (tokenType != FIN) {
224 fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
225 fflush(stderr);
226 exit(EXIT_FAILURE);
227 }
228 fprintf(stdout, "successful syntactic analysis\n");
229 #if DEBUG
230 fprintf(stdout, "leaving %s\n", __func__);
231 #endif
232 fflush(stdout);
233 }