#include "lexical_analyzer.h"
#include "print_helper.h"
-#define PRINT_TOKEN 0
+#define PRINT_TOKEN 1
/* Syntactic analyzer functions implementation */
fprintf(stdout, "entering %s\n", __func__);
#endif
if (tokenType == MOT) {
- scanner();
+ #if PRINT_TOKEN
+ wprint_ctoken(stdout);
+ #endif
if (target != NULL) {
- fputws((const wchar_t*)token[tokenFound].value, target);
+ fputws((const wchar_t*)tokenValue, target);
fputws(L" ", target);
}
- #if PRINT_TOKEN
- wprint_token(stdout);
- #endif
- token[tokenFound].type = tokenTypestr[tokenType];
- tokenFound++;
+ scanner();
analyze_TEXT();
} else if (tokenType != MOTCLE && tokenType != NPARA && tokenType != SECTION && \
tokenType != SSECTION && tokenType != FIN) {
fprintf(stdout, "entering %s\n", __func__);
#endif
if (tokenType == NPARA) {
- scanner();
#if PRINT_TOKEN
- wprint_token(stdout);
+ wprint_ctoken(stdout);
#endif
+ scanner();
if (target != NULL) {
fputws(L"<p>\n", target);
- fputws((const wchar_t*)token[tokenFound].value, target);
- fputws(L" ", target);
}
- token[tokenFound].type = tokenTypestr[tokenType];
- tokenFound++;
- if (tokenType == MOT) {
- scanner();
- #if PRINT_TOKEN
- wprint_token(stdout);
- #endif
- if (target != NULL) {
- fputws((const wchar_t*)token[tokenFound].value, target);
- fputws(L" ", target);
- }
- token[tokenFound].type = tokenTypestr[tokenType];
- tokenFound++;
- analyze_TEXT();
- if (target != NULL) {
- fputws(L"\n</p>\n", target);
- }
- analyze_P();
+ analyze_TEXT();
+ if (target != NULL) {
+ fputws(L"\n</p>\n", target);
+ }
+ analyze_P();
} else if (tokenType != SECTION && tokenType != SSECTION && tokenType != FIN) {
fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
fflush(stderr);
exit(EXIT_FAILURE);
}
- }
#if DEBUG
fprintf(stdout, "leaving %s\n", __func__);
#endif
fprintf(stdout, "entering %s\n", __func__);
#endif
if (tokenType == MOTCLE) {
- scanner();
- #if PRINT_TOKEN
- wprint_token(stdout);
- #endif /* PRINT_TOKEN */
- token[tokenFound].type = tokenTypestr[tokenType];
- tokenFound++;
//FIXME: Check if the MOTCLE token value is set to >Titre
if (target != NULL) {
fputws(L"<title>\n", target);
- fputws((const wchar_t*)token[tokenFound].value, target);
fputws(L" ", target);
}
+ scanner();
analyze_TEXT();
if (target != NULL) {
fputws(L"\n</title>\n", target);
}
if (tokenType == MOTCLE) {
- scanner();
#if PRINT_TOKEN
- wprint_token(stdout);
+ wprint_ctoken(stdout);
#endif /* PRINT_TOKEN */
- token[tokenFound].type = tokenTypestr[tokenType];
- tokenFound++;
+ scanner();
//The text contain the author
+ fputws(L"<meta name=\"author\" content=\"", target);
analyze_TEXT();
+ fputws(L"\">", target);
} else if (tokenType != NPARA && tokenType != SECTION && tokenType != FIN) {
fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
fflush(stderr);
fprintf(stdout, "entering %s\n", __func__);
#endif
if (tokenType == SECTION) {
- scanner();
#if PRINT_TOKEN
- wprint_token(stdout);
+ wprint_ctoken(stdout);
#endif /* PRINT_TOKEN */
if (target != NULL) {
fputws(L"<h1>\n", target);
- fputws((const wchar_t*)token[tokenFound].value, target);
- fputws(L" ", target);
}
- token[tokenFound].type = tokenTypestr[tokenType];
- tokenFound++;
+ scanner();
analyze_TEXT();
if (target != NULL) {
fputws(L"\n</h1>\n", target);
}
+ } else {
+ fprintf(stderr, "%s error\n", __func__);
+ fflush(stderr);
+ exit(EXIT_FAILURE);
}
#if DEBUG
fprintf(stdout, "leaving %s\n", __func__);
fprintf(stdout, "entering %s\n", __func__);
#endif
if (tokenType == SSECTION) {
- scanner();
#if PRINT_TOKEN
- wprint_token(stdout);
+ wprint_ctoken(stdout);
#endif /* PRINT_TOKEN */
if (target != NULL) {
fputws(L"<h2>\n", target);
- fputws((const wchar_t*)token[tokenFound].value, target);
- fputws(L" ", target);
}
- token[tokenFound].type = tokenTypestr[tokenType];
- tokenFound++;
+ scanner();
analyze_TEXT();
if (target != NULL) {
fputws(L"\n</h2>\n", target);
#if DEBUG
fprintf(stdout, "entering %s\n", __func__);
#endif
- //FIXME: This test is probably useless
if (tokenType == SSECTION) {
analyze_H2();
analyze_P();
#if DEBUG
fprintf(stdout, "entering %s\n", __func__);
#endif
- //FIXME: This test is probably useless
if (tokenType == SECTION) {
analyze_H1();
analyze_P();
#if DEBUG
fprintf(stdout, "entering %s\n", __func__);
#endif
- scanner();
/* print the lexical analysis result */
#if PRINT_TOKEN
- wprint_token(stdout);
+ wprint_ctoken(stdout);
#endif /* PRINT_TOKEN */
- token[tokenFound].type = tokenTypestr[tokenType];
- tokenFound++;
if (target != NULL) {
fputws(L"<head>\n", target);
}