X-Git-Url: https://git.piment-noir.org/?p=TP_AL_C.git;a=blobdiff_plain;f=lexer%2Fsyntactic_analyzer.c;h=0f1b1818b3a6c2ced9a65c8cc912fe3bd61385e2;hp=2d3880ff963fa1cccac02f2c3371d9d989b751cd;hb=2d2a5978e84c354bf8167a3ab0b1088d264049bf;hpb=e70feb8ccefed05878cb3f714ea34ad8f8d8ddfa diff --git a/lexer/syntactic_analyzer.c b/lexer/syntactic_analyzer.c index 2d3880f..0f1b181 100644 --- a/lexer/syntactic_analyzer.c +++ b/lexer/syntactic_analyzer.c @@ -1,14 +1,14 @@ /* Syntactic analyzer */ #include -#include #include #include "global_vars.h" -#include "lexical_analyzer.h" #include "print_helper.h" +#include "syntactic_analyzer.h" +#include "lexical_analyzer.h" -#define PRINT_TOKEN 0 +#define PRINT_TOKEN 1 /* Syntactic analyzer functions implementation */ @@ -19,16 +19,19 @@ static void analyze_TEXT() { fprintf(stdout, "entering %s\n", __func__); #endif if (tokenType == MOT) { - scanner(); #if PRINT_TOKEN - wprint_token(); + wprint_ctoken(stdout); #endif - token[tokenFound].type = tokenTypestr[tokenType]; - tokenFound++; + if (target != NULL) { + fputws((const wchar_t*)tokenValue, target); + fputws(L" ", target); + } + scanner(); analyze_TEXT(); } else if (tokenType != MOTCLE && tokenType != NPARA && tokenType != SECTION && \ tokenType != SSECTION && tokenType != FIN) { fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]); + fflush(stderr); exit(EXIT_FAILURE); } #if DEBUG @@ -41,29 +44,23 @@ static void analyze_P() { fprintf(stdout, "entering %s\n", __func__); #endif if (tokenType == NPARA) { - scanner(); #if PRINT_TOKEN - wprint_token(); - #endif - token[tokenFound].type = tokenTypestr[tokenType]; - tokenFound++; - if (tokenType == MOT) { - scanner(); - #if PRINT_TOKEN - wprint_token(); - #endif - token[tokenFound].type = tokenTypestr[tokenType]; - tokenFound++; - analyze_TEXT(); - analyze_P(); + wprint_ctoken(stdout); + #endif /* PRINT_TOKEN */ + if (target != NULL) { + fputws(L"

\n", target); + } + scanner(); + analyze_TEXT(); + if (target != NULL) { + fputws(L"\n

\n", target); + } + analyze_P(); } else if (tokenType != SECTION && tokenType != SSECTION && tokenType != FIN) { fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]); + fflush(stderr); exit(EXIT_FAILURE); } - } else if (tokenType != SECTION && tokenType != SSECTION && tokenType != FIN) { - fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]); - exit(EXIT_FAILURE); - } #if DEBUG fprintf(stdout, "leaving %s\n", __func__); #endif @@ -73,29 +70,29 @@ static void analyze_HEAD() { #if DEBUG fprintf(stdout, "entering %s\n", __func__); #endif - if (tokenType == MOTCLE) { + if (tokenType == MOTCLE && wcscmp(L">Titre", (const wchar_t*)tokenValue) == 0) { + if (target != NULL) { + fputws(L"\n", target); + fputws(L" ", target); + } scanner(); - #if PRINT_TOKEN - wprint_token(); - #endif - token[tokenFound].type = tokenTypestr[tokenType]; - tokenFound++; analyze_TEXT(); - if (tokenType == MOTCLE) { - scanner(); + if (target != NULL) { + fputws(L"\n\n", target); + } + if (tokenType == MOTCLE && wcscmp(L">Auteur", (const wchar_t*)tokenValue) == 0) { #if PRINT_TOKEN - wprint_token(); + wprint_ctoken(stdout); #endif /* PRINT_TOKEN */ - token[tokenFound].type = tokenTypestr[tokenType]; - tokenFound++; + fputws(L"", target); } else if (tokenType != NPARA && tokenType != SECTION && tokenType != FIN) { fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]); + fflush(stderr); exit(EXIT_FAILURE); } - } else if (tokenType != NPARA && tokenType != SECTION && tokenType != FIN) { - fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]); - exit(EXIT_FAILURE); } #if DEBUG fprintf(stdout, "leaving %s\n", __func__); @@ -107,13 +104,21 @@ static void analyze_H1() { fprintf(stdout, "entering %s\n", __func__); #endif if (tokenType == SECTION) { - scanner(); #if PRINT_TOKEN - wprint_token(); + wprint_ctoken(stdout); #endif /* PRINT_TOKEN */ - token[tokenFound].type = tokenTypestr[tokenType]; - tokenFound++; + if (target != NULL) { + fputws(L"

\n", target); + } + scanner(); analyze_TEXT(); + if (target != NULL) { + fputws(L"\n

\n", target); + } + } else { + fprintf(stderr, "%s error\n", __func__); + fflush(stderr); + exit(EXIT_FAILURE); } #if DEBUG fprintf(stdout, "leaving %s\n", __func__); @@ -125,13 +130,17 @@ static void analyze_H2() { fprintf(stdout, "entering %s\n", __func__); #endif if (tokenType == SSECTION) { - scanner(); #if PRINT_TOKEN - wprint_token(); + wprint_ctoken(stdout); #endif /* PRINT_TOKEN */ - token[tokenFound].type = tokenTypestr[tokenType]; - tokenFound++; + if (target != NULL) { + fputws(L"

\n", target); + } + scanner(); analyze_TEXT(); + if (target != NULL) { + fputws(L"\n

\n", target); + } } #if DEBUG fprintf(stdout, "leaving %s\n", __func__); @@ -148,6 +157,7 @@ static void analyze_S2() { analyze_S2(); } else if (tokenType != SECTION && tokenType != FIN) { fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]); + fflush(stderr); exit(EXIT_FAILURE); } #if DEBUG @@ -166,6 +176,7 @@ static void analyze_S1() { analyze_S1(); } else if (tokenType != FIN) { fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]); + fflush(stderr); exit(EXIT_FAILURE); } #if DEBUG @@ -184,25 +195,39 @@ static void analyze_BODY() { #endif } -void analyze_AXIOME() { +void analyze_AXIOME(void) { #if DEBUG fprintf(stdout, "entering %s\n", __func__); #endif - scanner(); /* print the lexical analysis result */ #if PRINT_TOKEN - wprint_token(); + wprint_ctoken(stdout); #endif /* PRINT_TOKEN */ - token[tokenFound].type = tokenTypestr[tokenType]; - tokenFound++; + if (target != NULL) { + fputws(L"\n", target); + } analyze_HEAD(); + if (target != NULL) { + fputws(L"\n\n", target); + } + if (target != NULL) { + fputws(L"\n", target); + } analyze_BODY(); + if (target != NULL) { + fputws(L"\n\n", target); + } + #if PRINT_TOKEN + wprint_ctoken(stdout); + #endif /* PRINT_TOKEN */ if (tokenType != FIN) { fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]); + fflush(stderr); exit(EXIT_FAILURE); } fprintf(stdout, "successful syntactic analysis\n"); #if DEBUG fprintf(stdout, "leaving %s\n", __func__); #endif + fflush(stdout); }