X-Git-Url: https://git.piment-noir.org/?a=blobdiff_plain;f=lexer%2Fsyntactic_analyzer.c;h=ddef90b12707dc5f09009e2d62f182a46899cd0c;hb=f196dae589431452a3eb37a2aa985d882f695407;hp=8d2d2e93837565b1fe4f60085ee01623c377180f;hpb=6c47be3237ab266b1cba4e3051afaaaa55e07e52;p=TP_AL_C.git diff --git a/lexer/syntactic_analyzer.c b/lexer/syntactic_analyzer.c index 8d2d2e9..ddef90b 100644 --- a/lexer/syntactic_analyzer.c +++ b/lexer/syntactic_analyzer.c @@ -20,8 +20,12 @@ static void analyze_TEXT() { #endif if (tokenType == MOT) { scanner(); + if (target != NULL) { + fputws((const wchar_t*)token[tokenFound].value, target); + fputws(L" ", target); + } #if PRINT_TOKEN - wprint_token(); + wprint_token(stdout); #endif token[tokenFound].type = tokenTypestr[tokenType]; tokenFound++; @@ -44,28 +48,36 @@ static void analyze_P() { if (tokenType == NPARA) { scanner(); #if PRINT_TOKEN - wprint_token(); + wprint_token(stdout); #endif + if (target != NULL) { + fputws(L"

\n", target); + fputws((const wchar_t*)token[tokenFound].value, target); + fputws(L" ", target); + } token[tokenFound].type = tokenTypestr[tokenType]; tokenFound++; if (tokenType == MOT) { scanner(); #if PRINT_TOKEN - wprint_token(); + wprint_token(stdout); #endif + if (target != NULL) { + fputws((const wchar_t*)token[tokenFound].value, target); + fputws(L" ", target); + } token[tokenFound].type = tokenTypestr[tokenType]; tokenFound++; analyze_TEXT(); + if (target != NULL) { + fputws(L"\n

\n", target); + } analyze_P(); } else if (tokenType != SECTION && tokenType != SSECTION && tokenType != FIN) { fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]); fflush(stderr); exit(EXIT_FAILURE); } - } else if (tokenType != SECTION && tokenType != SSECTION && tokenType != FIN) { - fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]); - fflush(stderr); - exit(EXIT_FAILURE); } #if DEBUG fprintf(stdout, "leaving %s\n", __func__); @@ -79,28 +91,34 @@ static void analyze_HEAD() { if (tokenType == MOTCLE) { scanner(); #if PRINT_TOKEN - wprint_token(); - #endif + wprint_token(stdout); + #endif /* PRINT_TOKEN */ token[tokenFound].type = tokenTypestr[tokenType]; tokenFound++; + //FIXME: Check if the MOTCLE token value is set to >Titre + if (target != NULL) { + fputws(L"\n", target); + fputws((const wchar_t*)token[tokenFound].value, target); + fputws(L" ", target); + } analyze_TEXT(); + if (target != NULL) { + fputws(L"\n\n", target); + } if (tokenType == MOTCLE) { scanner(); #if PRINT_TOKEN - wprint_token(); + wprint_token(stdout); #endif /* PRINT_TOKEN */ token[tokenFound].type = tokenTypestr[tokenType]; tokenFound++; + //The text contain the author analyze_TEXT(); } else if (tokenType != NPARA && tokenType != SECTION && tokenType != FIN) { fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]); fflush(stderr); exit(EXIT_FAILURE); } - } else if (tokenType != NPARA && tokenType != SECTION && tokenType != FIN) { - fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]); - fflush(stderr); - exit(EXIT_FAILURE); } #if DEBUG fprintf(stdout, "leaving %s\n", __func__); @@ -114,11 +132,19 @@ static void analyze_H1() { if (tokenType == SECTION) { scanner(); #if PRINT_TOKEN - wprint_token(); + wprint_token(stdout); #endif /* PRINT_TOKEN */ + if (target != NULL) { + fputws(L"

\n", target); + fputws((const wchar_t*)token[tokenFound].value, target); + fputws(L" ", target); + } token[tokenFound].type = tokenTypestr[tokenType]; tokenFound++; analyze_TEXT(); + if (target != NULL) { + fputws(L"\n

\n", target); + } } #if DEBUG fprintf(stdout, "leaving %s\n", __func__); @@ -132,11 +158,19 @@ static void analyze_H2() { if (tokenType == SSECTION) { scanner(); #if PRINT_TOKEN - wprint_token(); + wprint_token(stdout); #endif /* PRINT_TOKEN */ + if (target != NULL) { + fputws(L"

\n", target); + fputws((const wchar_t*)token[tokenFound].value, target); + fputws(L" ", target); + } token[tokenFound].type = tokenTypestr[tokenType]; tokenFound++; analyze_TEXT(); + if (target != NULL) { + fputws(L"\n

\n", target); + } } #if DEBUG fprintf(stdout, "leaving %s\n", __func__); @@ -147,6 +181,7 @@ static void analyze_S2() { #if DEBUG fprintf(stdout, "entering %s\n", __func__); #endif + //FIXME: This test is probably useless if (tokenType == SSECTION) { analyze_H2(); analyze_P(); @@ -165,6 +200,7 @@ static void analyze_S1() { #if DEBUG fprintf(stdout, "entering %s\n", __func__); #endif + //FIXME: This test is probably useless if (tokenType == SECTION) { analyze_H1(); analyze_P(); @@ -198,12 +234,24 @@ void analyze_AXIOME() { scanner(); /* print the lexical analysis result */ #if PRINT_TOKEN - wprint_token(); + wprint_token(stdout); #endif /* PRINT_TOKEN */ token[tokenFound].type = tokenTypestr[tokenType]; tokenFound++; + if (target != NULL) { + fputws(L"\n", target); + } analyze_HEAD(); + if (target != NULL) { + fputws(L"\n\n", target); + } + if (target != NULL) { + fputws(L"\n", target); + } analyze_BODY(); + if (target != NULL) { + fputws(L"\n\n", target); + } if (tokenType != FIN) { fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]); fflush(stderr);