From: Jérôme Benoit Date: Sat, 16 Dec 2017 16:13:01 +0000 (+0100) Subject: Simplify a bit a print helper function. X-Git-Url: https://git.piment-noir.org/?a=commitdiff_plain;h=f196dae589431452a3eb37a2aa985d882f695407;p=TP_AL_C.git Simplify a bit a print helper function. Signed-off-by: Jérôme Benoit --- diff --git a/lexer/main.c b/lexer/main.c index f2b6df4..7c31ee3 100644 --- a/lexer/main.c +++ b/lexer/main.c @@ -13,7 +13,7 @@ void do_lexical_analysis() { c = fgetwc(source); // lecture du premier caractere do { scanner(); - wprint_token_target(); + wprint_token(target); token[tokenFound].type = tokenTypestr[tokenType]; tokenFound++; } while (tokenType != FIN); // tant que la fin du fichier n'est pas atteinte diff --git a/lexer/print_helper.c b/lexer/print_helper.c index f7ab04f..630985b 100644 --- a/lexer/print_helper.c +++ b/lexer/print_helper.c @@ -1,7 +1,7 @@ -#include #include #include +#include "print_helper.h" #include "global_vars.h" void pr_warning(const char* format, ...) { @@ -53,19 +53,11 @@ void wpr_error(const wchar_t *format, ...) { fflush(stderr); } -void wprint_token_stdout() { +void wprint_token(FILE* out_file) { if (tokenType == MOT || tokenType == MOTCLE) { - fwprintf(stdout, L"%20s: %ls\n", tokenTypestr[tokenType], token[tokenFound].value); + fwprintf(out_file, L"%20s: %ls\n", tokenTypestr[tokenType], token[tokenFound].value); } else { - fwprintf(stdout, L"%20s\n", tokenTypestr[tokenType]); - } - fflush(stdout); -} - -void wprint_token_target() { - if (tokenType == MOT || tokenType == MOTCLE) { - fwprintf(target, L"%20s: %ls\n", tokenTypestr[tokenType], token[tokenFound].value); - } else { - fwprintf(target, L"%20s\n", tokenTypestr[tokenType]); + fwprintf(out_file, L"%20s\n", tokenTypestr[tokenType]); } + fflush(out_file); } diff --git a/lexer/print_helper.h b/lexer/print_helper.h index 396afd4..080ea9d 100644 --- a/lexer/print_helper.h +++ b/lexer/print_helper.h @@ -1,6 +1,8 @@ #ifndef PRINT_HELPER_H_ #define PRINT_HELPER_H_ +#include + void pr_warning(const char *format, ...); void pr_error(const char *format, ...); @@ -9,7 +11,6 @@ void pr_debug(const char *format, ...); void wpr_warning(const wchar_t *format, ...); void wpr_error(const wchar_t *format, ...); -void wprint_token_stdout(); -void wprint_token_target(); +void wprint_token(FILE* out_file); #endif /* PRINT_HELPER_H_ */ diff --git a/lexer/syntactic_analyzer.c b/lexer/syntactic_analyzer.c index 29524f0..ddef90b 100644 --- a/lexer/syntactic_analyzer.c +++ b/lexer/syntactic_analyzer.c @@ -25,7 +25,7 @@ static void analyze_TEXT() { fputws(L" ", target); } #if PRINT_TOKEN - wprint_token_stdout(); + wprint_token(stdout); #endif token[tokenFound].type = tokenTypestr[tokenType]; tokenFound++; @@ -48,7 +48,7 @@ static void analyze_P() { if (tokenType == NPARA) { scanner(); #if PRINT_TOKEN - wprint_token_stdout(); + wprint_token(stdout); #endif if (target != NULL) { fputws(L"

\n", target); @@ -60,7 +60,7 @@ static void analyze_P() { if (tokenType == MOT) { scanner(); #if PRINT_TOKEN - wprint_token_stdout(); + wprint_token(stdout); #endif if (target != NULL) { fputws((const wchar_t*)token[tokenFound].value, target); @@ -91,7 +91,7 @@ static void analyze_HEAD() { if (tokenType == MOTCLE) { scanner(); #if PRINT_TOKEN - wprint_token_stdout(); + wprint_token(stdout); #endif /* PRINT_TOKEN */ token[tokenFound].type = tokenTypestr[tokenType]; tokenFound++; @@ -108,7 +108,7 @@ static void analyze_HEAD() { if (tokenType == MOTCLE) { scanner(); #if PRINT_TOKEN - wprint_token_stdout(); + wprint_token(stdout); #endif /* PRINT_TOKEN */ token[tokenFound].type = tokenTypestr[tokenType]; tokenFound++; @@ -132,7 +132,7 @@ static void analyze_H1() { if (tokenType == SECTION) { scanner(); #if PRINT_TOKEN - wprint_token_stdout(); + wprint_token(stdout); #endif /* PRINT_TOKEN */ if (target != NULL) { fputws(L"

\n", target); @@ -158,7 +158,7 @@ static void analyze_H2() { if (tokenType == SSECTION) { scanner(); #if PRINT_TOKEN - wprint_token_stdout(); + wprint_token(stdout); #endif /* PRINT_TOKEN */ if (target != NULL) { fputws(L"

\n", target); @@ -234,7 +234,7 @@ void analyze_AXIOME() { scanner(); /* print the lexical analysis result */ #if PRINT_TOKEN - wprint_token_stdout(); + wprint_token(stdout); #endif /* PRINT_TOKEN */ token[tokenFound].type = tokenTypestr[tokenType]; tokenFound++;