Signed-off-by: Jérôme Benoit <jerome.benoit@piment-noir.org>
# Automatic dependencies code from:
# http://make.mad-scientist.net/papers/advanced-auto-dependency-generation/#tldr
BINARY_NAME=lexer
# Automatic dependencies code from:
# http://make.mad-scientist.net/papers/advanced-auto-dependency-generation/#tldr
BINARY_NAME=lexer
+#BUILD_TYPE=debug
+BUILD_TYPE=release
# ====================================
# DO NOT CHANGE STUFF BEYOND THIS LINE
# ====================================
# DO NOT CHANGE STUFF BEYOND THIS LINE
ifeq ($(BUILD_TYPE),debug)
BUILDDIR := .build/debug
DEBUG_FLAG = -g
ifeq ($(BUILD_TYPE),debug)
BUILDDIR := .build/debug
DEBUG_FLAG = -g
STRIP_FLAG =
OPTI_FLAG = -O0
else
BUILDDIR := .build/release
DEBUG_FLAG =
STRIP_FLAG =
OPTI_FLAG = -O0
else
BUILDDIR := .build/release
DEBUG_FLAG =
STRIP_FLAG = -s
OPTI_FLAG = -O3
endif
STRIP_FLAG = -s
OPTI_FLAG = -O3
endif
-CFLAGS := $(CFLAGS) $(WARN_FLAGS) $(STD_FLAG) $(OPTI_FLAG) $(DEBUG_FLAG)
+CFLAGS := -DDEBUG=$(DEBUG) $(CFLAGS) $(WARN_FLAGS) $(STD_FLAG) $(OPTI_FLAG) $(DEBUG_FLAG)
LDFLAGS := $(LDFLAGS) $(STRIP_FLAG)
OBJDIR := $(BUILDDIR)/objs
LDFLAGS := $(LDFLAGS) $(STRIP_FLAG)
OBJDIR := $(BUILDDIR)/objs
error:
if (tokenType == MOT || tokenType == MOTCLE) {
error:
if (tokenType == MOT || tokenType == MOTCLE) {
- wpr_error(L"%s error with token type: %s and value: %ls\n",
+ fwprintf(stderr, L"%s error with token type: %s and value: %ls\n",
__func__,
tokenTypestr[tokenType],
token[tokenFound].value);
} else {
__func__,
tokenTypestr[tokenType],
token[tokenFound].value);
} else {
- wpr_error(L"%s error with token type: %s\n",
+ fwprintf(stderr, L"%s error with token type: %s\n",
__func__,
tokenTypestr[tokenType]);
}
__func__,
tokenTypestr[tokenType]);
}
c = fgetwc(source); // lecture du premier caractere
do {
scanner();
c = fgetwc(source); // lecture du premier caractere
do {
scanner();
- if (tokenType == MOT || tokenType == MOTCLE) {
- fwprintf(target, L"%20s: %ls\n", tokenTypestr[tokenType], token[tokenFound].value);
- } else {
- fwprintf(target, L"%20s\n", tokenTypestr[tokenType]);
- }
token[tokenFound].type = tokenTypestr[tokenType];
tokenFound++;
} while (tokenType != FIN); // tant que la fin du fichier n'est pas atteinte
token[tokenFound].type = tokenTypestr[tokenType];
tokenFound++;
} while (tokenType != FIN); // tant que la fin du fichier n'est pas atteinte
void do_syntactic_analysis() {
c = fgetwc(source); // lecture du premier caractere
void do_syntactic_analysis() {
c = fgetwc(source); // lecture du premier caractere
- do {
- analyze_AXIOME();
- } while (tokenType != FIN);
}
void print_usage(const char* name) {
}
void print_usage(const char* name) {
#include <stdio.h>
#include <wchar.h>
#include <stdio.h>
#include <wchar.h>
-void pr_warning(const char *format, ...) {
+#include "global_vars.h"
+
+void pr_warning(const char* format, ...) {
va_list args;
va_start(args, format);
va_list args;
va_start(args, format);
+#if DEBUG
+void pr_debug(const char *format, ...) {
+ va_list args;
+
+ va_start(args, format);
+ fprintf(stderr, format, args);
+ va_end(args);
+}
+#else
+void pr_debug(const char *format, ...);
+#endif /* DEBUG */
+
void wpr_warning(const wchar_t *format, ...) {
va_list args;
void wpr_warning(const wchar_t *format, ...) {
va_list args;
fwprintf(stderr, format, args);
va_end(args);
}
fwprintf(stderr, format, args);
va_end(args);
}
+
+void wprint_token() {
+ if (tokenType == MOT || tokenType == MOTCLE) {
+ fwprintf(target, L"%20s: %ls\n", tokenTypestr[tokenType], token[tokenFound].value);
+ } else {
+ fwprintf(target, L"%20s\n", tokenTypestr[tokenType]);
+ }
+}
void pr_warning(const char *format, ...);
void pr_error(const char *format, ...);
void pr_warning(const char *format, ...);
void pr_error(const char *format, ...);
+void pr_debug(const char *format, ...);
+
void wpr_warning(const wchar_t *format, ...);
void wpr_error(const wchar_t *format, ...);
void wpr_warning(const wchar_t *format, ...);
void wpr_error(const wchar_t *format, ...);
#endif /* PRINT_HELPER_H_ */
#endif /* PRINT_HELPER_H_ */
#include <stdbool.h>
#include <stdarg.h>
#include <stdbool.h>
#include <stdarg.h>
#include "global_vars.h"
#include "lexical_analyzer.h"
#include "global_vars.h"
#include "lexical_analyzer.h"
+#include "print_helper.h"
+
+#define PRINT_TOKEN 0
/* Syntactic analyzer functions implementation */
enum TokenType tokenType;
/* Syntactic analyzer functions implementation */
enum TokenType tokenType;
-static bool analyze_TEXT() {
- bool rtval = true;
+static void analyze_TEXT() {
+ #if DEBUG
+ fprintf(stdout, "entering %s\n", __func__);
+ #endif
if (tokenType == MOT) {
scanner();
if (tokenType == MOT) {
scanner();
- rtval = analyze_TEXT();
+ #if PRINT_TOKEN
+ wprint_token();
+ #endif
+ token[tokenFound].type = tokenTypestr[tokenType];
+ tokenFound++;
+ analyze_TEXT();
} else if (tokenType != MOTCLE && tokenType != NPARA && tokenType != SECTION && \
tokenType != SSECTION && tokenType != FIN) {
} else if (tokenType != MOTCLE && tokenType != NPARA && tokenType != SECTION && \
tokenType != SSECTION && tokenType != FIN) {
+ fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
+ exit(EXIT_FAILURE);
+ #if DEBUG
+ fprintf(stdout, "leaving %s\n", __func__);
+ #endif
-static bool analyze_P() {
- bool rtval = true;
+static void analyze_P() {
+ #if DEBUG
+ fprintf(stdout, "entering %s\n", __func__);
+ #endif
if (tokenType == NPARA) {
scanner();
if (tokenType == NPARA) {
scanner();
+ #if PRINT_TOKEN
+ wprint_token();
+ #endif
+ token[tokenFound].type = tokenTypestr[tokenType];
+ tokenFound++;
if (tokenType == MOT) {
scanner();
if (tokenType == MOT) {
scanner();
- rtval = analyze_TEXT();
- rtval = analyze_P();
+ #if PRINT_TOKEN
+ wprint_token();
+ #endif
+ token[tokenFound].type = tokenTypestr[tokenType];
+ tokenFound++;
+ analyze_TEXT();
+ analyze_P();
+ } else if (tokenType != SECTION && tokenType != SSECTION && tokenType != FIN) {
+ fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
+ exit(EXIT_FAILURE);
}
} else if (tokenType != SECTION && tokenType != SSECTION && tokenType != FIN) {
}
} else if (tokenType != SECTION && tokenType != SSECTION && tokenType != FIN) {
+ fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
+ exit(EXIT_FAILURE);
+ #if DEBUG
+ fprintf(stdout, "leaving %s\n", __func__);
+ #endif
-static bool analyze_HEAD() {
- bool rtval = true;
+static void analyze_HEAD() {
+ #if DEBUG
+ fprintf(stdout, "entering %s\n", __func__);
+ #endif
if (tokenType == MOTCLE) {
scanner();
if (tokenType == MOTCLE) {
scanner();
- rtval = analyze_TEXT();
+ #if PRINT_TOKEN
+ wprint_token();
+ #endif
+ token[tokenFound].type = tokenTypestr[tokenType];
+ tokenFound++;
+ analyze_TEXT();
if (tokenType == MOTCLE) {
scanner();
if (tokenType == MOTCLE) {
scanner();
- rtval = analyze_TEXT();
- } else {
- rtval = false;
+ #if PRINT_TOKEN
+ wprint_token();
+ #endif /* PRINT_TOKEN */
+ token[tokenFound].type = tokenTypestr[tokenType];
+ tokenFound++;
+ analyze_TEXT();
+ } else if (tokenType != NPARA && tokenType != SECTION && tokenType != FIN) {
+ fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
+ exit(EXIT_FAILURE);
- } else {
- rtval = false;
+ } else if (tokenType != NPARA && tokenType != SECTION && tokenType != FIN) {
+ fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
+ exit(EXIT_FAILURE);
+ #if DEBUG
+ fprintf(stdout, "leaving %s\n", __func__);
+ #endif
-static bool analyze_H1() {
- bool rtval = true;
+static void analyze_H1() {
+ #if DEBUG
+ fprintf(stdout, "entering %s\n", __func__);
+ #endif
if (tokenType == SECTION) {
scanner();
if (tokenType == SECTION) {
scanner();
- rtval = analyze_TEXT();
- } else {
- rtval = false;
+ #if PRINT_TOKEN
+ wprint_token();
+ #endif /* PRINT_TOKEN */
+ token[tokenFound].type = tokenTypestr[tokenType];
+ tokenFound++;
+ analyze_TEXT();
+ #if DEBUG
+ fprintf(stdout, "leaving %s\n", __func__);
+ #endif
-static bool analyze_H2() {
- bool rtval = true;
+static void analyze_H2() {
+ #if DEBUG
+ fprintf(stdout, "entering %s\n", __func__);
+ #endif
if (tokenType == SSECTION) {
scanner();
if (tokenType == SSECTION) {
scanner();
- rtval = analyze_TEXT();
- } else {
- rtval = false;
+ #if PRINT_TOKEN
+ wprint_token();
+ #endif /* PRINT_TOKEN */
+ token[tokenFound].type = tokenTypestr[tokenType];
+ tokenFound++;
+ analyze_TEXT();
+ #if DEBUG
+ fprintf(stdout, "leaving %s\n", __func__);
+ #endif
-static bool analyze_S2() {
- bool rtval = true;
- if (analyze_H2()) {
- rtval = analyze_P();
- rtval = analyze_S2();
+static void analyze_S2() {
+ #if DEBUG
+ fprintf(stdout, "entering %s\n", __func__);
+ #endif
+ if (tokenType == SSECTION) {
+ analyze_H2();
+ analyze_P();
+ analyze_S2();
} else if (tokenType != SECTION && tokenType != FIN) {
} else if (tokenType != SECTION && tokenType != FIN) {
- rtval = false;
- } else {
- rtval = false;
+ fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
+ exit(EXIT_FAILURE);
+ #if DEBUG
+ fprintf(stdout, "leaving %s\n", __func__);
+ #endif
-static bool analyze_S1() {
- bool rtval = true;
- if (analyze_H1()) {
- rtval = analyze_P();
- rtval = analyze_S2();
- rtval = analyze_S1();
+static void analyze_S1() {
+ #if DEBUG
+ fprintf(stdout, "entering %s\n", __func__);
+ #endif
+ if (tokenType == SECTION) {
+ analyze_H1();
+ analyze_P();
+ analyze_S2();
+ analyze_S1();
} else if (tokenType != FIN) {
} else if (tokenType != FIN) {
- rtval = false;
- } else {
- rtval = false;
+ fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
+ exit(EXIT_FAILURE);
+ #if DEBUG
+ fprintf(stdout, "leaving %s\n", __func__);
+ #endif
-static bool analyze_BODY() {
- bool rtval = true;
- rtval = analyze_P();
- rtval = analyze_S1();
- return rtval;
+static void analyze_BODY() {
+ #if DEBUG
+ fprintf(stdout, "entering %s\n", __func__);
+ #endif
+ analyze_P();
+ analyze_S1();
+ #if DEBUG
+ fprintf(stdout, "leaving %s\n", __func__);
+ #endif
-bool analyze_AXIOME() {
- bool rtval = true;
+void analyze_AXIOME() {
+ #if DEBUG
+ fprintf(stdout, "entering %s\n", __func__);
+ #endif
- rtval = analyze_HEAD();
- rtval = analyze_BODY();
+ /* print the lexical analysis result */
+ #if PRINT_TOKEN
+ wprint_token();
+ #endif /* PRINT_TOKEN */
+ token[tokenFound].type = tokenTypestr[tokenType];
+ tokenFound++;
+ analyze_HEAD();
+ analyze_BODY();
+ fprintf(stderr, "%s follows error on %s\n", __func__, tokenTypestr[tokenType]);
+ exit(EXIT_FAILURE);
+ fprintf(stdout, "successful syntactic analysis\n");
+ #if DEBUG
+ fprintf(stdout, "leaving %s\n", __func__);
+ #endif
#ifndef SYNTACTIC_ANALYZER_H_
#define SYNTACTIC_ANALYZER_H_
#ifndef SYNTACTIC_ANALYZER_H_
#define SYNTACTIC_ANALYZER_H_
-/* Syntactic analyser functions declarations */
+/* Syntactic analyzer functions declarations */
#endif /* SYNTACTIC_ANALYZER_H_ */
#endif /* SYNTACTIC_ANALYZER_H_ */