(#303) Implement read_expr_from_file

master
rexim 2018-08-28 19:55:01 +07:00
parent a56532a9f0
commit 8ab120b87f
7 changed files with 233 additions and 57 deletions

View File

@ -93,11 +93,26 @@ set(HEADER_FILES
add_executable(nothing ${SOURCE_FILES} ${HEADER_FILES})
add_executable(nothing_test
src/script/expr.c
src/script/expr.h
src/script/parser.c
src/script/parser.h
src/script/tokenizer.c
src/script/tokenizer.h
src/system/lt.c
src/system/lt.h
src/system/error.c
src/system/error.h
src/system/lt/lt_adapters.c
src/system/lt/lt_adapters.h
src/system/lt/lt_slot.c
src/system/lt/lt_slot.h
test/main.c
test/test.h
test/tokenizer_suite.h
)
target_link_libraries(nothing ${SDL2_LIBRARY} ${SDL2_MIXER_LIBRARY})
target_link_libraries(nothing_test ${SDL2_LIBRARY} ${SDL2_MIXER_LIBRARY})
if(("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") OR ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "CLANG"))
set(CMAKE_C_FLAGS
@ -137,3 +152,4 @@ endif()
file(COPY ${CMAKE_CURRENT_SOURCE_DIR}/sounds DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
file(COPY ${CMAKE_CURRENT_SOURCE_DIR}/fonts DESTINATION ${CMAKE_CURRENT_BINARY_DIR})
file(COPY ${CMAKE_CURRENT_SOURCE_DIR}/test-data DESTINATION ${CMAKE_CURRENT_BINARY_DIR})

View File

@ -1,9 +1,16 @@
#include <assert.h>
#include <ctype.h>
#include <errno.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "script/parser.h"
#include "system/lt.h"
#include "system/lt/lt_adapters.h"
#define MAX_BUFFER_LENGTH (5 * 1000 * 1000)
static struct ParseResult parse_expr(struct Token current_token);
@ -163,7 +170,52 @@ struct ParseResult read_expr_from_string(const char *str)
struct ParseResult read_expr_from_file(const char *filename)
{
assert(filename);
return parse_failure("not implemented", NULL);
Lt *lt = create_lt();
if (lt == NULL) {
return parse_failure("Could not create Lt object", NULL);
}
FILE *stream = PUSH_LT(lt, fopen(filename, "rb"), fclose_lt);
if (!stream) {
/* TODO: ParseResult should not be used for reporting IO failures */
RETURN_LT(lt, parse_failure(strerror(errno), NULL));
}
if (fseek(stream, 0, SEEK_END) != 0) {
RETURN_LT(lt, parse_failure("Could not find the end of the file", NULL));
}
const long int buffer_length = ftell(stream);
if (buffer_length < 0) {
RETURN_LT(lt, parse_failure("Couldn't get the size of file", NULL));
}
if (buffer_length == 0) {
RETURN_LT(lt, parse_failure("File is empty", NULL));
}
if (buffer_length >= MAX_BUFFER_LENGTH) {
RETURN_LT(lt, parse_failure("File is too big", NULL));
}
if (fseek(stream, 0, SEEK_SET) != 0) {
RETURN_LT(lt, parse_failure("Could not find the beginning of the file", NULL));
}
char * const buffer = PUSH_LT(lt, malloc((size_t) buffer_length + 1), free);
if (buffer == NULL) {
RETURN_LT(lt, parse_failure(strerror(errno), NULL));
}
if (fread(buffer, 1, (size_t) buffer_length, stream) != (size_t) buffer_length) {
RETURN_LT(lt, parse_failure("Could not read the file", NULL));
}
struct ParseResult result = read_expr_from_string(buffer);
RETURN_LT(lt, result);
}
struct ParseResult parse_success(struct Expr expr,
@ -199,10 +251,13 @@ void print_parse_error(FILE *stream,
return;
}
fprintf(stream, "%s\n", str);
for (size_t i = 0; i < (size_t) (result.end - str); ++i) {
fprintf(stream, " ");
if (result.end) {
fprintf(stream, "%s\n", str);
for (size_t i = 0; i < (size_t) (result.end - str); ++i) {
fprintf(stream, " ");
}
fprintf(stream, "^\n");
}
fprintf(stream, "^\n");
fprintf(stream, "%s\n", result.error_message);
}

View File

@ -0,0 +1 @@
(+ 1 2 3)

View File

@ -1,59 +1,10 @@
#include <assert.h>
#include <stdio.h>
#include <string.h>
#include "test.h"
#include "script/tokenizer.h"
TEST(tokenizer_number_list_test)
{
struct Token token = next_token("(1 2 3)");
ASSERT_STREQN("(", token.begin, (size_t) (token.end - token.begin));
token = next_token(token.end);
ASSERT_STREQN("1", token.begin, (size_t) (token.end - token.begin));
token = next_token(token.end);
ASSERT_STREQN("2", token.begin, (size_t) (token.end - token.begin));
token = next_token(token.end);
ASSERT_STREQN("3", token.begin, (size_t) (token.end - token.begin));
token = next_token(token.end);
ASSERT_STREQN(")", token.begin, (size_t) (token.end - token.begin));
return 0;
}
TEST(tokenizer_string_list_test)
{
struct Token token = next_token("(\"foo\" \"bar\" \"baz\")");
ASSERT_STREQN("(", token.begin, (size_t) (token.end - token.begin));
token = next_token(token.end);
ASSERT_STREQN("\"foo\"", token.begin, (size_t) (token.end - token.begin));
token = next_token(token.end);
ASSERT_STREQN("\"bar\"", token.begin, (size_t) (token.end - token.begin));
token = next_token(token.end);
ASSERT_STREQN("\"baz\"", token.begin, (size_t) (token.end - token.begin));
token = next_token(token.end);
ASSERT_STREQN(")", token.begin, (size_t) (token.end - token.begin));
return 0;
}
TEST_SUITE(tokenizer_suite)
{
TEST_RUN(tokenizer_number_list_test);
TEST_RUN(tokenizer_string_list_test);
return 0;
}
#include "tokenizer_suite.h"
#include "parser_suite.h"
TEST_MAIN()
{
TEST_RUN(tokenizer_suite);
TEST_RUN(parser_suite);
return 0;
}

55
test/parser_suite.h Normal file
View File

@ -0,0 +1,55 @@
#ifndef PARSER_SUITE_H_
#define PARSER_SUITE_H_
#include "test.h"
#include "script/parser.h"
TEST(read_expr_from_file_test)
{
struct ParseResult result = read_expr_from_file("test-data/simple-sum.lisp");
ASSERT_TRUE(!result.is_error, result.error_message);
struct Expr head = result.expr;
struct Expr expr = head;
ASSERT_INTEQ(EXPR_CONS, expr.type);
ASSERT_INTEQ(EXPR_ATOM, expr.cons->car.type);
ASSERT_INTEQ(ATOM_SYMBOL, expr.cons->car.atom->type);
ASSERT_STREQ("+", expr.cons->car.atom->sym);
expr = expr.cons->cdr;
ASSERT_INTEQ(EXPR_CONS, expr.type);
ASSERT_INTEQ(EXPR_ATOM, expr.cons->car.type);
ASSERT_INTEQ(ATOM_NUMBER, expr.cons->car.atom->type);
ASSERT_FLOATEQ(1.0f, expr.cons->car.atom->num, 1e-3f);
expr = expr.cons->cdr;
ASSERT_INTEQ(EXPR_CONS, expr.type);
ASSERT_INTEQ(EXPR_ATOM, expr.cons->car.type);
ASSERT_INTEQ(ATOM_NUMBER, expr.cons->car.atom->type);
ASSERT_FLOATEQ(2.0f, expr.cons->car.atom->num, 1e-3f);
expr = expr.cons->cdr;
ASSERT_INTEQ(EXPR_CONS, expr.type);
ASSERT_INTEQ(EXPR_ATOM, expr.cons->car.type);
ASSERT_INTEQ(ATOM_NUMBER, expr.cons->car.atom->type);
ASSERT_FLOATEQ(3.0f, expr.cons->car.atom->num, 1e-3f);
expr = expr.cons->cdr;
ASSERT_INTEQ(EXPR_ATOM, expr.type);
ASSERT_INTEQ(ATOM_SYMBOL, expr.atom->type);
ASSERT_STREQ("nil", expr.atom->sym);
destroy_expr(head);
return 0;
}
TEST_SUITE(parser_suite)
{
TEST_RUN(read_expr_from_file_test);
return 0;
}
#endif // PARSER_SUITE_H_

View File

@ -1,6 +1,8 @@
#ifndef TEST_H_
#define TEST_H_
#include "math.h"
#define TEST_RUN(name) \
if (name() < 0) { \
return -1; \
@ -20,6 +22,8 @@
} \
static int name##_body(void)
// TODO: ASSERT_* macros evaluate expressions several times
#define ASSERT_STREQN(expected, actual, n) \
if (strncmp(expected, actual, n) != 0) { \
fprintf(stderr, "\n%s:%d: ASSERT_STREQN: \n", \
@ -33,6 +37,42 @@
return -1; \
}
#define ASSERT_STREQ(expected, actual) \
if (strcmp(expected, actual) != 0) { \
fprintf(stderr, "\n%s:%d: ASSERT_STREQ: \n", \
__FILE__, __LINE__); \
fprintf(stderr, " Expected: %s\n", expected); \
fprintf(stderr, " Actual: %s\n", actual); \
return -1; \
}
#define ASSERT_INTEQ(expected, actual) \
if (expected != actual) { \
fprintf(stderr, "\n%s:%d: ASSERT_INTEQ: \n", \
__FILE__, __LINE__); \
fprintf(stderr, " Expected: %d\n", expected); \
fprintf(stderr, " Actual: %d\n", actual); \
return -1; \
}
#define ASSERT_FLOATEQ(expected, actual, margin) \
if (fabsf(expected - actual) > margin) { \
fprintf(stderr, "\n%s:%d: ASSERT_INTEQ: \n", \
__FILE__, __LINE__); \
fprintf(stderr, " Expected: %f\n", expected); \
fprintf(stderr, " Actual: %f\n", actual); \
fprintf(stderr, " Margin: %f\n", margin); \
return -1; \
}
#define ASSERT_TRUE(condition, message) \
if (!condition) { \
fprintf(stderr, "\n%s:%d: ASSERT_TRUE: false\n", \
__FILE__, __LINE__); \
fprintf(stderr, "%s\n", message); \
return -1; \
}
#define TEST_SUITE(name) \
static int name##_body(void); \
static int name(void) { \

58
test/tokenizer_suite.h Normal file
View File

@ -0,0 +1,58 @@
#ifndef TOKENIZER_SUITE_H_
#define TOKENIZER_SUITE_H_
#include <assert.h>
#include <stdio.h>
#include <string.h>
#include "test.h"
#include "script/tokenizer.h"
TEST(tokenizer_number_list_test)
{
struct Token token = next_token("(1 2 3)");
ASSERT_STREQN("(", token.begin, (size_t) (token.end - token.begin));
token = next_token(token.end);
ASSERT_STREQN("1", token.begin, (size_t) (token.end - token.begin));
token = next_token(token.end);
ASSERT_STREQN("2", token.begin, (size_t) (token.end - token.begin));
token = next_token(token.end);
ASSERT_STREQN("3", token.begin, (size_t) (token.end - token.begin));
token = next_token(token.end);
ASSERT_STREQN(")", token.begin, (size_t) (token.end - token.begin));
return 0;
}
TEST(tokenizer_string_list_test)
{
struct Token token = next_token("(\"foo\" \"bar\" \"baz\")");
ASSERT_STREQN("(", token.begin, (size_t) (token.end - token.begin));
token = next_token(token.end);
ASSERT_STREQN("\"foo\"", token.begin, (size_t) (token.end - token.begin));
token = next_token(token.end);
ASSERT_STREQN("\"bar\"", token.begin, (size_t) (token.end - token.begin));
token = next_token(token.end);
ASSERT_STREQN("\"baz\"", token.begin, (size_t) (token.end - token.begin));
token = next_token(token.end);
ASSERT_STREQN(")", token.begin, (size_t) (token.end - token.begin));
return 0;
}
TEST_SUITE(tokenizer_suite)
{
TEST_RUN(tokenizer_number_list_test);
TEST_RUN(tokenizer_string_list_test);
return 0;
}
#endif // TOKENIZER_SUITE_H_