Merge pull request 'refactor-error-handling' (#12) from refactor-error-handling into main

Reviewed-on: #12
This commit was merged in pull request #12.
This commit is contained in:
2026-05-13 19:05:54 -06:00
11 changed files with 541 additions and 425 deletions

View File

@@ -40,6 +40,7 @@ target_include_directories(calculator_lib
target_link_libraries(calculator_lib
PUBLIC arena
PUBLIC arraylist
PRIVATE m
)
add_executable(calculator src/main.c)

View File

@@ -1,11 +1,28 @@
#ifndef EVALUATOR_H
#define EVALUATOR_H
#include "lexer.h"
#include "parser.h"
#include <stdint.h>
int64_t evaluate(ParseResult context);
int64_t evaluate_tree(ASTNode *tree);
typedef enum {
EVALUATOR_OK,
EVALUATOR_MATH_ERR,
EVALUATOR_INVALID_PARSING,
EVALUATOR_INVALID_TREE, // just to shut up the compiler with the swithces
} EvaluatorErr;
typedef struct {
bool is_valid;
union {
int64_t val;
EvaluatorErr err;
};
} EvaluatorResult;
EvaluatorResult evaluate_binary(Node *tree);
EvaluatorResult evaluate_unary(Node *tree);
EvaluatorResult evaluate(ParserResult context);
EvaluatorResult evaluate_tree(Node *tree);
#endif // !EVALUATOR_H

View File

@@ -8,12 +8,9 @@
// For identifing
typedef enum {
NODE_INTEGER,
NODE_BINARY_OP,
NODE_UNARY_OP,
NODE_PARENTHESIS,
} ASTNodeType;
TOKEN_INTEGER,
TOKEN_OPERATOR,
} TokenType;
// For classify operators
typedef enum {
OP_ADD,
@@ -36,25 +33,13 @@ typedef enum {
} LexerErr;
// Can be thought as tokens, they will be used by the parser.
typedef struct ASTNode {
ASTNodeType type;
typedef struct {
TokenType type;
union {
int64_t integer;
struct {
struct ASTNode *left;
struct ASTNode *right;
int64_t num;
Operator op;
} binary;
struct {
struct ASTNode *val;
Operator op;
} unary;
struct {
struct ASTNode *val;
Operator op;
} parenthesis;
} data;
} ASTNode;
};
} Token;
typedef struct {
bool is_valid;
@@ -68,21 +53,21 @@ typedef struct {
bool is_valid;
union {
LexerErr err;
ASTNode node;
Token token;
};
} ASTNodeResult;
} TokenResult;
typedef struct {
bool is_valid;
union {
LexerErr err;
int64_t number;
int64_t num;
};
} LexerI64Result;
// Lexer funtions as well as few functionality
TokenizeResult tokenize(const char* input);
ASTNodeResult tokenize_number(const char* input, size_t *offset);
TokenResult tokenize_number(const char* input, size_t *offset);
LexerI64Result string_to_integer(const char buf[]);
bool isoperator(int c);
Operator char_to_operator(int c);

View File

@@ -4,11 +4,31 @@
#include "lexer.h"
#include "arena.h"
#include "arraylist.h"
#include <stdbool.h>
#include <stdint.h>
typedef struct {
ASTNode *head;
} AST;
typedef enum {
NODE_INT,
NODE_BINARY_OP,
NODE_UNARY_OP,
} NodeType;
typedef struct Node {
NodeType type;
union {
int64_t num;
struct {
Operator op;
struct Node *left;
struct Node *right;
}binary;
struct {
Operator op;
struct Node *to;
}unary;
Operator par;
};
} Node;
typedef enum {
PARSER_OK = 0,
@@ -16,6 +36,8 @@ typedef enum {
PARSER_MISSING_OPERAND,
PARSER_UNMATCHED_PAREN,
PARSER_OUT_OF_MEMORY,
PARSER_INVALID_TOKENIZE,
PARSER_UNEXPECTED_EOF,
} ParserErr;
typedef struct {
@@ -24,20 +46,44 @@ typedef struct {
ParserErr err;
struct {
Arena arena;
ASTNode *tree;
Node *tree;
};
};
} ParseResult;
} ParserResult;
ASTNode *nud(ArraySlice *slice);
ASTNode *led(ArraySlice *slice, size_t right_precedence);
typedef struct {
bool is_valid;
union {
ParserErr err;
Node *node;
};
} TreeResult;
uint8_t prefix_rbp(ASTNode node);
uint8_t postfix_lbp(ASTNode node);
uint8_t infix_lbp(ASTNode node);
uint8_t infix_rbp(ASTNode node);
typedef struct {
bool is_valid;
union {
ParserErr err;
Node node;
};
} NodeResult;
ParseResult parse(TokenizeResult tokens);
ASTNode *parse_expr(ArraySlice *slice, Arena *arena, uint8_t min_bp);
typedef struct {
bool is_valid;
union {
ParserErr err;
uint8_t num;
};
} ParserU8Result;
TreeResult nud(ArraySlice *slice, Arena *arena, Token token); // Null denotation
TreeResult led(ArraySlice *slice, Arena *arena, Node *left, Token token); // Left denotation
ParserU8Result prefix_rbp(Token token);
ParserU8Result postfix_lbp(Token token);
ParserU8Result infix_lbp(Token token);
ParserU8Result infix_rbp(Token token);
ParserResult parse(TokenizeResult tokens);
TreeResult parse_expr(ArraySlice *slice, Arena *arena, uint8_t min_bp);
#endif // !PARSER_H

View File

@@ -2,36 +2,113 @@
#include "arena.h"
#include "lexer.h"
#include "parser.h"
#include <stdbool.h>
#include <stdint.h>
#include <math.h>
int64_t evaluate_tree(ASTNode *tree) {
EvaluatorResult evaluate_tree(Node *tree) {
if (tree->type == NODE_BINARY_OP) {
Operator op = tree->data.binary.op;
ASTNode *left = tree->data.binary.left;
ASTNode *right = tree->data.binary.right;
return evaluate_binary(tree);
} else if (tree->type == NODE_UNARY_OP) {
return evaluate_unary(tree);
}
return (EvaluatorResult) {
.is_valid = true,
.val = tree->num,
};
}
EvaluatorResult evaluate_binary(Node *tree) {
Operator op = tree->binary.op;
Node *left = tree->binary.left;
Node *right = tree->binary.right;
EvaluatorResult left_result = evaluate_tree(left);
EvaluatorResult right_result = evaluate_tree(right);
if (!left_result.is_valid) {
return left_result;
}
if (!left_result.is_valid) {
return left_result;
}
switch (op) {
case OP_ADD:
return evaluate_tree(left) + evaluate_tree(right);
return (EvaluatorResult) {
.is_valid = true,
.val = left_result.val + right_result.val,
};
case OP_SUB:
return evaluate_tree(left) - evaluate_tree(right);
return (EvaluatorResult) {
.is_valid = true,
.val = left_result.val - right_result.val,
};
case OP_MUL:
return evaluate_tree(left) * evaluate_tree(right);
return (EvaluatorResult) {
.is_valid = true,
.val = left_result.val * right_result.val,
};
case OP_DIV:
return evaluate_tree(left) / evaluate_tree(right);
return (EvaluatorResult) {
.is_valid = true,
.val = left_result.val / right_result.val,
};
case OP_POW:
return pow(evaluate_tree(left), evaluate_tree(right));
return (EvaluatorResult) {
.is_valid = true,
.val = pow(left_result.val, right_result.val),
};
default:
return (EvaluatorResult) {
.is_valid = false,
.err = EVALUATOR_INVALID_TREE,
};
}
}
int64_t return_val = tree->data.integer;
return return_val;
EvaluatorResult evaluate_unary(Node *tree) {
Operator op = tree->unary.op;
Node *to = tree->unary.to;
EvaluatorResult result = evaluate_tree(to);
if (!result.is_valid) {
return result;
}
int64_t evaluate(ParseResult context) {
int64_t result = evaluate_tree(context.tree);
switch (op) {
case OP_ADD:
return result;
case OP_SUB:
return (EvaluatorResult) {
.is_valid = true,
.val = -result.val,
};
case OP_FACTORIAL:
return (EvaluatorResult) {
.is_valid = true,
.val = tgamma(result.val + 1),
};
default:
return (EvaluatorResult) {
.is_valid = false,
.err = EVALUATOR_INVALID_TREE,
};
}
}
EvaluatorResult evaluate(ParserResult context) {
if (!context.is_valid) {
return (EvaluatorResult) {
.is_valid = false,
.err = EVALUATOR_INVALID_PARSING,
};
}
EvaluatorResult result = evaluate_tree(context.tree);
arena_destroy(&context.arena);
return result;

View File

@@ -5,7 +5,7 @@
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <strings.h>
#include <string.h>
#include <limits.h>
typedef enum {
@@ -15,34 +15,31 @@ typedef enum {
TokenizeResult tokenize(const char *input) {
ArrayList *arr = arraylist_init(64, sizeof(ASTNode));
ArrayList *arr = arraylist_init(64, sizeof(Token));
size_t offset = 0;
while (
input[offset] != '\n' ||
input[offset] != EOF ||
input[offset] != '\0') {
while (input[offset] != '\0') {
if (isdigit(input[offset])) {
ASTNodeResult result = tokenize_number(input, &offset);
TokenResult result = tokenize_number(input, &offset);
if (!result.is_valid) {
arraylist_destroy(&arr);
return (TokenizeResult) {.is_valid = false, .err = result.err};
}
arraylist_push_back(arr, &result.node);
arraylist_push_back(arr, &result.token);
} else if (isoperator(input[offset])) {
ASTNode op_node = {
.type = NODE_BINARY_OP,
.data.binary.op = char_to_operator(input[offset]),
.data.binary.left = NULL,
.data.binary.right = NULL,
Token op_node = {
.type = TOKEN_OPERATOR,
.op = char_to_operator(input[offset]),
};
arraylist_push_back(arr, &op_node);
} else if (isspace(input[offset])) {
// Nothing...
} else {
arraylist_destroy(&arr);
return (TokenizeResult) {
.is_valid = false,
.err = LEXER_NOT_RECOGNIZED_SYMBOL};
@@ -52,6 +49,7 @@ TokenizeResult tokenize(const char *input) {
}
if (arraylist_size(arr) < 1) {
arraylist_destroy(&arr);
return (TokenizeResult) {.is_valid = false, .err = LEXER_EMPTY_INPUT};
}
@@ -60,7 +58,7 @@ TokenizeResult tokenize(const char *input) {
// CURRENTLY, it only supports ints, not clear how floating
// point is implemented but i'll figure it out
ASTNodeResult tokenize_number(const char *input, size_t *offset) {
TokenResult tokenize_number(const char *input, size_t *offset) {
char buf[64] = { '\0' };
size_t buf_pos = 0;
bool is_integer = true; // Will later be used to differentiate fractions
@@ -68,35 +66,35 @@ ASTNodeResult tokenize_number(const char *input, size_t *offset) {
// read number
size_t current = *offset;
while (isdigit(input[current])) {
buf[buf_pos] = input[current];
if (buf_pos >= sizeof(buf)) {
return (ASTNodeResult) {
if (buf_pos >= sizeof(buf) - 1) {
return (TokenResult) {
.is_valid = false,
.err = LEXER_BUF_OVERFLOW};
}
buf[buf_pos] = input[current];
current++;
buf_pos++;
}
ASTNode new_node;
Token new_token;
if (is_integer) {
new_node.type = NODE_INTEGER;
LexerI64Result status = string_to_integer(buf);
new_token.type = TOKEN_INTEGER;
LexerI64Result result = string_to_integer(buf);
if (!status.is_valid) {
return (ASTNodeResult) {.is_valid = false, .err = status.err};
if (!result.is_valid) {
return (TokenResult) {.is_valid = false, .err = result.err};
}
new_node.data.integer = status.number;
new_token.num = result.num;
*offset = current;
return (ASTNodeResult) {.is_valid = true, .node = new_node};
*offset = current - 1;
return (TokenResult) {.is_valid = true, .token = new_token};
}
return (ASTNodeResult) {
return (TokenResult) {
.is_valid = false,
.err = LEXER_FAILED_NUMBER_CONVERSION};
}
@@ -122,7 +120,7 @@ LexerI64Result string_to_integer(const char *buf) {
c++;
}
return (LexerI64Result) {.is_valid = true, .number = count};
return (LexerI64Result) {.is_valid = true, .num = count};
}
bool isoperator(int c) {

View File

@@ -18,12 +18,11 @@ int main(void) {
}
buf[pos] = '\0';
TokenizeResult tokens = tokenize(buf);
EvaluatorResult result = evaluate(parse(tokenize(buf)));
if (!result.is_valid) {
puts("Error checando expresion");
}
ParseResult par = parse(tokens);
int64_t result = evaluate(par);
printf("El resultado es: %" PRIi64 "\n", result);
printf("El resultado es: %" PRIi64 "\n", result.val);
return EXIT_SUCCESS;
}

View File

@@ -2,216 +2,391 @@
#include "arraylist.h"
#include "lexer.h"
#include "arena.h"
#include <cmocka.h>
#include <stdalign.h>
#include <stdbool.h>
#include <stdint.h>
uint8_t prefix_rbp(ASTNode node) {
if (node.type == NODE_INTEGER) {
return 0;
ParserU8Result prefix_rbp(Token token) {
if (token.type == TOKEN_INTEGER) {
return (ParserU8Result) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
switch (node.data.unary.op) {
switch (token.op) {
case OP_SUB:
case OP_ADD:
return 30;
return (ParserU8Result) {
.is_valid = true,
.num = 30,
};
default:
return -1;
return (ParserU8Result) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
}
uint8_t postfix_lbp(ASTNode node) {
if (node.type == NODE_INTEGER) {
return 0;
ParserU8Result postfix_lbp(Token token) {
if (token.type != TOKEN_OPERATOR) {
return (ParserU8Result) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
switch (node.data.unary.op) {
switch (token.op) {
case OP_FACTORIAL:
return 40;
return (ParserU8Result) {
.is_valid = true,
.num = 40,
};
default:
// needs to be dealt with with resulttypes
return 255;
return (ParserU8Result) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
}
uint8_t infix_lbp(ASTNode node) {
if (node.type == NODE_INTEGER) {
return 0;
ParserU8Result infix_lbp(Token token) {
if (token.type != TOKEN_OPERATOR) {
return (ParserU8Result) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
switch (node.data.binary.op) {
switch (token.op) {
case OP_ADD:
case OP_SUB:
return 10;
break;
return (ParserU8Result) {
.is_valid = true,
.num = 10,
};
case OP_DIV:
case OP_MUL:
return 20;
return (ParserU8Result) {
.is_valid = true,
.num = 20,
};
case OP_POW:
return 51;
return (ParserU8Result) {
.is_valid = true,
.num = 51,
};
default:
return 0;
return (ParserU8Result) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
}
uint8_t infix_rbp(ASTNode node) {
if (node.type == NODE_INTEGER) {
return 0;
ParserU8Result infix_rbp(Token token) {
if (token.type != TOKEN_OPERATOR) {
return (ParserU8Result) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
switch (node.data.binary.op) {
switch (token.op) {
case OP_ADD:
case OP_SUB:
return 11;
break;
return (ParserU8Result) {
.is_valid = true,
.num = 11,
};
case OP_DIV:
case OP_MUL:
return 21;
return (ParserU8Result) {
.is_valid = true,
.num = 21,
};
case OP_POW:
return 50;
return (ParserU8Result) {
.is_valid = true,
.num = 50,
};
default:
return 0;
return (ParserU8Result) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
}
ParseResult parse(TokenizeResult tokens) {
TreeResult led(
ArraySlice *slice,
Arena *arena,
Node *left,
Token token
) {
arena_ensure_capacity(
arena,
sizeof(Node),
alignof(Node)
);
Node *node = arena_unwrap_pointer(
arena_alloc(
arena,
sizeof(Node),
alignof(Node)
)
);
switch (token.op) {
// Binary operators
case OP_ADD:
case OP_SUB:
case OP_MUL:
case OP_DIV:
case OP_POW: {
node->type = NODE_BINARY_OP;
node->binary.op = token.op;
ParserU8Result rbp_result = infix_rbp(token);
if (!rbp_result.is_valid) {
return (TreeResult) {
.is_valid = false,
.err = rbp_result.err,
};
}
TreeResult right = parse_expr(
slice,
arena,
rbp_result.num
);
if (!right.is_valid) {
return right;
}
node->binary.left = left;
node->binary.right = right.node;
return (TreeResult) {
.is_valid = true,
.node = node,
};
}
// Postfix operators
case OP_FACTORIAL: {
node->type = NODE_UNARY_OP;
node->unary.op = token.op;
node->unary.to = left;
return (TreeResult) {
.is_valid = true,
.node = node,
};
}
default:
return (TreeResult) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
}
TreeResult nud(ArraySlice *slice, Arena *arena, Token token) {
arena_ensure_capacity(
arena,
sizeof(Node),
alignof(Node)
);
Node *node = arena_unwrap_pointer(
arena_alloc(
arena,
sizeof(Node),
alignof(Node)
)
);
if (token.type == TOKEN_INTEGER) {
node->type = NODE_INT;
node->num = token.num;
return (TreeResult) {
.is_valid = true,
.node = node,
};
}
switch (token.op) {
case OP_START_PAR: {
TreeResult expr = parse_expr(slice, arena, 0);
if (!expr.is_valid) {
return expr;
}
Token end_par;
if (arrayslice_next(slice, &end_par) != ARRLIST_OK) {
return (TreeResult) {
.is_valid = false,
.err = PARSER_UNMATCHED_PAREN,
};
}
if (end_par.type != TOKEN_OPERATOR ||
end_par.op != OP_END_PAR) {
return (TreeResult) {
.is_valid = false,
.err = PARSER_UNMATCHED_PAREN,
};
}
return expr;
}
case OP_ADD:
case OP_SUB: {
node->type = NODE_UNARY_OP;
node->unary.op = token.op;
ParserU8Result rbp_result = prefix_rbp(token);
if (!rbp_result.is_valid) {
return (TreeResult) {
.is_valid = false,
.err = rbp_result.err,
};
}
TreeResult right = parse_expr(
slice,
arena,
rbp_result.num
);
if (!right.is_valid) {
return right;
}
node->unary.to = right.node;
return (TreeResult) {
.is_valid = true,
.node = node,
};
}
default:
return (TreeResult) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
}
ParserResult parse(TokenizeResult tokens) {
if (!tokens.is_valid) {
return (ParserResult) {
.is_valid = false,
.err = PARSER_INVALID_TOKENIZE,
};
}
ArraySlice *context = arraylist_slice(tokens.arr, 0, arraylist_size(tokens.arr));
Arena arena = arena_init(sizeof(ASTNode) * arraylist_size(tokens.arr)).arena;
Arena arena = arena_init(sizeof(Node) * arraylist_size(tokens.arr)).arena;
return (ParseResult) {
TreeResult result = parse_expr(context, &arena, 0);
if (!result.is_valid) {
arena_destroy(&arena);
arraylist_destroy(&tokens.arr);
return (ParserResult) {
.is_valid = false,
.err = result.err,
};
}
arraylist_destroy(&tokens.arr);
return (ParserResult) {
.is_valid = true,
.arena = arena,
.tree = parse_expr(context, &arena, 0)};
.tree = result.node};
}
ASTNode *parse_expr(ArraySlice *slice, Arena *arena, uint8_t min_bp) {
// First: Consume a first number
arena_ensure_capacity(
arena,
sizeof(ASTNode),
alignof(ASTNode)
); // shouldn't fail but if it does then what a shame
TreeResult parse_expr(ArraySlice *slice, Arena *arena, uint8_t min_bp) {
Token current_token;
// Get pointer in the arena
ASTNode *left_side = arena_unwrap_pointer(
arena_alloc(
arena,
sizeof(ASTNode),
alignof(ASTNode)
)
);
arrayslice_next(slice, left_side);
if (left_side->type == NODE_PARENTHESIS &&
left_side->data.parenthesis.op == OP_START_PAR) {
left_side = parse_expr(slice, arena, 0);
// HERE CHEKC LATER if slice.next != ')'
ASTNode *end_par;
arrayslice_next(slice, &end_par);
if (end_par->type != NODE_PARENTHESIS ||
end_par->data.parenthesis.op != OP_END_PAR) {
// todo
}
return left_side;
}
// if is unary then take prefix bp and continue
// to the right, no need to allocate left side
// because we just did and right side
// WILL return a valid allocated pointer.
if (left_side->type == NODE_UNARY_OP) {
uint8_t rbp = prefix_rbp(*left_side);
ASTNode *righ_side = parse_expr(slice, arena, rbp);
left_side->data.unary.val = righ_side;
if (arrayslice_next(slice, &current_token) != ARRLIST_OK) {
return (TreeResult) {
.is_valid = false,
.err = PARSER_UNEXPECTED_EOF,
};
}
while (true) {
// Second: Get next one and checn bp
if (!arrayslice_is_valid(slice)) {
TreeResult left_result = nud(slice, arena, current_token);
if (!left_result.is_valid) {
return left_result;
}
Node *left_side = left_result.node;
while (arrayslice_is_valid(slice)) {
Token operator_token;
arrayslice_peek(slice, &operator_token);
if (operator_token.type != TOKEN_OPERATOR) {
break;
}
// Here check if not OP error
ParserU8Result postfix_lbp_result = postfix_lbp(operator_token);
ASTNode operator;
// Here should chekc if is operator not some bs
// Third, get operator and binding powers
arrayslice_peek(slice, &operator);
// temporary for bad error handling
if (postfix_lbp(operator) != 255) {
if (postfix_lbp(operator) < min_bp) {
if (postfix_lbp_result.is_valid) {
if (postfix_lbp_result.num < min_bp) {
break;
}
// allocate operator
arrayslice_next(slice, NULL);
arena_ensure_capacity(
arena,
sizeof(ASTNode),
alignof(ASTNode));
ASTNode *new_node = arena_unwrap_pointer(
arena_alloc(
arena,
sizeof(ASTNode),
alignof(ASTNode)
)
);
*new_node = operator;
TreeResult result = led(slice, arena, left_side, operator_token);
new_node->data.unary.val = left_side;
left_side = new_node;
continue;
if (!result.is_valid) {
return result;
}
// check if it has infix or not, if not then error
uint8_t rbp = infix_rbp(operator);
uint8_t lbp = infix_lbp(operator);
if (rbp != 255 && lbp != 255) {
// If lbp is LESS then stop recursion,
// we found the next smaller binding power
// or the one with more precedence
if (lbp < min_bp) {
break;
}
// If NOT, then we continue wtching ahead
// for the next one but taking our current
// concern that is rbp of the current operator
arrayslice_next(slice, NULL);
ASTNode *right_side = parse_expr(slice, arena, rbp);
arena_ensure_capacity(
arena,
sizeof(ASTNode),
alignof(ASTNode));
ASTNode *new_node = arena_unwrap_pointer(
arena_alloc(
arena,
sizeof(ASTNode),
alignof(ASTNode)
)
);
*new_node = operator;
new_node->data.binary.left = left_side;
new_node->data.binary.right = right_side;
left_side = new_node;
left_side = result.node;
continue;
}
// Path for infix basically
ParserU8Result lbp_result = infix_lbp(operator_token);
if (!lbp_result.is_valid) {
break;
}
if (lbp_result.num < min_bp) {
break;
}
arrayslice_next(slice, NULL);
TreeResult result = led(slice, arena, left_side, operator_token);
if (!result.is_valid) {
return result;
}
left_side = result.node;
}
// Final: return left side
return left_side;
return (TreeResult){
.is_valid = true,
.node = left_side,
};
}

View File

@@ -1,6 +1,3 @@
#include "lexer.h"
#include "parser.h"
#include "evaluator.h"
#include <stdarg.h>
#include <stdbool.h>
#include <stddef.h>
@@ -9,22 +6,8 @@
#include <cmocka.h>
#include <stdlib.h>
static void test_basic_evaluation(void** state) {
(void) state;
char expr[256] = "2 + 4 * 40 / 2";
TokenizeResult tokens = tokenize(expr);
ParseResult result = parse(tokens);
int64_t value = evaluate(result);
assert_int_equal(value, 82);
}
int main(void) {
const struct CMUnitTest tests[] = {
cmocka_unit_test(test_basic_evaluation),
};
cmocka_run_group_tests(tests, NULL, NULL);
return EXIT_SUCCESS;
}

View File

@@ -1,106 +1,11 @@
#include "arraylist.h"
#include "lexer.h"
#include <stdarg.h>
#include <stdbool.h>
#include <stddef.h>
#include <stdint.h>
#include <setjmp.h>
#include <cmocka.h>
static void test_tokenize_normal_expresion(void **state) {
(void) state;
char expr[256] = "2 + 3 / 66 * 789";
ASTNode node;
TokenizeResult tokens = tokenize(expr);
assert_true(tokens.is_valid);
assert_int_equal(arraylist_size(tokens.arr), 7);
arraylist_get(tokens.arr, 0, &node);
assert_int_equal(node.type, NODE_INTEGER);
assert_int_equal(node.data.integer, 2);
arraylist_get(tokens.arr, 1, &node);
assert_int_equal(node.type, NODE_BINARY_OP);
assert_int_equal(node.data.binary.op, OP_ADD);
arraylist_get(tokens.arr, 2, &node);
assert_int_equal(node.type, NODE_INTEGER);
assert_int_equal(node.data.integer, 3);
arraylist_get(tokens.arr, 3, &node);
assert_int_equal(node.type, NODE_BINARY_OP);
assert_int_equal(node.data.binary.op, OP_DIV);
arraylist_get(tokens.arr, 4, &node);
assert_int_equal(node.type, NODE_INTEGER);
assert_int_equal(node.data.integer, 66);
arraylist_get(tokens.arr, 5, &node);
assert_int_equal(node.type, NODE_BINARY_OP);
assert_int_equal(node.data.binary.op, OP_MUL);
arraylist_get(tokens.arr, 6, &node);
assert_int_equal(node.type, NODE_INTEGER);
assert_int_equal(node.data.integer, 789);
}
static void test_tokenize_unrecognized_symbol(void **state) {
(void) state;
char expr[256] = " 2 j 3 / 66 } 789";
TokenizeResult tokens = tokenize(expr);
assert_false(tokens.is_valid);
assert_uint_equal(tokens.err, LEXER_NOT_RECOGNIZED_SYMBOL);
}
static void test_tokenize_wrong_sintax(void **state) {
(void) state;
char expr[256] = "2 3 / 66 789";
TokenizeResult tokens = tokenize(expr);
assert_false(tokens.is_valid);
assert_uint_equal(tokens.err, LEXER_WRONG_SYNTAX);
}
static void test_string_to_number_normal(void **state) {
(void) state;
char num[16] = "2333t55";
size_t offset = 0;
ASTNodeResult result = tokenize_number(num, &offset);
assert_true(result.is_valid);
assert_int_equal(offset, 4); // equal to t position in string
assert_int_equal(result.node.type, NODE_INTEGER);
assert_int_equal(result.node.data.integer, 2333);
}
static void test_string_to_number_overflow(void **state) {
(void) state;
// Number is INT64_MAX but with a extra 899 at the end
char num[32] = "92233720368547758079";
size_t offset = 0;
ASTNodeResult result = tokenize_number(num, &offset);
assert_false(result.is_valid);
assert_uint_equal(result.err, LEXER_INT_OVERFLOW);
// Technically it can trigger a buf overflow error but obvioulsy
// it will trigger int overflow error first
}
#include <stdlib.h>
int main(void) {
const struct CMUnitTest tests[] = {
cmocka_unit_test(test_string_to_number_normal),
cmocka_unit_test(test_string_to_number_overflow),
cmocka_unit_test(test_tokenize_normal_expresion),
cmocka_unit_test(test_tokenize_unrecognized_symbol),
cmocka_unit_test(test_tokenize_wrong_sintax),
};
return cmocka_run_group_tests(tests, NULL, NULL);
return EXIT_SUCCESS;
}

View File

@@ -1,81 +1,11 @@
#include "arena.h"
#include "arraylist.h"
#include "lexer.h"
#include "parser.h"
#include <stdarg.h>
#include <stdbool.h>
#include <stddef.h>
#include <stdint.h>
#include <setjmp.h>
#include <cmocka.h>
static void test_parsing_basic_expression(void **state) {
(void) state;
char expr[256] = "2 + 3 / 66 * 789";
TokenizeResult tokens = tokenize(expr);
assert_true(tokens.is_valid);
assert_int_equal(arraylist_size(tokens.arr), 7);
ParseResult result = parse(tokens);
// Assert head is +
assert_int_equal(result.tree->type, NODE_BINARY_OP);
assert_int_equal(result.tree->data.binary.op, OP_ADD);
assert_int_equal(result.tree->data.binary.left->type, NODE_INTEGER);
assert_int_equal(result.tree->data.binary.left->data.integer, 2);
assert_int_equal(
result.tree->data.binary.right->type,
NODE_BINARY_OP
);
assert_int_equal(
result.tree->data.binary.right->data.binary.op,
OP_MUL
);
assert_int_equal(
result.tree->data.binary.right->data.binary.right->type,
NODE_INTEGER);
assert_int_equal(
result.tree->data.binary.right->data.binary.right->data.integer,
789);
assert_int_equal(
result.tree->data.binary.right->data.binary.left->type,
NODE_BINARY_OP
);
assert_int_equal(
result.tree->data.binary.right->data.binary.left->data.binary.op,
OP_DIV
);
assert_int_equal(
result.tree->data.binary.right->data.binary.left->data.binary.right->type,
NODE_INTEGER
);
assert_int_equal(
result.tree->data.binary.right->data.binary.left->data.binary.right->data.integer,
66
);
assert_int_equal(
result.tree->data.binary.right->data.binary.left->data.binary.left->type,
NODE_INTEGER
);
assert_int_equal(
result.tree->data.binary.right->data.binary.left->data.binary.left->data.integer,
3
);
arena_destroy(&result.arena);
}
#include <stdlib.h>
int main(void) {
const struct CMUnitTest tests [] = {
cmocka_unit_test(test_parsing_basic_expression),
};
return cmocka_run_group_tests(tests, NULL, NULL);
return EXIT_SUCCESS;
}