Compare commits

..

30 Commits

Author SHA1 Message Date
9ea1da549f Merge pull request 'refactor-error-handling' (#12) from refactor-error-handling into main
Reviewed-on: #12
2026-05-13 19:05:54 -06:00
ab791dbc9b fix: tests and main 2026-05-13 18:48:14 -06:00
3ec73559ee refactor: evaluator separated into evaluate bin and un
Is nicer this way, also made it more beautiful to look at and therefor
to understand.
2026-05-13 18:06:01 -06:00
b56a368244 refactor: bp funtions take tokens now
necessary for cleannes
2026-05-13 17:48:03 -06:00
6294121e91 refactor: nud and led have differetn responasblires
So now nud and led do what they were supposed to do i guess, now i
thinks is just adjusting infix and postfix and all bd funcions to act on
operator instead.
2026-05-13 17:35:52 -06:00
70ab06964c addtition: nud and led token to node distinction
I think i should instead have a nud and led function i guess, may do
that next
2026-05-13 16:09:03 -06:00
90c426f3a4 refactor: moved NodeResult to TreeResult 2026-05-13 12:19:17 -06:00
efa0e3bacd refactor: evaluator incomplete.
SO, i forgot to implement nud and led correctly and the parser cant tell
apart from - as unary and - as binary (+ as well), i need to correct
that, move Node * to TreeResult so to use NodeResult with nud and led
2026-05-13 12:13:07 -06:00
542a94ef81 refactor: All of parser.c
DAMN, it wasn't that difficult, just bothers me a bit the part that
checks if both lbp and rbp of the infix are valid, like i do validation
twice but is fine i guess, maybe using an else?, i'll see if i change
it, for now i need to change the evaluator
2026-05-13 11:09:22 -06:00
80e05a9acf refactor: changed parser.h, added Node
So just added node back but now clearly separated by tokens and nodes of
the AST as it should be, now real rework the mess that is the parser
2026-05-13 10:02:55 -06:00
f3373123e1 refactor: adapted lexer to work with new tokens
Now its fine, the code i find it clear if one just sits down to read it
for a moment, next is the parser that REALLY needs reworking and a few
helper structs.
2026-05-13 09:49:28 -06:00
2a73f5f9d6 refactor: delete ASTNode, add Token to lexer
So, total refactor, now we serious. I feel ASTNode was feeling very
bloated so we need to rewrite and adapt everything, by now lets get the
lexer working again, is already well written for me at least.
2026-05-13 09:37:15 -06:00
e3d64596ab Merge pull request 'refactor-lexer' (#11) from refactor-lexer into main
Reviewed-on: #11
2026-05-12 20:08:39 -06:00
56c80fa071 addition: Managing of parenthesis
Its a fucking mess, i was writting straight bullshit but it conceptually
should work, just need to refactor the shit out of it to make it way
more clean than it actually is and also later fix the fucking evaluator
like damn it sucks ASSS now (not that much really is nice but obviously
doesn't work, i like my code a lot :)
2026-05-12 20:04:41 -06:00
7f390a8c6b addition: postfix operator capability, may work 2026-05-12 19:40:42 -06:00
e30b3d7175 addition: proccessing of prefix op 2026-05-12 18:33:52 -06:00
59f99059bb refactor: changes and additions ot parser 2026-05-12 18:15:36 -06:00
c41847e120 refactor: rewrote tokenize and modified ohter funcs
Well i wanted to wildly change a lot of things about the lexer thinking
i could do something better but really all i found was automatic lexers
that at least for me don't really fit the project so a manual one it is,
i guess technically is a automata. Whatever, is good enough.
2026-04-30 21:34:27 -06:00
f2c906c6aa initial-commit 2026-04-30 10:40:17 -06:00
fee33ff1f0 Merge pull request 'refactor-generic-array' (#9) from refactor-generic-array into main
Reviewed-on: #9
all gut
2026-04-30 10:05:21 -06:00
ac2e783ccc fix: tests and implementation of lexer
Just a few details here and there, nothing wrong, everything else is
going well.
2026-04-30 09:58:27 -06:00
630d9f53e1 test: changed lexer tests 2026-04-24 09:36:03 -06:00
b7e1cdf3a6 refactor: made parser work with arrayslices and new result types 2026-04-24 09:06:47 -06:00
cef046f7db refactor: changed string to int adn tokenize number 2026-04-24 08:09:31 -06:00
19c84c382b refactor: changed funtions definitions, modified tokenize 2026-04-24 07:17:35 -06:00
855d683005 addition: Resul structs for rework 2026-04-24 07:02:00 -06:00
576bcd9504 addition: starting to refactor NodeArray into ArrayList 2026-04-23 15:37:16 -06:00
e6420cb1c9 add/fix: Added arena implementation and cmake rework 2026-04-23 12:39:04 -06:00
f50546bd07 Merge pull request 'feature-AST-using-arena' (#8) from feature-AST-using-arena into main
Reviewed-on: #8
2026-04-23 12:34:30 -06:00
c99f307827 Merge pull request 'feature-AST-using-arena' (#5) from feature-AST-using-arena into main
Reviewed-on: #5
2026-04-13 08:58:47 -06:00
16 changed files with 681 additions and 648 deletions

1
.gitignore vendored
View File

@@ -9,6 +9,7 @@ out/Release/
# Cmake files
CMakeCache.txt
cmake
CMakeFiles/
cmake_install.cmake
CTestTestfile.cmake

View File

@@ -3,16 +3,8 @@ project(calculator VERSION 1.0 LANGUAGES C)
set(CMAKE_C_STANDARD 11)
set(CMAKE_C_STANDARD_REQUIRED ON)
include(FetchContent)
FetchContent_Declare(
arena
GIT_REPOSITORY https://laentropia-homelab.tail7368da.ts.net/laentropia/Arena.git
GIT_TAG main
SOURCE_DIR ${CMAKE_SOURCE_DIR}/external/arena
)
# Export compile_commands.json (para clangd)
# clangd
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
add_compile_options(
@@ -21,22 +13,24 @@ add_compile_options(
-Wpedantic
)
include(cmake/CPM.cmake)
FetchContent_MakeAvailable(arena)
add_library(arena STATIC
external/arena/src/arena.c
CPMAddPackage(
NAME arena
GIT_REPOSITORY https://laentropia-homelab.tail7368da.ts.net/laentropia/Arena.git
GIT_TAG main
)
target_include_directories(arena
PUBLIC ${CMAKE_SOURCE_DIR}/external/arena/include
CPMAddPackage(
NAME arraylist
GIT_REPOSITORY https://laentropia-homelab.tail7368da.ts.net/laentropia/ArrayList.git
GIT_TAG main
)
add_library(calculator_lib
src/lexer.c
src/parser.c
src/evaluator.c
src/ASTNodeArray.c
)
target_include_directories(calculator_lib
@@ -45,16 +39,12 @@ target_include_directories(calculator_lib
target_link_libraries(calculator_lib
PUBLIC arena
PUBLIC arraylist
PRIVATE m
)
add_executable(calculator src/main.c)
target_link_libraries(calculator calculator_lib)
# ------------------------
# Testing
# ------------------------
enable_testing()
add_subdirectory(test)

1
external/arena vendored

Submodule external/arena deleted from 3d3b8596cc

View File

@@ -1,11 +1,28 @@
#ifndef EVALUATOR_H
#define EVALUATOR_H
#include "lexer.h"
#include "parser.h"
#include <stdint.h>
int64_t evaluate(ParseResult context);
int64_t evaluate_tree(ASTNode *tree);
typedef enum {
EVALUATOR_OK,
EVALUATOR_MATH_ERR,
EVALUATOR_INVALID_PARSING,
EVALUATOR_INVALID_TREE, // just to shut up the compiler with the swithces
} EvaluatorErr;
typedef struct {
bool is_valid;
union {
int64_t val;
EvaluatorErr err;
};
} EvaluatorResult;
EvaluatorResult evaluate_binary(Node *tree);
EvaluatorResult evaluate_unary(Node *tree);
EvaluatorResult evaluate(ParserResult context);
EvaluatorResult evaluate_tree(Node *tree);
#endif // !EVALUATOR_H

View File

@@ -1,76 +1,74 @@
#ifndef LEXER_H
#define LEXER_H
#include "arraylist.h"
#include <stddef.h>
#include <stdbool.h>
#include <stdint.h>
// For identifing
typedef enum {
NODE_INTEGER,
NODE_BINARY_OP,
} ASTNodeType;
TOKEN_INTEGER,
TOKEN_OPERATOR,
} TokenType;
// For classify operators
typedef enum {
OP_ADD,
OP_SUB,
OP_MUL,
OP_DIV
OP_DIV,
OP_POW,
OP_FACTORIAL,
OP_START_PAR,
OP_END_PAR,
} Operator;
typedef enum {
ARRAY_OK = 0,
ARRAY_NULL,
ARRAY_EMPTY,
ARRAY_OUT_OF_BOUNDS,
ARRAY_NULL_ARG,
ARRAY_ALLOC,
} ASTNodeArrayErr;
typedef enum {
LEXER_OK = 0,
LEXER_INT_OVERFLOW,
LEXER_FAILED_NUMBER_CONVERSION,
LEXER_NOT_RECOGNIZED_SYMBOL,
LEXER_EMPTY_INPUT,
LEXER_NULL_ARG,
LEXER_WRONG_SYNTAX,
LEXER_BUF_OVERFLOW,
} LexerErr;
// Can be thought as tokens, they will be used by the parser.
typedef struct ASTNode {
ASTNodeType type;
union {
int64_t integer;
struct {
struct ASTNode *left;
struct ASTNode *right;
Operator op;
} binary;
} data;
} ASTNode;
// I prefer ot have a dynamic array for storing the "tokens"
typedef struct {
size_t len;
size_t cap;
ASTNode *data;
} ASTNodeArray;
TokenType type;
union {
int64_t num;
Operator op;
};
} Token;
ASTNodeArray ASTNodeArray_init(size_t size);
void ASTNodeArray_free(ASTNodeArray *arr);
ASTNodeArrayErr ASTNodeArray_push(ASTNodeArray *arr, ASTNode node);
ASTNodeArrayErr ASTNodeArray_get(const ASTNodeArray *arr, size_t index, ASTNode *out);
// Out in pop can be NULL so it doesn't return anything
ASTNodeArrayErr ASTNodeArray_pop(ASTNodeArray *arr, size_t index, ASTNode *out);
size_t ASTNodeArray_len(ASTNodeArray *arr);
typedef struct {
bool is_valid;
union {
LexerErr err;
ArrayList *arr;
};
} TokenizeResult;
typedef struct {
bool is_valid;
union {
LexerErr err;
Token token;
};
} TokenResult;
typedef struct {
bool is_valid;
union {
LexerErr err;
int64_t num;
};
} LexerI64Result;
// Lexer funtions as well as few functionality
LexerErr tokenize(const char* input, ASTNodeArray *out);
LexerErr tokenize_number(const char* input, size_t *offset, ASTNode *out);
LexerErr string_to_integer(const char buf[], int64_t *number);
TokenizeResult tokenize(const char* input);
TokenResult tokenize_number(const char* input, size_t *offset);
LexerI64Result string_to_integer(const char buf[]);
bool isoperator(int c);
Operator char_to_operator(int c);
char operator_to_char(Operator op);

View File

@@ -3,33 +3,87 @@
#include "lexer.h"
#include "arena.h"
#include "arraylist.h"
#include <stdbool.h>
#include <stdint.h>
typedef struct {
ASTNode *head;
} AST;
typedef enum {
NODE_INT,
NODE_BINARY_OP,
NODE_UNARY_OP,
} NodeType;
typedef struct Node {
NodeType type;
union {
int64_t num;
struct {
Operator op;
struct Node *left;
struct Node *right;
}binary;
struct {
Operator op;
struct Node *to;
}unary;
Operator par;
};
} Node;
typedef enum {
PARSER_OK = 0,
PARSER_UNEXPECTED_TOKEN,
PARSER_MISSING_OPERAND,
PARSER_UNMATCHED_PAREN,
PARSER_OUT_OF_MEMORY,
PARSER_INVALID_TOKENIZE,
PARSER_UNEXPECTED_EOF,
} ParserErr;
typedef struct {
ASTNodeArray *arr;
size_t pos;
} ASTNodeSlice;
bool is_valid;
union {
ParserErr err;
struct {
Arena arena;
Node *tree;
};
};
} ParserResult;
typedef struct {
bool is_valid;
union {
ParserErr err;
Node *node;
};
} TreeResult;
typedef struct {
Arena arena;
ASTNode *tree;
} ParseResult;
bool is_valid;
union {
ParserErr err;
Node node;
};
} NodeResult;
ASTNode ASTNodeSlice_peek(ASTNodeSlice *slice);
ASTNode ASTNodeSlice_next(ASTNodeSlice *slice);
bool ASTNodeSlice_is_valid(ASTNodeSlice *slice);
typedef struct {
bool is_valid;
union {
ParserErr err;
uint8_t num;
};
} ParserU8Result;
ASTNode *nud(ASTNodeSlice *slice);
ASTNode *led(ASTNodeSlice *slice, size_t right_precedence);
TreeResult nud(ArraySlice *slice, Arena *arena, Token token); // Null denotation
TreeResult led(ArraySlice *slice, Arena *arena, Node *left, Token token); // Left denotation
uint8_t node_lbp(ASTNode node);
uint8_t node_rbp(ASTNode node);
ParserU8Result prefix_rbp(Token token);
ParserU8Result postfix_lbp(Token token);
ParserU8Result infix_lbp(Token token);
ParserU8Result infix_rbp(Token token);
ParseResult parse(ASTNodeArray *arr);
ASTNode *parse_expr(ASTNodeSlice *slice, Arena *arena, uint8_t min_bp);
ParserResult parse(TokenizeResult tokens);
TreeResult parse_expr(ArraySlice *slice, Arena *arena, uint8_t min_bp);
#endif // !PARSER_H

View File

@@ -1,108 +0,0 @@
#include "lexer.h"
#include <stdlib.h>
#define NODE_ARRAY_DEFAULT_SIZE 64
// Helps state machine for the lexer :)
typedef enum {
WAIT_FOR_NUMBER,
WAIT_FOR_OPERATOR,
} LexerState;
ASTNodeArray ASTNodeArray_init(size_t size) {
ASTNodeArray new;
new.len = 0; // if 0 then use default
new.cap = size == 0 ? NODE_ARRAY_DEFAULT_SIZE : size;
new.data = malloc(new.cap * sizeof(ASTNode));
return new;
}
void ASTNodeArray_free(ASTNodeArray *arr) {
free(arr->data);
arr->cap = 0;
arr->len = 0;
}
ASTNodeArrayErr ASTNodeArray_get(const ASTNodeArray *arr, size_t index, ASTNode *out) {
if (arr == NULL) {
return ARRAY_NULL;
}
if (out == NULL) {
return ARRAY_NULL_ARG;
}
if (arr->len == 0) {
return ARRAY_EMPTY;
}
if (index >= arr->len) {
return ARRAY_OUT_OF_BOUNDS;
}
*out = arr->data[index];
return ARRAY_OK;
}
ASTNodeArrayErr ASTNodeArray_push(ASTNodeArray *arr, ASTNode node) {
if (arr == NULL) {
return ARRAY_NULL;
}
if (arr->len >= arr->cap) {
size_t new_cap = arr->cap * 2;
ASTNode *tmp = realloc(arr->data, new_cap * sizeof(ASTNode));
if (tmp == NULL) {
return ARRAY_ALLOC;
}
arr->data = tmp;
arr->cap = new_cap;
}
arr->data[arr->len] = node;
arr->len = arr->len + 1;
return ARRAY_OK;
}
ASTNodeArrayErr ASTNodeArray_pop(ASTNodeArray *arr, size_t index, ASTNode *out) {
if (arr == NULL) {
return ARRAY_NULL;
}
if (arr->len == 0) {
return ARRAY_EMPTY;
}
if (index >= arr->len) {
return ARRAY_OUT_OF_BOUNDS;
}
if (arr->cap / 4 > arr->len) {
size_t new_cap = arr->cap / 2;
ASTNode *tmp = realloc(arr->data, new_cap * sizeof(ASTNode));
if (tmp == NULL) {
return ARRAY_ALLOC;
}
arr->data = tmp;
arr->cap = new_cap;
}
if (out != NULL) {
ASTNode node_to_delete = arr->data[index];
*out = node_to_delete;
}
for (size_t i = index; i < arr->len - 1; i++) {
arr->data[index] = arr->data[index + 1];
}
return ARRAY_OK;
}
size_t ASTNodeArray_len(ASTNodeArray *arr) {
if (arr == NULL) {
return 0;
}
return arr->len;
}

View File

@@ -2,34 +2,113 @@
#include "arena.h"
#include "lexer.h"
#include "parser.h"
#include <stdbool.h>
#include <stdint.h>
#include <math.h>
int64_t evaluate_tree(ASTNode *tree) {
EvaluatorResult evaluate_tree(Node *tree) {
if (tree->type == NODE_BINARY_OP) {
Operator op = tree->data.binary.op;
ASTNode *left = tree->data.binary.left;
ASTNode *right = tree->data.binary.right;
switch (op) {
case OP_ADD:
return evaluate_tree(left) + evaluate_tree(right);
case OP_SUB:
return evaluate_tree(left) - evaluate_tree(right);
case OP_MUL:
return evaluate_tree(left) * evaluate_tree(right);
case OP_DIV:
return evaluate_tree(left) / evaluate_tree(right);
}
return evaluate_binary(tree);
} else if (tree->type == NODE_UNARY_OP) {
return evaluate_unary(tree);
}
int64_t return_val = tree->data.integer;
return return_val;
return (EvaluatorResult) {
.is_valid = true,
.val = tree->num,
};
}
int64_t evaluate(ParseResult context) {
int64_t result = evaluate_tree(context.tree);
EvaluatorResult evaluate_binary(Node *tree) {
Operator op = tree->binary.op;
Node *left = tree->binary.left;
Node *right = tree->binary.right;
EvaluatorResult left_result = evaluate_tree(left);
EvaluatorResult right_result = evaluate_tree(right);
if (!left_result.is_valid) {
return left_result;
}
if (!left_result.is_valid) {
return left_result;
}
switch (op) {
case OP_ADD:
return (EvaluatorResult) {
.is_valid = true,
.val = left_result.val + right_result.val,
};
case OP_SUB:
return (EvaluatorResult) {
.is_valid = true,
.val = left_result.val - right_result.val,
};
case OP_MUL:
return (EvaluatorResult) {
.is_valid = true,
.val = left_result.val * right_result.val,
};
case OP_DIV:
return (EvaluatorResult) {
.is_valid = true,
.val = left_result.val / right_result.val,
};
case OP_POW:
return (EvaluatorResult) {
.is_valid = true,
.val = pow(left_result.val, right_result.val),
};
default:
return (EvaluatorResult) {
.is_valid = false,
.err = EVALUATOR_INVALID_TREE,
};
}
}
EvaluatorResult evaluate_unary(Node *tree) {
Operator op = tree->unary.op;
Node *to = tree->unary.to;
EvaluatorResult result = evaluate_tree(to);
if (!result.is_valid) {
return result;
}
switch (op) {
case OP_ADD:
return result;
case OP_SUB:
return (EvaluatorResult) {
.is_valid = true,
.val = -result.val,
};
case OP_FACTORIAL:
return (EvaluatorResult) {
.is_valid = true,
.val = tgamma(result.val + 1),
};
default:
return (EvaluatorResult) {
.is_valid = false,
.err = EVALUATOR_INVALID_TREE,
};
}
}
EvaluatorResult evaluate(ParserResult context) {
if (!context.is_valid) {
return (EvaluatorResult) {
.is_valid = false,
.err = EVALUATOR_INVALID_PARSING,
};
}
EvaluatorResult result = evaluate_tree(context.tree);
arena_destroy(&context.arena);
return result;

View File

@@ -1,9 +1,11 @@
#include "lexer.h"
#include "arraylist.h"
#include <ctype.h>
#include <math.h>
#include <stdbool.h>
#include <stdint.h>
#include <stdio.h>
#include <stdlib.h>
#include <strings.h>
#include <string.h>
#include <limits.h>
typedef enum {
@@ -12,110 +14,113 @@ typedef enum {
} LexerState;
LexerErr tokenize(const char *input, ASTNodeArray *out) {
TokenizeResult tokenize(const char *input) {
ArrayList *arr = arraylist_init(64, sizeof(Token));
size_t offset = 0;
LexerState state = WAIT_FOR_NUMBER;
ASTNodeArray arr = ASTNodeArray_init(0); // 0 defaults to 64
while (input[offset] != '\n' && input[offset] != '\0') {
int current = input[offset];
while (input[offset] != '\0') {
if (isdigit(current)) {
if (state != WAIT_FOR_NUMBER) {
ASTNodeArray_free(&arr);
return LEXER_WRONG_SYNTAX;
}
ASTNode new_node;
LexerErr result = tokenize_number(input, &offset, &new_node);
if (isdigit(input[offset])) {
TokenResult result = tokenize_number(input, &offset);
if (result != LEXER_OK) {
ASTNodeArray_free(&arr);
return result;
if (!result.is_valid) {
arraylist_destroy(&arr);
return (TokenizeResult) {.is_valid = false, .err = result.err};
}
ASTNodeArray_push(&arr, new_node);
state = WAIT_FOR_OPERATOR;
} else if (isoperator(current)) {
if (state != WAIT_FOR_OPERATOR) {
return LEXER_WRONG_SYNTAX;
}
ASTNode new_node = {
.type = NODE_BINARY_OP,
.data.binary.op = char_to_operator(current),
.data.binary.right = NULL,
.data.binary.left = NULL,
arraylist_push_back(arr, &result.token);
} else if (isoperator(input[offset])) {
Token op_node = {
.type = TOKEN_OPERATOR,
.op = char_to_operator(input[offset]),
};
ASTNodeArray_push(&arr, new_node);
state = WAIT_FOR_NUMBER;
} else if (isspace(current)) {
arraylist_push_back(arr, &op_node);
} else if (isspace(input[offset])) {
// Nothing...
} else {
ASTNodeArray_free(&arr);
return LEXER_NOT_RECOGNIZED_SYMBOL;
arraylist_destroy(&arr);
return (TokenizeResult) {
.is_valid = false,
.err = LEXER_NOT_RECOGNIZED_SYMBOL};
}
offset++;
}
if (arr.len < 1) {
return LEXER_EMPTY_INPUT;
if (arraylist_size(arr) < 1) {
arraylist_destroy(&arr);
return (TokenizeResult) {.is_valid = false, .err = LEXER_EMPTY_INPUT};
}
*out = arr;
return LEXER_OK;
return (TokenizeResult) {.is_valid = true, .arr = arr};
}
// CURRENTLY, it only supports ints, not clear how floating
// point is implemented but i'll figure it out
LexerErr tokenize_number(const char *input, size_t *offset, ASTNode *out) {
char buf[128] = { '\0' };
TokenResult tokenize_number(const char *input, size_t *offset) {
char buf[64] = { '\0' };
size_t buf_pos = 0;
bool is_integer = true; // Will later be used to differentiate fractions
// read number
size_t current = *offset;
while (isdigit(input[current])) {
if (buf_pos >= sizeof(buf) - 1) {
return (TokenResult) {
.is_valid = false,
.err = LEXER_BUF_OVERFLOW};
}
buf[buf_pos] = input[current];
if (buf_pos >= sizeof(buf)) {
return LEXER_BUF_OVERFLOW;
}
current++;
buf_pos++;
}
ASTNode new_node;
Token new_token;
if (is_integer) {
new_node.type = NODE_INTEGER;
LexerErr status = string_to_integer(buf, &new_node.data.integer);
if (status == LEXER_OK) {
*out = new_node;
new_token.type = TOKEN_INTEGER;
LexerI64Result result = string_to_integer(buf);
if (!result.is_valid) {
return (TokenResult) {.is_valid = false, .err = result.err};
}
*offset = current;
return status;
new_token.num = result.num;
*offset = current - 1;
return (TokenResult) {.is_valid = true, .token = new_token};
}
return LEXER_FAILED_NUMBER_CONVERSION;
return (TokenResult) {
.is_valid = false,
.err = LEXER_FAILED_NUMBER_CONVERSION};
}
LexerErr string_to_integer(const char *buf, int64_t *number) {
LexerI64Result string_to_integer(const char *buf) {
int c = 0;
int64_t count = 0;
while (buf[c] != '\0') {
// Extracts number from char
int digit = buf[c] - '0';
if (count > (INT64_MAX - digit) / 10) {
return LEXER_INT_OVERFLOW;
return (LexerI64Result) {
.is_valid = false,
.err = LEXER_INT_OVERFLOW};
}
count = count * 10;
count += digit;
c++;
}
*number = count;
return LEXER_OK;
return (LexerI64Result) {.is_valid = true, .num = count};
}
bool isoperator(int c) {
@@ -124,6 +129,10 @@ bool isoperator(int c) {
case '-':
case '/':
case '*':
case '^':
case '!':
case '(':
case ')':
return true;
default:
return false;
@@ -144,6 +153,18 @@ Operator char_to_operator(int c) {
case '/':
return OP_DIV;
break;
case '^':
return OP_POW;
break;
case '!':
return OP_FACTORIAL;
break;
case '(':
return OP_START_PAR;
break;
case ')':
return OP_END_PAR;
break;
default: // I mean shouldn't be used, we assume
return -1;
}
@@ -159,5 +180,15 @@ char operator_to_char(Operator op) {
return '*';
case OP_DIV:
return '/';
case OP_POW:
return '^';
case OP_FACTORIAL:
return '!';
case OP_START_PAR:
return '(';
case OP_END_PAR:
return ')';
default:
return EOF;
}
}

View File

@@ -1,4 +1,3 @@
#include "arena.h"
#include "evaluator.h"
#include "lexer.h"
#include "parser.h"
@@ -19,13 +18,11 @@ int main(void) {
}
buf[pos] = '\0';
ASTNodeArray context;
tokenize(buf, &context);
EvaluatorResult result = evaluate(parse(tokenize(buf)));
if (!result.is_valid) {
puts("Error checando expresion");
}
ParseResult par = parse(&context);
int64_t result = evaluate(par);
printf("El resultado es: %" PRIi64 "\n", result);
printf("El resultado es: %" PRIi64 "\n", result.val);
return EXIT_SUCCESS;
}

View File

@@ -1,134 +1,392 @@
#include "parser.h"
#include "arraylist.h"
#include "lexer.h"
#include "arena.h"
#include <cmocka.h>
#include <stdalign.h>
#include <stdbool.h>
#include <stdint.h>
#include <stdlib.h>
ParserU8Result prefix_rbp(Token token) {
if (token.type == TOKEN_INTEGER) {
return (ParserU8Result) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
switch (token.op) {
case OP_SUB:
case OP_ADD:
return (ParserU8Result) {
.is_valid = true,
.num = 30,
};
default:
return (ParserU8Result) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
}
uint8_t node_lbp(ASTNode node) {
if (node.type == NODE_INTEGER) {
return 0;
ParserU8Result postfix_lbp(Token token) {
if (token.type != TOKEN_OPERATOR) {
return (ParserU8Result) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
switch (node.data.binary.op) {
switch (token.op) {
case OP_FACTORIAL:
return (ParserU8Result) {
.is_valid = true,
.num = 40,
};
default:
return (ParserU8Result) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
}
ParserU8Result infix_lbp(Token token) {
if (token.type != TOKEN_OPERATOR) {
return (ParserU8Result) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
switch (token.op) {
case OP_ADD:
case OP_SUB:
return 10;
break;
return (ParserU8Result) {
.is_valid = true,
.num = 10,
};
case OP_DIV:
case OP_MUL:
return 20;
return (ParserU8Result) {
.is_valid = true,
.num = 20,
};
case OP_POW:
return (ParserU8Result) {
.is_valid = true,
.num = 51,
};
default:
return 0;
return (ParserU8Result) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
}
uint8_t node_rbp(ASTNode node) {
if (node.type == NODE_INTEGER) {
return 0;
ParserU8Result infix_rbp(Token token) {
if (token.type != TOKEN_OPERATOR) {
return (ParserU8Result) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
switch (node.data.binary.op) {
switch (token.op) {
case OP_ADD:
case OP_SUB:
return 11;
break;
return (ParserU8Result) {
.is_valid = true,
.num = 11,
};
case OP_DIV:
case OP_MUL:
return 21;
return (ParserU8Result) {
.is_valid = true,
.num = 21,
};
case OP_POW:
return (ParserU8Result) {
.is_valid = true,
.num = 50,
};
default:
return 0;
return (ParserU8Result) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
}
ASTNode ASTNodeSlice_next(ASTNodeSlice *slice) {
return slice->arr->data[slice->pos++];
}
ASTNode ASTNodeSlice_peek(ASTNodeSlice *slice) {
return slice->arr->data[slice->pos];
}
bool ASTNodeSlice_is_valid(ASTNodeSlice *slice) {
if (slice->arr->len < 1) {
return false;
}
if (slice->pos >= slice->arr->len) {
return false;
}
return true;
}
ParseResult parse(ASTNodeArray *arr) {
ASTNodeSlice context = {
.arr = arr,
.pos = 0,
};
Arena arena = arena_init(sizeof(ASTNode) * arr->len).arena;
return (ParseResult) {
.arena = arena,
.tree = parse_expr(&context, &arena, 0)};
}
ASTNode *parse_expr(ASTNodeSlice *slice, Arena *arena, uint8_t min_bp) {
TreeResult led(
ArraySlice *slice,
Arena *arena,
Node *left,
Token token
) {
arena_ensure_capacity(
arena,
sizeof(ASTNode),
alignof(ASTNode)
sizeof(Node),
alignof(Node)
);
ASTNode *left_side = arena_unwrap_pointer(
Node *node = arena_unwrap_pointer(
arena_alloc(
arena,
sizeof(ASTNode),
alignof(ASTNode)
sizeof(Node),
alignof(Node)
)
);
*left_side = ASTNodeSlice_next(slice);
switch (token.op) {
while (true) {
if (!ASTNodeSlice_is_valid(slice)) {
break;
}
// Binary operators
case OP_ADD:
case OP_SUB:
case OP_MUL:
case OP_DIV:
case OP_POW: {
node->type = NODE_BINARY_OP;
node->binary.op = token.op;
ASTNode operator = ASTNodeSlice_peek(slice);
uint8_t rbp = node_rbp(operator);
uint8_t lbp = node_lbp(operator);
ParserU8Result rbp_result = infix_rbp(token);
if (!rbp_result.is_valid) {
return (TreeResult) {
.is_valid = false,
.err = rbp_result.err,
};
}
if (lbp < min_bp) {
break;
}
ASTNodeSlice_next(slice);
ASTNode *right_side = parse_expr(slice, arena, rbp);
arena_ensure_capacity(
arena,
sizeof(ASTNode),
alignof(ASTNode));
ASTNode *new_node = arena_unwrap_pointer(
arena_alloc(
TreeResult right = parse_expr(
slice,
arena,
sizeof(ASTNode),
alignof(ASTNode)
)
);
*new_node = operator;
rbp_result.num
);
new_node->data.binary.left = left_side;
new_node->data.binary.right = right_side;
if (!right.is_valid) {
return right;
}
left_side = new_node;
node->binary.left = left;
node->binary.right = right.node;
return (TreeResult) {
.is_valid = true,
.node = node,
};
}
// Postfix operators
case OP_FACTORIAL: {
node->type = NODE_UNARY_OP;
node->unary.op = token.op;
node->unary.to = left;
return (TreeResult) {
.is_valid = true,
.node = node,
};
}
default:
return (TreeResult) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
}
TreeResult nud(ArraySlice *slice, Arena *arena, Token token) {
arena_ensure_capacity(
arena,
sizeof(Node),
alignof(Node)
);
Node *node = arena_unwrap_pointer(
arena_alloc(
arena,
sizeof(Node),
alignof(Node)
)
);
if (token.type == TOKEN_INTEGER) {
node->type = NODE_INT;
node->num = token.num;
return (TreeResult) {
.is_valid = true,
.node = node,
};
}
switch (token.op) {
case OP_START_PAR: {
TreeResult expr = parse_expr(slice, arena, 0);
if (!expr.is_valid) {
return expr;
}
return left_side;
Token end_par;
if (arrayslice_next(slice, &end_par) != ARRLIST_OK) {
return (TreeResult) {
.is_valid = false,
.err = PARSER_UNMATCHED_PAREN,
};
}
if (end_par.type != TOKEN_OPERATOR ||
end_par.op != OP_END_PAR) {
return (TreeResult) {
.is_valid = false,
.err = PARSER_UNMATCHED_PAREN,
};
}
return expr;
}
case OP_ADD:
case OP_SUB: {
node->type = NODE_UNARY_OP;
node->unary.op = token.op;
ParserU8Result rbp_result = prefix_rbp(token);
if (!rbp_result.is_valid) {
return (TreeResult) {
.is_valid = false,
.err = rbp_result.err,
};
}
TreeResult right = parse_expr(
slice,
arena,
rbp_result.num
);
if (!right.is_valid) {
return right;
}
node->unary.to = right.node;
return (TreeResult) {
.is_valid = true,
.node = node,
};
}
default:
return (TreeResult) {
.is_valid = false,
.err = PARSER_UNEXPECTED_TOKEN,
};
}
}
ParserResult parse(TokenizeResult tokens) {
if (!tokens.is_valid) {
return (ParserResult) {
.is_valid = false,
.err = PARSER_INVALID_TOKENIZE,
};
}
ArraySlice *context = arraylist_slice(tokens.arr, 0, arraylist_size(tokens.arr));
Arena arena = arena_init(sizeof(Node) * arraylist_size(tokens.arr)).arena;
TreeResult result = parse_expr(context, &arena, 0);
if (!result.is_valid) {
arena_destroy(&arena);
arraylist_destroy(&tokens.arr);
return (ParserResult) {
.is_valid = false,
.err = result.err,
};
}
arraylist_destroy(&tokens.arr);
return (ParserResult) {
.is_valid = true,
.arena = arena,
.tree = result.node};
}
TreeResult parse_expr(ArraySlice *slice, Arena *arena, uint8_t min_bp) {
Token current_token;
if (arrayslice_next(slice, &current_token) != ARRLIST_OK) {
return (TreeResult) {
.is_valid = false,
.err = PARSER_UNEXPECTED_EOF,
};
}
TreeResult left_result = nud(slice, arena, current_token);
if (!left_result.is_valid) {
return left_result;
}
Node *left_side = left_result.node;
while (arrayslice_is_valid(slice)) {
Token operator_token;
arrayslice_peek(slice, &operator_token);
if (operator_token.type != TOKEN_OPERATOR) {
break;
}
ParserU8Result postfix_lbp_result = postfix_lbp(operator_token);
if (postfix_lbp_result.is_valid) {
if (postfix_lbp_result.num < min_bp) {
break;
}
arrayslice_next(slice, NULL);
TreeResult result = led(slice, arena, left_side, operator_token);
if (!result.is_valid) {
return result;
}
left_side = result.node;
continue;
}
// Path for infix basically
ParserU8Result lbp_result = infix_lbp(operator_token);
if (!lbp_result.is_valid) {
break;
}
if (lbp_result.num < min_bp) {
break;
}
arrayslice_next(slice, NULL);
TreeResult result = led(slice, arena, left_side, operator_token);
if (!result.is_valid) {
return result;
}
left_side = result.node;
}
// Final: return left side
return (TreeResult){
.is_valid = true,
.node = left_side,
};
}

View File

@@ -1,15 +1,9 @@
find_package(cmocka REQUIRED)
add_executable(test_nodeArray test_ASTNodeArray.c)
add_executable(test_lexer test_lexer.c)
add_executable(test_parser test_parser.c)
add_executable(test_evaluator test_evaluator.c)
target_link_libraries(test_nodeArray
calculator_lib
cmocka::cmocka
)
target_link_libraries(test_lexer
calculator_lib
cmocka::cmocka
@@ -25,7 +19,6 @@ target_link_libraries(test_evaluator
cmocka::cmocka
)
add_test(NAME nodeArray_tests COMMAND test_nodeArray)
add_test(NAME lexer_tests COMMAND test_lexer)
add_test(NAME parser_tests COMMAND test_parser)
add_test(NAME evaluator_tests COMMAND test_evaluator)

View File

@@ -1,86 +0,0 @@
#include "lexer.h"
#include <stdarg.h>
#include <stdbool.h>
#include <stddef.h>
#include <stdint.h>
#include <setjmp.h>
#include <cmocka.h>
static void test_array_push(void **state) {
(void) state;
// We use 2 to force resize and checking anything wrong with malloc
ASTNodeArray arr = ASTNodeArray_init(2);
ASTNode node1 = {
.type = NODE_INTEGER,
.data = { .integer = 90 }
};
ASTNode node2 = {
.type = NODE_INTEGER,
.data = { .integer = 80 }
};
ASTNode node3 = {
.type = NODE_INTEGER,
.data = { .integer = 70 }
};
assert_int_equal(ASTNodeArray_push(&arr, node1), ARRAY_OK);
assert_int_equal(ASTNodeArray_len(&arr), 1);
assert_int_equal(ASTNodeArray_push(&arr, node2), ARRAY_OK);
assert_int_equal(ASTNodeArray_len(&arr), 2);
assert_int_equal(ASTNodeArray_push(&arr, node3), ARRAY_OK);
assert_int_equal(ASTNodeArray_len(&arr), 3);
ASTNodeArray_free(&arr);
}
static void test_array_pop(void **state) {
(void) state;
// Set to force desize
ASTNodeArray arr = ASTNodeArray_init(16);
ASTNode node1 = {
.type = NODE_INTEGER,
.data = { .integer = 90 }
};
ASTNode node2 = {
.type = NODE_INTEGER,
.data = { .integer = 80 }
};
ASTNode node3 = {
.type = NODE_INTEGER,
.data = { .integer = 70 }
};
assert_int_equal(ASTNodeArray_push(&arr, node1), ARRAY_OK);
assert_int_equal(ASTNodeArray_len(&arr), 1);
assert_int_equal(ASTNodeArray_push(&arr, node2), ARRAY_OK);
assert_int_equal(ASTNodeArray_len(&arr), 2);
assert_int_equal(ASTNodeArray_push(&arr, node3), ARRAY_OK);
assert_int_equal(ASTNodeArray_len(&arr), 3);
ASTNode node4;
assert_int_equal(ASTNodeArray_pop(&arr, 1, &node4), ARRAY_OK);
assert_int_equal(node4.type, NODE_INTEGER);
assert_int_equal(node4.data.integer, 80);
ASTNodeArray_free(&arr);
}
int main(void) {
const struct CMUnitTest tests[] = {
cmocka_unit_test(test_array_push),
cmocka_unit_test(test_array_pop),
};
return cmocka_run_group_tests(tests, NULL, NULL);
}

View File

@@ -1,6 +1,3 @@
#include "lexer.h"
#include "parser.h"
#include "evaluator.h"
#include <stdarg.h>
#include <stdbool.h>
#include <stddef.h>
@@ -9,24 +6,8 @@
#include <cmocka.h>
#include <stdlib.h>
static void test_basic_evaluation(void** state) {
(void) state;
char expr[256] = "2 + 4 * 40 / 2";
ASTNodeArray context;
tokenize(expr, &context);
ParseResult result = parse(&context);
int64_t value = evaluate(result);
assert_int_equal(value, 82);
}
int main(void) {
const struct CMUnitTest tests[] = {
cmocka_unit_test(test_basic_evaluation),
};
cmocka_run_group_tests(tests, NULL, NULL);
return EXIT_SUCCESS;
}

View File

@@ -1,112 +1,11 @@
#include "lexer.h"
#include <stdarg.h>
#include <stdbool.h>
#include <stddef.h>
#include <stdint.h>
#include <setjmp.h>
#include <cmocka.h>
static void test_tokenize_normal_expresion(void **state) {
(void) state;
char expr[256] = "2 + 3 / 66 * 789";
ASTNodeArray tokens;
ASTNode node;
assert_int_equal(tokenize(expr, &tokens), LEXER_OK);
assert_int_equal(tokens.len, 7);
ASTNodeArray_get(&tokens, 0, &node);
assert_int_equal(node.type, NODE_INTEGER);
assert_int_equal(node.data.integer, 2);
ASTNodeArray_get(&tokens, 1, &node);
assert_int_equal(node.type, NODE_BINARY_OP);
assert_int_equal(node.data.binary.op, OP_ADD);
ASTNodeArray_get(&tokens, 2, &node);
assert_int_equal(node.type, NODE_INTEGER);
assert_int_equal(node.data.integer, 3);
ASTNodeArray_get(&tokens, 3, &node);
assert_int_equal(node.type, NODE_BINARY_OP);
assert_int_equal(node.data.binary.op, OP_DIV);
ASTNodeArray_get(&tokens, 4, &node);
assert_int_equal(node.type, NODE_INTEGER);
assert_int_equal(node.data.integer, 66);
ASTNodeArray_get(&tokens, 5, &node);
assert_int_equal(node.type, NODE_BINARY_OP);
assert_int_equal(node.data.binary.op, OP_MUL);
ASTNodeArray_get(&tokens, 6, &node);
assert_int_equal(node.type, NODE_INTEGER);
assert_int_equal(node.data.integer, 789);
}
static void test_tokenize_unrecognized_symbol(void **state) {
(void) state;
char expr[256] = " 2 j 3 / 66 } 789";
ASTNodeArray tokens = {
.len = 0,
.cap = 0,
};
assert_int_equal(tokenize(expr, &tokens), LEXER_NOT_RECOGNIZED_SYMBOL);
assert_int_equal(tokens.len, 0);
assert_int_equal(tokens.cap, 0);
}
static void test_tokenize_wrong_sintax(void **state) {
(void) state;
char expr[256] = "2 3 / 66 789";
ASTNodeArray tokens = {
.len = 0,
.cap = 0,
};
assert_int_equal(tokenize(expr, &tokens), LEXER_WRONG_SYNTAX);
assert_int_equal(tokens.len, 0);
assert_int_equal(tokens.cap, 0);
}
static void test_string_to_number_normal(void **state) {
(void) state;
char num[16] = "2333t55";
size_t offset = 0;
ASTNode result;
assert_int_equal(tokenize_number(num, &offset, &result), LEXER_OK);
assert_int_equal(offset, 4); // equal to t position in string
assert_int_equal(result.type, NODE_INTEGER);
assert_int_equal(result.data.integer, 2333);
}
static void test_string_to_number_overflow(void **state) {
(void) state;
// Number is INT64_MAX but with a extra 8 at the end
char num[32] = "92233720368547758078yy7";
size_t offset = 0;
ASTNode result;
assert_int_equal(tokenize_number(num, &offset, &result), LEXER_INT_OVERFLOW);
// Technically it can trigger a buf overflow error but obvioulsy
// it will trigger int overflow error first
}
#include <stdlib.h>
int main(void) {
const struct CMUnitTest tests[] = {
cmocka_unit_test(test_string_to_number_normal),
cmocka_unit_test(test_string_to_number_overflow),
cmocka_unit_test(test_tokenize_normal_expresion),
cmocka_unit_test(test_tokenize_unrecognized_symbol),
cmocka_unit_test(test_tokenize_wrong_sintax),
};
return cmocka_run_group_tests(tests, NULL, NULL);
return EXIT_SUCCESS;
}

View File

@@ -1,81 +1,11 @@
#include "arena.h"
#include "lexer.h"
#include "parser.h"
#include <stdarg.h>
#include <stdbool.h>
#include <stddef.h>
#include <stdint.h>
#include <setjmp.h>
#include <cmocka.h>
static void test_parsing_basic_expression(void **state) {
(void) state;
char expr[256] = "2 + 3 / 66 * 789";
ASTNodeArray tokens;
ASTNode node;
assert_int_equal(tokenize(expr, &tokens), LEXER_OK);
assert_int_equal(tokens.len, 7);
ParseResult result = parse(&tokens);
// Assert head is +
assert_int_equal(result.tree->type, NODE_BINARY_OP);
assert_int_equal(result.tree->data.binary.op, OP_ADD);
assert_int_equal(result.tree->data.binary.left->type, NODE_INTEGER);
assert_int_equal(result.tree->data.binary.left->data.integer, 2);
assert_int_equal(
result.tree->data.binary.right->type,
NODE_BINARY_OP
);
assert_int_equal(
result.tree->data.binary.right->data.binary.op,
OP_MUL
);
assert_int_equal(
result.tree->data.binary.right->data.binary.right->type,
NODE_INTEGER);
assert_int_equal(
result.tree->data.binary.right->data.binary.right->data.integer,
789);
assert_int_equal(
result.tree->data.binary.right->data.binary.left->type,
NODE_BINARY_OP
);
assert_int_equal(
result.tree->data.binary.right->data.binary.left->data.binary.op,
OP_DIV
);
assert_int_equal(
result.tree->data.binary.right->data.binary.left->data.binary.right->type,
NODE_INTEGER
);
assert_int_equal(
result.tree->data.binary.right->data.binary.left->data.binary.right->data.integer,
66
);
assert_int_equal(
result.tree->data.binary.right->data.binary.left->data.binary.left->type,
NODE_INTEGER
);
assert_int_equal(
result.tree->data.binary.right->data.binary.left->data.binary.left->data.integer,
3
);
arena_destroy(&result.arena);
}
#include <stdlib.h>
int main(void) {
const struct CMUnitTest tests [] = {
cmocka_unit_test(test_parsing_basic_expression),
};
return cmocka_run_group_tests(tests, NULL, NULL);
return EXIT_SUCCESS;
}