Initial upload

This commit is contained in:
r4 2021-07-01 17:04:13 +02:00
parent 366bd4ad30
commit 574cf9219e
15 changed files with 958 additions and 0 deletions

40
Makefile Normal file
View File

@ -0,0 +1,40 @@
CC=gcc
CFLAGS=-Wall -O3 -march=native -std=c99
LDFLAGS=
EXE=str_eval
ODIR=obj
LIBS=-lm
DEPS=lex.h ast.h token.h token_list.h ptr_stack.h error.h
_OBJ=lex.c ast.c token.c token_list.c ptr_stack.c error.c main.c
OBJ=$(patsubst %.c,$(ODIR)/%.o,$(_OBJ))
all: makedepend odir $(EXE)
run: all
./$(EXE)
$(ODIR)/%.o: %.c
$(CC) -c -o $@ $< $(CFLAGS) $(LIBS)
$(EXE): $(OBJ)
$(CC) -o $@ $^ $(CFLAGS) $(LIBS) $(LDFLAGS)
makedepend: $(_OBJ) $(DEPS)
echo "# Automatically generated by makedepend" > $@
makedepend -Y -f $@ -p $(ODIR)/ $(_OBJ) 2>/dev/null
rm $@.bak
odir:
mkdir -p $(ODIR)
.PHONY: clean makedepend
clean:
rm -rf $(ODIR) $(EXE) makedepend
include makedepend

171
ast.c Normal file
View File

@ -0,0 +1,171 @@
/* vim: set filetype=c: */
#include "ast.h"
#include "ptr_stack.h"
#include <malloc.h>
#include <assert.h>
#include <math.h>
void AST_init(AST* obj) {
obj->root = NULL;
}
static Token* new_default_ExprToken_() {
Token* tkn = malloc(sizeof(Token));
tkn->type = TokenTypeExpr;
tkn->data.expr.lhs = NULL; /* Left operand */
tkn->data.expr.op = NULL; /* Operator */
tkn->data.expr.rhs = NULL; /* Right operand */
return tkn;
}
static Token* new_NumToken_with_num_(NumToken val) {
Token* tkn = malloc(sizeof(Token));
tkn->type = TokenTypeNum;
tkn->data.num = val;
return tkn;
}
static Token* new_OpToken_with_sym_(OpToken val) {
Token* tkn = malloc(sizeof(Token));
tkn->type = TokenTypeOp;
tkn->data.op = val;
return tkn;
}
Result AST_parse_from_TokenList(AST* obj, const TokenList* tokens) {
/* If root weren't NULL, the AST was either not initialized, or
* it was already populated */
assert(obj->root == NULL);
obj->root = new_default_ExprToken_();
PtrStack node_stack; /* Always has curr_node at the top, followed by curr_node's parent, etc.. */
PtrStack_init(&node_stack);
Token* curr_node = obj->root; /* Node means ExprToken in this case */
PtrStack_push(&node_stack, curr_node);
/* Linearly iterate through every token the lexer generated */
const TokenListItem* curr;
for(curr = tokens->front; curr != NULL; curr = curr->next) {
switch(curr->val.type) {
default:
return Result_err("Found invalid token type while building AST");
break;
case TokenTypeSep: {
if(curr->val.data.sep.sym == '(') {
/* Create a new sub expression as a child of the current expression */
Token* new_tkn = new_default_ExprToken_();
/* The left and right hand operands get filled from left to right;
* insert the new sub expression into the next free operand slot,
* free meaning unassigned or NULL */
if(curr_node->data.expr.lhs == NULL)
curr_node->data.expr.lhs = new_tkn;
else if (curr_node->data.expr.rhs == NULL)
curr_node->data.expr.rhs = new_tkn;
else {
free(new_tkn); /* Free new_tkn, as it is unused due to an error */
return Result_err("Found more than 2 operands for 1 operator while building AST");
break;
}
/* Push the new curr_node onto the pointer stack to allow to
* go a layer back, as needed in case of an rparen */
curr_node = new_tkn;
PtrStack_push(&node_stack, curr_node);
} else /* if(curr->val.data.sep.sym == ')') */ {
/* Go back a layer, effectively changing curr_node to its parent */
PtrStack_pop(&node_stack);
curr_node = node_stack.top->ptr;
}
break;
}
case TokenTypeNum: {
assert(curr_node->type == TokenTypeExpr);
Token* num_tkn = new_NumToken_with_num_(curr->val.data.num);
/* Fill the curr_node expression operands from left to right */
if(curr_node->data.expr.lhs == NULL)
curr_node->data.expr.lhs = num_tkn;
else if (curr_node->data.expr.rhs == NULL)
curr_node->data.expr.rhs = num_tkn;
else {
free(num_tkn); /* Free num_tkn, as it is unused due to an error */
return Result_err("Found more than 2 operands for 1 operator while building AST");
break;
}
break;
}
case TokenTypeOp: {
if(curr_node->data.expr.op == NULL) {
/* Fill the expression token's operator field with exactly the same
* data as the current token */
Token* op_tkn = new_OpToken_with_sym_(curr->val.data.op);
curr_node->data.expr.op = op_tkn;
} else {
return Result_err("Found more than 1 operator in a single expression while building the AST");
break;
}
break;
}
}
}
PtrStack_uninit(&node_stack);
return Result_noerr();
}
static long double AST_eval_node_(Token* node) {
assert(node->type == TokenTypeExpr);
long double a, b;
if(node->data.expr.lhs == NULL)
/* TODO: Error handling */
return 0;
if(node->data.expr.lhs->type == TokenTypeExpr)
a = AST_eval_node_(node->data.expr.lhs);
else /* if(node->data.expr.lhs->type == TokenTypeNum) */
a = node->data.expr.lhs->data.num;
if(node->data.expr.rhs == NULL)
/* If there is no right hand side expression, just return the left hand one */
return a;
if(node->data.expr.rhs->type == TokenTypeExpr)
b = AST_eval_node_(node->data.expr.rhs);
else /* if(node->data.expr.rhs->type == TokenTypeNum) */
b = node->data.expr.rhs->data.num;
switch(node->data.expr.op->data.op) {
case '+':
return a + b;
break;
case '-':
return a - b;
break;
case '*':
return a * b;
break;
case '/':
return a / b;
break;
case '^':
return pow(a, b);
break;
default:
/* TODO: Error handling */
break;
}
return 0;
}
long double AST_evaluate(AST* obj) {
return AST_eval_node_(obj->root);
}
void AST_uninit(AST* obj) {
if(obj->root)
/* uninit and deallocate the root token and its children */
ExprToken_uninit_recursive(obj->root);
}

24
ast.h Normal file
View File

@ -0,0 +1,24 @@
/* vim: set filetype=c: */
#ifndef _AST_H_
#define _AST_H_
#include "token.h"
#include "token_list.h"
#include "error.h"
typedef struct AST AST;
struct AST {
Token* root;
};
extern void AST_init(AST* obj);
/* Can be called exactly once after AST_init */
extern Result AST_parse_from_TokenList(AST* obj, const TokenList* tokens);
extern long double AST_evaluate(AST* obj);
extern void AST_uninit(AST* obj);
#endif /* _AST_H_ */

30
error.c Normal file
View File

@ -0,0 +1,30 @@
/* vim: set filetype=c: */
#include "error.h"
#include <assert.h>
Result Result_err(char* err_str) {
Result res;
res.has_err = true;
res.err_str = err_str;
return res;
}
Result Result_noerr() {
Result res;
res.has_err = false;
res.err_str = NULL;
return res;
}
extern void Result_print_err_or(const Result obj, const char* err_prefix, const char* no_err, FILE* err_file, FILE* no_err_file) {
if(obj.has_err) {
if(err_prefix != NULL)
fprintf(err_file, "%s", err_prefix);
assert(obj.err_str != NULL);
fprintf(err_file, "%s\n", obj.err_str);
}
else if(no_err != NULL)
fprintf(no_err_file, "%s\n", no_err);
}

23
error.h Normal file
View File

@ -0,0 +1,23 @@
/* vim: set filetype=c: */
#ifndef _ERROR_H_
#define _ERROR_H_
#include <stdbool.h>
#include <stdio.h>
typedef struct Result Result;
struct Result {
bool has_err;
char* err_str;
};
extern Result Result_err(char* err_str);
extern Result Result_noerr();
/* err_prefix: string to print before the error, if on occurred; prints nothing if it has value NULL
* no_err: string to print if no error occured; prints nothing if it has value NULL
* err_file: where to output the string, if an error has occurred
* no_err_file: where to output, if no error has occurred */
extern void Result_print_err_or(const Result obj, const char* err_prefix, const char* no_err, FILE* err_file, FILE* no_err_file);
#endif /* _ERROR_H_ */

4
leakcheck.sh Executable file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env sh
make
EXPR="2.5/(5+3)-2*4/3.3^4.1^5"
echo "$EXPR" | valgrind --leak-check=yes ./str_eval

186
lex.c Normal file
View File

@ -0,0 +1,186 @@
/* vim: set filetype=c: */
#include "lex.h"
#include <assert.h>
void Lex_init(Lex* obj) {
TokenList_init(&obj->tokens);
PtrStack_init(&obj->ptr_stack_);
obj->out_read_total = 0;
obj->out_written_total = 0;
obj->context_size_ = 0;
}
Result Lex_lex_char(Lex* obj, char in) {
/* If the current char is part of a Num token */
if((in >= '0' && in <= '9') || in == '.') {
/* Don't let contexts become as large as the predefined size, as
* we still need space for the null string terminator char \0 */
if(obj->context_size_ < LEX_MAX_CONTEXT_SIZE - 1) {
obj->context_[obj->context_size_] = in;
obj->context_size_++;
}
/* If the current char is not part of a number, but the
* previous one was, write the number as a token */
} else {
if(obj->context_size_) {
/* Add the context string terminator */
obj->context_[obj->context_size_] = '\0';
obj->context_size_++;
/* Write the token */
TokenListItem* itm = malloc(sizeof(TokenListItem));
itm->val.type = TokenTypeNum;
itm->val.data.num = strtold(obj->context_, NULL);
TokenList_push_back(&obj->tokens, itm);
obj->context_size_ = 0;
}
/* If the char is not a whitespace, newline, string terminator, etc.. */
if(!(in == ' ' || in == '\t' || in == '\n' || in == '\0')) {
switch(in) {
case '(':
case ')': {
TokenListItem* itm = malloc(sizeof(TokenListItem));
itm->val.type = TokenTypeSep;
itm->val.data.sep.sym = in;
if(in == '(')
PtrStack_push(&obj->ptr_stack_, (void*)itm);
else /* if in == ')' */ {
/* Throw an error, if the pointer stack has no top element */
if(obj->ptr_stack_.size == 0)
return Result_err("Found ')' without matching '('");
/* Matching LParen */
TokenListItem* match = obj->ptr_stack_.top->ptr;
/* Define the RParen's matching LParen */
itm->val.data.sep.matching = match;
/* Define the LParen's matching RParen */
match->val.data.sep.matching = itm;
/* Pop the top element */
PtrStack_pop(&obj->ptr_stack_);
}
TokenList_push_back(&obj->tokens, itm);
return Result_noerr();
break;
}
case '+':
case '-':
case '*':
case '/':
case '^': {
TokenListItem* itm = malloc(sizeof(TokenListItem));
itm->val.type = TokenTypeOp;
itm->val.data.op = in;
TokenList_push_back(&obj->tokens, itm);
return Result_noerr();
break;
}
default:
return Result_err("Unrecognized character");
break;
}
}
}
return Result_noerr();
}
Result Lex_finish_lex(Lex* obj) {
Result res = Lex_lex_char(obj, '\n');
if(res.has_err)
return res;
if(obj->ptr_stack_.size) {
return Result_err("Found '(' without matching ')'");
}
return Result_noerr();
}
static bool contains_char_(char c, const char* charset) {
size_t i;
for(i = 0; charset[i] != '\0'; i++) {
if(charset[i] == c)
return true;
}
return false;
}
Result Lex_apply_op_precedence(Lex* obj, const char* opsyms, bool right_to_left) {
/* Iterate through tokens, where curr is the current token */
TokenListItem* const begin = right_to_left ? obj->tokens.back : obj->tokens.front;
TokenListItem* curr;
for(curr = begin; curr != NULL; curr = right_to_left ? curr->prev : curr->next) {
if(curr->val.type != TokenTypeOp)
continue;
if(curr->prev == NULL || curr->next == NULL) {
return Result_err("Found an operator with at least one missing operand");
continue;
}
if(!contains_char_(curr->val.data.op, opsyms))
continue;
/* Items right and left, takes into consideration potential parentheses:
* For example, if we have (4+3)*5, lofop will be the position of the left paren,
* and rofop will be the position of the 5. This is possible due to each paren
* having a pointer to its matching paren (for details on how it works, refer to
* Lex_lex_char()). */
TokenListItem* lofop = NULL; /* Item left of operator */
TokenListItem* rofop = NULL; /* Item right of operator */
/* lofop */
if(curr->prev->val.type == TokenTypeNum)
lofop = curr->prev;
else if(curr->prev->val.type == TokenTypeSep)
/* Find the matching paren and position lofop there */
lofop = curr->prev->val.data.sep.matching;
else {
return Result_err("Invalid operand type left of operator");
}
/* rofop: essentially the same as lofop */
if(curr->next->val.type == TokenTypeNum)
rofop = curr->next;
else if(curr->next->val.type == TokenTypeSep)
rofop = curr->next->val.data.sep.matching;
else {
return Result_err("Invalid operand type right of operator");
}
assert(lofop != NULL);
assert(rofop != NULL);
/* Don't add any parens, if they would be redundant.
* We know that they are redundant, if lofop has an
* LParen left of it and rofop has an RParen right of it*/
if(
/* Check for LParen left of lofop */
lofop->prev != NULL &&
lofop->prev->val.type == TokenTypeSep &&
lofop->prev->val.data.sep.sym == '(' &&
/* Check for RParen right of rofop */
rofop->next != NULL &&
rofop->next->val.type == TokenTypeSep &&
rofop->next->val.data.sep.sym == ')'
) continue;
/* Insert parens to make the AST builder handle precedence */
TokenListItem* lp = malloc(sizeof(TokenListItem));
TokenListItem* rp = malloc(sizeof(TokenListItem));
lp->val.type = TokenTypeSep;
lp->val.data.sep.sym = '(';
lp->val.data.sep.matching = rp;
TokenList_insert_before(&obj->tokens, lofop, lp);
rp->val.type = TokenTypeSep;
rp->val.data.sep.sym = ')';
rp->val.data.sep.matching = lp;
TokenList_insert_after(&obj->tokens, rofop, rp);
}
return Result_noerr();
}
void Lex_uninit(Lex* obj) {
TokenList_uninit(&obj->tokens);
PtrStack_uninit(&obj->ptr_stack_);
}

55
lex.h Normal file
View File

@ -0,0 +1,55 @@
/* vim: set filetype=c: */
#ifndef _LEX_H_
#define _LEX_H_
#include <stdlib.h>
#include <stdbool.h>
#include "token.h"
#include "token_list.h"
#include "ptr_stack.h"
#include "error.h"
#define LEX_MAX_CONTEXT_SIZE 256
typedef struct Lex Lex;
struct Lex {
/** Output **/
TokenList tokens;
/* Stats */
size_t out_written_total; /* Total number of tokens put out */
size_t out_read_total; /* Total number of read chars */
/** Internal **/
/* Stores a stack of pointers to LParen TokenListItems. Using this,
* we can always know where each paren's matching paren is located,
* which allows for a fast operator precedence implementation. */
PtrStack ptr_stack_;
/* Holds chars of text that haven't completely been lexed yet */
char context_[LEX_MAX_CONTEXT_SIZE];
size_t context_size_;
};
extern void Lex_init(Lex* obj);
extern Result Lex_lex_char(Lex* obj, char in);
extern Result Lex_finish_lex(Lex* obj);
/* This function MUST be called exactly once for every operator , as it transforms the
* expression into a representation which allows for an easy creation of a tertiary tree.
* The order of calls to this function is what dictates the actual precedence.
* Usage: Call this function once for each operator precedence:
* opsym is a string with the operator characters to apply precedence to, for example "+-";
* right_to_left specifies the direction. For most math operators, this should be false, but
* for pow for example, it should be true.
* You should call this function for the operators with highest priority, i.e. pow first.
* An example setup would be:
* Lex_apply_op_precedence(&lex, "^", true);
* Lex_apply_op_precedence(&lex, "*:", false);
* Lex_apply_op_precedence(&lex, "+-", false); */
extern Result Lex_apply_op_precedence(Lex* obj, const char* opsyms, bool right_to_left);
extern void Lex_uninit(Lex* obj);
#endif /* _LEX_H_ */

72
main.c Normal file
View File

@ -0,0 +1,72 @@
/* vim: set filetype=c: */
#include "lex.h"
#include "ast.h"
#include <stdlib.h>
#include <stdio.h>
#define INPUT_SIZE 512
static void print_tokens(const Lex* lex) {
const TokenListItem* curr;
for(curr = lex->tokens.front; curr != NULL; curr = curr->next) {
Token_print(&curr->val, stdout);
fputc('\n', stdout);
}
}
static void handle_normal_err(const Result res, const char* domain) {
Result_print_err_or(res, domain, NULL, stderr, stdout);
if(res.has_err)
exit(EXIT_FAILURE);
}
int main() {
size_t i;
Result res;
char in[INPUT_SIZE];
Lex lex;
Lex_init(&lex);
printf("Enter an expression:\n");
for(i = 0;;i++) {
const char c = fgetc(stdin);
if(i >= INPUT_SIZE - 1 || c == '\n' || c == EOF) {
in[i] = '\0'; /* String terminator */
break;
}
in[i] = c;
}
for(i = 0; in[i] != '\0'; i++) {
res = Lex_lex_char(&lex, in[i]);
handle_normal_err(res, "Lexer error: ");
}
res = Lex_finish_lex(&lex);
handle_normal_err(res, "Lexer error: ");
printf("\n**Tokens**\n");
print_tokens(&lex);
res = Lex_apply_op_precedence(&lex, "^", true);
handle_normal_err(res, "Lexer error: ");
res = Lex_apply_op_precedence(&lex, "*/", false);
handle_normal_err(res, "Lexer error: ");
res = Lex_apply_op_precedence(&lex, "+-", false);
handle_normal_err(res, "Lexer error: ");
printf("\n**Tokens (with operator precedence)**\n");
print_tokens(&lex);
AST ast;
AST_init(&ast);
res = AST_parse_from_TokenList(&ast, &lex.tokens);
handle_normal_err(res, "Parser error: ");
printf("\n**AST**\n");
Token_print_as_tree(ast.root, stdout);
long double result = AST_evaluate(&ast);
printf("Result: %Lf\n", result);
AST_uninit(&ast);
Lex_uninit(&lex);
return EXIT_SUCCESS;
}

33
ptr_stack.c Normal file
View File

@ -0,0 +1,33 @@
/* vim: set filetype=c: */
#include "ptr_stack.h"
#include <malloc.h>
void PtrStack_init(PtrStack* obj) {
obj->top = NULL;
obj->size = 0;
}
void PtrStack_push(PtrStack* obj, void* ptr) {
PtrStackItem* itm = malloc(sizeof(PtrStackItem));
itm->ptr = ptr;
itm->prev = obj->top;
obj->top = itm;
obj->size++;
}
void PtrStack_pop(PtrStack* obj) {
PtrStackItem* itm = obj->top;
obj->top = itm->prev;
free(itm);
obj->size--;
}
void PtrStack_uninit(PtrStack* obj) {
PtrStackItem* curr = obj->top;
while(curr != NULL) {
PtrStackItem* prev = curr->prev;
free(curr);
curr = prev;
}
}

31
ptr_stack.h Normal file
View File

@ -0,0 +1,31 @@
/* vim: set filetype=c: */
#ifndef _PTR_STACK_H_
#define _PTR_STACK_H_
#include <stddef.h>
/* THIS STRUCT IS ONLY FOR HOLDING POINTERS AS REFERENCES, NOT
* HEAP ALLOCATED OBJECTS. IT DOES NOT AUTOMATICALLY ALLOC OR FREE
* ANYTHING BUT INTERNAL OBJECTS. */
typedef struct PtrStackItem PtrStackItem;
struct PtrStackItem {
PtrStackItem* prev;
void* ptr;
};
typedef struct PtrStack PtrStack;
struct PtrStack {
PtrStackItem* top;
size_t size;
};
extern void PtrStack_init(PtrStack* obj);
extern void PtrStack_push(PtrStack* obj, void* ptr);
extern void PtrStack_pop(PtrStack* obj);
extern void PtrStack_uninit(PtrStack* obj);
#endif /* _PTR_STACK_H_ */

71
token.c Normal file
View File

@ -0,0 +1,71 @@
/* vim: set filetype=c: */
#include "token.h"
#include <assert.h>
#include <malloc.h>
void ExprToken_uninit_recursive(Token* obj) {
if(obj->type == TokenTypeExpr) {
/* Uninit lhs recursively */
if(obj->data.expr.lhs)
ExprToken_uninit_recursive(obj->data.expr.lhs);
/* Uninit op */
if(obj->data.expr.op) {
assert(obj->data.expr.op->type == TokenTypeOp);
free(obj->data.expr.op);
}
/* Uninit rhs recursively */
if(obj->data.expr.rhs)
ExprToken_uninit_recursive(obj->data.expr.rhs);
} else {
/* If it's not an ExprToken, the type must be NumToken */
assert(obj->type == TokenTypeNum);
}
free(obj); /* Free the token itself */
}
void Token_print(const Token* obj, FILE* file) {
switch(obj->type) {
default:
fprintf(file, "(Invalid)");
break;
case TokenTypeNull:
fprintf(file, "(Null)");
break;
case TokenTypeNum:
fprintf(file, "(Num, %Lf)", obj->data.num);
break;
case TokenTypeSep:
fprintf(file, "(Sep, '%c')", obj->data.sep.sym);
break;
case TokenTypeOp:
fprintf(file, "(Op, '%c')", obj->data.op);
break;
case TokenTypeExpr:
fprintf(file, "(Expr)");
break;
}
}
static void Token_print_as_tree_(const Token* obj, FILE* file, size_t depth) {
/* Put spaces in front for hierarchical view */
size_t i;
for(i = 0; i < depth * 4; i++)
fputc(' ', file);
Token_print(obj, file); /* Print the token itself */
fputc('\n', file); /* Print newline */
/* Print the children one layer deeper */
if(obj->type == TokenTypeExpr) {
if(obj->data.expr.lhs != NULL)
Token_print_as_tree_(obj->data.expr.lhs, file, depth + 1);
if(obj->data.expr.op != NULL)
Token_print_as_tree_(obj->data.expr.op, file, depth + 1);
if(obj->data.expr.rhs != NULL)
Token_print_as_tree_(obj->data.expr.rhs, file, depth + 1);
}
}
void Token_print_as_tree(const Token* obj, FILE* file) {
Token_print_as_tree_(obj, file, 0);
}

52
token.h Normal file
View File

@ -0,0 +1,52 @@
/* vim: set filetype=c: */
#ifndef _TOKEN_H_
#define _TOKEN_H_
#include <stdio.h>
typedef struct Token Token;
struct Token;
typedef struct TokenListItem TokenListItem;
struct TokenListItem;
typedef long double NumToken;
typedef struct SepToken SepToken;
struct SepToken {
char sym;
TokenListItem* matching;
};
typedef char OpToken;
typedef struct ExprToken ExprToken;
struct ExprToken {
Token* lhs;
Token* op;
Token* rhs;
};
typedef struct Token Token;
struct Token {
enum {
TokenTypeNull, /* Invalid type */
TokenTypeNum,
TokenTypeSep,
TokenTypeOp,
TokenTypeExpr,
} type;
union {
NumToken num;
SepToken sep;
OpToken op;
ExprToken expr;
} data;
};
/* Recursively frees Token of type ExprToken and its children */
extern void ExprToken_uninit_recursive(Token* obj);
extern void Token_print(const Token* obj, FILE* file);
extern void Token_print_as_tree(const Token* obj, FILE* file);
#endif /* _TOKEN_H_ */

130
token_list.c Normal file
View File

@ -0,0 +1,130 @@
/* vim: set filetype=c: */
#include "token_list.h"
#include <malloc.h>
void TokenList_init(TokenList* obj) {
obj->front = NULL;
obj->back = NULL;
obj->size = 0;
}
static void TokenList_add_initial_item_(TokenList* obj, TokenListItem* data) {
obj->front = obj->back = data;
data->prev = data->next = NULL;
obj->size++;
}
void TokenList_push_back(TokenList* obj, TokenListItem* data) {
if(obj->size == 0) {
TokenList_add_initial_item_(obj, data);
return;
}
data->prev = obj->back;
obj->back->next = data;
obj->back = data;
data->next = NULL;
obj->size++;
}
void TokenList_push_front(TokenList* obj, TokenListItem* data) {
if(obj->size == 0) {
TokenList_add_initial_item_(obj, data);
return;
}
data->next = obj->front;
obj->front->prev = data;
obj->front = data;
data->prev = NULL;
obj->size++;
}
void TokenList_insert_before(TokenList* obj, TokenListItem* itm, TokenListItem* data) {
if(itm == obj->front) {
TokenList_push_front(obj, data);
return;
}
/* See insert_after for details */
data->prev = itm->prev;
data->next = itm;
itm->prev->next = data;
itm->prev = data;
obj->size++;
}
void TokenList_insert_after(TokenList* obj, TokenListItem* itm, TokenListItem* data) {
if(itm == obj->back) {
TokenList_push_back(obj, data);
return;
}
/* Initial state
* +---+ -> +---+ -> +---+
* |itm| | | | |
* +---+ <- +---+ <- +---+
* +----+
* |data|
* +----+ */
/* Make data point to its new neighbours
* +---+ -> +---+ -> +---+
* |itm| | | | |
* +---+ <- +---+ <- +---+
* ^ ^
* | |
* | +----+ -+
* | |data|
* +- +----+ */
data->next = itm->next;
data->prev = itm;
/* Make new datas' neighbours point to it
* +---+ -+ +---+ -> +---+
* |itm| | | | | |
* +---+ | +---+ <- +---+
* ^ | | ^
* | +---+ | |
* | | +---+--+
* | | | |
* | +-> +----+ -+ |
* | |data| |
* +---- +----+ <---+ */
/* +---+ -> +----+ -> +---+ -> +---+
* |itm| |data| | | | |
* +---+ <- +----+ <- +---+ <- +---+ */
itm->next->prev = data;
itm->next = data;
obj->size++;
}
void TokenList_remove(TokenList* obj, TokenListItem* itm) {
if(obj->size == 1) {
obj->front = NULL;
obj->back = NULL;
}
else if(itm == obj->front) {
itm->next->prev = itm->prev;
obj->front = itm->next;
}
else if(itm == obj->back) {
itm->prev->next = itm->next;
obj->back = itm->prev;
} else {
itm->prev->next = itm->next;
itm->next->prev = itm->prev;
}
free(itm);
obj->size--;
}
void TokenList_uninit(TokenList* obj) {
TokenListItem* curr = obj->front;
while(curr != NULL) {
TokenListItem* next = curr->next;
free(curr);
curr = next;
}
}

36
token_list.h Normal file
View File

@ -0,0 +1,36 @@
/* vim: set filetype=c: */
#ifndef _TOKEN_LIST_H_
#define _TOKEN_LIST_H_
#include "token.h"
typedef struct TokenListItem TokenListItem;
struct TokenListItem {
TokenListItem* next;
TokenListItem* prev;
Token val;
};
typedef struct TokenList TokenList;
struct TokenList {
TokenListItem* front;
TokenListItem* back;
size_t size;
};
extern void TokenList_init(TokenList* obj);
/* All inserted / appended elements must be heap allocated in advance */
extern void TokenList_push_back(TokenList* obj, TokenListItem* data);
extern void TokenList_push_front(TokenList* obj, TokenListItem* data);
extern void TokenList_insert_before(TokenList* obj, TokenListItem* itm, TokenListItem* data);
extern void TokenList_insert_after(TokenList* obj, TokenListItem* itm, TokenListItem* data);
extern void TokenList_remove(TokenList* obj, TokenListItem* itm);
/* Frees all TokenStreamItems */
extern void TokenList_uninit(TokenList* obj);
#endif /* _TOKEN_LIST_H_ */