Skip to content

Commit

Permalink
some changes
Browse files Browse the repository at this point in the history
  • Loading branch information
v420v committed Oct 19, 2024
1 parent 6c778eb commit f8e6fcb
Show file tree
Hide file tree
Showing 5 changed files with 18 additions and 32 deletions.
11 changes: 0 additions & 11 deletions src/codegen/codegen.ibu
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,7 @@
#include "linux-syscall/header.ibu"

func gen_expr(g *Gen, node *Node) i32;
func gen_lhs_rhs(g *Gen, lhs *Node, rhs *Node) i32;
func gen_addr(g *Gen, node *Node) i32;
func gen_store(ty *Type) i32;
func gen_load(g *Gen, ty *Type) i32;
func gen_condition_store(ty *Type) i32;
func func_call_args_to_stack(g *Gen, args *Vec, ty *Type) i32;
func gen_stmt(g *Gen, node *Node) i32;
func gen_node_block(g *Gen, stmts *Vec) i32;
func gen_stmt(g *Gen, node *Node) i32;
func assign_func_params_offset(g *Gen, obj *Object) i32;
func assign_local_var_offset(g *Gen, obj *Object) i32;
func has_main(object *Object) bool;

func new_gen() *Gen {
let g *Gen = alloc(typesize(Gen));
Expand Down
6 changes: 3 additions & 3 deletions src/parser/header.ibu
Original file line number Diff line number Diff line change
Expand Up @@ -86,9 +86,9 @@ struct Type {
tok *Token,
}

func unexpected_token_error(expected *u8, tok *Token, msg *u8) i32;
func unkown_type_error(types *Vec, tok *Token, msg *u8) i32;
func unkown_member_error(members *Vec, tok *Token, msg *u8) i32;
func unexpected_token_error(expected *u8, tok *Token, msg *u8) u0;
func unkown_type_error(types *Vec, tok *Token, msg *u8) u0;
func unkown_member_error(members *Vec, tok *Token, msg *u8) u0;
#define ObjectKind i32
#define OBJ_FUNC 0
#define OBJ_VAR 1
Expand Down
6 changes: 3 additions & 3 deletions src/parser/parser.ibu
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ func parse_expr(p *Parser) *Node;
func parse_stmt(p *Parser) *Node;
func cmp_type(ty1 *Type, ty2 *Type) bool;

func unexpected_token_error(expected *u8, tok *Token, msg *u8) i32 {
func unexpected_token_error(expected *u8, tok *Token, msg *u8) u0 {
print_error_with_code(tok, msg);
eprintf("help: expected `%s` but got `%s`\n", expected, tok.lit);
exit(1);
}

func unkown_type_error(types *Vec, tok *Token, msg *u8) i32{
func unkown_type_error(types *Vec, tok *Token, msg *u8) u0 {
print_error_with_code(tok, msg);

if types.len == 0 {
Expand All @@ -42,7 +42,7 @@ func unkown_type_error(types *Vec, tok *Token, msg *u8) i32{
exit(1);
}

func unkown_member_error(members *Vec, tok *Token, msg *u8) i32 {
func unkown_member_error(members *Vec, tok *Token, msg *u8) u0 {
print_error_with_code(tok, msg);

if members.len == 0 {
Expand Down
4 changes: 2 additions & 2 deletions src/tokenizer/header.ibu
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ struct Tokenizer {
}

func copy_token(source *Token, dest *Token) u0;
func print_error_with_code(tok *Token, msg *u8) i32;
func print_error(tok *Token, msg *u8) i32;
func print_error_with_code(tok *Token, msg *u8) u0;
func print_error(tok *Token, msg *u8) u0;
func new_tokenizer(file_name *u8, program *u8) *Tokenizer;
func tokenize(t *Tokenizer) *Token;

23 changes: 10 additions & 13 deletions src/tokenizer/tokenizer.ibu
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ func frepeat_str(fd i32, str *u8, n i32) i32 {
return 0;
}

func print_error_with_code(tok *Token, msg *u8) i32 {
func print_error_with_code(tok *Token, msg *u8) u0 {
eprintf("\033[1m\033[31m--> %s:%d:%d: error:\033[0m\n", tok.file_name, tok.line, tok.col);

eprintf(" | \n");
Expand Down Expand Up @@ -50,7 +50,7 @@ func print_error_with_code(tok *Token, msg *u8) i32 {
return 0;
}

func print_error(tok *Token, msg *u8) i32 {
func print_error(tok *Token, msg *u8) u0 {
print_error_with_code(tok, msg);
exit(1);
}
Expand Down Expand Up @@ -89,8 +89,8 @@ func starts_with(a *u8, b *u8) bool {
return true;
}

func tokenizer_next(t *Tokenizer) i32 {
if *t.program == '\n' { // new line
func tokenizer_next(t *Tokenizer) u0 {
if *t.program == '\n' {
t.line++;
t.col = 0;
t.program++;
Expand All @@ -99,16 +99,14 @@ func tokenizer_next(t *Tokenizer) i32 {
t.col++;
t.program++;
}
return 0;
}

func tokenizer_next_n(t *Tokenizer, n i32) i32 {
func tokenizer_next_n(t *Tokenizer, n i32) u0 {
let idx i32 = 0;
while idx < n {
tokenizer_next(t);
idx++;
}
return 0;
}

func new_token(t *Tokenizer, kind TokenKind) *Token {
Expand Down Expand Up @@ -142,13 +140,12 @@ func tokenize_punct(t *Tokenizer) *Token {
return nil;
}

func skip_single_line_comment(t *Tokenizer) i32 {
func skip_single_line_comment(t *Tokenizer) u0 {
if starts_with(t.program, "//") {
while (*t.program != '\n') && (*t.program != '\0') {
tokenizer_next(t);
}
}
return 0;
}

func string_end(t *Tokenizer, str *u8) *u8 {
Expand Down Expand Up @@ -179,8 +176,8 @@ func from_hex(c u8) u8 {
return c - 'A' + 10;
}

func read_escaped_char(t *Tokenizer) i32 {
let c i32;
func read_escaped_char(t *Tokenizer) u8 {
let c u8;
tokenizer_next(t);
if '0' <= *t.program <= '7' {
c = *t.program - '0';
Expand Down Expand Up @@ -252,9 +249,9 @@ func tokenize_number(t *Tokenizer) *Token {
func tokenize_char(t *Tokenizer) *Token {
let tok *Token = new_token(t, TK_NUM);
let char_start *u8 = t.program;
tokenizer_next(t); // skip start '\''
tokenizer_next(t);

let c i32;
let c u8;
if *t.program == '\\' {
c = read_escaped_char(t);
} else {
Expand Down

0 comments on commit f8e6fcb

Please sign in to comment.