Create tokenization tables

This commit is contained in:
2025-05-01 23:37:40 +02:00
parent f3a8b2626a
commit 768821c689
3 changed files with 381 additions and 178 deletions

View File

@@ -1896,11 +1896,6 @@ _main:
sw s0, 0(sp)
addi s0, sp, 8
# Read the source from the standard input.
la a0, source_code
li a1, SOURCE_BUFFER_SIZE # Buffer size.
call _read_file
li s2, 1
# Epilogue.
@@ -1912,7 +1907,13 @@ _main:
# Entry point.
.type _start, @function
_start:
call _tokenizer_initialize
# Read the source from the standard input.
la a0, source_code
li a1, SOURCE_BUFFER_SIZE # Buffer size.
call _read_file
mv a0, s1
call _tokenize
call _main
call _compile