Allow expressions in designators

This commit is contained in:
2025-02-07 00:56:54 +01:00
parent 5e9b4259ca
commit 077de53c74
5 changed files with 51 additions and 40 deletions

View File

@ -1,20 +1,20 @@
const
SEEK_SET = 0, SEEK_CUR = 1, SEEK_END = 2,
SEEK_SET = 0; SEEK_CUR = 1; SEEK_END = 2;
TOKEN_IDENTIFIER = 1, TOKEN_IF = 2, TOKEN_THEN = 3, TOKEN_ELSE = 4, TOKEN_ELSIF = 5,
TOKEN_WHILE = 6, TOKEN_DO = 7, TOKEN_PROC = 8, TOKEN_BEGIN = 9, TOKEN_END = 10,
TOKEN_EXTERN = 11, TOKEN_CONST = 12, TOKEN_VAR = 13, TOKEN_ARRAY = 14, TOKEN_OF = 15,
TOKEN_TYPE = 16, TOKEN_RECORD = 17, TOKEN_UNION = 18, TOKEN_POINTER = 19, TOKEN_TO = 20,
TOKEN_BOOLEAN = 21, TOKEN_NIL = 22, TOKEN_AND = 23, TOKEN_OR = 24, TOKEN_NOT = 25,
TOKEN_RETURN = 26, TOKEN_CAST = 27, TOKEN_AS = 28, TOKEN_SIZEOF = 29,
TOKEN_LEFT_PAREN = 30, TOKEN_RIGHT_PAREN = 31, TOKEN_LEFT_SQUARE = 32,
TOKEN_RIGHT_SQUARE = 33, TOKEN_GREATER_EQUAL = 34, TOKEN_LESS_EQUAL = 35,
TOKEN_GREATER_THAN = 36, TOKEN_LESS_THAN = 37, TOKEN_NOT_EQUAL = 38, TOKEN_EQUAL = 39,
TOKEN_SEMICOLON = 40, TOKEN_DOT = 41, TOKEN_COMMA = 42,
TOKEN_PLUS = 43, TOKEN_MINUS = 44, TOKEN_MULTIPLICATION = 45, TOKEN_DIVISION = 46,
TOKEN_REMAINDER = 47, TOKEN_ASSIGNMENT = 48, TOKEN_COLON = 49, TOKEN_HAT = 50,
TOKEN_AT = 51, TOKEN_COMMENT = 52, TOKEN_INTEGER = 53, TOKEN_WORD = 54,
TOKEN_CHARACTER = 55, TOKEN_STRING = 56;
TOKEN_IDENTIFIER = 1; TOKEN_IF = 2; TOKEN_THEN = 3; TOKEN_ELSE = 4; TOKEN_ELSIF = 5;
TOKEN_WHILE = 6; TOKEN_DO = 7; TOKEN_PROC = 8; TOKEN_BEGIN = 9; TOKEN_END = 10;
TOKEN_EXTERN = 11; TOKEN_CONST = 12; TOKEN_VAR = 13; TOKEN_ARRAY = 14; TOKEN_OF = 15;
TOKEN_TYPE = 16; TOKEN_RECORD = 17; TOKEN_UNION = 18; TOKEN_POINTER = 19; TOKEN_TO = 20;
TOKEN_BOOLEAN = 21; TOKEN_NIL = 22; TOKEN_AND = 23; TOKEN_OR = 24; TOKEN_NOT = 25;
TOKEN_RETURN = 26; TOKEN_CAST = 27; TOKEN_AS = 28; TOKEN_SIZEOF = 29;
TOKEN_LEFT_PAREN = 30; TOKEN_RIGHT_PAREN = 31; TOKEN_LEFT_SQUARE = 32;
TOKEN_RIGHT_SQUARE = 33; TOKEN_GREATER_EQUAL = 34; TOKEN_LESS_EQUAL = 35;
TOKEN_GREATER_THAN = 36; TOKEN_LESS_THAN = 37; TOKEN_NOT_EQUAL = 38; TOKEN_EQUAL = 39;
TOKEN_SEMICOLON = 40; TOKEN_DOT = 41; TOKEN_COMMA = 42;
TOKEN_PLUS = 43; TOKEN_MINUS = 44; TOKEN_MULTIPLICATION = 45; TOKEN_DIVISION = 46;
TOKEN_REMAINDER = 47; TOKEN_ASSIGNMENT = 48; TOKEN_COLON = 49; TOKEN_HAT = 50;
TOKEN_AT = 51; TOKEN_COMMENT = 52; TOKEN_INTEGER = 53; TOKEN_WORD = 54;
TOKEN_CHARACTER = 55; TOKEN_STRING = 56;
type
Position = record
@ -73,7 +73,6 @@ proc malloc(size: Word): pointer to Byte; extern;
proc free(ptr: pointer to Byte); extern;
proc calloc(nmemb: Word, size: Word): pointer to Byte; extern;
proc realloc(ptr: pointer to Byte, size: Word): pointer to Byte; extern;
proc reallocarray(ptr: pointer to Byte, n: Word, size: Word): pointer to Byte; extern;
proc memset(ptr: pointer to Char, c: Int, n: Int): pointer to Char; extern;
@ -91,6 +90,11 @@ proc exit(code: Int); extern;
(*
Standard procedures.
*)
proc reallocarray(ptr: pointer to Byte, n: Word, size: Word): pointer to Byte;
begin
return realloc(ptr, n * size)
end;
proc write_s(value: String);
begin
write(0, value, strlen(value))
@ -128,7 +132,7 @@ begin
buffer[n] := cast(cast('0' as Int) + digit as Char);
n := n - 1
end;
while n < 10 do
while n < 9 do
n := n + 1;
write_c(buffer[n])
end
@ -255,7 +259,6 @@ end;
proc lex_comment(input: pointer to Char): pointer to Char;
var
current: pointer to Char,
next: pointer to Char;
begin
while input^ <> '\0' do
@ -264,7 +267,7 @@ begin
if input^ = '*' and next^ = ')' then
return next + 1
end;
input := input + 1
input := next
end;
return nil
end;
@ -286,16 +289,13 @@ end;
proc lex_string(input: pointer to Char, current_token: pointer to Token): pointer to Char;
var
token_end: pointer to Char,
previous: pointer to Char,
constructed_string: pointer to Char,
token_length: Word,
is_valid: Bool;
begin
token_end := input;
previous := input - 1;
while token_end^ <> '\0' and not (previous^ <> '\\' and token_end^ = '"') do
previous := token_end;
while token_end^ <> '\0' and not ((token_end - 1)^ <> '\\' and token_end^ = '"') do
token_end := token_end + 1
end;
if token_end^ <> '\"' then