Handle ASCII codes > 128 in the tokenizer

This commit is contained in:
Eugen Wissner 2025-05-30 19:51:18 +02:00
parent 15135f14d8
commit a93d12eb50
Signed by: belka
GPG Key ID: A27FDC1E8EE902C0
3 changed files with 278 additions and 259 deletions

View File

@ -73,13 +73,13 @@ TYPE
lexerKindFrom, lexerKindFrom,
lexerKindPointer, lexerKindPointer,
lexerKindArray, lexerKindArray,
lexerKindTrait, lexerKindArrow,
lexerKindProgram, lexerKindProgram,
lexerKindModule, lexerKindModule,
lexerKindImport lexerKindImport
); );
LexerToken = RECORD LexerToken = RECORD
CASE Kind: LexerKind OF CASE kind: LexerKind OF
lexerKindBoolean: booleanKind: BOOLEAN | lexerKindBoolean: booleanKind: BOOLEAN |
lexerKindIdentifier: identifierKind: ARRAY[1..256] OF CHAR lexerKindIdentifier: identifierKind: ARRAY[1..256] OF CHAR
END END

View File

@ -72,6 +72,8 @@ VAR
transitions: ARRAY[1..16] OF TransitionClasses; transitions: ARRAY[1..16] OF TransitionClasses;
PROCEDURE initialize_classification(); PROCEDURE initialize_classification();
VAR
i: CARDINAL;
BEGIN BEGIN
classification[1] := transitionClassEof; (* NUL *) classification[1] := transitionClassEof; (* NUL *)
classification[2] := transitionClassInvalid; (* SOH *) classification[2] := transitionClassInvalid; (* SOH *)
@ -200,255 +202,264 @@ BEGIN
classification[125] := transitionClassSingle; (* | *) classification[125] := transitionClassSingle; (* | *)
classification[126] := transitionClassOther; (* } *) classification[126] := transitionClassOther; (* } *)
classification[127] := transitionClassSingle; (* ~ *) classification[127] := transitionClassSingle; (* ~ *)
classification[128] := transitionClassInvalid (* DEL *) classification[128] := transitionClassInvalid; (* DEL *)
i := 129;
WHILE i <= 256 DO
classification[i] := transitionClassOther;
i := i + 1
END
END initialize_classification; END initialize_classification;
PROCEDURE compare_keyword(Keyword: ARRAY OF CHAR; TokenStart: PLexerBuffer; TokenEnd: PLexerBuffer): BOOLEAN; PROCEDURE compare_keyword(Keyword: ARRAY OF CHAR; TokenStart: PLexerBuffer; TokenEnd: PLexerBuffer): BOOLEAN;
VAR VAR
Result: BOOLEAN; result: BOOLEAN;
Index: CARDINAL; index: CARDINAL;
BEGIN BEGIN
Index := 0; index := 0;
Result := TRUE; result := TRUE;
WHILE (Index < Length(Keyword)) AND (TokenStart <> TokenEnd) AND Result DO WHILE (index < Length(Keyword)) AND (TokenStart <> TokenEnd) AND result DO
Result := (Keyword[Index] = TokenStart^) OR (Lower(Keyword[Index]) = TokenStart^); result := (Keyword[index] = TokenStart^) OR (Lower(Keyword[index]) = TokenStart^);
INC(TokenStart); INC(TokenStart);
INC(Index) INC(index)
END; END;
Result := (Index = Length(Keyword)) AND (TokenStart = TokenEnd) AND Result; result := (index = Length(Keyword)) AND (TokenStart = TokenEnd) AND result;
RETURN Result RETURN result
END compare_keyword; END compare_keyword;
(* Reached the end of file. *) (* Reached the end of file. *)
PROCEDURE transition_action_eof(lexer: PLexer; AToken: PLexerToken); PROCEDURE transition_action_eof(lexer: PLexer; token: PLexerToken);
BEGIN BEGIN
AToken^.Kind := lexerKindEof token^.kind := lexerKindEof
END transition_action_eof; END transition_action_eof;
(* Add the character to the token currently read and advance to the next character. *) (* Add the character to the token currently read and advance to the next character. *)
PROCEDURE transition_action_accumulate(lexer: PLexer; AToken: PLexerToken); PROCEDURE transition_action_accumulate(lexer: PLexer; token: PLexerToken);
BEGIN BEGIN
INC(lexer^.Current) INC(lexer^.Current)
END transition_action_accumulate; END transition_action_accumulate;
(* The current character is not a part of the token. Finish the token already (* The current character is not a part of the token. Finish the token already
* read. Don't advance to the next character. *) * read. Don't advance to the next character. *)
PROCEDURE transition_action_finalize(lexer: PLexer; AToken: PLexerToken); PROCEDURE transition_action_finalize(lexer: PLexer; token: PLexerToken);
BEGIN BEGIN
IF lexer^.Start^ = ':' THEN IF lexer^.Start^ = ':' THEN
AToken^.Kind := lexerKindColon token^.kind := lexerKindColon
END; END;
IF lexer^.Start^ = '>' THEN IF lexer^.Start^ = '>' THEN
AToken^.Kind := lexerKindGreaterThan token^.kind := lexerKindGreaterThan
END; END;
IF lexer^.Start^ = '<' THEN IF lexer^.Start^ = '<' THEN
AToken^.Kind := lexerKindLessThan token^.kind := lexerKindLessThan
END; END;
IF lexer^.Start^ = '(' THEN IF lexer^.Start^ = '(' THEN
AToken^.Kind := lexerKindLeftParen token^.kind := lexerKindLeftParen
END; END;
IF lexer^.Start^ = '-' THEN IF lexer^.Start^ = '-' THEN
AToken^.Kind := lexerKindLeftParen token^.kind := lexerKindLeftParen
END; END;
IF lexer^.Start^ = '.' THEN IF lexer^.Start^ = '.' THEN
AToken^.Kind := lexerKindDot token^.kind := lexerKindDot
END END
END transition_action_finalize; END transition_action_finalize;
(* An action for tokens containing multiple characters. *) (* An action for tokens containing multiple characters. *)
PROCEDURE transition_action_composite(lexer: PLexer; AToken: PLexerToken); PROCEDURE transition_action_composite(lexer: PLexer; token: PLexerToken);
BEGIN BEGIN
IF lexer^.Start^ = '<' THEN IF lexer^.Start^ = '<' THEN
IF lexer^.Current^ = '>' THEN IF lexer^.Current^ = '>' THEN
AToken^.Kind := lexerKindNotEqual token^.kind := lexerKindNotEqual
END; END;
IF lexer^.Current^ = '=' THEN IF lexer^.Current^ = '=' THEN
AToken^.Kind := lexerKindLessEqual token^.kind := lexerKindLessEqual
END END
END; END;
IF (lexer^.Start^ = '>') AND (lexer^.Current^ = '=') THEN IF (lexer^.Start^ = '>') AND (lexer^.Current^ = '=') THEN
AToken^.Kind := lexerKindGreaterEqual token^.kind := lexerKindGreaterEqual
END; END;
IF (lexer^.Start^ = '.') AND (lexer^.Current^ = '.') THEN IF (lexer^.Start^ = '.') AND (lexer^.Current^ = '.') THEN
AToken^.Kind := lexerKindRange token^.kind := lexerKindRange
END; END;
IF (lexer^.Start^ = ':') AND (lexer^.Current^ = '=') THEN IF (lexer^.Start^ = ':') AND (lexer^.Current^ = '=') THEN
AToken^.Kind := lexerKindAssignment token^.kind := lexerKindAssignment
END;
IF (lexer^.Start^ = '-') AND (lexer^.Current^ = '>') THEN
token^.kind := lexerKindArrow
END; END;
INC(lexer^.Current) INC(lexer^.Current)
END transition_action_composite; END transition_action_composite;
(* Skip a space. *) (* Skip a space. *)
PROCEDURE transition_action_skip(lexer: PLexer; AToken: PLexerToken); PROCEDURE transition_action_skip(lexer: PLexer; token: PLexerToken);
BEGIN BEGIN
INC(lexer^.Current); INC(lexer^.Current);
INC(lexer^.Start) INC(lexer^.Start)
END transition_action_skip; END transition_action_skip;
(* Delimited string action. *) (* Delimited string action. *)
PROCEDURE transition_action_delimited(lexer: PLexer; AToken: PLexerToken); PROCEDURE transition_action_delimited(lexer: PLexer; token: PLexerToken);
BEGIN BEGIN
IF lexer^.Start^ = '(' THEN IF lexer^.Start^ = '(' THEN
AToken^.Kind := lexerKindComment token^.kind := lexerKindComment
END; END;
IF lexer^.Start^ = '"' THEN IF lexer^.Start^ = '"' THEN
AToken^.Kind := lexerKindCharacter token^.kind := lexerKindCharacter
END; END;
IF lexer^.Start^ = "'" THEN IF lexer^.Start^ = "'" THEN
AToken^.Kind := lexerKindString token^.kind := lexerKindString
END; END;
INC(lexer^.Current) INC(lexer^.Current)
END transition_action_delimited; END transition_action_delimited;
(* Finalize keyword OR identifier. *) (* Finalize keyword OR identifier. *)
PROCEDURE transition_action_key_id(lexer: PLexer; AToken: PLexerToken); PROCEDURE transition_action_key_id(lexer: PLexer; token: PLexerToken);
BEGIN BEGIN
AToken^.Kind := lexerKindIdentifier; token^.kind := lexerKindIdentifier;
AToken^.identifierKind[1] := lexer^.Current - lexer^.Start; token^.identifierKind[1] := lexer^.Current - lexer^.Start;
MemCopy(lexer^.Start, ORD(AToken^.identifierKind[1]), ADR(AToken^.identifierKind[2])); MemCopy(lexer^.Start, ORD(token^.identifierKind[1]), ADR(token^.identifierKind[2]));
IF compare_keyword('PROGRAM', lexer^.Start, lexer^.Current) THEN IF compare_keyword('PROGRAM', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindProgram token^.kind := lexerKindProgram
END; END;
IF compare_keyword('IMPORT', lexer^.Start, lexer^.Current) THEN IF compare_keyword('IMPORT', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindImport token^.kind := lexerKindImport
END; END;
IF compare_keyword('CONST', lexer^.Start, lexer^.Current) THEN IF compare_keyword('CONST', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindConst token^.kind := lexerKindConst
END; END;
IF compare_keyword('VAR', lexer^.Start, lexer^.Current) THEN IF compare_keyword('VAR', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindVar token^.kind := lexerKindVar
END; END;
IF compare_keyword('IF', lexer^.Start, lexer^.Current) THEN IF compare_keyword('IF', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindIf token^.kind := lexerKindIf
END; END;
IF compare_keyword('THEN', lexer^.Start, lexer^.Current) THEN IF compare_keyword('THEN', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindThen token^.kind := lexerKindThen
END; END;
IF compare_keyword('ELSIF', lexer^.Start, lexer^.Current) THEN IF compare_keyword('ELSIF', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindElsif token^.kind := lexerKindElsif
END; END;
IF compare_keyword('ELSE', lexer^.Start, lexer^.Current) THEN IF compare_keyword('ELSE', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindElse token^.kind := lexerKindElse
END; END;
IF compare_keyword('WHILE', lexer^.Start, lexer^.Current) THEN IF compare_keyword('WHILE', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindWhile token^.kind := lexerKindWhile
END; END;
IF compare_keyword('DO', lexer^.Start, lexer^.Current) THEN IF compare_keyword('DO', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindDo token^.kind := lexerKindDo
END; END;
IF compare_keyword('proc', lexer^.Start, lexer^.Current) THEN IF compare_keyword('proc', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindProc token^.kind := lexerKindProc
END; END;
IF compare_keyword('BEGIN', lexer^.Start, lexer^.Current) THEN IF compare_keyword('BEGIN', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindBegin token^.kind := lexerKindBegin
END; END;
IF compare_keyword('END', lexer^.Start, lexer^.Current) THEN IF compare_keyword('END', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindEnd token^.kind := lexerKindEnd
END; END;
IF compare_keyword('TYPE', lexer^.Start, lexer^.Current) THEN IF compare_keyword('TYPE', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindType token^.kind := lexerKindType
END; END;
IF compare_keyword('RECORD', lexer^.Start, lexer^.Current) THEN IF compare_keyword('RECORD', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindRecord token^.kind := lexerKindRecord
END; END;
IF compare_keyword('UNION', lexer^.Start, lexer^.Current) THEN IF compare_keyword('UNION', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindUnion token^.kind := lexerKindUnion
END; END;
IF compare_keyword('NIL', lexer^.Start, lexer^.Current) THEN IF compare_keyword('NIL', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindNull token^.kind := lexerKindNull
END; END;
IF compare_keyword('AND', lexer^.Start, lexer^.Current) THEN IF compare_keyword('AND', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindAnd token^.kind := lexerKindAnd
END; END;
IF compare_keyword('OR', lexer^.Start, lexer^.Current) THEN IF compare_keyword('OR', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindOr token^.kind := lexerKindOr
END; END;
IF compare_keyword('RETURN', lexer^.Start, lexer^.Current) THEN IF compare_keyword('RETURN', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindReturn token^.kind := lexerKindReturn
END; END;
IF compare_keyword('DEFINITION', lexer^.Start, lexer^.Current) THEN IF compare_keyword('DEFINITION', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindDefinition token^.kind := lexerKindDefinition
END; END;
IF compare_keyword('TO', lexer^.Start, lexer^.Current) THEN IF compare_keyword('TO', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindTo token^.kind := lexerKindTo
END; END;
IF compare_keyword('CASE', lexer^.Start, lexer^.Current) THEN IF compare_keyword('CASE', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindCase token^.kind := lexerKindCase
END; END;
IF compare_keyword('OF', lexer^.Start, lexer^.Current) THEN IF compare_keyword('OF', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindOf token^.kind := lexerKindOf
END; END;
IF compare_keyword('FROM', lexer^.Start, lexer^.Current) THEN IF compare_keyword('FROM', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindFrom token^.kind := lexerKindFrom
END; END;
IF compare_keyword('MODULE', lexer^.Start, lexer^.Current) THEN IF compare_keyword('MODULE', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindModule token^.kind := lexerKindModule
END; END;
IF compare_keyword('IMPLEMENTATION', lexer^.Start, lexer^.Current) THEN IF compare_keyword('IMPLEMENTATION', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindImplementation token^.kind := lexerKindImplementation
END; END;
IF compare_keyword('POINTER', lexer^.Start, lexer^.Current) THEN IF compare_keyword('POINTER', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindPointer token^.kind := lexerKindPointer
END; END;
IF compare_keyword('ARRAY', lexer^.Start, lexer^.Current) THEN IF compare_keyword('ARRAY', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindArray token^.kind := lexerKindArray
END; END;
IF compare_keyword('TRUE', lexer^.Start, lexer^.Current) THEN IF compare_keyword('TRUE', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindBoolean; token^.kind := lexerKindBoolean;
AToken^.booleanKind := TRUE token^.booleanKind := TRUE
END; END;
IF compare_keyword('FALSE', lexer^.Start, lexer^.Current) THEN IF compare_keyword('FALSE', lexer^.Start, lexer^.Current) THEN
AToken^.Kind := lexerKindBoolean; token^.kind := lexerKindBoolean;
AToken^.booleanKind := FALSE token^.booleanKind := FALSE
END END
END transition_action_key_id; END transition_action_key_id;
(* Action for tokens containing only one character. The character cannot be (* Action for tokens containing only one character. The character cannot be
* followed by other characters forming a composite token. *) * followed by other characters forming a composite token. *)
PROCEDURE transition_action_single(lexer: PLexer; AToken: PLexerToken); PROCEDURE transition_action_single(lexer: PLexer; token: PLexerToken);
BEGIN BEGIN
IF lexer^.Current^ = '&' THEN IF lexer^.Current^ = '&' THEN
AToken^.Kind := lexerKindAnd token^.kind := lexerKindAnd
END; END;
IF lexer^.Current^ = ';' THEN IF lexer^.Current^ = ';' THEN
AToken^.Kind := lexerKindSemicolon token^.kind := lexerKindSemicolon
END; END;
IF lexer^.Current^ = ',' THEN IF lexer^.Current^ = ',' THEN
AToken^.Kind := lexerKindComma token^.kind := lexerKindComma
END; END;
IF lexer^.Current^ = ',' THEN IF lexer^.Current^ = ',' THEN
AToken^.Kind := lexerKindComma token^.kind := lexerKindComma
END; END;
IF lexer^.Current^ = ')' THEN IF lexer^.Current^ = ')' THEN
AToken^.Kind := lexerKindRightParen token^.kind := lexerKindRightParen
END; END;
IF lexer^.Current^ = '[' THEN IF lexer^.Current^ = '[' THEN
AToken^.Kind := lexerKindLeftSquare token^.kind := lexerKindLeftSquare
END; END;
IF lexer^.Current^ = ']' THEN IF lexer^.Current^ = ']' THEN
AToken^.Kind := lexerKindRightSquare token^.kind := lexerKindRightSquare
END; END;
IF lexer^.Current^ = '^' THEN IF lexer^.Current^ = '^' THEN
AToken^.Kind := lexerKindHat token^.kind := lexerKindHat
END; END;
IF lexer^.Current^ = '=' THEN IF lexer^.Current^ = '=' THEN
AToken^.Kind := lexerKindEqual token^.kind := lexerKindEqual
END; END;
IF lexer^.Current^ = '+' THEN IF lexer^.Current^ = '+' THEN
AToken^.Kind := lexerKindPlus token^.kind := lexerKindPlus
END; END;
IF lexer^.Current^ = '/' THEN IF lexer^.Current^ = '/' THEN
AToken^.Kind := lexerKindDivision token^.kind := lexerKindDivision
END; END;
IF lexer^.Current^ = '%' THEN IF lexer^.Current^ = '%' THEN
AToken^.Kind := lexerKindRemainder token^.kind := lexerKindRemainder
END; END;
IF lexer^.Current^ = '@' THEN IF lexer^.Current^ = '@' THEN
AToken^.Kind := lexerKindAt token^.kind := lexerKindAt
END; END;
IF lexer^.Current^ = '|' THEN IF lexer^.Current^ = '|' THEN
AToken^.Kind := lexerKindPipe token^.kind := lexerKindPipe
END; END;
INC(lexer^.Current) INC(lexer^.Current)
END transition_action_single; END transition_action_single;
(* Handle an integer literal. *) (* Handle an integer literal. *)
PROCEDURE transition_action_integer(lexer: PLexer; AToken: PLexerToken); PROCEDURE transition_action_integer(lexer: PLexer; token: PLexerToken);
BEGIN BEGIN
AToken^.Kind := lexerKindInteger token^.kind := lexerKindInteger
END transition_action_integer; END transition_action_integer;
PROCEDURE set_default_transition(CurrentState: TransitionState; DefaultAction: TransitionAction; NextState: TransitionState); PROCEDURE set_default_transition(CurrentState: TransitionState; DefaultAction: TransitionAction; NextState: TransitionState);
VAR VAR
@ -747,7 +758,7 @@ VAR
CurrentClass: TransitionClass; CurrentClass: TransitionClass;
CurrentState: TransitionState; CurrentState: TransitionState;
CurrentTransition: Transition; CurrentTransition: Transition;
Result: LexerToken; result: LexerToken;
BEGIN BEGIN
lexer^.Current := lexer^.Start; lexer^.Current := lexer^.Start;
CurrentState := transitionStateStart; CurrentState := transitionStateStart;
@ -757,15 +768,15 @@ BEGIN
CurrentTransition := transitions[ORD(CurrentState) + 1][ORD(CurrentClass) + 1]; CurrentTransition := transitions[ORD(CurrentState) + 1][ORD(CurrentClass) + 1];
IF CurrentTransition.Action <> NIL THEN IF CurrentTransition.Action <> NIL THEN
CurrentTransition.Action(lexer, ADR(Result)) CurrentTransition.Action(lexer, ADR(result))
END; END;
CurrentState := CurrentTransition.NextState CurrentState := CurrentTransition.NextState
END; END;
RETURN Result RETURN result
END lexer_current; END lexer_current;
PROCEDURE lexer_lex(lexer: PLexer): LexerToken; PROCEDURE lexer_lex(lexer: PLexer): LexerToken;
VAR VAR
Result: LexerToken; result: LexerToken;
BEGIN BEGIN
IF lexer^.Length = 0 THEN IF lexer^.Length = 0 THEN
lexer^.Length := ReadNBytes(lexer^.Input, CHUNK_SIZE, lexer^.Buffer); lexer^.Length := ReadNBytes(lexer^.Input, CHUNK_SIZE, lexer^.Buffer);
@ -773,8 +784,8 @@ BEGIN
END; END;
lexer^.Start := lexer^.Current; lexer^.Start := lexer^.Current;
Result := lexer_current(lexer); result := lexer_current(lexer);
RETURN Result RETURN result
END lexer_lex; END lexer_lex;
PROCEDURE lexer_destroy(lexer: PLexer); PROCEDURE lexer_destroy(lexer: PLexer);
BEGIN BEGIN

View File

@ -19,7 +19,7 @@ VAR
BEGIN BEGIN
result := lexer_lex(lexer); result := lexer_lex(lexer);
WHILE result.Kind = lexerKindComment DO WHILE result.kind = lexerKindComment DO
result := lexer_lex(lexer) result := lexer_lex(lexer)
END; END;
@ -48,7 +48,7 @@ BEGIN
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WHILE token.Kind <> lexerKindSemicolon DO WHILE token.kind <> lexerKindSemicolon DO
WriteString(', '); WriteString(', ');
token := transpiler_lex(lexer); token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
@ -63,7 +63,7 @@ VAR
BEGIN BEGIN
token := lexer_current(lexer); token := lexer_current(lexer);
WHILE token.Kind = lexerKindFrom DO WHILE token.kind = lexerKindFrom DO
transpile_import(context, lexer); transpile_import(context, lexer);
token := lexer_current(lexer) token := lexer_current(lexer)
END; END;
@ -71,69 +71,69 @@ BEGIN
END transpile_import_part; END transpile_import_part;
PROCEDURE transpile_constant(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_constant(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
written_bytes: CARDINAL; written_bytes: CARDINAL;
BEGIN BEGIN
WriteString(' '); WriteString(' ');
Token := lexer_current(lexer); token := lexer_current(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WriteString(' = '); WriteString(' = ');
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
write_semicolon() write_semicolon()
END transpile_constant; END transpile_constant;
PROCEDURE transpile_constant_part(context: PTranspilerContext; lexer: PLexer): BOOLEAN; PROCEDURE transpile_constant_part(context: PTranspilerContext; lexer: PLexer): BOOLEAN;
VAR VAR
Token: LexerToken; token: LexerToken;
result: BOOLEAN; result: BOOLEAN;
BEGIN BEGIN
Token := lexer_current(lexer); token := lexer_current(lexer);
result := Token.Kind = lexerKindConst; result := token.kind = lexerKindConst;
IF result THEN IF result THEN
WriteString('CONST'); WriteString('CONST');
WriteLn(); WriteLn();
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WHILE Token.Kind = lexerKindIdentifier DO WHILE token.kind = lexerKindIdentifier DO
transpile_constant(context, lexer); transpile_constant(context, lexer);
Token := transpiler_lex(lexer) token := transpiler_lex(lexer)
END END
END; END;
RETURN result RETURN result
END transpile_constant_part; END transpile_constant_part;
PROCEDURE transpile_module(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_module(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
written_bytes: CARDINAL; written_bytes: CARDINAL;
BEGIN BEGIN
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
IF Token.Kind = lexerKindDefinition THEN IF token.kind = lexerKindDefinition THEN
WriteString('DEFINITION '); WriteString('DEFINITION ');
Token := transpiler_lex(lexer) token := transpiler_lex(lexer)
END; END;
IF Token.Kind = lexerKindImplementation THEN IF token.kind = lexerKindImplementation THEN
WriteString('IMPLEMENTATION '); WriteString('IMPLEMENTATION ');
Token := transpiler_lex(lexer) token := transpiler_lex(lexer)
END; END;
WriteString('MODULE '); WriteString('MODULE ');
(* Write the module name and end the line with a semicolon and newline. *) (* Write the module name and end the line with a semicolon and newline. *)
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
write_semicolon(); write_semicolon();
WriteLn(); WriteLn();
(* Write the module body. *) (* Write the module body. *)
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
transpile_import_part(context, lexer); transpile_import_part(context, lexer);
IF transpile_constant_part(context, lexer) THEN IF transpile_constant_part(context, lexer) THEN
WriteLn() WriteLn()
@ -147,32 +147,32 @@ BEGIN
WriteString('END '); WriteString('END ');
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
Write('.'); Write('.');
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WriteLn() WriteLn()
END transpile_module; END transpile_module;
PROCEDURE transpile_type_fields(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_type_fields(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
written_bytes: CARDINAL; written_bytes: CARDINAL;
BEGIN BEGIN
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WHILE Token.Kind <> lexerKindEnd DO WHILE token.kind <> lexerKindEnd DO
WriteString(' '); WriteString(' ');
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WriteString(': '); WriteString(': ');
transpile_type_expression(context, lexer); transpile_type_expression(context, lexer);
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
IF Token.Kind = lexerKindSemicolon THEN IF token.kind = lexerKindSemicolon THEN
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
Write(';') Write(';')
END; END;
WriteLn() WriteLn()
@ -180,7 +180,7 @@ BEGIN
END transpile_type_fields; END transpile_type_fields;
PROCEDURE transpile_record_type(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_record_type(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
BEGIN BEGIN
WriteString('RECORD'); WriteString('RECORD');
WriteLn(); WriteLn();
@ -189,32 +189,32 @@ BEGIN
END transpile_record_type; END transpile_record_type;
PROCEDURE transpile_pointer_type(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_pointer_type(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
written_bytes: CARDINAL; written_bytes: CARDINAL;
BEGIN BEGIN
Token := lexer_current(lexer); token := lexer_current(lexer);
WriteString('POINTER TO '); WriteString('POINTER TO ');
IF Token.Kind = lexerKindPointer THEN IF token.kind = lexerKindPointer THEN
Token := transpiler_lex(lexer) token := transpiler_lex(lexer)
END; END;
transpile_type_expression(context, lexer) transpile_type_expression(context, lexer)
END transpile_pointer_type; END transpile_pointer_type;
PROCEDURE transpile_array_type(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_array_type(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
written_bytes: CARDINAL; written_bytes: CARDINAL;
BEGIN BEGIN
WriteString('ARRAY'); WriteString('ARRAY');
Token := lexer_current(lexer); token := lexer_current(lexer);
IF Token.Kind = lexerKindArray THEN IF token.kind = lexerKindArray THEN
Token := transpiler_lex(lexer) token := transpiler_lex(lexer)
END; END;
IF Token.Kind <> lexerKindOf THEN IF token.kind <> lexerKindOf THEN
WriteString('[1..'); WriteString('[1..');
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
Write(']') Write(']')
END; END;
WriteString(' OF '); WriteString(' OF ');
@ -222,50 +222,50 @@ BEGIN
END transpile_array_type; END transpile_array_type;
PROCEDURE transpile_enumeration_type(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_enumeration_type(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
written_bytes: CARDINAL; written_bytes: CARDINAL;
BEGIN BEGIN
WriteString('('); WriteString('(');
WriteLn(); WriteLn();
WriteString(' '); WriteString(' ');
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WHILE Token.Kind = lexerKindComma DO WHILE token.kind = lexerKindComma DO
Write(','); Write(',');
WriteLn(); WriteLn();
WriteString(' '); WriteString(' ');
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
Token := transpiler_lex(lexer) token := transpiler_lex(lexer)
END; END;
WriteLn(); WriteLn();
WriteString(' )') WriteString(' )')
END transpile_enumeration_type; END transpile_enumeration_type;
PROCEDURE transpile_union_type(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_union_type(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
END transpile_union_type; END transpile_union_type;
PROCEDURE transpile_procedure_type(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_procedure_type(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
written_bytes: CARDINAL; written_bytes: CARDINAL;
BEGIN BEGIN
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WriteString('PROCEDURE('); WriteString('PROCEDURE(');
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WHILE Token.Kind <> lexerKindRightParen DO WHILE token.kind <> lexerKindRightParen DO
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
IF Token.Kind = lexerKindComma THEN IF token.kind = lexerKindComma THEN
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WriteString(', ') WriteString(', ')
END END
END; END;
@ -273,42 +273,42 @@ BEGIN
END transpile_procedure_type; END transpile_procedure_type;
PROCEDURE transpile_type_expression(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_type_expression(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
written_bytes: CARDINAL; written_bytes: CARDINAL;
BEGIN BEGIN
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
IF Token.Kind = lexerKindRecord THEN IF token.kind = lexerKindRecord THEN
transpile_record_type(context, lexer) transpile_record_type(context, lexer)
END; END;
IF Token.Kind = lexerKindLeftParen THEN IF token.kind = lexerKindLeftParen THEN
transpile_enumeration_type(context, lexer) transpile_enumeration_type(context, lexer)
END; END;
IF (Token.Kind = lexerKindArray) OR (Token.Kind = lexerKindLeftSquare) THEN IF (token.kind = lexerKindArray) OR (token.kind = lexerKindLeftSquare) THEN
transpile_array_type(context, lexer) transpile_array_type(context, lexer)
END; END;
IF Token.Kind = lexerKindHat THEN IF token.kind = lexerKindHat THEN
transpile_pointer_type(context, lexer) transpile_pointer_type(context, lexer)
END; END;
IF Token.Kind = lexerKindProc THEN IF token.kind = lexerKindProc THEN
transpile_procedure_type(context, lexer) transpile_procedure_type(context, lexer)
END; END;
IF Token.Kind = lexerKindIdentifier THEN IF token.kind = lexerKindIdentifier THEN
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start) written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start)
END END
END transpile_type_expression; END transpile_type_expression;
PROCEDURE transpile_type_declaration(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_type_declaration(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
written_bytes: CARDINAL; written_bytes: CARDINAL;
BEGIN BEGIN
WriteString(' '); WriteString(' ');
Token := lexer_current(lexer); token := lexer_current(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WriteString(' = '); WriteString(' = ');
transpile_type_expression(context, lexer); transpile_type_expression(context, lexer);
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
write_semicolon(); write_semicolon();
END transpile_type_declaration; END transpile_type_declaration;
PROCEDURE transpile_type_part(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_type_part(context: PTranspilerContext; lexer: PLexer);
@ -317,12 +317,12 @@ VAR
BEGIN BEGIN
token := lexer_current(lexer); token := lexer_current(lexer);
IF token.Kind = lexerKindType THEN IF token.kind = lexerKindType THEN
WriteString('TYPE'); WriteString('TYPE');
WriteLn(); WriteLn();
token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WHILE token.Kind = lexerKindIdentifier DO WHILE token.kind = lexerKindIdentifier DO
transpile_type_declaration(context, lexer); transpile_type_declaration(context, lexer);
token := transpiler_lex(lexer) token := transpiler_lex(lexer)
END; END;
@ -331,35 +331,35 @@ BEGIN
END transpile_type_part; END transpile_type_part;
PROCEDURE transpile_variable_declaration(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_variable_declaration(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
written_bytes: CARDINAL; written_bytes: CARDINAL;
BEGIN BEGIN
WriteString(' '); WriteString(' ');
Token := lexer_current(lexer); token := lexer_current(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WriteString(': '); WriteString(': ');
transpile_type_expression(context, lexer); transpile_type_expression(context, lexer);
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
write_semicolon() write_semicolon()
END transpile_variable_declaration; END transpile_variable_declaration;
PROCEDURE transpile_variable_part(context: PTranspilerContext; lexer: PLexer): BOOLEAN; PROCEDURE transpile_variable_part(context: PTranspilerContext; lexer: PLexer): BOOLEAN;
VAR VAR
Token: LexerToken; token: LexerToken;
result: BOOLEAN; result: BOOLEAN;
BEGIN BEGIN
Token := lexer_current(lexer); token := lexer_current(lexer);
result := Token.Kind = lexerKindVar; result := token.kind = lexerKindVar;
IF result THEN IF result THEN
WriteString('VAR'); WriteString('VAR');
WriteLn(); WriteLn();
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WHILE Token.Kind = lexerKindIdentifier DO WHILE token.kind = lexerKindIdentifier DO
transpile_variable_declaration(context, lexer); transpile_variable_declaration(context, lexer);
Token := transpiler_lex(lexer) token := transpiler_lex(lexer)
END END
END; END;
RETURN result RETURN result
@ -379,7 +379,7 @@ BEGIN
Write('('); Write('(');
token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WHILE token.Kind <> lexerKindRightParen DO WHILE token.kind <> lexerKindRightParen DO
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer); token := transpiler_lex(lexer);
@ -388,7 +388,7 @@ BEGIN
transpile_type_expression(context, lexer); transpile_type_expression(context, lexer);
token := transpiler_lex(lexer); token := transpiler_lex(lexer);
IF (token.Kind = lexerKindSemicolon) OR (token.Kind = lexerKindComma) THEN IF (token.kind = lexerKindSemicolon) OR (token.kind = lexerKindComma) THEN
WriteString('; '); WriteString('; ');
token := transpiler_lex(lexer) token := transpiler_lex(lexer)
END END
@ -397,7 +397,7 @@ BEGIN
token := transpiler_lex(lexer); token := transpiler_lex(lexer);
(* Check for the return type and write it. *) (* Check for the return type and write it. *)
IF token.Kind = lexerKindColon THEN IF token.kind = lexerKindArrow THEN
WriteString(': '); WriteString(': ');
token := transpiler_lex(lexer); token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
@ -408,28 +408,28 @@ BEGIN
RETURN result RETURN result
END transpile_procedure_heading; END transpile_procedure_heading;
PROCEDURE transpile_expression(context: PTranspilerContext; lexer: PLexer; TrailingToken: LexerKind); PROCEDURE transpile_expression(context: PTranspilerContext; lexer: PLexer; trailing_token: LexerKind);
VAR VAR
token: LexerToken; token: LexerToken;
written_bytes: CARDINAL; written_bytes: CARDINAL;
BEGIN BEGIN
token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WHILE (token.Kind <> TrailingToken) AND (token.Kind <> lexerKindEnd) DO WHILE (token.kind <> trailing_token) AND (token.kind <> lexerKindEnd) DO
written_bytes := 0; written_bytes := 0;
IF token.Kind = lexerKindNull THEN IF token.kind = lexerKindNull THEN
WriteString('NIL '); WriteString('NIL ');
written_bytes := 1 written_bytes := 1
END; END;
IF token.Kind = lexerKindOr THEN IF token.kind = lexerKindOr THEN
WriteString('OR '); WriteString('OR ');
written_bytes := 1 written_bytes := 1
END; END;
IF token.Kind = lexerKindAnd THEN IF token.kind = lexerKindAnd THEN
WriteString('AND '); WriteString('AND ');
written_bytes := 1 written_bytes := 1
END; END;
IF token.Kind = lexerKindNot THEN IF token.kind = lexerKindNot THEN
WriteString('NOT '); WriteString('NOT ');
written_bytes := 1 written_bytes := 1
END; END;
@ -456,7 +456,7 @@ BEGIN
END transpile_if_statement; END transpile_if_statement;
PROCEDURE transpile_while_statement(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_while_statement(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
written_bytes: CARDINAL; written_bytes: CARDINAL;
BEGIN BEGIN
WriteString(' WHILE '); WriteString(' WHILE ');
@ -466,7 +466,7 @@ BEGIN
WriteLn(); WriteLn();
transpile_statements(context, lexer); transpile_statements(context, lexer);
WriteString(' END'); WriteString(' END');
Token := transpiler_lex(lexer) token := transpiler_lex(lexer)
END transpile_while_statement; END transpile_while_statement;
PROCEDURE transpile_assignment_statement(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_assignment_statement(context: PTranspilerContext; lexer: PLexer);
BEGIN BEGIN
@ -475,26 +475,70 @@ BEGIN
END transpile_assignment_statement; END transpile_assignment_statement;
PROCEDURE transpile_call_statement(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_call_statement(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
written_bytes: CARDINAL; written_bytes: CARDINAL;
BEGIN BEGIN
WriteString('('); WriteString('(');
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
WHILE (Token.Kind <> lexerKindSemicolon) AND (Token.Kind <> lexerKindEnd) DO WHILE (token.kind <> lexerKindSemicolon) AND (token.kind <> lexerKindEnd) DO
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
Token := transpiler_lex(lexer) token := transpiler_lex(lexer)
END END
END transpile_call_statement; END transpile_call_statement;
PROCEDURE transpile_return_statement(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_designator_expression(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
written_bytes: CARDINAL;
BEGIN
WriteString(' ');
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer);
WHILE token.kind = lexerKindLeftSquare DO
Write('[');
token := transpiler_lex(lexer);
WHILE token.kind <> lexerKindRightSquare DO
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer)
END;
Write(']');
token := transpiler_lex(lexer)
END;
IF token.kind = lexerKindHat THEN
Write('^');
token := transpiler_lex(lexer)
END;
IF token.kind = lexerKindDot THEN
Write('.');
token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer)
END;
IF token.kind = lexerKindHat THEN
Write('^');
token := transpiler_lex(lexer)
END;
WHILE token.kind = lexerKindLeftSquare DO
Write('[');
token := transpiler_lex(lexer);
WHILE token.kind <> lexerKindRightSquare DO
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer)
END;
Write(']');
token := transpiler_lex(lexer)
END
END transpile_designator_expression;
PROCEDURE transpile_return_statement(context: PTranspilerContext; lexer: PLexer);
VAR
token: LexerToken;
written_bytes: CARDINAL; written_bytes: CARDINAL;
BEGIN BEGIN
WriteString(' RETURN '); WriteString(' RETURN ');
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
Token := transpiler_lex(lexer) token := transpiler_lex(lexer)
END transpile_return_statement; END transpile_return_statement;
PROCEDURE transpile_statement(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_statement(context: PTranspilerContext; lexer: PLexer);
VAR VAR
@ -503,74 +547,38 @@ VAR
BEGIN BEGIN
token := transpiler_lex(lexer); token := transpiler_lex(lexer);
IF token.Kind = lexerKindIf THEN IF token.kind = lexerKindIf THEN
transpile_if_statement(context, lexer) transpile_if_statement(context, lexer)
END; END;
IF token.Kind = lexerKindWhile THEN IF token.kind = lexerKindWhile THEN
transpile_while_statement(context, lexer) transpile_while_statement(context, lexer)
END; END;
IF token.Kind = lexerKindReturn THEN IF token.kind = lexerKindReturn THEN
transpile_return_statement(context, lexer) transpile_return_statement(context, lexer)
END; END;
IF token.Kind = lexerKindIdentifier THEN IF token.kind = lexerKindIdentifier THEN
WriteString(' '); transpile_designator_expression(context, lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); token := lexer_current(lexer);
token := transpiler_lex(lexer);
WHILE token.Kind = lexerKindLeftSquare DO IF token.kind = lexerKindAssignment THEN
Write('[');
token := transpiler_lex(lexer);
WHILE token.Kind <> lexerKindRightSquare DO
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer)
END;
Write(']');
token := transpiler_lex(lexer);
END;
IF token.Kind = lexerKindHat THEN
Write('^');
token := transpiler_lex(lexer)
END;
IF token.Kind = lexerKindDot THEN
Write('.');
token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer);
END;
IF token.Kind = lexerKindHat THEN
Write('^');
token := transpiler_lex(lexer)
END;
WHILE token.Kind = lexerKindLeftSquare DO
Write('[');
token := transpiler_lex(lexer);
WHILE token.Kind <> lexerKindRightSquare DO
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer)
END;
Write(']');
token := transpiler_lex(lexer);
END;
IF token.Kind = lexerKindAssignment THEN
transpile_assignment_statement(context, lexer) transpile_assignment_statement(context, lexer)
END; END;
IF token.Kind = lexerKindLeftParen THEN IF token.kind = lexerKindLeftParen THEN
transpile_call_statement(context, lexer) transpile_call_statement(context, lexer)
END END
END END
END transpile_statement; END transpile_statement;
PROCEDURE transpile_statements(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_statements(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
BEGIN BEGIN
Token := lexer_current(lexer); token := lexer_current(lexer);
WHILE Token.Kind <> lexerKindEnd DO WHILE token.kind <> lexerKindEnd DO
transpile_statement(context, lexer); transpile_statement(context, lexer);
Token := lexer_current(lexer); token := lexer_current(lexer);
IF Token.Kind = lexerKindSemicolon THEN IF token.kind = lexerKindSemicolon THEN
Write(';') Write(';')
END; END;
WriteLn() WriteLn()
@ -578,10 +586,10 @@ BEGIN
END transpile_statements; END transpile_statements;
PROCEDURE transpile_statement_part(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_statement_part(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
BEGIN BEGIN
Token := lexer_current(lexer); token := lexer_current(lexer);
IF Token.Kind = lexerKindBegin THEN IF token.kind = lexerKindBegin THEN
WriteString('BEGIN'); WriteString('BEGIN');
WriteLn(); WriteLn();
transpile_statements(context, lexer) transpile_statements(context, lexer)
@ -589,40 +597,40 @@ BEGIN
END transpile_statement_part; END transpile_statement_part;
PROCEDURE transpile_procedure_declaration(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_procedure_declaration(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
seen_part: BOOLEAN; seen_part: BOOLEAN;
written_bytes: CARDINAL; written_bytes: CARDINAL;
BEGIN BEGIN
Token := transpile_procedure_heading(context, lexer); token := transpile_procedure_heading(context, lexer);
seen_part := transpile_constant_part(context, lexer); seen_part := transpile_constant_part(context, lexer);
seen_part := transpile_variable_part(context, lexer); seen_part := transpile_variable_part(context, lexer);
transpile_statement_part(context, lexer); transpile_statement_part(context, lexer);
WriteString('END '); WriteString('END ');
written_bytes := WriteNBytes(StdOut, ORD(Token.identifierKind[1]), ADR(Token.identifierKind[2])); written_bytes := WriteNBytes(StdOut, ORD(token.identifierKind[1]), ADR(token.identifierKind[2]));
Token := transpiler_lex(lexer); token := transpiler_lex(lexer);
write_semicolon(); write_semicolon();
Token := transpiler_lex(lexer) token := transpiler_lex(lexer)
END transpile_procedure_declaration; END transpile_procedure_declaration;
PROCEDURE transpile_procedure_part(context: PTranspilerContext; lexer: PLexer); PROCEDURE transpile_procedure_part(context: PTranspilerContext; lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
BEGIN BEGIN
Token := lexer_current(lexer); token := lexer_current(lexer);
WHILE Token.Kind = lexerKindProc DO WHILE token.kind = lexerKindProc DO
transpile_procedure_declaration(context, lexer); transpile_procedure_declaration(context, lexer);
Token := lexer_current(lexer); token := lexer_current(lexer);
WriteLn() WriteLn()
END END
END transpile_procedure_part; END transpile_procedure_part;
PROCEDURE transpile(lexer: PLexer); PROCEDURE transpile(lexer: PLexer);
VAR VAR
Token: LexerToken; token: LexerToken;
written_bytes: CARDINAL; written_bytes: CARDINAL;
Context: TranspilerContext; context: TranspilerContext;
BEGIN BEGIN
transpile_module(ADR(Context), lexer) transpile_module(ADR(context), lexer)
END transpile; END transpile;
END Transpiler. END Transpiler.