From 23885e5b951db7c52aab10ea6a671bafdde10035 Mon Sep 17 00:00:00 2001 From: Eugen Wissner Date: Sat, 31 May 2025 11:27:23 +0200 Subject: [PATCH] Add A command line parsing procedure --- source/CommandLine.def | 3 + source/CommandLine.mod | 3 + source/CommandLineInterface.def | 15 + source/CommandLineInterface.mod | 74 +++ source/Common.def | 8 + source/Common.mod | 3 + source/Compiler.mod | 42 +- source/Lexer.def | 5 +- source/Lexer.mod | 44 +- source/Parser.def | 59 +++ source/Parser.mod | 3 + source/Transpiler.def | 14 +- source/Transpiler.mod | 883 ++++++++++++++++++-------------- 13 files changed, 760 insertions(+), 396 deletions(-) create mode 100644 source/CommandLine.def create mode 100644 source/CommandLine.mod create mode 100644 source/CommandLineInterface.def create mode 100644 source/CommandLineInterface.mod create mode 100644 source/Common.def create mode 100644 source/Common.mod create mode 100644 source/Parser.def create mode 100644 source/Parser.mod diff --git a/source/CommandLine.def b/source/CommandLine.def new file mode 100644 index 0000000..37fd7fa --- /dev/null +++ b/source/CommandLine.def @@ -0,0 +1,3 @@ +DEFINITION MODULE CommandLine; + +END CommandLine. diff --git a/source/CommandLine.mod b/source/CommandLine.mod new file mode 100644 index 0000000..215f7c3 --- /dev/null +++ b/source/CommandLine.mod @@ -0,0 +1,3 @@ +MODULE CommandLine; + +END CommandLine. diff --git a/source/CommandLineInterface.def b/source/CommandLineInterface.def new file mode 100644 index 0000000..85be324 --- /dev/null +++ b/source/CommandLineInterface.def @@ -0,0 +1,15 @@ +DEFINITION MODULE CommandLineInterface; + +FROM Common IMPORT ShortString; + +TYPE + CommandLine = RECORD + input: ShortString; + lex: BOOLEAN; + parse: BOOLEAN + END; + PCommandLine = POINTER TO CommandLine; + +PROCEDURE parse_command_line(): PCommandLine; + +END CommandLineInterface. diff --git a/source/CommandLineInterface.mod b/source/CommandLineInterface.mod new file mode 100644 index 0000000..2b33668 --- /dev/null +++ b/source/CommandLineInterface.mod @@ -0,0 +1,74 @@ +IMPLEMENTATION MODULE CommandLineInterface; + +FROM SYSTEM IMPORT ADR, TSIZE; + +FROM Args IMPORT GetArg, Narg; +FROM FIO IMPORT WriteString, WriteChar, WriteLine, StdErr; +FROM Storage IMPORT ALLOCATE; +FROM Strings IMPORT CompareStr, Length; +FROM MemUtils IMPORT MemZero; + +FROM Common IMPORT ShortString; + +PROCEDURE parse_command_line(): PCommandLine; +VAR + parameter: ShortString; + i: CARDINAL; + result: PCommandLine; + parsed: BOOLEAN; +BEGIN + i := 1; + ALLOCATE(result, TSIZE(CommandLine)); + result^.lex := FALSE; + result^.parse := FALSE; + MemZero(ADR(result^.input), 256); + + WHILE (i < Narg()) AND (result <> NIL) DO + parsed := GetArg(parameter, i); + parsed := FALSE; + + IF CompareStr(parameter, '--lex') = 0 THEN + parsed := TRUE; + result^.lex := TRUE + END; + IF CompareStr(parameter, '--parse') = 0 THEN + parsed := TRUE; + result^.parse := TRUE + END; + IF parameter[0] <> '-' THEN + parsed := TRUE; + + IF Length(result^.input) > 0 THEN + WriteString(StdErr, 'Fatal error: only one source file can be compiled at once. First given "'); + WriteString(StdErr, result^.input); + WriteString(StdErr, '", then "'); + WriteString(StdErr, parameter); + WriteString(StdErr, '".'); + WriteLine(StdErr); + result := NIL + END; + IF result <> NIL THEN + result^.input := parameter + END + END; + IF parsed = FALSE THEN + WriteString(StdErr, 'Fatal error: unknown command line options: '); + + WriteString(StdErr, parameter); + WriteChar(StdErr, '.'); + WriteLine(StdErr); + + result := NIL + END; + + i := i + 1 + END; + IF (result <> NIL) AND (Length(result^.input) = 0) THEN + WriteString(StdErr, 'Fatal error: no input files.'); + WriteLine(StdErr); + result := NIL + END; + + RETURN result +END parse_command_line; +END CommandLineInterface. diff --git a/source/Common.def b/source/Common.def new file mode 100644 index 0000000..c6f661d --- /dev/null +++ b/source/Common.def @@ -0,0 +1,8 @@ +DEFINITION MODULE Common; + +TYPE + ShortString = ARRAY[0..255] OF CHAR; + Identifier = ARRAY[1..256] OF CHAR; + PIdentifier = POINTER TO Identifier; + +END Common. diff --git a/source/Common.mod b/source/Common.mod new file mode 100644 index 0000000..135d3a9 --- /dev/null +++ b/source/Common.mod @@ -0,0 +1,3 @@ +IMPLEMENTATION MODULE Common; + +END Common. diff --git a/source/Compiler.mod b/source/Compiler.mod index c6b3e09..ed197f6 100644 --- a/source/Compiler.mod +++ b/source/Compiler.mod @@ -1,18 +1,50 @@ MODULE Compiler; -FROM FIO IMPORT StdIn; +FROM FIO IMPORT Close, IsNoError, File, OpenToRead, StdErr, StdOut, WriteLine, WriteString; FROM SYSTEM IMPORT ADR; +FROM M2RTS IMPORT HALT, ExitOnHalt; FROM Lexer IMPORT Lexer, lexer_destroy, lexer_initialize; FROM Transpiler IMPORT transpile; +FROM CommandLineInterface IMPORT PCommandLine, parse_command_line; +VAR + command_line: PCommandLine; + +PROCEDURE compile_from_stream(); VAR lexer: Lexer; - + source_input: File; BEGIN - lexer_initialize(ADR(lexer), StdIn); + source_input := OpenToRead(command_line^.input); - transpile(ADR(lexer)); + IF IsNoError(source_input) = FALSE THEN + WriteString(StdErr, 'Fatal error: failed to read the input file "'); + WriteString(StdErr, command_line^.input); + WriteString(StdErr, '".'); + WriteLine(StdErr); - lexer_destroy(ADR(lexer)) + ExitOnHalt(2) + END; + IF IsNoError(source_input) THEN + lexer_initialize(ADR(lexer), source_input); + + transpile(ADR(lexer), StdOut); + + lexer_destroy(ADR(lexer)); + + Close(source_input) + END +END compile_from_stream; +BEGIN + ExitOnHalt(0); + command_line := parse_command_line(); + + IF command_line <> NIL THEN + compile_from_stream() + END; + IF command_line = NIL THEN + ExitOnHalt(1) + END; + HALT() END Compiler. diff --git a/source/Lexer.def b/source/Lexer.def index 0df33cd..7dcf06b 100644 --- a/source/Lexer.def +++ b/source/Lexer.def @@ -2,6 +2,8 @@ DEFINITION MODULE Lexer; FROM FIO IMPORT File; +FROM Common IMPORT Identifier; + TYPE PLexerBuffer = POINTER TO CHAR; Lexer = RECORD @@ -81,7 +83,8 @@ TYPE LexerToken = RECORD CASE kind: LexerKind OF lexerKindBoolean: booleanKind: BOOLEAN | - lexerKindIdentifier: identifierKind: ARRAY[1..256] OF CHAR + lexerKindIdentifier: identifierKind: Identifier | + lexerKindInteger: integerKind: INTEGER END END; PLexerToken = POINTER TO LexerToken; diff --git a/source/Lexer.mod b/source/Lexer.mod index 78eda6b..99fcc4e 100644 --- a/source/Lexer.mod +++ b/source/Lexer.mod @@ -1,8 +1,10 @@ IMPLEMENTATION MODULE Lexer; -FROM FIO IMPORT ReadNBytes; -FROM SYSTEM IMPORT ADR; +FROM FIO IMPORT ReadNBytes, StdErr; +FROM SYSTEM IMPORT ADR, TSIZE; +FROM DynamicStrings IMPORT String, InitStringCharStar, KillString; +FROM StringConvert IMPORT StringToInteger; FROM Storage IMPORT DEALLOCATE, ALLOCATE; FROM Strings IMPORT Length; FROM MemUtils IMPORT MemCopy, MemZero; @@ -210,7 +212,7 @@ BEGIN i := i + 1 END END initialize_classification; -PROCEDURE compare_keyword(Keyword: ARRAY OF CHAR; TokenStart: PLexerBuffer; TokenEnd: PLexerBuffer): BOOLEAN; +PROCEDURE compare_keyword(Keyword: ARRAY OF CHAR; TokenStart: PLexerBuffer; TokenEnd: PLexerBuffer): BOOLEAN; VAR result: BOOLEAN; index: CARDINAL; @@ -227,18 +229,18 @@ BEGIN RETURN result END compare_keyword; (* Reached the end of file. *) -PROCEDURE transition_action_eof(lexer: PLexer; token: PLexerToken); +PROCEDURE transition_action_eof(lexer: PLexer; token: PLexerToken); BEGIN token^.kind := lexerKindEof END transition_action_eof; (* Add the character to the token currently read and advance to the next character. *) -PROCEDURE transition_action_accumulate(lexer: PLexer; token: PLexerToken); +PROCEDURE transition_action_accumulate(lexer: PLexer; token: PLexerToken); BEGIN INC(lexer^.Current) END transition_action_accumulate; (* The current character is not a part of the token. Finish the token already * read. Don't advance to the next character. *) -PROCEDURE transition_action_finalize(lexer: PLexer; token: PLexerToken); +PROCEDURE transition_action_finalize(lexer: PLexer; token: PLexerToken); BEGIN IF lexer^.Start^ = ':' THEN token^.kind := lexerKindColon @@ -260,7 +262,7 @@ BEGIN END END transition_action_finalize; (* An action for tokens containing multiple characters. *) -PROCEDURE transition_action_composite(lexer: PLexer; token: PLexerToken); +PROCEDURE transition_action_composite(lexer: PLexer; token: PLexerToken); BEGIN IF lexer^.Start^ = '<' THEN IF lexer^.Current^ = '>' THEN @@ -285,13 +287,13 @@ BEGIN INC(lexer^.Current) END transition_action_composite; (* Skip a space. *) -PROCEDURE transition_action_skip(lexer: PLexer; token: PLexerToken); +PROCEDURE transition_action_skip(lexer: PLexer; token: PLexerToken); BEGIN INC(lexer^.Current); INC(lexer^.Start) END transition_action_skip; (* Delimited string action. *) -PROCEDURE transition_action_delimited(lexer: PLexer; token: PLexerToken); +PROCEDURE transition_action_delimited(lexer: PLexer; token: PLexerToken); BEGIN IF lexer^.Start^ = '(' THEN token^.kind := lexerKindComment @@ -305,7 +307,7 @@ BEGIN INC(lexer^.Current) END transition_action_delimited; (* Finalize keyword OR identifier. *) -PROCEDURE transition_action_key_id(lexer: PLexer; token: PLexerToken); +PROCEDURE transition_action_key_id(lexer: PLexer; token: PLexerToken); BEGIN token^.kind := lexerKindIdentifier; @@ -410,7 +412,7 @@ BEGIN END transition_action_key_id; (* Action for tokens containing only one character. The character cannot be * followed by other characters forming a composite token. *) -PROCEDURE transition_action_single(lexer: PLexer; token: PLexerToken); +PROCEDURE transition_action_single(lexer: PLexer; token: PLexerToken); BEGIN IF lexer^.Current^ = '&' THEN token^.kind := lexerKindAnd @@ -457,11 +459,23 @@ BEGIN INC(lexer^.Current) END transition_action_single; (* Handle an integer literal. *) -PROCEDURE transition_action_integer(lexer: PLexer; token: PLexerToken); +PROCEDURE transition_action_integer(lexer: PLexer; token: PLexerToken); +VAR + buffer: String; + integer_length: CARDINAL; + found: BOOLEAN; BEGIN - token^.kind := lexerKindInteger + token^.kind := lexerKindInteger; + + integer_length := lexer^.Current - lexer^.Start; + MemZero(ADR(token^.identifierKind), TSIZE(Identifier)); + MemCopy(lexer^.Start, integer_length, ADR(token^.identifierKind[1])); + + buffer := InitStringCharStar(ADR(token^.identifierKind[1])); + token^.integerKind := StringToInteger(buffer, 10, found); + buffer := KillString(buffer) END transition_action_integer; -PROCEDURE set_default_transition(CurrentState: TransitionState; DefaultAction: TransitionAction; NextState: TransitionState); +PROCEDURE set_default_transition(CurrentState: TransitionState; DefaultAction: TransitionAction; NextState: TransitionState); VAR DefaultTransition: Transition; BEGIN @@ -744,7 +758,7 @@ BEGIN transitions[ORD(transitionStateDecimalSuffix) + 1][ORD(transitionClassX) + 1].Action := NIL; transitions[ORD(transitionStateDecimalSuffix) + 1][ORD(transitionClassX) + 1].NextState := transitionStateEnd END initialize_transitions; -PROCEDURE lexer_initialize(lexer: PLexer; Input: File); +PROCEDURE lexer_initialize(lexer: PLexer; Input: File); BEGIN lexer^.Input := Input; lexer^.Length := 0; diff --git a/source/Parser.def b/source/Parser.def new file mode 100644 index 0000000..f968125 --- /dev/null +++ b/source/Parser.def @@ -0,0 +1,59 @@ +DEFINITION MODULE Parser; + +FROM Common IMPORT Identifier, PIdentifier; + +TYPE + AstConstantDeclaration = RECORD + END; + PAstConstantDeclaration = POINTER TO AstConstantDeclaration; + PPAstConstantDeclaration = POINTER TO PAstConstantDeclaration; + + AstFieldDeclaration = RECORD + field_name: Identifier; + field_type: PAstTypeExpression + END; + PAstFieldDeclaration = POINTER TO AstFieldDeclaration; + + AstTypeExpressionKind = ( + astTypeExpressionKindNamed, + astTypeExpressionKindRecord, + astTypeExpressionKindEnumeration, + astTypeExpressionKindArray, + astTypeExpressionKindPointer, + astTypeExpressionKindProcedure + ); + AstTypeExpression = RECORD + CASE kind: AstTypeExpressionKind OF + astTypeExpressionKindNamed: name: Identifier | + astTypeExpressionKindEnumeration: cases: PIdentifier | + astTypeExpressionKindPointer: target: PAstTypeExpression | + astTypeExpressionKindRecord: fields: PAstFieldDeclaration | + astTypeExpressionKindArray: + base: PAstTypeExpression; + length: CARDINAL | + astTypeExpressionKindProcedure: parameters: PPAstTypeExpression + END + END; + PAstTypeExpression = POINTER TO AstTypeExpression; + PPAstTypeExpression = POINTER TO PAstTypeExpression; + + AstTypeDeclaration = RECORD + identifier: Identifier; + type_expression: PAstTypeExpression + END; + PAstTypeDeclaration = POINTER TO AstTypeDeclaration; + PPAstTypeDeclaration = POINTER TO PAstTypeDeclaration; + + AstVariableDeclaration = RECORD + END; + PAstVariableDeclaration = POINTER TO AstVariableDeclaration; + PPAstVariableDeclaration = POINTER TO PAstVariableDeclaration; + + AstModule = RECORD + constants: PPAstConstantDeclaration; + types: PPAstTypeDeclaration; + variables: PPAstVariableDeclaration + END; + PAstModule = POINTER TO AstModule; + +END Parser. diff --git a/source/Parser.mod b/source/Parser.mod new file mode 100644 index 0000000..e40cc4a --- /dev/null +++ b/source/Parser.mod @@ -0,0 +1,3 @@ +IMPLEMENTATION MODULE Parser; + +END Parser. diff --git a/source/Transpiler.def b/source/Transpiler.def index c3cd8a7..555a960 100644 --- a/source/Transpiler.def +++ b/source/Transpiler.def @@ -1,7 +1,17 @@ DEFINITION MODULE Transpiler; -FROM Lexer IMPORT PLexer; +FROM FIO IMPORT File; -PROCEDURE transpile(ALexer: PLexer); +FROM Lexer IMPORT PLexer, Lexer; + +TYPE + TranspilerContext = RECORD + indentation: CARDINAL; + output: File; + lexer: PLexer + END; + PTranspilerContext = POINTER TO TranspilerContext; + +PROCEDURE transpile(lexer: PLexer; output: File); END Transpiler. diff --git a/source/Transpiler.mod b/source/Transpiler.mod index 63ce824..2f3dfea 100644 --- a/source/Transpiler.mod +++ b/source/Transpiler.mod @@ -1,16 +1,19 @@ IMPLEMENTATION MODULE Transpiler; -FROM FIO IMPORT WriteNBytes, StdOut; -FROM SYSTEM IMPORT ADR, ADDRESS; +FROM FIO IMPORT WriteNBytes, WriteLine, WriteChar, WriteString; +FROM SYSTEM IMPORT ADR, ADDRESS, TSIZE; -FROM Terminal IMPORT Write, WriteLn, WriteString; +FROM NumberIO IMPORT IntToStr; +FROM Storage IMPORT ALLOCATE, REALLOCATE; +FROM MemUtils IMPORT MemCopy, MemZero; + +FROM Common IMPORT Identifier, PIdentifier; FROM Lexer IMPORT Lexer, LexerToken, lexer_current, lexer_lex, LexerKind; - -TYPE - PTranspilerContext = POINTER TO TranspilerContext; - TranspilerContext = RECORD - Indentation: CARDINAL - END; +FROM Parser IMPORT AstModule, PAstModule, AstTypeExpressionKind, + AstConstantDeclaration, PPAstConstantDeclaration, + AstTypeDeclaration, PAstTypeDeclaration, PPAstTypeDeclaration, + AstVariableDeclaration, PPAstVariableDeclaration, + PAstTypeExpression, AstTypeExpression, PPAstTypeExpression, AstFieldDeclaration, PAstFieldDeclaration; (* Calls lexer_lex() but skips the comments. *) PROCEDURE transpiler_lex(lexer: PLexer): LexerToken; @@ -26,611 +29,745 @@ BEGIN RETURN result END transpiler_lex; (* Write a semicolon followed by a newline. *) -PROCEDURE write_semicolon(); +PROCEDURE write_semicolon(output: File); BEGIN - WriteString(';'); - WriteLn() + WriteChar(output, ';'); + WriteLine(output) END write_semicolon; -PROCEDURE transpile_import(context: PTranspilerContext; lexer: PLexer); +PROCEDURE write_current(lexer: PLexer; output: File); VAR - token: LexerToken; written_bytes: CARDINAL; BEGIN - WriteString('FROM '); - token := transpiler_lex(lexer); - - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - - token := transpiler_lex(lexer); - WriteString(' IMPORT '); - - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - - token := transpiler_lex(lexer); - WHILE token.kind <> lexerKindSemicolon DO - WriteString(', '); - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer) - END; - write_semicolon(); - token := transpiler_lex(lexer) -END transpile_import; -PROCEDURE transpile_import_part(context: PTranspilerContext; lexer: PLexer); + written_bytes := WriteNBytes(output, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start) +END write_current; +PROCEDURE transpile_import(context: PTranspilerContext); VAR token: LexerToken; BEGIN - token := lexer_current(lexer); + WriteString(context^.output, 'FROM '); + token := transpiler_lex(context^.lexer); + + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer); + WriteString(context^.output, ' IMPORT '); + + token := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); + + token := transpiler_lex(context^.lexer); + WHILE token.kind <> lexerKindSemicolon DO + WriteString(context^.output, ', '); + token := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer) + END; + write_semicolon(context^.output); + token := transpiler_lex(context^.lexer) +END transpile_import; +PROCEDURE transpile_import_part(context: PTranspilerContext); +VAR + token: LexerToken; +BEGIN + token := lexer_current(context^.lexer); WHILE token.kind = lexerKindFrom DO - transpile_import(context, lexer); - token := lexer_current(lexer) + transpile_import(context); + token := lexer_current(context^.lexer) END; - WriteLn() + WriteLine(context^.output) END transpile_import_part; -PROCEDURE transpile_constant(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_constant(context: PTranspilerContext); VAR token: LexerToken; - written_bytes: CARDINAL; BEGIN - WriteString(' '); - token := lexer_current(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + WriteString(context^.output, ' '); + token := lexer_current(context^.lexer); + write_current(context^.lexer, context^.output); - token := transpiler_lex(lexer); - WriteString(' = '); + token := transpiler_lex(context^.lexer); + WriteString(context^.output, ' = '); - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + token := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); - token := transpiler_lex(lexer); - write_semicolon() + token := transpiler_lex(context^.lexer); + write_semicolon(context^.output) END transpile_constant; -PROCEDURE transpile_constant_part(context: PTranspilerContext; lexer: PLexer): BOOLEAN; +PROCEDURE transpile_constant_part(context: PTranspilerContext): PPAstConstantDeclaration; VAR token: LexerToken; - result: BOOLEAN; BEGIN - token := lexer_current(lexer); - result := token.kind = lexerKindConst; + token := lexer_current(context^.lexer); - IF result THEN - WriteString('CONST'); - WriteLn(); - token := transpiler_lex(lexer); + IF token.kind = lexerKindConst THEN + WriteString(context^.output, 'CONST'); + WriteLine(context^.output); + token := transpiler_lex(context^.lexer); WHILE token.kind = lexerKindIdentifier DO - transpile_constant(context, lexer); - token := transpiler_lex(lexer) + transpile_constant(context); + token := transpiler_lex(context^.lexer) END END; - RETURN result + RETURN NIL END transpile_constant_part; -PROCEDURE transpile_module(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_module(context: PTranspilerContext): PAstModule; VAR token: LexerToken; - written_bytes: CARDINAL; + result: PAstModule; BEGIN - token := transpiler_lex(lexer); + ALLOCATE(result, TSIZE(AstModule)); + token := transpiler_lex(context^.lexer); IF token.kind = lexerKindDefinition THEN - WriteString('DEFINITION '); - token := transpiler_lex(lexer) + WriteString(context^.output, 'DEFINITION '); + token := transpiler_lex(context^.lexer) END; IF token.kind = lexerKindImplementation THEN - WriteString('IMPLEMENTATION '); - token := transpiler_lex(lexer) + WriteString(context^.output, 'IMPLEMENTATION '); + token := transpiler_lex(context^.lexer) END; - WriteString('MODULE '); + WriteString(context^.output, 'MODULE '); (* Write the module name and end the line with a semicolon and newline. *) - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + token := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); - token := transpiler_lex(lexer); - write_semicolon(); - WriteLn(); + token := transpiler_lex(context^.lexer); + write_semicolon(context^.output); + WriteLine(context^.output); (* Write the module body. *) - token := transpiler_lex(lexer); - transpile_import_part(context, lexer); - IF transpile_constant_part(context, lexer) THEN - WriteLn() - END; - transpile_type_part(context, lexer); - IF transpile_variable_part(context, lexer) THEN - WriteLn() - END; - transpile_procedure_part(context, lexer); - transpile_statement_part(context, lexer); + token := transpiler_lex(context^.lexer); + transpile_import_part(context); - WriteString('END '); + result^.constants := transpile_constant_part(context); + result^.types := transpile_type_part(context); + result^.variables := transpile_variable_part(context); - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + transpile_procedure_part(context); + transpile_statement_part(context); - token := transpiler_lex(lexer); - Write('.'); + WriteString(context^.output, 'END '); - token := transpiler_lex(lexer); - WriteLn() + token := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); + + token := transpiler_lex(context^.lexer); + WriteChar(context^.output, '.'); + + token := transpiler_lex(context^.lexer); + WriteLine(context^.output); + + RETURN result END transpile_module; -PROCEDURE transpile_type_fields(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_type_fields(context: PTranspilerContext): PAstFieldDeclaration; VAR token: LexerToken; - written_bytes: CARDINAL; + field_declarations: PAstFieldDeclaration; + field_count: CARDINAL; + current_field: PAstFieldDeclaration; BEGIN - token := transpiler_lex(lexer); + ALLOCATE(field_declarations, TSIZE(AstFieldDeclaration)); + token := transpiler_lex(context^.lexer); + field_count := 0; WHILE token.kind <> lexerKindEnd DO - WriteString(' '); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer); - WriteString(': '); - transpile_type_expression(context, lexer); - token := transpiler_lex(lexer); + INC(field_count); + REALLOCATE(field_declarations, TSIZE(AstFieldDeclaration) * (field_count + 1)); + current_field := field_declarations; + INC(current_field , TSIZE(AstFieldDeclaration) * (field_count - 1)); + + WriteString(context^.output, ' '); + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer); + + current_field^.field_name := token.identifierKind; + + WriteString(context^.output, ': '); + token := transpiler_lex(context^.lexer); + current_field^.field_type := transpile_type_expression(context); + token := transpiler_lex(context^.lexer); IF token.kind = lexerKindSemicolon THEN - token := transpiler_lex(lexer); - Write(';') + token := transpiler_lex(context^.lexer); + WriteChar(context^.output, ';') END; - WriteLn() - END -END transpile_type_fields; -PROCEDURE transpile_record_type(context: PTranspilerContext; lexer: PLexer); -VAR - token: LexerToken; -BEGIN - WriteString('RECORD'); - WriteLn(); - transpile_type_fields(context, lexer); - WriteString(' END') -END transpile_record_type; -PROCEDURE transpile_pointer_type(context: PTranspilerContext; lexer: PLexer); -VAR - token: LexerToken; - written_bytes: CARDINAL; -BEGIN - token := lexer_current(lexer); - WriteString('POINTER TO '); - IF token.kind = lexerKindPointer THEN - token := transpiler_lex(lexer) + WriteLine(context^.output) END; - transpile_type_expression(context, lexer) -END transpile_pointer_type; -PROCEDURE transpile_array_type(context: PTranspilerContext; lexer: PLexer); + INC(current_field, TSIZE(AstFieldDeclaration)); + MemZero(current_field, TSIZE(AstFieldDeclaration)); + RETURN field_declarations +END transpile_type_fields; +PROCEDURE transpile_record_type(context: PTranspilerContext): PAstTypeExpression; +VAR + result: PAstTypeExpression; +BEGIN + ALLOCATE(result, TSIZE(AstTypeExpression)); + result^.kind := astTypeExpressionKindRecord; + + WriteString(context^.output, 'RECORD'); + WriteLine(context^.output); + result^.fields := transpile_type_fields(context); + WriteString(context^.output, ' END'); + + RETURN result +END transpile_record_type; +PROCEDURE transpile_pointer_type(context: PTranspilerContext): PAstTypeExpression; VAR token: LexerToken; - written_bytes: CARDINAL; + result: PAstTypeExpression; BEGIN - WriteString('ARRAY'); - token := lexer_current(lexer); + ALLOCATE(result, TSIZE(AstTypeExpression)); + result^.kind := astTypeExpressionKindPointer; + + token := lexer_current(context^.lexer); + WriteString(context^.output, 'POINTER TO '); + IF token.kind = lexerKindPointer THEN + token := transpiler_lex(context^.lexer) + END; + token := lexer_current(context^.lexer); + result^.target := transpile_type_expression(context); + + RETURN result +END transpile_pointer_type; +PROCEDURE transpile_array_type(context: PTranspilerContext): PAstTypeExpression; +VAR + token: LexerToken; + buffer: ARRAY[1..20] OF CHAR; + result: PAstTypeExpression; +BEGIN + ALLOCATE(result, TSIZE(AstTypeExpression)); + result^.kind := astTypeExpressionKindArray; + + WriteString(context^.output, 'ARRAY'); + token := lexer_current(context^.lexer); IF token.kind = lexerKindArray THEN - token := transpiler_lex(lexer) + token := transpiler_lex(context^.lexer) END; IF token.kind <> lexerKindOf THEN - WriteString('[1..'); - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer); - Write(']') + WriteString(context^.output, '[1..'); + token := transpiler_lex(context^.lexer); + + result^.length := token.integerKind; + IntToStr(result^.length, 0, buffer); + WriteString(context^.output, buffer); + + token := transpiler_lex(context^.lexer); + WriteChar(context^.output, ']') END; - WriteString(' OF '); - transpile_type_expression(context, lexer) + WriteString(context^.output, ' OF '); + + token := transpiler_lex(context^.lexer); + result^.base := transpile_type_expression(context); + + RETURN result END transpile_array_type; -PROCEDURE transpile_enumeration_type(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_enumeration_type(context: PTranspilerContext): PAstTypeExpression; VAR token: LexerToken; + result: PAstTypeExpression; + current_case: PIdentifier; + case_count: CARDINAL; written_bytes: CARDINAL; BEGIN - WriteString('('); - WriteLn(); - WriteString(' '); + ALLOCATE(result, TSIZE(AstTypeExpression)); + result^.kind := astTypeExpressionKindEnumeration; - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + case_count := 1; + ALLOCATE(result^.cases, TSIZE(Identifier) * 2); + token := transpiler_lex(context^.lexer); + current_case := result^.cases; + current_case^ := token.identifierKind; - token := transpiler_lex(lexer); + token := transpiler_lex(context^.lexer); WHILE token.kind = lexerKindComma DO - Write(','); - WriteLn(); - WriteString(' '); - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + token := transpiler_lex(context^.lexer); - token := transpiler_lex(lexer) + INC(case_count); + REALLOCATE(result^.cases, TSIZE(Identifier) * (case_count + 1)); + current_case := result^.cases; + INC(current_case, TSIZE(Identifier) * (case_count - 1)); + current_case^ := token.identifierKind; + + token := transpiler_lex(context^.lexer) END; - WriteLn(); - WriteString(' )') + INC(current_case, TSIZE(Identifier)); + MemZero(current_case, TSIZE(Identifier)); + + (* Write the cases using the generated identifier list before. *) + current_case := result^.cases; + + WriteString(context^.output, '('); + WriteLine(context^.output); + WriteString(context^.output, ' '); + written_bytes := WriteNBytes(context^.output, ORD(current_case^[1]), ADR(current_case^[2])); + INC(current_case, TSIZE(Identifier)); + + WHILE ORD(current_case^[1]) <> 0 DO + WriteChar(context^.output, ','); + WriteLine(context^.output); + WriteString(context^.output, ' '); + written_bytes := WriteNBytes(context^.output, ORD(current_case^[1]), ADR(current_case^[2])); + + INC(current_case, TSIZE(Identifier)) + END; + WriteLine(context^.output); + WriteString(context^.output, ' )'); + + RETURN result END transpile_enumeration_type; -PROCEDURE transpile_union_type(context: PTranspilerContext; lexer: PLexer); -VAR - token: LexerToken; -END transpile_union_type; -PROCEDURE transpile_procedure_type(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_named_type(context: PTranspilerContext): PAstTypeExpression; VAR token: LexerToken; + result: PAstTypeExpression; written_bytes: CARDINAL; BEGIN - token := transpiler_lex(lexer); - WriteString('PROCEDURE('); + token := lexer_current(context^.lexer); + ALLOCATE(result, TSIZE(AstTypeExpression)); - token := transpiler_lex(lexer); + result^.kind := astTypeExpressionKindNamed; + result^.name := token.identifierKind; + + written_bytes := WriteNBytes(context^.output, ORD(result^.name[1]), ADR(result^.name[2])); + + RETURN result +END transpile_named_type; +PROCEDURE transpile_procedure_type(context: PTranspilerContext): PAstTypeExpression; +VAR + token: LexerToken; + result: PAstTypeExpression; + current_parameter: PPAstTypeExpression; + parameter_count: CARDINAL; +BEGIN + parameter_count := 0; + ALLOCATE(result, TSIZE(AstTypeExpression)); + result^.kind := astTypeExpressionKindProcedure; + + ALLOCATE(result^.parameters, 1); + + token := transpiler_lex(context^.lexer); + WriteString(context^.output, 'PROCEDURE('); + + token := transpiler_lex(context^.lexer); WHILE token.kind <> lexerKindRightParen DO - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + INC(parameter_count); + REALLOCATE(result^.parameters, TSIZE(PAstTypeExpression) * (parameter_count + 1)); + current_parameter := result^.parameters; + INC(current_parameter, TSIZE(PAstTypeExpression) * (parameter_count - 1)); - token := transpiler_lex(lexer); + current_parameter^ := transpile_type_expression(context); + + token := transpiler_lex(context^.lexer); IF token.kind = lexerKindComma THEN - token := transpiler_lex(lexer); - WriteString(', ') + token := transpiler_lex(context^.lexer); + WriteString(context^.output, ', ') END END; - Write(')') + current_parameter := result^.parameters; + INC(current_parameter, TSIZE(PAstTypeExpression) * parameter_count); + current_parameter^ := NIL; + WriteChar(context^.output, ')'); + + RETURN result END transpile_procedure_type; -PROCEDURE transpile_type_expression(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_type_expression(context: PTranspilerContext): PAstTypeExpression; VAR token: LexerToken; - written_bytes: CARDINAL; + result: PAstTypeExpression; BEGIN - token := transpiler_lex(lexer); + result := NIL; + token := lexer_current(context^.lexer); + IF token.kind = lexerKindRecord THEN - transpile_record_type(context, lexer) + result := transpile_record_type(context) END; IF token.kind = lexerKindLeftParen THEN - transpile_enumeration_type(context, lexer) + result := transpile_enumeration_type(context) END; IF (token.kind = lexerKindArray) OR (token.kind = lexerKindLeftSquare) THEN - transpile_array_type(context, lexer) + result := transpile_array_type(context) END; IF token.kind = lexerKindHat THEN - transpile_pointer_type(context, lexer) + result := transpile_pointer_type(context) END; IF token.kind = lexerKindProc THEN - transpile_procedure_type(context, lexer) + result := transpile_procedure_type(context) END; IF token.kind = lexerKindIdentifier THEN - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start) - END -END transpile_type_expression; -PROCEDURE transpile_type_declaration(context: PTranspilerContext; lexer: PLexer); -VAR - token: LexerToken; - written_bytes: CARDINAL; -BEGIN - WriteString(' '); - token := lexer_current(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - - token := transpiler_lex(lexer); - WriteString(' = '); - transpile_type_expression(context, lexer); - token := transpiler_lex(lexer); - write_semicolon(); -END transpile_type_declaration; -PROCEDURE transpile_type_part(context: PTranspilerContext; lexer: PLexer); -VAR - token: LexerToken; -BEGIN - token := lexer_current(lexer); - - IF token.kind = lexerKindType THEN - WriteString('TYPE'); - WriteLn(); - token := transpiler_lex(lexer); - - WHILE token.kind = lexerKindIdentifier DO - transpile_type_declaration(context, lexer); - token := transpiler_lex(lexer) - END; - WriteLn() - END -END transpile_type_part; -PROCEDURE transpile_variable_declaration(context: PTranspilerContext; lexer: PLexer); -VAR - token: LexerToken; - written_bytes: CARDINAL; -BEGIN - WriteString(' '); - token := lexer_current(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - - token := transpiler_lex(lexer); - WriteString(': '); - transpile_type_expression(context, lexer); - token := transpiler_lex(lexer); - write_semicolon() -END transpile_variable_declaration; -PROCEDURE transpile_variable_part(context: PTranspilerContext; lexer: PLexer): BOOLEAN; -VAR - token: LexerToken; - result: BOOLEAN; -BEGIN - token := lexer_current(lexer); - result := token.kind = lexerKindVar; - - IF result THEN - WriteString('VAR'); - WriteLn(); - token := transpiler_lex(lexer); - - WHILE token.kind = lexerKindIdentifier DO - transpile_variable_declaration(context, lexer); - token := transpiler_lex(lexer) - END + result := transpile_named_type(context) END; RETURN result +END transpile_type_expression; +PROCEDURE transpile_type_declaration(context: PTranspilerContext): PAstTypeDeclaration; +VAR + token: LexerToken; + result: PAstTypeDeclaration; + written_bytes: CARDINAL; +BEGIN + WriteString(context^.output, ' '); + token := lexer_current(context^.lexer); + + ALLOCATE(result, TSIZE(AstTypeDeclaration)); + result^.identifier := token.identifierKind; + + written_bytes := WriteNBytes(context^.output, ORD(token.identifierKind[1]), ADR(token.identifierKind[2])); + + token := transpiler_lex(context^.lexer); + WriteString(context^.output, ' = '); + token := transpiler_lex(context^.lexer); + result^.type_expression := transpile_type_expression(context); + token := transpiler_lex(context^.lexer); + write_semicolon(context^.output); + + RETURN result +END transpile_type_declaration; +PROCEDURE transpile_type_part(context: PTranspilerContext): PPAstTypeDeclaration; +VAR + token: LexerToken; + result: PPAstTypeDeclaration; + current_declaration: PPAstTypeDeclaration; + declaration_count: CARDINAL; +BEGIN + token := lexer_current(context^.lexer); + + ALLOCATE(result, TSIZE(PAstTypeDeclaration)); + current_declaration := result; + declaration_count := 0; + + IF token.kind = lexerKindType THEN + WriteString(context^.output, 'TYPE'); + WriteLine(context^.output); + token := transpiler_lex(context^.lexer); + + WHILE token.kind = lexerKindIdentifier DO + INC(declaration_count); + + REALLOCATE(result, TSIZE(PAstTypeDeclaration) * (declaration_count + 1)); + current_declaration := result; + INC(current_declaration, TSIZE(PAstTypeDeclaration) * (declaration_count - 1)); + + current_declaration^ := transpile_type_declaration(context); + token := transpiler_lex(context^.lexer) + END; + WriteLine(context^.output) + END; + IF declaration_count <> 0 THEN + INC(current_declaration, TSIZE(PAstTypeDeclaration)) + END; + current_declaration^ := NIL; + RETURN result +END transpile_type_part; +PROCEDURE transpile_variable_declaration(context: PTranspilerContext); +VAR + token: LexerToken; + type_expression: PAstTypeExpression; +BEGIN + WriteString(context^.output, ' '); + token := lexer_current(context^.lexer); + write_current(context^.lexer, context^.output); + + token := transpiler_lex(context^.lexer); + WriteString(context^.output, ': '); + token := transpiler_lex(context^.lexer); + type_expression := transpile_type_expression(context); + token := transpiler_lex(context^.lexer); + write_semicolon(context^.output) +END transpile_variable_declaration; +PROCEDURE transpile_variable_part(context: PTranspilerContext): PPAstVariableDeclaration; +VAR + token: LexerToken; +BEGIN + token := lexer_current(context^.lexer); + + IF token.kind = lexerKindVar THEN + WriteString(context^.output, 'VAR'); + WriteLine(context^.output); + token := transpiler_lex(context^.lexer); + + WHILE token.kind = lexerKindIdentifier DO + transpile_variable_declaration(context); + token := transpiler_lex(context^.lexer) + END + END; + RETURN NIL END transpile_variable_part; -PROCEDURE transpile_procedure_heading(context: PTranspilerContext; lexer: PLexer): LexerToken; +PROCEDURE transpile_procedure_heading(context: PTranspilerContext): LexerToken; VAR token: LexerToken; result: LexerToken; - written_bytes: CARDINAL; + type_expression: PAstTypeExpression; BEGIN - WriteString('PROCEDURE '); + WriteString(context^.output, 'PROCEDURE '); - result := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + result := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); - token := transpiler_lex(lexer); - Write('('); + token := transpiler_lex(context^.lexer); + WriteChar(context^.output, '('); - token := transpiler_lex(lexer); + token := transpiler_lex(context^.lexer); WHILE token.kind <> lexerKindRightParen DO - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + write_current(context^.lexer, context^.output); - token := transpiler_lex(lexer); - WriteString(': '); + token := transpiler_lex(context^.lexer); + WriteString(context^.output, ': '); + token := transpiler_lex(context^.lexer); - transpile_type_expression(context, lexer); + type_expression := transpile_type_expression(context); - token := transpiler_lex(lexer); + token := transpiler_lex(context^.lexer); IF (token.kind = lexerKindSemicolon) OR (token.kind = lexerKindComma) THEN - WriteString('; '); - token := transpiler_lex(lexer) + WriteString(context^.output, '; '); + token := transpiler_lex(context^.lexer) END END; - WriteString(')'); - token := transpiler_lex(lexer); + WriteString(context^.output, ')'); + token := transpiler_lex(context^.lexer); (* Check for the return type and write it. *) IF token.kind = lexerKindArrow THEN - WriteString(': '); - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer) + WriteString(context^.output, ': '); + token := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer) END; - token := transpiler_lex(lexer); - write_semicolon(); + token := transpiler_lex(context^.lexer); + write_semicolon(context^.output); RETURN result END transpile_procedure_heading; -PROCEDURE transpile_expression(context: PTranspilerContext; lexer: PLexer; trailing_token: LexerKind); +PROCEDURE transpile_expression(context: PTranspilerContext; trailing_token: LexerKind); VAR token: LexerToken; written_bytes: CARDINAL; BEGIN - token := transpiler_lex(lexer); + token := transpiler_lex(context^.lexer); WHILE (token.kind <> trailing_token) AND (token.kind <> lexerKindEnd) DO written_bytes := 0; IF token.kind = lexerKindNull THEN - WriteString('NIL '); + WriteString(context^.output, 'NIL '); + written_bytes := 1 + END; + IF (token.kind = lexerKindBoolean) AND token.booleanKind THEN + WriteString(context^.output, 'TRUE '); + written_bytes := 1 + END; + IF (token.kind = lexerKindBoolean) AND (~token.booleanKind) THEN + WriteString(context^.output, 'FALSE '); written_bytes := 1 END; IF token.kind = lexerKindOr THEN - WriteString('OR '); + WriteString(context^.output, 'OR '); written_bytes := 1 END; IF token.kind = lexerKindAnd THEN - WriteString('AND '); + WriteString(context^.output, 'AND '); written_bytes := 1 END; IF token.kind = lexerKindNot THEN - WriteString('NOT '); + WriteString(context^.output, 'NOT '); written_bytes := 1 END; IF written_bytes = 0 THEN - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - Write(' ') + write_current(context^.lexer, context^.output); + WriteChar(context^.output, ' ') END; - token := transpiler_lex(lexer) + token := transpiler_lex(context^.lexer) END END transpile_expression; -PROCEDURE transpile_if_statement(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_if_statement(context: PTranspilerContext); VAR token: LexerToken; - written_bytes: CARDINAL; BEGIN - WriteString(' IF '); - transpile_expression(context, lexer, lexerKindThen); + WriteString(context^.output, ' IF '); + transpile_expression(context, lexerKindThen); - WriteString('THEN'); - WriteLn(); - transpile_statements(context, lexer); - WriteString(' END'); - token := transpiler_lex(lexer) + WriteString(context^.output, 'THEN'); + WriteLine(context^.output); + transpile_statements(context); + WriteString(context^.output, ' END'); + token := transpiler_lex(context^.lexer) END transpile_if_statement; -PROCEDURE transpile_while_statement(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_while_statement(context: PTranspilerContext); VAR token: LexerToken; - written_bytes: CARDINAL; BEGIN - WriteString(' WHILE '); - transpile_expression(context, lexer, lexerKindDo); + WriteString(context^.output, ' WHILE '); + transpile_expression(context, lexerKindDo); - WriteString('DO'); - WriteLn(); - transpile_statements(context, lexer); - WriteString(' END'); - token := transpiler_lex(lexer) + WriteString(context^.output, 'DO'); + WriteLine(context^.output); + transpile_statements(context); + WriteString(context^.output, ' END'); + token := transpiler_lex(context^.lexer) END transpile_while_statement; -PROCEDURE transpile_assignment_statement(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_assignment_statement(context: PTranspilerContext); BEGIN - WriteString(' := '); - transpile_expression(context, lexer, lexerKindSemicolon); + WriteString(context^.output, ' := '); + transpile_expression(context, lexerKindSemicolon); END transpile_assignment_statement; -PROCEDURE transpile_call_statement(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_call_statement(context: PTranspilerContext); VAR token: LexerToken; - written_bytes: CARDINAL; BEGIN - WriteString('('); - token := transpiler_lex(lexer); + WriteString(context^.output, '('); + token := transpiler_lex(context^.lexer); WHILE (token.kind <> lexerKindSemicolon) AND (token.kind <> lexerKindEnd) DO - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer) + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer) END END transpile_call_statement; -PROCEDURE transpile_designator_expression(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_designator_expression(context: PTranspilerContext); VAR token: LexerToken; - written_bytes: CARDINAL; BEGIN - WriteString(' '); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer); + WriteString(context^.output, ' '); + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer); WHILE token.kind = lexerKindLeftSquare DO - Write('['); - token := transpiler_lex(lexer); + WriteChar(context^.output, '['); + token := transpiler_lex(context^.lexer); WHILE token.kind <> lexerKindRightSquare DO - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer) + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer) END; - Write(']'); - token := transpiler_lex(lexer) + WriteChar(context^.output, ']'); + token := transpiler_lex(context^.lexer) END; IF token.kind = lexerKindHat THEN - Write('^'); - token := transpiler_lex(lexer) + WriteChar(context^.output, '^'); + token := transpiler_lex(context^.lexer) END; IF token.kind = lexerKindDot THEN - Write('.'); - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer) + WriteChar(context^.output, '.'); + token := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer) END; IF token.kind = lexerKindHat THEN - Write('^'); - token := transpiler_lex(lexer) + WriteChar(context^.output, '^'); + token := transpiler_lex(context^.lexer) END; WHILE token.kind = lexerKindLeftSquare DO - Write('['); - token := transpiler_lex(lexer); + WriteChar(context^.output, '['); + token := transpiler_lex(context^.lexer); WHILE token.kind <> lexerKindRightSquare DO - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer) + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer) END; - Write(']'); - token := transpiler_lex(lexer) + WriteChar(context^.output, ']'); + token := transpiler_lex(context^.lexer) END END transpile_designator_expression; -PROCEDURE transpile_return_statement(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_return_statement(context: PTranspilerContext); VAR token: LexerToken; - written_bytes: CARDINAL; BEGIN - WriteString(' RETURN '); - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer) + WriteString(context^.output, ' RETURN '); + transpile_expression(context, lexerKindSemicolon) END transpile_return_statement; -PROCEDURE transpile_statement(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_statement(context: PTranspilerContext); VAR token: LexerToken; - written_bytes: CARDINAL; BEGIN - token := transpiler_lex(lexer); + token := transpiler_lex(context^.lexer); IF token.kind = lexerKindIf THEN - transpile_if_statement(context, lexer) + transpile_if_statement(context) END; IF token.kind = lexerKindWhile THEN - transpile_while_statement(context, lexer) + transpile_while_statement(context) END; IF token.kind = lexerKindReturn THEN - transpile_return_statement(context, lexer) + transpile_return_statement(context) END; IF token.kind = lexerKindIdentifier THEN - transpile_designator_expression(context, lexer); - token := lexer_current(lexer); + transpile_designator_expression(context); + token := lexer_current(context^.lexer); IF token.kind = lexerKindAssignment THEN - transpile_assignment_statement(context, lexer) + transpile_assignment_statement(context) END; IF token.kind = lexerKindLeftParen THEN - transpile_call_statement(context, lexer) + transpile_call_statement(context) END END END transpile_statement; -PROCEDURE transpile_statements(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_statements(context: PTranspilerContext); VAR token: LexerToken; BEGIN - token := lexer_current(lexer); + token := lexer_current(context^.lexer); WHILE token.kind <> lexerKindEnd DO - transpile_statement(context, lexer); - token := lexer_current(lexer); + transpile_statement(context); + token := lexer_current(context^.lexer); IF token.kind = lexerKindSemicolon THEN - Write(';') + WriteChar(context^.output, ';') END; - WriteLn() + WriteLine(context^.output) END END transpile_statements; -PROCEDURE transpile_statement_part(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_statement_part(context: PTranspilerContext); VAR token: LexerToken; BEGIN - token := lexer_current(lexer); + token := lexer_current(context^.lexer); IF token.kind = lexerKindBegin THEN - WriteString('BEGIN'); - WriteLn(); - transpile_statements(context, lexer) + WriteString(context^.output, 'BEGIN'); + WriteLine(context^.output); + transpile_statements(context) END END transpile_statement_part; -PROCEDURE transpile_procedure_declaration(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_procedure_declaration(context: PTranspilerContext); VAR token: LexerToken; - seen_part: BOOLEAN; + seen_variables: PPAstVariableDeclaration; written_bytes: CARDINAL; + seen_constants: PPAstConstantDeclaration; BEGIN - token := transpile_procedure_heading(context, lexer); - seen_part := transpile_constant_part(context, lexer); - seen_part := transpile_variable_part(context, lexer); - transpile_statement_part(context, lexer); + token := transpile_procedure_heading(context); + seen_constants := transpile_constant_part(context); + seen_variables := transpile_variable_part(context); + transpile_statement_part(context); - WriteString('END '); - written_bytes := WriteNBytes(StdOut, ORD(token.identifierKind[1]), ADR(token.identifierKind[2])); + WriteString(context^.output, 'END '); + written_bytes := WriteNBytes(context^.output, ORD(token.identifierKind[1]), ADR(token.identifierKind[2])); - token := transpiler_lex(lexer); - write_semicolon(); - token := transpiler_lex(lexer) + token := transpiler_lex(context^.lexer); + write_semicolon(context^.output); + token := transpiler_lex(context^.lexer) END transpile_procedure_declaration; -PROCEDURE transpile_procedure_part(context: PTranspilerContext; lexer: PLexer); +PROCEDURE transpile_procedure_part(context: PTranspilerContext); VAR token: LexerToken; BEGIN - token := lexer_current(lexer); + token := lexer_current(context^.lexer); WHILE token.kind = lexerKindProc DO - transpile_procedure_declaration(context, lexer); - token := lexer_current(lexer); - WriteLn() + transpile_procedure_declaration(context); + token := lexer_current(context^.lexer); + WriteLine(context^.output) END END transpile_procedure_part; -PROCEDURE transpile(lexer: PLexer); +PROCEDURE transpile(lexer: PLexer; output: File); VAR token: LexerToken; - written_bytes: CARDINAL; context: TranspilerContext; + ast_module: PAstModule; BEGIN - transpile_module(ADR(context), lexer) + context.indentation := 0; + context.output := output; + context.lexer := lexer; + + ast_module := transpile_module(ADR(context)) END transpile; END Transpiler.