diff --git a/Rakefile b/Rakefile index 774a3e7..ba6063a 100644 --- a/Rakefile +++ b/Rakefile @@ -44,11 +44,12 @@ end .partition { |f| f.end_with? '.elna' } File.open t.name, 'w' do |output| - puts - puts(compiler * ' ') + compiler_command = compiler + sources - Open3.popen2(*compiler) do |cl_in, cl_out| - cl_in.write File.read(*sources) + puts + puts(compiler_command * ' ') + + Open3.popen2(*compiler_command) do |cl_in, cl_out| cl_in.close IO.copy_stream cl_out, output @@ -87,11 +88,11 @@ task default: 'source/Compiler.elna' task :default do |t| exe, previous_output, source = t.prerequisites - cat_arguments = ['cat', source] + exe_arguments = [exe, source] diff_arguments = ['diff', '-Nur', '--text', previous_output, '-'] - puts [cat_arguments * ' ', exe, diff_arguments * ' '].join(' | ') - Open3.pipeline(cat_arguments, exe, diff_arguments) + puts [exe, diff_arguments * ' '].join(' | ') + Open3.pipeline exe_arguments, diff_arguments end task :backport do @@ -104,7 +105,7 @@ task :backport do source .gsub(/^(var|type|const|begin)/) { |match| match.upcase } .gsub(/^[[:alnum:]]* ?module/) { |match| match.upcase } - .gsub(/\b(record|nil|or)\b/) { |match| match.upcase } + .gsub(/\b(record|nil|or|false|true)\b/) { |match| match.upcase } .gsub(/proc\(/, 'PROCEDURE(') .gsub(/ & /, ' AND ') .gsub(/ -> /, ': ') diff --git a/source/CommandLineInterface.def b/source/CommandLineInterface.def new file mode 100644 index 0000000..85be324 --- /dev/null +++ b/source/CommandLineInterface.def @@ -0,0 +1,15 @@ +DEFINITION MODULE CommandLineInterface; + +FROM Common IMPORT ShortString; + +TYPE + CommandLine = RECORD + input: ShortString; + lex: BOOLEAN; + parse: BOOLEAN + END; + PCommandLine = POINTER TO CommandLine; + +PROCEDURE parse_command_line(): PCommandLine; + +END CommandLineInterface. diff --git a/source/CommandLineInterface.elna b/source/CommandLineInterface.elna new file mode 100644 index 0000000..eb16b4f --- /dev/null +++ b/source/CommandLineInterface.elna @@ -0,0 +1,75 @@ +implementation module CommandLineInterface; + +from SYSTEM import ADR, TSIZE; + +from Args import GetArg, Narg; +from FIO import WriteString, WriteChar, WriteLine, StdErr; +from Storage import ALLOCATE; +from Strings import CompareStr, Length; +from MemUtils import MemZero; + +from Common import ShortString; + +proc parse_command_line() -> PCommandLine; +var + parameter: ShortString; + i: CARDINAL; + result: PCommandLine; + parsed: BOOLEAN; +begin + i := 1; + ALLOCATE(result, TSIZE(CommandLine)); + result^.lex := false; + result^.parse := false; + MemZero(ADR(result^.input), 256); + + while (i < Narg()) & (result <> nil) do + parsed := GetArg(parameter, i); + parsed := false; + + if CompareStr(parameter, '--lex') = 0 then + parsed := true; + result^.lex := true + end; + if CompareStr(parameter, '--parse') = 0 then + parsed := true; + result^.parse := true + end; + if parameter[0] <> '-' then + parsed := true; + + if Length(result^.input) > 0 then + WriteString(StdErr, 'Fatal error: only one source file can be compiled at once. First given "'); + WriteString(StdErr, result^.input); + WriteString(StdErr, '", then "'); + WriteString(StdErr, parameter); + WriteString(StdErr, '".'); + WriteLine(StdErr); + result := nil + end; + if result <> nil then + result^.input := parameter + end + end; + if parsed = false then + WriteString(StdErr, 'Fatal error: unknown command line options: '); + + WriteString(StdErr, parameter); + WriteChar(StdErr, '.'); + WriteLine(StdErr); + + result := nil + end; + + i := i + 1 + end; + if (result <> nil) & (Length(result^.input) = 0) then + WriteString(StdErr, 'Fatal error: no input files.'); + WriteLine(StdErr); + result := nil + end; + + return result +end; + +end CommandLineInterface. diff --git a/source/Common.def b/source/Common.def new file mode 100644 index 0000000..c6f661d --- /dev/null +++ b/source/Common.def @@ -0,0 +1,8 @@ +DEFINITION MODULE Common; + +TYPE + ShortString = ARRAY[0..255] OF CHAR; + Identifier = ARRAY[1..256] OF CHAR; + PIdentifier = POINTER TO Identifier; + +END Common. diff --git a/source/Common.elna b/source/Common.elna new file mode 100644 index 0000000..7b8d623 --- /dev/null +++ b/source/Common.elna @@ -0,0 +1,3 @@ +implementation module Common; + +end Common. diff --git a/source/Compiler.elna b/source/Compiler.elna index 4e6ea19..b5f85a5 100644 --- a/source/Compiler.elna +++ b/source/Compiler.elna @@ -1,18 +1,51 @@ module Compiler; -from FIO import StdIn; +from FIO import Close, IsNoError, File, OpenToRead, StdErr, StdOut, WriteLine, WriteString; from SYSTEM import ADR; +from M2RTS import HALT, ExitOnHalt; from Lexer import Lexer, lexer_destroy, lexer_initialize; from Transpiler import transpile; +from CommandLineInterface import PCommandLine, parse_command_line; +var + command_line: PCommandLine; + +proc compile_from_stream(); var lexer: Lexer; + source_input: File; +begin + source_input := OpenToRead(command_line^.input); + + if IsNoError(source_input) = false then + WriteString(StdErr, 'Fatal error: failed to read the input file "'); + WriteString(StdErr, command_line^.input); + WriteString(StdErr, '".'); + WriteLine(StdErr); + + ExitOnHalt(2) + end; + if IsNoError(source_input) then + lexer_initialize(ADR(lexer), source_input); + + transpile(ADR(lexer), StdOut); + + lexer_destroy(ADR(lexer)); + + Close(source_input) + end +end; begin - lexer_initialize(ADR(lexer), StdIn); + ExitOnHalt(0); + command_line := parse_command_line(); - transpile(ADR(lexer)); - - lexer_destroy(ADR(lexer)) + if command_line <> nil then + compile_from_stream() + end; + if command_line = nil then + ExitOnHalt(1) + end; + HALT() end Compiler. diff --git a/source/Lexer.def b/source/Lexer.def index 0df33cd..68643f8 100644 --- a/source/Lexer.def +++ b/source/Lexer.def @@ -2,6 +2,8 @@ DEFINITION MODULE Lexer; FROM FIO IMPORT File; +FROM Common IMPORT Identifier; + TYPE PLexerBuffer = POINTER TO CHAR; Lexer = RECORD @@ -81,7 +83,7 @@ TYPE LexerToken = RECORD CASE kind: LexerKind OF lexerKindBoolean: booleanKind: BOOLEAN | - lexerKindIdentifier: identifierKind: ARRAY[1..256] OF CHAR + lexerKindIdentifier: identifierKind: Identifier END END; PLexerToken = POINTER TO LexerToken; diff --git a/source/Lexer.elna b/source/Lexer.elna index 902e93a..19cfd34 100644 --- a/source/Lexer.elna +++ b/source/Lexer.elna @@ -217,7 +217,7 @@ var index: CARDINAL; begin index := 0; - result := TRUE; + result := true; while (index < Length(Keyword)) & (TokenStart <> TokenEnd) & result DO result := (Keyword[index] = TokenStart^) or (Lower(Keyword[index]) = TokenStart^); @@ -409,11 +409,11 @@ begin end; if compare_keyword('TRUE', lexer^.Start, lexer^.Current) then token^.kind := lexerKindBoolean; - token^.booleanKind := TRUE + token^.booleanKind := true end; if compare_keyword('FALSE', lexer^.Start, lexer^.Current) then token^.kind := lexerKindBoolean; - token^.booleanKind := FALSE + token^.booleanKind := false end end; diff --git a/source/Parser.def b/source/Parser.def new file mode 100644 index 0000000..d4fe66e --- /dev/null +++ b/source/Parser.def @@ -0,0 +1,46 @@ +DEFINITION MODULE Parser; + +FROM Common IMPORT Identifier, PIdentifier; + +TYPE + AstConstantDeclaration = RECORD + END; + PAstConstantDeclaration = POINTER TO AstConstantDeclaration; + PPAstConstantDeclaration = POINTER TO PAstConstantDeclaration; + + AstTypeExpressionKind = ( + astTypeExpressionKindNamed, + astTypeExpressionKindRecord, + astTypeExpressionKindEnumeration, + astTypeExpressionKindArray, + astTypeExpressionKindPointer, + astTypeExpressionKindProcedure + ); + AstTypeExpression = RECORD + CASE kind: AstTypeExpressionKind OF + astTypeExpressionKindNamed: name: Identifier | + astTypeExpressionKindEnumeration: cases: PIdentifier + END + END; + PAstTypeExpression = POINTER TO AstTypeExpression; + + AstTypeDeclaration = RECORD + identifier: Identifier; + type_expression: PAstTypeExpression + END; + PAstTypeDeclaration = POINTER TO AstTypeDeclaration; + PPAstTypeDeclaration = POINTER TO PAstTypeDeclaration; + + AstVariableDeclaration = RECORD + END; + PAstVariableDeclaration = POINTER TO AstVariableDeclaration; + PPAstVariableDeclaration = POINTER TO PAstVariableDeclaration; + + AstModule = RECORD + constants: PPAstConstantDeclaration; + types: PPAstTypeDeclaration; + variables: PPAstVariableDeclaration + END; + PAstModule = POINTER TO AstModule; + +END Parser. diff --git a/source/Parser.elna b/source/Parser.elna new file mode 100644 index 0000000..d4d55ed --- /dev/null +++ b/source/Parser.elna @@ -0,0 +1,3 @@ +implementation module Parser; + +end Parser. diff --git a/source/Transpiler.def b/source/Transpiler.def index c3cd8a7..555a960 100644 --- a/source/Transpiler.def +++ b/source/Transpiler.def @@ -1,7 +1,17 @@ DEFINITION MODULE Transpiler; -FROM Lexer IMPORT PLexer; +FROM FIO IMPORT File; -PROCEDURE transpile(ALexer: PLexer); +FROM Lexer IMPORT PLexer, Lexer; + +TYPE + TranspilerContext = RECORD + indentation: CARDINAL; + output: File; + lexer: PLexer + END; + PTranspilerContext = POINTER TO TranspilerContext; + +PROCEDURE transpile(lexer: PLexer; output: File); END Transpiler. diff --git a/source/Transpiler.elna b/source/Transpiler.elna index 9820193..373a18a 100644 --- a/source/Transpiler.elna +++ b/source/Transpiler.elna @@ -1,16 +1,18 @@ implementation module Transpiler; -from FIO import WriteNBytes, StdOut; -from SYSTEM import ADR, ADDRESS; +from FIO import WriteNBytes, WriteLine, WriteChar, WriteString; +from SYSTEM import ADR, ADDRESS, TSIZE; -from Terminal import Write, WriteLn, WriteString; +from Storage import ALLOCATE, REALLOCATE; +from MemUtils import MemCopy, MemZero; + +from Common import Identifier, PIdentifier; from Lexer import Lexer, LexerToken, lexer_current, lexer_lex, LexerKind; - -type - PTranspilerContext = ^TranspilerContext; - TranspilerContext = record - Indentation: CARDINAL - end; +from Parser import AstModule, PAstModule, AstTypeExpressionKind, + AstConstantDeclaration, PPAstConstantDeclaration, + AstTypeDeclaration, PAstTypeDeclaration, PPAstTypeDeclaration, + AstVariableDeclaration, PPAstVariableDeclaration, + PAstTypeExpression, AstTypeExpression; (* Calls lexer_lex() but skips the comments. *) proc transpiler_lex(lexer: PLexer) -> LexerToken; @@ -27,643 +29,740 @@ begin end; (* Write a semicolon followed by a newline. *) -proc write_semicolon(); +proc write_semicolon(output: File); begin - WriteString(';'); - WriteLn() + WriteChar(output, ';'); + WriteLine(output) end; -proc transpile_import(context: PTranspilerContext, lexer: PLexer); +proc write_current(lexer: PLexer, output: File); var - token: LexerToken; written_bytes: CARDINAL; begin - WriteString('FROM '); - token := transpiler_lex(lexer); - - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - - token := transpiler_lex(lexer); - WriteString(' IMPORT '); - - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - - token := transpiler_lex(lexer); - while token.kind <> lexerKindSemicolon do - WriteString(', '); - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer) - end; - write_semicolon(); - token := transpiler_lex(lexer) + written_bytes := WriteNBytes(output, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start) end; -proc transpile_import_part(context: PTranspilerContext, lexer: PLexer); +proc transpile_import(context: PTranspilerContext); var token: LexerToken; begin - token := lexer_current(lexer); + WriteString(context^.output, 'FROM '); + token := transpiler_lex(context^.lexer); + + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer); + WriteString(context^.output, ' IMPORT '); + + token := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); + + token := transpiler_lex(context^.lexer); + while token.kind <> lexerKindSemicolon do + WriteString(context^.output, ', '); + token := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer) + end; + write_semicolon(context^.output); + token := transpiler_lex(context^.lexer) +end; + +proc transpile_import_part(context: PTranspilerContext); +var + token: LexerToken; +begin + token := lexer_current(context^.lexer); while token.kind = lexerKindFrom do - transpile_import(context, lexer); - token := lexer_current(lexer) + transpile_import(context); + token := lexer_current(context^.lexer) end; - WriteLn() + WriteLine(context^.output) end; -proc transpile_constant(context: PTranspilerContext, lexer: PLexer); +proc transpile_constant(context: PTranspilerContext); var token: LexerToken; - written_bytes: CARDINAL; begin - WriteString(' '); - token := lexer_current(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + WriteString(context^.output, ' '); + token := lexer_current(context^.lexer); + write_current(context^.lexer, context^.output); - token := transpiler_lex(lexer); - WriteString(' = '); + token := transpiler_lex(context^.lexer); + WriteString(context^.output, ' = '); - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + token := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); - token := transpiler_lex(lexer); - write_semicolon() + token := transpiler_lex(context^.lexer); + write_semicolon(context^.output) end; -proc transpile_constant_part(context: PTranspilerContext, lexer: PLexer) -> BOOLEAN; +proc transpile_constant_part(context: PTranspilerContext) -> PPAstConstantDeclaration; var token: LexerToken; - result: BOOLEAN; begin - token := lexer_current(lexer); - result := token.kind = lexerKindConst; + token := lexer_current(context^.lexer); - if result then - WriteString('CONST'); - WriteLn(); - token := transpiler_lex(lexer); + if token.kind = lexerKindConst then + WriteString(context^.output, 'CONST'); + WriteLine(context^.output); + token := transpiler_lex(context^.lexer); while token.kind = lexerKindIdentifier do - transpile_constant(context, lexer); - token := transpiler_lex(lexer) + transpile_constant(context); + token := transpiler_lex(context^.lexer) end end; - return result + return nil end; -proc transpile_module(context: PTranspilerContext, lexer: PLexer); +proc transpile_module(context: PTranspilerContext) -> PAstModule; var token: LexerToken; - written_bytes: CARDINAL; + result: PAstModule; begin - token := transpiler_lex(lexer); + ALLOCATE(result, TSIZE(AstModule)); + token := transpiler_lex(context^.lexer); if token.kind = lexerKindDefinition then - WriteString('DEFINITION '); - token := transpiler_lex(lexer) + WriteString(context^.output, 'DEFINITION '); + token := transpiler_lex(context^.lexer) end; if token.kind = lexerKindImplementation then - WriteString('IMPLEMENTATION '); - token := transpiler_lex(lexer) + WriteString(context^.output, 'IMPLEMENTATION '); + token := transpiler_lex(context^.lexer) end; - WriteString('MODULE '); + WriteString(context^.output, 'MODULE '); (* Write the module name and end the line with a semicolon and newline. *) - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + token := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); - token := transpiler_lex(lexer); - write_semicolon(); - WriteLn(); + token := transpiler_lex(context^.lexer); + write_semicolon(context^.output); + WriteLine(context^.output); (* Write the module body. *) - token := transpiler_lex(lexer); - transpile_import_part(context, lexer); - if transpile_constant_part(context, lexer) then - WriteLn() - end; - transpile_type_part(context, lexer); - if transpile_variable_part(context, lexer) then - WriteLn() - end; - transpile_procedure_part(context, lexer); - transpile_statement_part(context, lexer); + token := transpiler_lex(context^.lexer); + transpile_import_part(context); - WriteString('END '); + result^.constants := transpile_constant_part(context); + result^.types := transpile_type_part(context); + result^.variables := transpile_variable_part(context); - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + transpile_procedure_part(context); + transpile_statement_part(context); - token := transpiler_lex(lexer); - Write('.'); + WriteString(context^.output, 'END '); - token := transpiler_lex(lexer); - WriteLn() + token := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); + + token := transpiler_lex(context^.lexer); + WriteChar(context^.output, '.'); + + token := transpiler_lex(context^.lexer); + WriteLine(context^.output); + + return result end; -proc transpile_type_fields(context: PTranspilerContext, lexer: PLexer); +proc transpile_type_fields(context: PTranspilerContext); var token: LexerToken; - written_bytes: CARDINAL; + type_expression: PAstTypeExpression; begin - token := transpiler_lex(lexer); + token := transpiler_lex(context^.lexer); while token.kind <> lexerKindEnd do - WriteString(' '); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer); - WriteString(': '); - transpile_type_expression(context, lexer); - token := transpiler_lex(lexer); + WriteString(context^.output, ' '); + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer); + WriteString(context^.output, ': '); + type_expression := transpile_type_expression(context); + token := transpiler_lex(context^.lexer); if token.kind = lexerKindSemicolon then - token := transpiler_lex(lexer); - Write(';') + token := transpiler_lex(context^.lexer); + WriteChar(context^.output, ';') end; - WriteLn() + WriteLine(context^.output) end end; -proc transpile_record_type(context: PTranspilerContext, lexer: PLexer); +proc transpile_record_type(context: PTranspilerContext) -> PAstTypeExpression; var - token: LexerToken; + result: PAstTypeExpression; begin - WriteString('RECORD'); - WriteLn(); - transpile_type_fields(context, lexer); - WriteString(' END') + ALLOCATE(result, TSIZE(AstTypeExpression)); + result^.kind := astTypeExpressionKindRecord; + + WriteString(context^.output, 'RECORD'); + WriteLine(context^.output); + transpile_type_fields(context); + WriteString(context^.output, ' END'); + + return result end; -proc transpile_pointer_type(context: PTranspilerContext, lexer: PLexer); +proc transpile_pointer_type(context: PTranspilerContext) -> PAstTypeExpression; var token: LexerToken; - written_bytes: CARDINAL; + type_expression: PAstTypeExpression; + result: PAstTypeExpression; begin - token := lexer_current(lexer); - WriteString('POINTER TO '); + ALLOCATE(result, TSIZE(AstTypeExpression)); + result^.kind := astTypeExpressionKindPointer; + + token := lexer_current(context^.lexer); + WriteString(context^.output, 'POINTER TO '); if token.kind = lexerKindPointer then - token := transpiler_lex(lexer) + token := transpiler_lex(context^.lexer) end; - transpile_type_expression(context, lexer) + type_expression := transpile_type_expression(context); + + return result end; -proc transpile_array_type(context: PTranspilerContext, lexer: PLexer); +proc transpile_array_type(context: PTranspilerContext) -> PAstTypeExpression; var token: LexerToken; - written_bytes: CARDINAL; + type_expression: PAstTypeExpression; + result: PAstTypeExpression; begin - WriteString('ARRAY'); - token := lexer_current(lexer); + ALLOCATE(result, TSIZE(AstTypeExpression)); + result^.kind := astTypeExpressionKindArray; + + WriteString(context^.output, 'ARRAY'); + token := lexer_current(context^.lexer); if token.kind = lexerKindArray then - token := transpiler_lex(lexer) + token := transpiler_lex(context^.lexer) end; if token.kind <> lexerKindOf then - WriteString('[1..'); - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer); - Write(']') + WriteString(context^.output, '[1..'); + token := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer); + WriteChar(context^.output, ']') end; - WriteString(' OF '); - transpile_type_expression(context, lexer) + WriteString(context^.output, ' OF '); + type_expression := transpile_type_expression(context); + + return result end; -proc transpile_enumeration_type(context: PTranspilerContext, lexer: PLexer); +proc transpile_enumeration_type(context: PTranspilerContext) -> PAstTypeExpression; var token: LexerToken; + result: PAstTypeExpression; + current_case: PIdentifier; + case_count: CARDINAL; written_bytes: CARDINAL; begin - WriteString('('); - WriteLn(); - WriteString(' '); + ALLOCATE(result, TSIZE(AstTypeExpression)); + result^.kind := astTypeExpressionKindEnumeration; - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + case_count := 1; + ALLOCATE(result^.cases, TSIZE(Identifier) * 2); + token := transpiler_lex(context^.lexer); + current_case := result^.cases; + current_case^ := token.identifierKind; - token := transpiler_lex(lexer); + token := transpiler_lex(context^.lexer); while token.kind = lexerKindComma do - Write(','); - WriteLn(); - WriteString(' '); - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + token := transpiler_lex(context^.lexer); - token := transpiler_lex(lexer) + INC(case_count); + REALLOCATE(result^.cases, TSIZE(Identifier) * (case_count + 1)); + current_case := result^.cases; + INC(current_case, TSIZE(Identifier) * (case_count - 1)); + current_case^ := token.identifierKind; + + token := transpiler_lex(context^.lexer) end; - WriteLn(); - WriteString(' )') + INC(current_case, TSIZE(Identifier)); + MemZero(current_case, TSIZE(Identifier)); + + (* Write the cases using the generated identifier list before. *) + current_case := result^.cases; + + WriteString(context^.output, '('); + WriteLine(context^.output); + WriteString(context^.output, ' '); + written_bytes := WriteNBytes(context^.output, ORD(current_case^[1]), ADR(current_case^[2])); + INC(current_case, TSIZE(Identifier)); + + while ORD(current_case^[1]) <> 0 do + WriteChar(context^.output, ','); + WriteLine(context^.output); + WriteString(context^.output, ' '); + written_bytes := WriteNBytes(context^.output, ORD(current_case^[1]), ADR(current_case^[2])); + + INC(current_case, TSIZE(Identifier)) + end; + WriteLine(context^.output); + WriteString(context^.output, ' )'); + + return result end; -proc transpile_union_type(context: PTranspilerContext, lexer: PLexer); -var - token: LexerToken; -end; - -proc transpile_procedure_type(context: PTranspilerContext, lexer: PLexer); +proc transpile_named_type(context: PTranspilerContext) -> PAstTypeExpression; var token: LexerToken; + result: PAstTypeExpression; written_bytes: CARDINAL; begin - token := transpiler_lex(lexer); - WriteString('PROCEDURE('); + token := lexer_current(context^.lexer); + ALLOCATE(result, TSIZE(AstTypeExpression)); - token := transpiler_lex(lexer); + result^.kind := astTypeExpressionKindNamed; + result^.name := token.identifierKind; + + written_bytes := WriteNBytes(context^.output, ORD(result^.name[1]), ADR(result^.name[2])); + + return result +end; + +proc transpile_procedure_type(context: PTranspilerContext) -> PAstTypeExpression; +var + token: LexerToken; + result: PAstTypeExpression; +begin + ALLOCATE(result, TSIZE(AstTypeExpression)); + result^.kind := astTypeExpressionKindProcedure; + + token := transpiler_lex(context^.lexer); + WriteString(context^.output, 'PROCEDURE('); + + token := transpiler_lex(context^.lexer); while token.kind <> lexerKindRightParen do - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + write_current(context^.lexer, context^.output); - token := transpiler_lex(lexer); + token := transpiler_lex(context^.lexer); if token.kind = lexerKindComma then - token := transpiler_lex(lexer); - WriteString(', ') + token := transpiler_lex(context^.lexer); + WriteString(context^.output, ', ') end end; - Write(')') + WriteChar(context^.output, ')'); + + return nil end; -proc transpile_type_expression(context: PTranspilerContext, lexer: PLexer); +proc transpile_type_expression(context: PTranspilerContext) -> PAstTypeExpression; var token: LexerToken; - written_bytes: CARDINAL; + result: PAstTypeExpression; begin - token := transpiler_lex(lexer); + result := nil; + token := transpiler_lex(context^.lexer); + if token.kind = lexerKindRecord then - transpile_record_type(context, lexer) + result := transpile_record_type(context) end; if token.kind = lexerKindLeftParen then - transpile_enumeration_type(context, lexer) + result := transpile_enumeration_type(context) end; if (token.kind = lexerKindArray) or (token.kind = lexerKindLeftSquare) then - transpile_array_type(context, lexer) + result := transpile_array_type(context) end; if token.kind = lexerKindHat then - transpile_pointer_type(context, lexer) + result := transpile_pointer_type(context) end; if token.kind = lexerKindProc then - transpile_procedure_type(context, lexer) + result := transpile_procedure_type(context) end; if token.kind = lexerKindIdentifier then - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start) - end -end; - -proc transpile_type_declaration(context: PTranspilerContext, lexer: PLexer); -var - token: LexerToken; - written_bytes: CARDINAL; -begin - WriteString(' '); - token := lexer_current(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - - token := transpiler_lex(lexer); - WriteString(' = '); - transpile_type_expression(context, lexer); - token := transpiler_lex(lexer); - write_semicolon(); -end; - -proc transpile_type_part(context: PTranspilerContext, lexer: PLexer); -var - token: LexerToken; -begin - token := lexer_current(lexer); - - if token.kind = lexerKindType then - WriteString('TYPE'); - WriteLn(); - token := transpiler_lex(lexer); - - while token.kind = lexerKindIdentifier do - transpile_type_declaration(context, lexer); - token := transpiler_lex(lexer) - end; - WriteLn() - end -end; - -proc transpile_variable_declaration(context: PTranspilerContext, lexer: PLexer); -var - token: LexerToken; - written_bytes: CARDINAL; -begin - WriteString(' '); - token := lexer_current(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - - token := transpiler_lex(lexer); - WriteString(': '); - transpile_type_expression(context, lexer); - token := transpiler_lex(lexer); - write_semicolon() -end; - -proc transpile_variable_part(context: PTranspilerContext, lexer: PLexer) -> BOOLEAN; -var - token: LexerToken; - result: BOOLEAN; -begin - token := lexer_current(lexer); - result := token.kind = lexerKindVar; - - if result then - WriteString('VAR'); - WriteLn(); - token := transpiler_lex(lexer); - - while token.kind = lexerKindIdentifier do - transpile_variable_declaration(context, lexer); - token := transpiler_lex(lexer) - end + result := transpile_named_type(context) end; return result end; -proc transpile_procedure_heading(context: PTranspilerContext, lexer: PLexer) -> LexerToken; +proc transpile_type_declaration(context: PTranspilerContext) -> PAstTypeDeclaration; +var + token: LexerToken; + result: PAstTypeDeclaration; + written_bytes: CARDINAL; +begin + WriteString(context^.output, ' '); + token := lexer_current(context^.lexer); + + ALLOCATE(result, TSIZE(AstTypeDeclaration)); + result^.identifier := token.identifierKind; + + written_bytes := WriteNBytes(context^.output, ORD(token.identifierKind[1]), ADR(token.identifierKind[2])); + + token := transpiler_lex(context^.lexer); + WriteString(context^.output, ' = '); + result^.type_expression := transpile_type_expression(context); + token := transpiler_lex(context^.lexer); + write_semicolon(context^.output); + + return result +end; + +proc transpile_type_part(context: PTranspilerContext) -> PPAstTypeDeclaration; +var + token: LexerToken; + result: PPAstTypeDeclaration; + current_declaration: PPAstTypeDeclaration; + declaration_count: CARDINAL; +begin + token := lexer_current(context^.lexer); + + ALLOCATE(result, TSIZE(PAstTypeDeclaration)); + current_declaration := result; + declaration_count := 0; + + if token.kind = lexerKindType then + WriteString(context^.output, 'TYPE'); + WriteLine(context^.output); + token := transpiler_lex(context^.lexer); + + while token.kind = lexerKindIdentifier do + INC(declaration_count); + + REALLOCATE(result, TSIZE(PAstTypeDeclaration) * (declaration_count + 1)); + current_declaration := result; + INC(current_declaration, TSIZE(PAstTypeDeclaration) * (declaration_count - 1)); + + current_declaration^ := transpile_type_declaration(context); + token := transpiler_lex(context^.lexer) + end; + WriteLine(context^.output) + end; + if declaration_count <> 0 then + INC(current_declaration, TSIZE(PAstTypeDeclaration)) + end; + current_declaration^ := nil; + return result +end; + +proc transpile_variable_declaration(context: PTranspilerContext); +var + token: LexerToken; + type_expression: PAstTypeExpression; +begin + WriteString(context^.output, ' '); + token := lexer_current(context^.lexer); + write_current(context^.lexer, context^.output); + + token := transpiler_lex(context^.lexer); + WriteString(context^.output, ': '); + type_expression := transpile_type_expression(context); + token := transpiler_lex(context^.lexer); + write_semicolon(context^.output) +end; + +proc transpile_variable_part(context: PTranspilerContext) -> PPAstVariableDeclaration; +var + token: LexerToken; +begin + token := lexer_current(context^.lexer); + + if token.kind = lexerKindVar then + WriteString(context^.output, 'VAR'); + WriteLine(context^.output); + token := transpiler_lex(context^.lexer); + + while token.kind = lexerKindIdentifier do + transpile_variable_declaration(context); + token := transpiler_lex(context^.lexer) + end + end; + return nil +end; + +proc transpile_procedure_heading(context: PTranspilerContext) -> LexerToken; var token: LexerToken; result: LexerToken; - written_bytes: CARDINAL; + type_expression: PAstTypeExpression; begin - WriteString('PROCEDURE '); + WriteString(context^.output, 'PROCEDURE '); - result := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + result := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); - token := transpiler_lex(lexer); - Write('('); + token := transpiler_lex(context^.lexer); + WriteChar(context^.output, '('); - token := transpiler_lex(lexer); + token := transpiler_lex(context^.lexer); while token.kind <> lexerKindRightParen do - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); + write_current(context^.lexer, context^.output); - token := transpiler_lex(lexer); - WriteString(': '); + token := transpiler_lex(context^.lexer); + WriteString(context^.output, ': '); - transpile_type_expression(context, lexer); + type_expression := transpile_type_expression(context); - token := transpiler_lex(lexer); + token := transpiler_lex(context^.lexer); if (token.kind = lexerKindSemicolon) or (token.kind = lexerKindComma) then - WriteString('; '); - token := transpiler_lex(lexer) + WriteString(context^.output, '; '); + token := transpiler_lex(context^.lexer) end end; - WriteString(')'); - token := transpiler_lex(lexer); + WriteString(context^.output, ')'); + token := transpiler_lex(context^.lexer); (* Check for the return type and write it. *) if token.kind = lexerKindArrow then - WriteString(': '); - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer) + WriteString(context^.output, ': '); + token := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer) end; - token := transpiler_lex(lexer); - write_semicolon(); + token := transpiler_lex(context^.lexer); + write_semicolon(context^.output); return result end; -proc transpile_expression(context: PTranspilerContext, lexer: PLexer, trailing_token: LexerKind); +proc transpile_expression(context: PTranspilerContext, trailing_token: LexerKind); var token: LexerToken; written_bytes: CARDINAL; begin - token := transpiler_lex(lexer); + token := transpiler_lex(context^.lexer); while (token.kind <> trailing_token) & (token.kind <> lexerKindEnd) do written_bytes := 0; if token.kind = lexerKindNull then - WriteString('NIL '); + WriteString(context^.output, 'NIL '); + written_bytes := 1 + end; + if (token.kind = lexerKindBoolean) & token.booleanKind then + WriteString(context^.output, 'TRUE '); + written_bytes := 1 + end; + if (token.kind = lexerKindBoolean) & (~token.booleanKind) then + WriteString(context^.output, 'FALSE '); written_bytes := 1 end; if token.kind = lexerKindOr then - WriteString('OR '); + WriteString(context^.output, 'OR '); written_bytes := 1 end; if token.kind = lexerKindAnd then - WriteString('AND '); + WriteString(context^.output, 'AND '); written_bytes := 1 end; if token.kind = lexerKindNot then - WriteString('NOT '); + WriteString(context^.output, 'NOT '); written_bytes := 1 end; if written_bytes = 0 then - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - Write(' ') + write_current(context^.lexer, context^.output); + WriteChar(context^.output, ' ') end; - token := transpiler_lex(lexer) + token := transpiler_lex(context^.lexer) end end; -proc transpile_if_statement(context: PTranspilerContext, lexer: PLexer); +proc transpile_if_statement(context: PTranspilerContext); var token: LexerToken; - written_bytes: CARDINAL; begin - WriteString(' IF '); - transpile_expression(context, lexer, lexerKindThen); + WriteString(context^.output, ' IF '); + transpile_expression(context, lexerKindThen); - WriteString('THEN'); - WriteLn(); - transpile_statements(context, lexer); - WriteString(' END'); - token := transpiler_lex(lexer) + WriteString(context^.output, 'THEN'); + WriteLine(context^.output); + transpile_statements(context); + WriteString(context^.output, ' END'); + token := transpiler_lex(context^.lexer) end; -proc transpile_while_statement(context: PTranspilerContext, lexer: PLexer); +proc transpile_while_statement(context: PTranspilerContext); var token: LexerToken; - written_bytes: CARDINAL; begin - WriteString(' WHILE '); - transpile_expression(context, lexer, lexerKindDo); + WriteString(context^.output, ' WHILE '); + transpile_expression(context, lexerKindDo); - WriteString('DO'); - WriteLn(); - transpile_statements(context, lexer); - WriteString(' END'); - token := transpiler_lex(lexer) + WriteString(context^.output, 'DO'); + WriteLine(context^.output); + transpile_statements(context); + WriteString(context^.output, ' END'); + token := transpiler_lex(context^.lexer) end; -proc transpile_assignment_statement(context: PTranspilerContext, lexer: PLexer); +proc transpile_assignment_statement(context: PTranspilerContext); begin - WriteString(' := '); - transpile_expression(context, lexer, lexerKindSemicolon); + WriteString(context^.output, ' := '); + transpile_expression(context, lexerKindSemicolon); end; -proc transpile_call_statement(context: PTranspilerContext, lexer: PLexer); +proc transpile_call_statement(context: PTranspilerContext); var token: LexerToken; - written_bytes: CARDINAL; begin - WriteString('('); - token := transpiler_lex(lexer); + WriteString(context^.output, '('); + token := transpiler_lex(context^.lexer); while (token.kind <> lexerKindSemicolon) & (token.kind <> lexerKindEnd) do - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer) + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer) end end; -proc transpile_designator_expression(context: PTranspilerContext, lexer: PLexer); +proc transpile_designator_expression(context: PTranspilerContext); var token: LexerToken; - written_bytes: CARDINAL; begin - WriteString(' '); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer); + WriteString(context^.output, ' '); + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer); while token.kind = lexerKindLeftSquare do - Write('['); - token := transpiler_lex(lexer); + WriteChar(context^.output, '['); + token := transpiler_lex(context^.lexer); while token.kind <> lexerKindRightSquare do - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer) + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer) end; - Write(']'); - token := transpiler_lex(lexer) + WriteChar(context^.output, ']'); + token := transpiler_lex(context^.lexer) end; if token.kind = lexerKindHat then - Write('^'); - token := transpiler_lex(lexer) + WriteChar(context^.output, '^'); + token := transpiler_lex(context^.lexer) end; if token.kind = lexerKindDot then - Write('.'); - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer) + WriteChar(context^.output, '.'); + token := transpiler_lex(context^.lexer); + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer) end; if token.kind = lexerKindHat then - Write('^'); - token := transpiler_lex(lexer) + WriteChar(context^.output, '^'); + token := transpiler_lex(context^.lexer) end; while token.kind = lexerKindLeftSquare do - Write('['); - token := transpiler_lex(lexer); + WriteChar(context^.output, '['); + token := transpiler_lex(context^.lexer); while token.kind <> lexerKindRightSquare do - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer) + write_current(context^.lexer, context^.output); + token := transpiler_lex(context^.lexer) end; - Write(']'); - token := transpiler_lex(lexer) + WriteChar(context^.output, ']'); + token := transpiler_lex(context^.lexer) end end; -proc transpile_return_statement(context: PTranspilerContext, lexer: PLexer); +proc transpile_return_statement(context: PTranspilerContext); var token: LexerToken; - written_bytes: CARDINAL; begin - WriteString(' RETURN '); - token := transpiler_lex(lexer); - written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start); - token := transpiler_lex(lexer) + WriteString(context^.output, ' RETURN '); + transpile_expression(context, lexerKindSemicolon) end; -proc transpile_statement(context: PTranspilerContext, lexer: PLexer); +proc transpile_statement(context: PTranspilerContext); var token: LexerToken; - written_bytes: CARDINAL; begin - token := transpiler_lex(lexer); + token := transpiler_lex(context^.lexer); if token.kind = lexerKindIf then - transpile_if_statement(context, lexer) + transpile_if_statement(context) end; if token.kind = lexerKindWhile then - transpile_while_statement(context, lexer) + transpile_while_statement(context) end; if token.kind = lexerKindReturn then - transpile_return_statement(context, lexer) + transpile_return_statement(context) end; if token.kind = lexerKindIdentifier then - transpile_designator_expression(context, lexer); - token := lexer_current(lexer); + transpile_designator_expression(context); + token := lexer_current(context^.lexer); if token.kind = lexerKindAssignment then - transpile_assignment_statement(context, lexer) + transpile_assignment_statement(context) end; if token.kind = lexerKindLeftParen then - transpile_call_statement(context, lexer) + transpile_call_statement(context) end end end; -proc transpile_statements(context: PTranspilerContext, lexer: PLexer); +proc transpile_statements(context: PTranspilerContext); var token: LexerToken; begin - token := lexer_current(lexer); + token := lexer_current(context^.lexer); while token.kind <> lexerKindEnd do - transpile_statement(context, lexer); - token := lexer_current(lexer); + transpile_statement(context); + token := lexer_current(context^.lexer); if token.kind = lexerKindSemicolon then - Write(';') + WriteChar(context^.output, ';') end; - WriteLn() + WriteLine(context^.output) end end; -proc transpile_statement_part(context: PTranspilerContext, lexer: PLexer); +proc transpile_statement_part(context: PTranspilerContext); var token: LexerToken; begin - token := lexer_current(lexer); + token := lexer_current(context^.lexer); if token.kind = lexerKindBegin then - WriteString('BEGIN'); - WriteLn(); - transpile_statements(context, lexer) + WriteString(context^.output, 'BEGIN'); + WriteLine(context^.output); + transpile_statements(context) end end; -proc transpile_procedure_declaration(context: PTranspilerContext, lexer: PLexer); +proc transpile_procedure_declaration(context: PTranspilerContext); var token: LexerToken; - seen_part: BOOLEAN; + seen_variables: PPAstVariableDeclaration; written_bytes: CARDINAL; + seen_constants: PPAstConstantDeclaration; begin - token := transpile_procedure_heading(context, lexer); - seen_part := transpile_constant_part(context, lexer); - seen_part := transpile_variable_part(context, lexer); - transpile_statement_part(context, lexer); + token := transpile_procedure_heading(context); + seen_constants := transpile_constant_part(context); + seen_variables := transpile_variable_part(context); + transpile_statement_part(context); - WriteString('END '); - written_bytes := WriteNBytes(StdOut, ORD(token.identifierKind[1]), ADR(token.identifierKind[2])); + WriteString(context^.output, 'END '); + written_bytes := WriteNBytes(context^.output, ORD(token.identifierKind[1]), ADR(token.identifierKind[2])); - token := transpiler_lex(lexer); - write_semicolon(); - token := transpiler_lex(lexer) + token := transpiler_lex(context^.lexer); + write_semicolon(context^.output); + token := transpiler_lex(context^.lexer) end; -proc transpile_procedure_part(context: PTranspilerContext, lexer: PLexer); +proc transpile_procedure_part(context: PTranspilerContext); var token: LexerToken; begin - token := lexer_current(lexer); + token := lexer_current(context^.lexer); while token.kind = lexerKindProc do - transpile_procedure_declaration(context, lexer); - token := lexer_current(lexer); - WriteLn() + transpile_procedure_declaration(context); + token := lexer_current(context^.lexer); + WriteLine(context^.output) end end; -proc transpile(lexer: PLexer); +proc transpile(lexer: PLexer, output: File); var token: LexerToken; - written_bytes: CARDINAL; context: TranspilerContext; + ast_module: PAstModule; begin - transpile_module(ADR(context), lexer) + context.indentation := 0; + context.output := output; + context.lexer := lexer; + + ast_module := transpile_module(ADR(context)) end; end Transpiler.