Add a command line parsing procedure

This commit is contained in:
Eugen Wissner 2025-05-31 11:28:24 +02:00
parent 6e415e474f
commit edb8ce37d9
Signed by: belka
GPG Key ID: A27FDC1E8EE902C0
11 changed files with 386 additions and 247 deletions

View File

@ -44,11 +44,12 @@ end
.partition { |f| f.end_with? '.elna' }
File.open t.name, 'w' do |output|
puts
puts(compiler * ' ')
compiler_command = compiler + sources
Open3.popen2(*compiler) do |cl_in, cl_out|
cl_in.write File.read(*sources)
puts
puts(compiler_command * ' ')
Open3.popen2(*compiler_command) do |cl_in, cl_out|
cl_in.close
IO.copy_stream cl_out, output
@ -87,11 +88,11 @@ task default: 'source/Compiler.elna'
task :default do |t|
exe, previous_output, source = t.prerequisites
cat_arguments = ['cat', source]
exe_arguments = [exe, source]
diff_arguments = ['diff', '-Nur', '--text', previous_output, '-']
puts [cat_arguments * ' ', exe, diff_arguments * ' '].join(' | ')
Open3.pipeline(cat_arguments, exe, diff_arguments)
puts [exe, diff_arguments * ' '].join(' | ')
Open3.pipeline exe_arguments, diff_arguments
end
task :backport do
@ -104,7 +105,7 @@ task :backport do
source
.gsub(/^(var|type|const|begin)/) { |match| match.upcase }
.gsub(/^[[:alnum:]]* ?module/) { |match| match.upcase }
.gsub(/\b(record|nil|or)\b/) { |match| match.upcase }
.gsub(/\b(record|nil|or|false|true)\b/) { |match| match.upcase }
.gsub(/proc\(/, 'PROCEDURE(')
.gsub(/ & /, ' AND ')
.gsub(/ -> /, ': ')

View File

@ -0,0 +1,15 @@
DEFINITION MODULE CommandLineInterface;
FROM Common IMPORT ShortString;
TYPE
CommandLine = RECORD
input: ShortString;
lex: BOOLEAN;
parse: BOOLEAN
END;
PCommandLine = POINTER TO CommandLine;
PROCEDURE parse_command_line(): PCommandLine;
END CommandLineInterface.

View File

@ -0,0 +1,75 @@
implementation module CommandLineInterface;
from SYSTEM import ADR, TSIZE;
from Args import GetArg, Narg;
from FIO import WriteString, WriteChar, WriteLine, StdErr;
from Storage import ALLOCATE;
from Strings import CompareStr, Length;
from MemUtils import MemZero;
from Common import ShortString;
proc parse_command_line() -> PCommandLine;
var
parameter: ShortString;
i: CARDINAL;
result: PCommandLine;
parsed: BOOLEAN;
begin
i := 1;
ALLOCATE(result, TSIZE(CommandLine));
result^.lex := false;
result^.parse := false;
MemZero(ADR(result^.input), 256);
while (i < Narg()) & (result <> nil) do
parsed := GetArg(parameter, i);
parsed := false;
if CompareStr(parameter, '--lex') = 0 then
parsed := true;
result^.lex := true
end;
if CompareStr(parameter, '--parse') = 0 then
parsed := true;
result^.parse := true
end;
if parameter[0] <> '-' then
parsed := true;
if Length(result^.input) > 0 then
WriteString(StdErr, 'Fatal error: only one source file can be compiled at once. First given "');
WriteString(StdErr, result^.input);
WriteString(StdErr, '", then "');
WriteString(StdErr, parameter);
WriteString(StdErr, '".');
WriteLine(StdErr);
result := nil
end;
if result <> nil then
result^.input := parameter
end
end;
if parsed = false then
WriteString(StdErr, 'Fatal error: unknown command line options: ');
WriteString(StdErr, parameter);
WriteChar(StdErr, '.');
WriteLine(StdErr);
result := nil
end;
i := i + 1
end;
if (result <> nil) & (Length(result^.input) = 0) then
WriteString(StdErr, 'Fatal error: no input files.');
WriteLine(StdErr);
result := nil
end;
return result
end;
end CommandLineInterface.

6
source/Common.def Normal file
View File

@ -0,0 +1,6 @@
DEFINITION MODULE Common;
TYPE
ShortString = ARRAY[0..255] OF CHAR;
END Common.

3
source/Common.elna Normal file
View File

@ -0,0 +1,3 @@
implementation module Common;
end Common.

View File

@ -1,18 +1,51 @@
module Compiler;
from FIO import StdIn;
from FIO import Close, IsNoError, File, OpenToRead, StdErr, WriteLine, WriteString;
from SYSTEM import ADR;
from M2RTS import HALT, ExitOnHalt;
from Lexer import Lexer, lexer_destroy, lexer_initialize;
from Transpiler import transpile;
from CommandLineInterface import PCommandLine, parse_command_line;
var
command_line: PCommandLine;
proc compile_from_stream();
var
lexer: Lexer;
source_input: File;
begin
source_input := OpenToRead(command_line^.input);
if IsNoError(source_input) = false then
WriteString(StdErr, 'Fatal error: failed to read the input file "');
WriteString(StdErr, command_line^.input);
WriteString(StdErr, '".');
WriteLine(StdErr);
ExitOnHalt(2)
end;
if IsNoError(source_input) then
lexer_initialize(ADR(lexer), source_input);
transpile(ADR(lexer));
lexer_destroy(ADR(lexer));
Close(source_input)
end
end;
begin
lexer_initialize(ADR(lexer), StdIn);
ExitOnHalt(0);
command_line := parse_command_line();
transpile(ADR(lexer));
lexer_destroy(ADR(lexer))
if command_line <> nil then
compile_from_stream()
end;
if command_line = nil then
ExitOnHalt(1)
end;
HALT()
end Compiler.

View File

@ -217,7 +217,7 @@ var
index: CARDINAL;
begin
index := 0;
result := TRUE;
result := true;
while (index < Length(Keyword)) & (TokenStart <> TokenEnd) & result DO
result := (Keyword[index] = TokenStart^) or (Lower(Keyword[index]) = TokenStart^);
@ -409,11 +409,11 @@ begin
end;
if compare_keyword('TRUE', lexer^.Start, lexer^.Current) then
token^.kind := lexerKindBoolean;
token^.booleanKind := TRUE
token^.booleanKind := true
end;
if compare_keyword('FALSE', lexer^.Start, lexer^.Current) then
token^.kind := lexerKindBoolean;
token^.booleanKind := FALSE
token^.booleanKind := false
end
end;

3
source/Parser.def Normal file
View File

@ -0,0 +1,3 @@
DEFINITION MODULE Parser;
END Parser.

3
source/Parser.elna Normal file
View File

@ -0,0 +1,3 @@
module Parser;
end Parser.

View File

@ -1,6 +1,16 @@
DEFINITION MODULE Transpiler;
FROM Lexer IMPORT PLexer;
FROM FIO IMPORT File;
FROM Lexer IMPORT PLexer, Lexer;
TYPE
TranspilerContext = RECORD
indentation: CARDINAL;
output: File;
lexer: PLexer
END;
PTranspilerContext = POINTER TO TranspilerContext;
PROCEDURE transpile(ALexer: PLexer);

View File

@ -1,17 +1,11 @@
implementation module Transpiler;
from FIO import WriteNBytes, StdOut;
from FIO import WriteNBytes, StdOut, WriteLine, WriteChar;
from SYSTEM import ADR, ADDRESS;
from Terminal import Write, WriteLn, WriteString;
from Lexer import Lexer, LexerToken, lexer_current, lexer_lex, LexerKind;
type
PTranspilerContext = ^TranspilerContext;
TranspilerContext = record
Indentation: CARDINAL
end;
(* Calls lexer_lex() but skips the comments. *)
proc transpiler_lex(lexer: PLexer) -> LexerToken;
var
@ -27,413 +21,403 @@ begin
end;
(* Write a semicolon followed by a newline. *)
proc write_semicolon();
proc write_semicolon(output: File);
begin
WriteString(';');
WriteLn()
WriteChar(output, ';');
WriteLine(output)
end;
proc transpile_import(context: PTranspilerContext, lexer: PLexer);
proc write_current(lexer: PLexer);
var
written_bytes: CARDINAL;
begin
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start)
end;
proc transpile_import(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
WriteString('FROM ');
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer);
write_current(context^.lexer);
token := transpiler_lex(context^.lexer);
WriteString(' IMPORT ');
token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(context^.lexer);
write_current(context^.lexer);
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
while token.kind <> lexerKindSemicolon do
WriteString(', ');
token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer);
write_current(context^.lexer);
token := transpiler_lex(context^.lexer)
end;
write_semicolon();
token := transpiler_lex(lexer)
write_semicolon(context^.output);
token := transpiler_lex(context^.lexer)
end;
proc transpile_import_part(context: PTranspilerContext, lexer: PLexer);
proc transpile_import_part(context: PTranspilerContext);
var
token: LexerToken;
begin
token := lexer_current(lexer);
token := lexer_current(context^.lexer);
while token.kind = lexerKindFrom do
transpile_import(context, lexer);
token := lexer_current(lexer)
transpile_import(context);
token := lexer_current(context^.lexer)
end;
WriteLn()
end;
proc transpile_constant(context: PTranspilerContext, lexer: PLexer);
proc transpile_constant(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
WriteString(' ');
token := lexer_current(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := lexer_current(context^.lexer);
write_current(context^.lexer);
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
WriteString(' = ');
token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(context^.lexer);
write_current(context^.lexer);
token := transpiler_lex(lexer);
write_semicolon()
token := transpiler_lex(context^.lexer);
write_semicolon(context^.output)
end;
proc transpile_constant_part(context: PTranspilerContext, lexer: PLexer) -> BOOLEAN;
proc transpile_constant_part(context: PTranspilerContext) -> BOOLEAN;
var
token: LexerToken;
result: BOOLEAN;
begin
token := lexer_current(lexer);
token := lexer_current(context^.lexer);
result := token.kind = lexerKindConst;
if result then
WriteString('CONST');
WriteLn();
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
while token.kind = lexerKindIdentifier do
transpile_constant(context, lexer);
token := transpiler_lex(lexer)
transpile_constant(context);
token := transpiler_lex(context^.lexer)
end
end;
return result
end;
proc transpile_module(context: PTranspilerContext, lexer: PLexer);
proc transpile_module(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
if token.kind = lexerKindDefinition then
WriteString('DEFINITION ');
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer)
end;
if token.kind = lexerKindImplementation then
WriteString('IMPLEMENTATION ');
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer)
end;
WriteString('MODULE ');
(* Write the module name and end the line with a semicolon and newline. *)
token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(context^.lexer);
write_current(context^.lexer);
token := transpiler_lex(lexer);
write_semicolon();
token := transpiler_lex(context^.lexer);
write_semicolon(context^.output);
WriteLn();
(* Write the module body. *)
token := transpiler_lex(lexer);
transpile_import_part(context, lexer);
if transpile_constant_part(context, lexer) then
token := transpiler_lex(context^.lexer);
transpile_import_part(context);
if transpile_constant_part(context) then
WriteLn()
end;
transpile_type_part(context, lexer);
if transpile_variable_part(context, lexer) then
transpile_type_part(context);
if transpile_variable_part(context) then
WriteLn()
end;
transpile_procedure_part(context, lexer);
transpile_statement_part(context, lexer);
transpile_procedure_part(context);
transpile_statement_part(context);
WriteString('END ');
token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(context^.lexer);
write_current(context^.lexer);
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
Write('.');
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
WriteLn()
end;
proc transpile_type_fields(context: PTranspilerContext, lexer: PLexer);
proc transpile_type_fields(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
while token.kind <> lexerKindEnd do
WriteString(' ');
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer);
write_current(context^.lexer);
token := transpiler_lex(context^.lexer);
WriteString(': ');
transpile_type_expression(context, lexer);
token := transpiler_lex(lexer);
transpile_type_expression(context);
token := transpiler_lex(context^.lexer);
if token.kind = lexerKindSemicolon then
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
Write(';')
end;
WriteLn()
end
end;
proc transpile_record_type(context: PTranspilerContext, lexer: PLexer);
var
token: LexerToken;
proc transpile_record_type(context: PTranspilerContext);
begin
WriteString('RECORD');
WriteLn();
transpile_type_fields(context, lexer);
transpile_type_fields(context);
WriteString(' END')
end;
proc transpile_pointer_type(context: PTranspilerContext, lexer: PLexer);
proc transpile_pointer_type(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
token := lexer_current(lexer);
token := lexer_current(context^.lexer);
WriteString('POINTER TO ');
if token.kind = lexerKindPointer then
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer)
end;
transpile_type_expression(context, lexer)
transpile_type_expression(context)
end;
proc transpile_array_type(context: PTranspilerContext, lexer: PLexer);
proc transpile_array_type(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
WriteString('ARRAY');
token := lexer_current(lexer);
token := lexer_current(context^.lexer);
if token.kind = lexerKindArray then
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer)
end;
if token.kind <> lexerKindOf then
WriteString('[1..');
token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
write_current(context^.lexer);
token := transpiler_lex(context^.lexer);
Write(']')
end;
WriteString(' OF ');
transpile_type_expression(context, lexer)
transpile_type_expression(context)
end;
proc transpile_enumeration_type(context: PTranspilerContext, lexer: PLexer);
proc transpile_enumeration_type(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
WriteString('(');
WriteLn();
WriteString(' ');
token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(context^.lexer);
write_current(context^.lexer);
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
while token.kind = lexerKindComma do
Write(',');
WriteLn();
WriteString(' ');
token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(context^.lexer);
write_current(context^.lexer);
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer)
end;
WriteLn();
WriteString(' )')
end;
proc transpile_union_type(context: PTranspilerContext, lexer: PLexer);
var
token: LexerToken;
proc transpile_union_type(context: PTranspilerContext);
end;
proc transpile_procedure_type(context: PTranspilerContext, lexer: PLexer);
proc transpile_procedure_type(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
WriteString('PROCEDURE(');
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
while token.kind <> lexerKindRightParen do
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
write_current(context^.lexer);
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
if token.kind = lexerKindComma then
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
WriteString(', ')
end
end;
Write(')')
end;
proc transpile_type_expression(context: PTranspilerContext, lexer: PLexer);
proc transpile_type_expression(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
if token.kind = lexerKindRecord then
transpile_record_type(context, lexer)
transpile_record_type(context)
end;
if token.kind = lexerKindLeftParen then
transpile_enumeration_type(context, lexer)
transpile_enumeration_type(context)
end;
if (token.kind = lexerKindArray) or (token.kind = lexerKindLeftSquare) then
transpile_array_type(context, lexer)
transpile_array_type(context)
end;
if token.kind = lexerKindHat then
transpile_pointer_type(context, lexer)
transpile_pointer_type(context)
end;
if token.kind = lexerKindProc then
transpile_procedure_type(context, lexer)
transpile_procedure_type(context)
end;
if token.kind = lexerKindIdentifier then
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start)
write_current(context^.lexer)
end
end;
proc transpile_type_declaration(context: PTranspilerContext, lexer: PLexer);
proc transpile_type_declaration(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
WriteString(' ');
token := lexer_current(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := lexer_current(context^.lexer);
write_current(context^.lexer);
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
WriteString(' = ');
transpile_type_expression(context, lexer);
token := transpiler_lex(lexer);
write_semicolon();
transpile_type_expression(context);
token := transpiler_lex(context^.lexer);
write_semicolon(context^.output);
end;
proc transpile_type_part(context: PTranspilerContext, lexer: PLexer);
proc transpile_type_part(context: PTranspilerContext);
var
token: LexerToken;
begin
token := lexer_current(lexer);
token := lexer_current(context^.lexer);
if token.kind = lexerKindType then
WriteString('TYPE');
WriteLn();
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
while token.kind = lexerKindIdentifier do
transpile_type_declaration(context, lexer);
token := transpiler_lex(lexer)
transpile_type_declaration(context);
token := transpiler_lex(context^.lexer)
end;
WriteLn()
end
end;
proc transpile_variable_declaration(context: PTranspilerContext, lexer: PLexer);
proc transpile_variable_declaration(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
WriteString(' ');
token := lexer_current(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := lexer_current(context^.lexer);
write_current(context^.lexer);
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
WriteString(': ');
transpile_type_expression(context, lexer);
token := transpiler_lex(lexer);
write_semicolon()
transpile_type_expression(context);
token := transpiler_lex(context^.lexer);
write_semicolon(context^.output)
end;
proc transpile_variable_part(context: PTranspilerContext, lexer: PLexer) -> BOOLEAN;
proc transpile_variable_part(context: PTranspilerContext) -> BOOLEAN;
var
token: LexerToken;
result: BOOLEAN;
begin
token := lexer_current(lexer);
token := lexer_current(context^.lexer);
result := token.kind = lexerKindVar;
if result then
WriteString('VAR');
WriteLn();
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
while token.kind = lexerKindIdentifier do
transpile_variable_declaration(context, lexer);
token := transpiler_lex(lexer)
transpile_variable_declaration(context);
token := transpiler_lex(context^.lexer)
end
end;
return result
end;
proc transpile_procedure_heading(context: PTranspilerContext, lexer: PLexer) -> LexerToken;
proc transpile_procedure_heading(context: PTranspilerContext) -> LexerToken;
var
token: LexerToken;
result: LexerToken;
written_bytes: CARDINAL;
begin
WriteString('PROCEDURE ');
result := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
result := transpiler_lex(context^.lexer);
write_current(context^.lexer);
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
Write('(');
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
while token.kind <> lexerKindRightParen do
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
write_current(context^.lexer);
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
WriteString(': ');
transpile_type_expression(context, lexer);
transpile_type_expression(context);
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
if (token.kind = lexerKindSemicolon) or (token.kind = lexerKindComma) then
WriteString('; ');
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer)
end
end;
WriteString(')');
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
(* Check for the return type and write it. *)
if token.kind = lexerKindArrow then
WriteString(': ');
token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer);
write_current(context^.lexer);
token := transpiler_lex(context^.lexer)
end;
token := transpiler_lex(lexer);
write_semicolon();
token := transpiler_lex(context^.lexer);
write_semicolon(context^.output);
return result
end;
proc transpile_expression(context: PTranspilerContext, lexer: PLexer, trailing_token: LexerKind);
proc transpile_expression(context: PTranspilerContext, trailing_token: LexerKind);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
while (token.kind <> trailing_token) & (token.kind <> lexerKindEnd) do
written_bytes := 0;
@ -441,6 +425,14 @@ begin
WriteString('NIL ');
written_bytes := 1
end;
if (token.kind = lexerKindBoolean) & token.booleanKind then
WriteString('TRUE ');
written_bytes := 1
end;
if (token.kind = lexerKindBoolean) & (~token.booleanKind) then
WriteString('FALSE ');
written_bytes := 1
end;
if token.kind = lexerKindOr then
WriteString('OR ');
written_bytes := 1
@ -454,157 +446,151 @@ begin
written_bytes := 1
end;
if written_bytes = 0 then
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
write_current(context^.lexer);
Write(' ')
end;
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer)
end
end;
proc transpile_if_statement(context: PTranspilerContext, lexer: PLexer);
proc transpile_if_statement(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
WriteString(' IF ');
transpile_expression(context, lexer, lexerKindThen);
transpile_expression(context, lexerKindThen);
WriteString('THEN');
WriteLn();
transpile_statements(context, lexer);
transpile_statements(context);
WriteString(' END');
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer)
end;
proc transpile_while_statement(context: PTranspilerContext, lexer: PLexer);
proc transpile_while_statement(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
WriteString(' WHILE ');
transpile_expression(context, lexer, lexerKindDo);
transpile_expression(context, lexerKindDo);
WriteString('DO');
WriteLn();
transpile_statements(context, lexer);
transpile_statements(context);
WriteString(' END');
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer)
end;
proc transpile_assignment_statement(context: PTranspilerContext, lexer: PLexer);
proc transpile_assignment_statement(context: PTranspilerContext);
begin
WriteString(' := ');
transpile_expression(context, lexer, lexerKindSemicolon);
transpile_expression(context, lexerKindSemicolon);
end;
proc transpile_call_statement(context: PTranspilerContext, lexer: PLexer);
proc transpile_call_statement(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
WriteString('(');
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
while (token.kind <> lexerKindSemicolon) & (token.kind <> lexerKindEnd) do
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer)
write_current(context^.lexer);
token := transpiler_lex(context^.lexer)
end
end;
proc transpile_designator_expression(context: PTranspilerContext, lexer: PLexer);
proc transpile_designator_expression(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
WriteString(' ');
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer);
write_current(context^.lexer);
token := transpiler_lex(context^.lexer);
while token.kind = lexerKindLeftSquare do
Write('[');
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
while token.kind <> lexerKindRightSquare do
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer)
write_current(context^.lexer);
token := transpiler_lex(context^.lexer)
end;
Write(']');
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer)
end;
if token.kind = lexerKindHat then
Write('^');
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer)
end;
if token.kind = lexerKindDot then
Write('.');
token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer);
write_current(context^.lexer);
token := transpiler_lex(context^.lexer)
end;
if token.kind = lexerKindHat then
Write('^');
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer)
end;
while token.kind = lexerKindLeftSquare do
Write('[');
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
while token.kind <> lexerKindRightSquare do
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer)
write_current(context^.lexer);
token := transpiler_lex(context^.lexer)
end;
Write(']');
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer)
end
end;
proc transpile_return_statement(context: PTranspilerContext, lexer: PLexer);
proc transpile_return_statement(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
WriteString(' RETURN ');
token := transpiler_lex(lexer);
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer);
write_current(context^.lexer);
token := transpiler_lex(context^.lexer)
end;
proc transpile_statement(context: PTranspilerContext, lexer: PLexer);
proc transpile_statement(context: PTranspilerContext);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
token := transpiler_lex(lexer);
token := transpiler_lex(context^.lexer);
if token.kind = lexerKindIf then
transpile_if_statement(context, lexer)
transpile_if_statement(context)
end;
if token.kind = lexerKindWhile then
transpile_while_statement(context, lexer)
transpile_while_statement(context)
end;
if token.kind = lexerKindReturn then
transpile_return_statement(context, lexer)
transpile_return_statement(context)
end;
if token.kind = lexerKindIdentifier then
transpile_designator_expression(context, lexer);
token := lexer_current(lexer);
transpile_designator_expression(context);
token := lexer_current(context^.lexer);
if token.kind = lexerKindAssignment then
transpile_assignment_statement(context, lexer)
transpile_assignment_statement(context)
end;
if token.kind = lexerKindLeftParen then
transpile_call_statement(context, lexer)
transpile_call_statement(context)
end
end
end;
proc transpile_statements(context: PTranspilerContext, lexer: PLexer);
proc transpile_statements(context: PTranspilerContext);
var
token: LexerToken;
begin
token := lexer_current(lexer);
token := lexer_current(context^.lexer);
while token.kind <> lexerKindEnd do
transpile_statement(context, lexer);
token := lexer_current(lexer);
transpile_statement(context);
token := lexer_current(context^.lexer);
if token.kind = lexerKindSemicolon then
Write(';')
@ -613,46 +599,46 @@ begin
end
end;
proc transpile_statement_part(context: PTranspilerContext, lexer: PLexer);
proc transpile_statement_part(context: PTranspilerContext);
var
token: LexerToken;
begin
token := lexer_current(lexer);
token := lexer_current(context^.lexer);
if token.kind = lexerKindBegin then
WriteString('BEGIN');
WriteLn();
transpile_statements(context, lexer)
transpile_statements(context)
end
end;
proc transpile_procedure_declaration(context: PTranspilerContext, lexer: PLexer);
proc transpile_procedure_declaration(context: PTranspilerContext);
var
token: LexerToken;
seen_part: BOOLEAN;
written_bytes: CARDINAL;
begin
token := transpile_procedure_heading(context, lexer);
seen_part := transpile_constant_part(context, lexer);
seen_part := transpile_variable_part(context, lexer);
transpile_statement_part(context, lexer);
token := transpile_procedure_heading(context);
seen_part := transpile_constant_part(context);
seen_part := transpile_variable_part(context);
transpile_statement_part(context);
WriteString('END ');
written_bytes := WriteNBytes(StdOut, ORD(token.identifierKind[1]), ADR(token.identifierKind[2]));
token := transpiler_lex(lexer);
write_semicolon();
token := transpiler_lex(lexer)
token := transpiler_lex(context^.lexer);
write_semicolon(context^.output);
token := transpiler_lex(context^.lexer)
end;
proc transpile_procedure_part(context: PTranspilerContext, lexer: PLexer);
proc transpile_procedure_part(context: PTranspilerContext);
var
token: LexerToken;
begin
token := lexer_current(lexer);
token := lexer_current(context^.lexer);
while token.kind = lexerKindProc do
transpile_procedure_declaration(context, lexer);
token := lexer_current(lexer);
transpile_procedure_declaration(context);
token := lexer_current(context^.lexer);
WriteLn()
end
end;
@ -663,7 +649,11 @@ var
written_bytes: CARDINAL;
context: TranspilerContext;
begin
transpile_module(ADR(context), lexer)
context.indentation := 0;
context.output := StdOut;
context.lexer := lexer;
transpile_module(ADR(context))
end;
end Transpiler.