Add a command line parsing procedure
This commit is contained in:
parent
6e415e474f
commit
edb8ce37d9
17
Rakefile
17
Rakefile
@ -44,11 +44,12 @@ end
|
|||||||
.partition { |f| f.end_with? '.elna' }
|
.partition { |f| f.end_with? '.elna' }
|
||||||
|
|
||||||
File.open t.name, 'w' do |output|
|
File.open t.name, 'w' do |output|
|
||||||
puts
|
compiler_command = compiler + sources
|
||||||
puts(compiler * ' ')
|
|
||||||
|
|
||||||
Open3.popen2(*compiler) do |cl_in, cl_out|
|
puts
|
||||||
cl_in.write File.read(*sources)
|
puts(compiler_command * ' ')
|
||||||
|
|
||||||
|
Open3.popen2(*compiler_command) do |cl_in, cl_out|
|
||||||
cl_in.close
|
cl_in.close
|
||||||
|
|
||||||
IO.copy_stream cl_out, output
|
IO.copy_stream cl_out, output
|
||||||
@ -87,11 +88,11 @@ task default: 'source/Compiler.elna'
|
|||||||
task :default do |t|
|
task :default do |t|
|
||||||
exe, previous_output, source = t.prerequisites
|
exe, previous_output, source = t.prerequisites
|
||||||
|
|
||||||
cat_arguments = ['cat', source]
|
exe_arguments = [exe, source]
|
||||||
diff_arguments = ['diff', '-Nur', '--text', previous_output, '-']
|
diff_arguments = ['diff', '-Nur', '--text', previous_output, '-']
|
||||||
|
|
||||||
puts [cat_arguments * ' ', exe, diff_arguments * ' '].join(' | ')
|
puts [exe, diff_arguments * ' '].join(' | ')
|
||||||
Open3.pipeline(cat_arguments, exe, diff_arguments)
|
Open3.pipeline exe_arguments, diff_arguments
|
||||||
end
|
end
|
||||||
|
|
||||||
task :backport do
|
task :backport do
|
||||||
@ -104,7 +105,7 @@ task :backport do
|
|||||||
source
|
source
|
||||||
.gsub(/^(var|type|const|begin)/) { |match| match.upcase }
|
.gsub(/^(var|type|const|begin)/) { |match| match.upcase }
|
||||||
.gsub(/^[[:alnum:]]* ?module/) { |match| match.upcase }
|
.gsub(/^[[:alnum:]]* ?module/) { |match| match.upcase }
|
||||||
.gsub(/\b(record|nil|or)\b/) { |match| match.upcase }
|
.gsub(/\b(record|nil|or|false|true)\b/) { |match| match.upcase }
|
||||||
.gsub(/proc\(/, 'PROCEDURE(')
|
.gsub(/proc\(/, 'PROCEDURE(')
|
||||||
.gsub(/ & /, ' AND ')
|
.gsub(/ & /, ' AND ')
|
||||||
.gsub(/ -> /, ': ')
|
.gsub(/ -> /, ': ')
|
||||||
|
15
source/CommandLineInterface.def
Normal file
15
source/CommandLineInterface.def
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
DEFINITION MODULE CommandLineInterface;
|
||||||
|
|
||||||
|
FROM Common IMPORT ShortString;
|
||||||
|
|
||||||
|
TYPE
|
||||||
|
CommandLine = RECORD
|
||||||
|
input: ShortString;
|
||||||
|
lex: BOOLEAN;
|
||||||
|
parse: BOOLEAN
|
||||||
|
END;
|
||||||
|
PCommandLine = POINTER TO CommandLine;
|
||||||
|
|
||||||
|
PROCEDURE parse_command_line(): PCommandLine;
|
||||||
|
|
||||||
|
END CommandLineInterface.
|
75
source/CommandLineInterface.elna
Normal file
75
source/CommandLineInterface.elna
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
implementation module CommandLineInterface;
|
||||||
|
|
||||||
|
from SYSTEM import ADR, TSIZE;
|
||||||
|
|
||||||
|
from Args import GetArg, Narg;
|
||||||
|
from FIO import WriteString, WriteChar, WriteLine, StdErr;
|
||||||
|
from Storage import ALLOCATE;
|
||||||
|
from Strings import CompareStr, Length;
|
||||||
|
from MemUtils import MemZero;
|
||||||
|
|
||||||
|
from Common import ShortString;
|
||||||
|
|
||||||
|
proc parse_command_line() -> PCommandLine;
|
||||||
|
var
|
||||||
|
parameter: ShortString;
|
||||||
|
i: CARDINAL;
|
||||||
|
result: PCommandLine;
|
||||||
|
parsed: BOOLEAN;
|
||||||
|
begin
|
||||||
|
i := 1;
|
||||||
|
ALLOCATE(result, TSIZE(CommandLine));
|
||||||
|
result^.lex := false;
|
||||||
|
result^.parse := false;
|
||||||
|
MemZero(ADR(result^.input), 256);
|
||||||
|
|
||||||
|
while (i < Narg()) & (result <> nil) do
|
||||||
|
parsed := GetArg(parameter, i);
|
||||||
|
parsed := false;
|
||||||
|
|
||||||
|
if CompareStr(parameter, '--lex') = 0 then
|
||||||
|
parsed := true;
|
||||||
|
result^.lex := true
|
||||||
|
end;
|
||||||
|
if CompareStr(parameter, '--parse') = 0 then
|
||||||
|
parsed := true;
|
||||||
|
result^.parse := true
|
||||||
|
end;
|
||||||
|
if parameter[0] <> '-' then
|
||||||
|
parsed := true;
|
||||||
|
|
||||||
|
if Length(result^.input) > 0 then
|
||||||
|
WriteString(StdErr, 'Fatal error: only one source file can be compiled at once. First given "');
|
||||||
|
WriteString(StdErr, result^.input);
|
||||||
|
WriteString(StdErr, '", then "');
|
||||||
|
WriteString(StdErr, parameter);
|
||||||
|
WriteString(StdErr, '".');
|
||||||
|
WriteLine(StdErr);
|
||||||
|
result := nil
|
||||||
|
end;
|
||||||
|
if result <> nil then
|
||||||
|
result^.input := parameter
|
||||||
|
end
|
||||||
|
end;
|
||||||
|
if parsed = false then
|
||||||
|
WriteString(StdErr, 'Fatal error: unknown command line options: ');
|
||||||
|
|
||||||
|
WriteString(StdErr, parameter);
|
||||||
|
WriteChar(StdErr, '.');
|
||||||
|
WriteLine(StdErr);
|
||||||
|
|
||||||
|
result := nil
|
||||||
|
end;
|
||||||
|
|
||||||
|
i := i + 1
|
||||||
|
end;
|
||||||
|
if (result <> nil) & (Length(result^.input) = 0) then
|
||||||
|
WriteString(StdErr, 'Fatal error: no input files.');
|
||||||
|
WriteLine(StdErr);
|
||||||
|
result := nil
|
||||||
|
end;
|
||||||
|
|
||||||
|
return result
|
||||||
|
end;
|
||||||
|
|
||||||
|
end CommandLineInterface.
|
6
source/Common.def
Normal file
6
source/Common.def
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
DEFINITION MODULE Common;
|
||||||
|
|
||||||
|
TYPE
|
||||||
|
ShortString = ARRAY[0..255] OF CHAR;
|
||||||
|
|
||||||
|
END Common.
|
3
source/Common.elna
Normal file
3
source/Common.elna
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
implementation module Common;
|
||||||
|
|
||||||
|
end Common.
|
@ -1,18 +1,51 @@
|
|||||||
module Compiler;
|
module Compiler;
|
||||||
|
|
||||||
from FIO import StdIn;
|
from FIO import Close, IsNoError, File, OpenToRead, StdErr, WriteLine, WriteString;
|
||||||
from SYSTEM import ADR;
|
from SYSTEM import ADR;
|
||||||
|
from M2RTS import HALT, ExitOnHalt;
|
||||||
|
|
||||||
from Lexer import Lexer, lexer_destroy, lexer_initialize;
|
from Lexer import Lexer, lexer_destroy, lexer_initialize;
|
||||||
from Transpiler import transpile;
|
from Transpiler import transpile;
|
||||||
|
from CommandLineInterface import PCommandLine, parse_command_line;
|
||||||
|
|
||||||
|
var
|
||||||
|
command_line: PCommandLine;
|
||||||
|
|
||||||
|
proc compile_from_stream();
|
||||||
var
|
var
|
||||||
lexer: Lexer;
|
lexer: Lexer;
|
||||||
|
source_input: File;
|
||||||
|
begin
|
||||||
|
source_input := OpenToRead(command_line^.input);
|
||||||
|
|
||||||
|
if IsNoError(source_input) = false then
|
||||||
|
WriteString(StdErr, 'Fatal error: failed to read the input file "');
|
||||||
|
WriteString(StdErr, command_line^.input);
|
||||||
|
WriteString(StdErr, '".');
|
||||||
|
WriteLine(StdErr);
|
||||||
|
|
||||||
|
ExitOnHalt(2)
|
||||||
|
end;
|
||||||
|
if IsNoError(source_input) then
|
||||||
|
lexer_initialize(ADR(lexer), source_input);
|
||||||
|
|
||||||
|
transpile(ADR(lexer));
|
||||||
|
|
||||||
|
lexer_destroy(ADR(lexer));
|
||||||
|
|
||||||
|
Close(source_input)
|
||||||
|
end
|
||||||
|
end;
|
||||||
|
|
||||||
begin
|
begin
|
||||||
lexer_initialize(ADR(lexer), StdIn);
|
ExitOnHalt(0);
|
||||||
|
command_line := parse_command_line();
|
||||||
|
|
||||||
transpile(ADR(lexer));
|
if command_line <> nil then
|
||||||
|
compile_from_stream()
|
||||||
lexer_destroy(ADR(lexer))
|
end;
|
||||||
|
if command_line = nil then
|
||||||
|
ExitOnHalt(1)
|
||||||
|
end;
|
||||||
|
HALT()
|
||||||
end Compiler.
|
end Compiler.
|
||||||
|
@ -217,7 +217,7 @@ var
|
|||||||
index: CARDINAL;
|
index: CARDINAL;
|
||||||
begin
|
begin
|
||||||
index := 0;
|
index := 0;
|
||||||
result := TRUE;
|
result := true;
|
||||||
|
|
||||||
while (index < Length(Keyword)) & (TokenStart <> TokenEnd) & result DO
|
while (index < Length(Keyword)) & (TokenStart <> TokenEnd) & result DO
|
||||||
result := (Keyword[index] = TokenStart^) or (Lower(Keyword[index]) = TokenStart^);
|
result := (Keyword[index] = TokenStart^) or (Lower(Keyword[index]) = TokenStart^);
|
||||||
@ -409,11 +409,11 @@ begin
|
|||||||
end;
|
end;
|
||||||
if compare_keyword('TRUE', lexer^.Start, lexer^.Current) then
|
if compare_keyword('TRUE', lexer^.Start, lexer^.Current) then
|
||||||
token^.kind := lexerKindBoolean;
|
token^.kind := lexerKindBoolean;
|
||||||
token^.booleanKind := TRUE
|
token^.booleanKind := true
|
||||||
end;
|
end;
|
||||||
if compare_keyword('FALSE', lexer^.Start, lexer^.Current) then
|
if compare_keyword('FALSE', lexer^.Start, lexer^.Current) then
|
||||||
token^.kind := lexerKindBoolean;
|
token^.kind := lexerKindBoolean;
|
||||||
token^.booleanKind := FALSE
|
token^.booleanKind := false
|
||||||
end
|
end
|
||||||
end;
|
end;
|
||||||
|
|
||||||
|
3
source/Parser.def
Normal file
3
source/Parser.def
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
DEFINITION MODULE Parser;
|
||||||
|
|
||||||
|
END Parser.
|
3
source/Parser.elna
Normal file
3
source/Parser.elna
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
module Parser;
|
||||||
|
|
||||||
|
end Parser.
|
@ -1,6 +1,16 @@
|
|||||||
DEFINITION MODULE Transpiler;
|
DEFINITION MODULE Transpiler;
|
||||||
|
|
||||||
FROM Lexer IMPORT PLexer;
|
FROM FIO IMPORT File;
|
||||||
|
|
||||||
|
FROM Lexer IMPORT PLexer, Lexer;
|
||||||
|
|
||||||
|
TYPE
|
||||||
|
TranspilerContext = RECORD
|
||||||
|
indentation: CARDINAL;
|
||||||
|
output: File;
|
||||||
|
lexer: PLexer
|
||||||
|
END;
|
||||||
|
PTranspilerContext = POINTER TO TranspilerContext;
|
||||||
|
|
||||||
PROCEDURE transpile(ALexer: PLexer);
|
PROCEDURE transpile(ALexer: PLexer);
|
||||||
|
|
||||||
|
@ -1,17 +1,11 @@
|
|||||||
implementation module Transpiler;
|
implementation module Transpiler;
|
||||||
|
|
||||||
from FIO import WriteNBytes, StdOut;
|
from FIO import WriteNBytes, StdOut, WriteLine, WriteChar;
|
||||||
from SYSTEM import ADR, ADDRESS;
|
from SYSTEM import ADR, ADDRESS;
|
||||||
|
|
||||||
from Terminal import Write, WriteLn, WriteString;
|
from Terminal import Write, WriteLn, WriteString;
|
||||||
from Lexer import Lexer, LexerToken, lexer_current, lexer_lex, LexerKind;
|
from Lexer import Lexer, LexerToken, lexer_current, lexer_lex, LexerKind;
|
||||||
|
|
||||||
type
|
|
||||||
PTranspilerContext = ^TranspilerContext;
|
|
||||||
TranspilerContext = record
|
|
||||||
Indentation: CARDINAL
|
|
||||||
end;
|
|
||||||
|
|
||||||
(* Calls lexer_lex() but skips the comments. *)
|
(* Calls lexer_lex() but skips the comments. *)
|
||||||
proc transpiler_lex(lexer: PLexer) -> LexerToken;
|
proc transpiler_lex(lexer: PLexer) -> LexerToken;
|
||||||
var
|
var
|
||||||
@ -27,413 +21,403 @@ begin
|
|||||||
end;
|
end;
|
||||||
|
|
||||||
(* Write a semicolon followed by a newline. *)
|
(* Write a semicolon followed by a newline. *)
|
||||||
proc write_semicolon();
|
proc write_semicolon(output: File);
|
||||||
begin
|
begin
|
||||||
WriteString(';');
|
WriteChar(output, ';');
|
||||||
WriteLn()
|
WriteLine(output)
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_import(context: PTranspilerContext, lexer: PLexer);
|
proc write_current(lexer: PLexer);
|
||||||
|
var
|
||||||
|
written_bytes: CARDINAL;
|
||||||
|
begin
|
||||||
|
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start)
|
||||||
|
end;
|
||||||
|
|
||||||
|
proc transpile_import(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
WriteString('FROM ');
|
WriteString('FROM ');
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
|
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
|
token := transpiler_lex(context^.lexer);
|
||||||
token := transpiler_lex(lexer);
|
|
||||||
WriteString(' IMPORT ');
|
WriteString(' IMPORT ');
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
while token.kind <> lexerKindSemicolon do
|
while token.kind <> lexerKindSemicolon do
|
||||||
WriteString(', ');
|
WriteString(', ');
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
write_semicolon();
|
write_semicolon(context^.output);
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_import_part(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_import_part(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
begin
|
begin
|
||||||
token := lexer_current(lexer);
|
token := lexer_current(context^.lexer);
|
||||||
|
|
||||||
while token.kind = lexerKindFrom do
|
while token.kind = lexerKindFrom do
|
||||||
transpile_import(context, lexer);
|
transpile_import(context);
|
||||||
token := lexer_current(lexer)
|
token := lexer_current(context^.lexer)
|
||||||
end;
|
end;
|
||||||
WriteLn()
|
WriteLn()
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_constant(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_constant(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
WriteString(' ');
|
WriteString(' ');
|
||||||
token := lexer_current(lexer);
|
token := lexer_current(context^.lexer);
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
WriteString(' = ');
|
WriteString(' = ');
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
write_semicolon()
|
write_semicolon(context^.output)
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_constant_part(context: PTranspilerContext, lexer: PLexer) -> BOOLEAN;
|
proc transpile_constant_part(context: PTranspilerContext) -> BOOLEAN;
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
result: BOOLEAN;
|
result: BOOLEAN;
|
||||||
begin
|
begin
|
||||||
token := lexer_current(lexer);
|
token := lexer_current(context^.lexer);
|
||||||
result := token.kind = lexerKindConst;
|
result := token.kind = lexerKindConst;
|
||||||
|
|
||||||
if result then
|
if result then
|
||||||
WriteString('CONST');
|
WriteString('CONST');
|
||||||
WriteLn();
|
WriteLn();
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
|
|
||||||
while token.kind = lexerKindIdentifier do
|
while token.kind = lexerKindIdentifier do
|
||||||
transpile_constant(context, lexer);
|
transpile_constant(context);
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end
|
end
|
||||||
end;
|
end;
|
||||||
return result
|
return result
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_module(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_module(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
|
|
||||||
if token.kind = lexerKindDefinition then
|
if token.kind = lexerKindDefinition then
|
||||||
WriteString('DEFINITION ');
|
WriteString('DEFINITION ');
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
if token.kind = lexerKindImplementation then
|
if token.kind = lexerKindImplementation then
|
||||||
WriteString('IMPLEMENTATION ');
|
WriteString('IMPLEMENTATION ');
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
WriteString('MODULE ');
|
WriteString('MODULE ');
|
||||||
|
|
||||||
(* Write the module name and end the line with a semicolon and newline. *)
|
(* Write the module name and end the line with a semicolon and newline. *)
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
write_semicolon();
|
write_semicolon(context^.output);
|
||||||
WriteLn();
|
WriteLn();
|
||||||
|
|
||||||
(* Write the module body. *)
|
(* Write the module body. *)
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
transpile_import_part(context, lexer);
|
transpile_import_part(context);
|
||||||
if transpile_constant_part(context, lexer) then
|
if transpile_constant_part(context) then
|
||||||
WriteLn()
|
WriteLn()
|
||||||
end;
|
end;
|
||||||
transpile_type_part(context, lexer);
|
transpile_type_part(context);
|
||||||
if transpile_variable_part(context, lexer) then
|
if transpile_variable_part(context) then
|
||||||
WriteLn()
|
WriteLn()
|
||||||
end;
|
end;
|
||||||
transpile_procedure_part(context, lexer);
|
transpile_procedure_part(context);
|
||||||
transpile_statement_part(context, lexer);
|
transpile_statement_part(context);
|
||||||
|
|
||||||
WriteString('END ');
|
WriteString('END ');
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
Write('.');
|
Write('.');
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
WriteLn()
|
WriteLn()
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_type_fields(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_type_fields(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
|
|
||||||
while token.kind <> lexerKindEnd do
|
while token.kind <> lexerKindEnd do
|
||||||
WriteString(' ');
|
WriteString(' ');
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
WriteString(': ');
|
WriteString(': ');
|
||||||
transpile_type_expression(context, lexer);
|
transpile_type_expression(context);
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
|
|
||||||
if token.kind = lexerKindSemicolon then
|
if token.kind = lexerKindSemicolon then
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
Write(';')
|
Write(';')
|
||||||
end;
|
end;
|
||||||
WriteLn()
|
WriteLn()
|
||||||
end
|
end
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_record_type(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_record_type(context: PTranspilerContext);
|
||||||
var
|
|
||||||
token: LexerToken;
|
|
||||||
begin
|
begin
|
||||||
WriteString('RECORD');
|
WriteString('RECORD');
|
||||||
WriteLn();
|
WriteLn();
|
||||||
transpile_type_fields(context, lexer);
|
transpile_type_fields(context);
|
||||||
WriteString(' END')
|
WriteString(' END')
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_pointer_type(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_pointer_type(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
token := lexer_current(lexer);
|
token := lexer_current(context^.lexer);
|
||||||
WriteString('POINTER TO ');
|
WriteString('POINTER TO ');
|
||||||
if token.kind = lexerKindPointer then
|
if token.kind = lexerKindPointer then
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
transpile_type_expression(context, lexer)
|
transpile_type_expression(context)
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_array_type(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_array_type(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
WriteString('ARRAY');
|
WriteString('ARRAY');
|
||||||
token := lexer_current(lexer);
|
token := lexer_current(context^.lexer);
|
||||||
|
|
||||||
if token.kind = lexerKindArray then
|
if token.kind = lexerKindArray then
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
if token.kind <> lexerKindOf then
|
if token.kind <> lexerKindOf then
|
||||||
WriteString('[1..');
|
WriteString('[1..');
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
Write(']')
|
Write(']')
|
||||||
end;
|
end;
|
||||||
WriteString(' OF ');
|
WriteString(' OF ');
|
||||||
transpile_type_expression(context, lexer)
|
transpile_type_expression(context)
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_enumeration_type(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_enumeration_type(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
WriteString('(');
|
WriteString('(');
|
||||||
WriteLn();
|
WriteLn();
|
||||||
WriteString(' ');
|
WriteString(' ');
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
|
|
||||||
while token.kind = lexerKindComma do
|
while token.kind = lexerKindComma do
|
||||||
Write(',');
|
Write(',');
|
||||||
WriteLn();
|
WriteLn();
|
||||||
WriteString(' ');
|
WriteString(' ');
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
|
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
WriteLn();
|
WriteLn();
|
||||||
WriteString(' )')
|
WriteString(' )')
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_union_type(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_union_type(context: PTranspilerContext);
|
||||||
var
|
|
||||||
token: LexerToken;
|
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_procedure_type(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_procedure_type(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
WriteString('PROCEDURE(');
|
WriteString('PROCEDURE(');
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
|
|
||||||
while token.kind <> lexerKindRightParen do
|
while token.kind <> lexerKindRightParen do
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
if token.kind = lexerKindComma then
|
if token.kind = lexerKindComma then
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
WriteString(', ')
|
WriteString(', ')
|
||||||
end
|
end
|
||||||
end;
|
end;
|
||||||
Write(')')
|
Write(')')
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_type_expression(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_type_expression(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
if token.kind = lexerKindRecord then
|
if token.kind = lexerKindRecord then
|
||||||
transpile_record_type(context, lexer)
|
transpile_record_type(context)
|
||||||
end;
|
end;
|
||||||
if token.kind = lexerKindLeftParen then
|
if token.kind = lexerKindLeftParen then
|
||||||
transpile_enumeration_type(context, lexer)
|
transpile_enumeration_type(context)
|
||||||
end;
|
end;
|
||||||
if (token.kind = lexerKindArray) or (token.kind = lexerKindLeftSquare) then
|
if (token.kind = lexerKindArray) or (token.kind = lexerKindLeftSquare) then
|
||||||
transpile_array_type(context, lexer)
|
transpile_array_type(context)
|
||||||
end;
|
end;
|
||||||
if token.kind = lexerKindHat then
|
if token.kind = lexerKindHat then
|
||||||
transpile_pointer_type(context, lexer)
|
transpile_pointer_type(context)
|
||||||
end;
|
end;
|
||||||
if token.kind = lexerKindProc then
|
if token.kind = lexerKindProc then
|
||||||
transpile_procedure_type(context, lexer)
|
transpile_procedure_type(context)
|
||||||
end;
|
end;
|
||||||
if token.kind = lexerKindIdentifier then
|
if token.kind = lexerKindIdentifier then
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start)
|
write_current(context^.lexer)
|
||||||
end
|
end
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_type_declaration(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_type_declaration(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
WriteString(' ');
|
WriteString(' ');
|
||||||
token := lexer_current(lexer);
|
token := lexer_current(context^.lexer);
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
WriteString(' = ');
|
WriteString(' = ');
|
||||||
transpile_type_expression(context, lexer);
|
transpile_type_expression(context);
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
write_semicolon();
|
write_semicolon(context^.output);
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_type_part(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_type_part(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
begin
|
begin
|
||||||
token := lexer_current(lexer);
|
token := lexer_current(context^.lexer);
|
||||||
|
|
||||||
if token.kind = lexerKindType then
|
if token.kind = lexerKindType then
|
||||||
WriteString('TYPE');
|
WriteString('TYPE');
|
||||||
WriteLn();
|
WriteLn();
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
|
|
||||||
while token.kind = lexerKindIdentifier do
|
while token.kind = lexerKindIdentifier do
|
||||||
transpile_type_declaration(context, lexer);
|
transpile_type_declaration(context);
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
WriteLn()
|
WriteLn()
|
||||||
end
|
end
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_variable_declaration(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_variable_declaration(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
WriteString(' ');
|
WriteString(' ');
|
||||||
token := lexer_current(lexer);
|
token := lexer_current(context^.lexer);
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
WriteString(': ');
|
WriteString(': ');
|
||||||
transpile_type_expression(context, lexer);
|
transpile_type_expression(context);
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
write_semicolon()
|
write_semicolon(context^.output)
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_variable_part(context: PTranspilerContext, lexer: PLexer) -> BOOLEAN;
|
proc transpile_variable_part(context: PTranspilerContext) -> BOOLEAN;
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
result: BOOLEAN;
|
result: BOOLEAN;
|
||||||
begin
|
begin
|
||||||
token := lexer_current(lexer);
|
token := lexer_current(context^.lexer);
|
||||||
result := token.kind = lexerKindVar;
|
result := token.kind = lexerKindVar;
|
||||||
|
|
||||||
if result then
|
if result then
|
||||||
WriteString('VAR');
|
WriteString('VAR');
|
||||||
WriteLn();
|
WriteLn();
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
|
|
||||||
while token.kind = lexerKindIdentifier do
|
while token.kind = lexerKindIdentifier do
|
||||||
transpile_variable_declaration(context, lexer);
|
transpile_variable_declaration(context);
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end
|
end
|
||||||
end;
|
end;
|
||||||
return result
|
return result
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_procedure_heading(context: PTranspilerContext, lexer: PLexer) -> LexerToken;
|
proc transpile_procedure_heading(context: PTranspilerContext) -> LexerToken;
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
result: LexerToken;
|
result: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
WriteString('PROCEDURE ');
|
WriteString('PROCEDURE ');
|
||||||
|
|
||||||
result := transpiler_lex(lexer);
|
result := transpiler_lex(context^.lexer);
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
Write('(');
|
Write('(');
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
while token.kind <> lexerKindRightParen do
|
while token.kind <> lexerKindRightParen do
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
WriteString(': ');
|
WriteString(': ');
|
||||||
|
|
||||||
transpile_type_expression(context, lexer);
|
transpile_type_expression(context);
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
if (token.kind = lexerKindSemicolon) or (token.kind = lexerKindComma) then
|
if (token.kind = lexerKindSemicolon) or (token.kind = lexerKindComma) then
|
||||||
WriteString('; ');
|
WriteString('; ');
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end
|
end
|
||||||
end;
|
end;
|
||||||
WriteString(')');
|
WriteString(')');
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
|
|
||||||
(* Check for the return type and write it. *)
|
(* Check for the return type and write it. *)
|
||||||
if token.kind = lexerKindArrow then
|
if token.kind = lexerKindArrow then
|
||||||
WriteString(': ');
|
WriteString(': ');
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
write_semicolon();
|
write_semicolon(context^.output);
|
||||||
|
|
||||||
return result
|
return result
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_expression(context: PTranspilerContext, lexer: PLexer, trailing_token: LexerKind);
|
proc transpile_expression(context: PTranspilerContext, trailing_token: LexerKind);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
written_bytes: CARDINAL;
|
||||||
begin
|
begin
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
|
|
||||||
while (token.kind <> trailing_token) & (token.kind <> lexerKindEnd) do
|
while (token.kind <> trailing_token) & (token.kind <> lexerKindEnd) do
|
||||||
written_bytes := 0;
|
written_bytes := 0;
|
||||||
@ -441,6 +425,14 @@ begin
|
|||||||
WriteString('NIL ');
|
WriteString('NIL ');
|
||||||
written_bytes := 1
|
written_bytes := 1
|
||||||
end;
|
end;
|
||||||
|
if (token.kind = lexerKindBoolean) & token.booleanKind then
|
||||||
|
WriteString('TRUE ');
|
||||||
|
written_bytes := 1
|
||||||
|
end;
|
||||||
|
if (token.kind = lexerKindBoolean) & (~token.booleanKind) then
|
||||||
|
WriteString('FALSE ');
|
||||||
|
written_bytes := 1
|
||||||
|
end;
|
||||||
if token.kind = lexerKindOr then
|
if token.kind = lexerKindOr then
|
||||||
WriteString('OR ');
|
WriteString('OR ');
|
||||||
written_bytes := 1
|
written_bytes := 1
|
||||||
@ -454,157 +446,151 @@ begin
|
|||||||
written_bytes := 1
|
written_bytes := 1
|
||||||
end;
|
end;
|
||||||
if written_bytes = 0 then
|
if written_bytes = 0 then
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
Write(' ')
|
Write(' ')
|
||||||
end;
|
end;
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end
|
end
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_if_statement(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_if_statement(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
WriteString(' IF ');
|
WriteString(' IF ');
|
||||||
transpile_expression(context, lexer, lexerKindThen);
|
transpile_expression(context, lexerKindThen);
|
||||||
|
|
||||||
WriteString('THEN');
|
WriteString('THEN');
|
||||||
WriteLn();
|
WriteLn();
|
||||||
transpile_statements(context, lexer);
|
transpile_statements(context);
|
||||||
WriteString(' END');
|
WriteString(' END');
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_while_statement(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_while_statement(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
WriteString(' WHILE ');
|
WriteString(' WHILE ');
|
||||||
transpile_expression(context, lexer, lexerKindDo);
|
transpile_expression(context, lexerKindDo);
|
||||||
|
|
||||||
WriteString('DO');
|
WriteString('DO');
|
||||||
WriteLn();
|
WriteLn();
|
||||||
transpile_statements(context, lexer);
|
transpile_statements(context);
|
||||||
WriteString(' END');
|
WriteString(' END');
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_assignment_statement(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_assignment_statement(context: PTranspilerContext);
|
||||||
begin
|
begin
|
||||||
WriteString(' := ');
|
WriteString(' := ');
|
||||||
transpile_expression(context, lexer, lexerKindSemicolon);
|
transpile_expression(context, lexerKindSemicolon);
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_call_statement(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_call_statement(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
WriteString('(');
|
WriteString('(');
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
|
|
||||||
while (token.kind <> lexerKindSemicolon) & (token.kind <> lexerKindEnd) do
|
while (token.kind <> lexerKindSemicolon) & (token.kind <> lexerKindEnd) do
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end
|
end
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_designator_expression(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_designator_expression(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
WriteString(' ');
|
WriteString(' ');
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
|
|
||||||
while token.kind = lexerKindLeftSquare do
|
while token.kind = lexerKindLeftSquare do
|
||||||
Write('[');
|
Write('[');
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
while token.kind <> lexerKindRightSquare do
|
while token.kind <> lexerKindRightSquare do
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
Write(']');
|
Write(']');
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
if token.kind = lexerKindHat then
|
if token.kind = lexerKindHat then
|
||||||
Write('^');
|
Write('^');
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
if token.kind = lexerKindDot then
|
if token.kind = lexerKindDot then
|
||||||
Write('.');
|
Write('.');
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
if token.kind = lexerKindHat then
|
if token.kind = lexerKindHat then
|
||||||
Write('^');
|
Write('^');
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
while token.kind = lexerKindLeftSquare do
|
while token.kind = lexerKindLeftSquare do
|
||||||
Write('[');
|
Write('[');
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
while token.kind <> lexerKindRightSquare do
|
while token.kind <> lexerKindRightSquare do
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
Write(']');
|
Write(']');
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end
|
end
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_return_statement(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_return_statement(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
WriteString(' RETURN ');
|
WriteString(' RETURN ');
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
written_bytes := WriteNBytes(StdOut, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
write_current(context^.lexer);
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_statement(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_statement(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
written_bytes: CARDINAL;
|
|
||||||
begin
|
begin
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
|
|
||||||
if token.kind = lexerKindIf then
|
if token.kind = lexerKindIf then
|
||||||
transpile_if_statement(context, lexer)
|
transpile_if_statement(context)
|
||||||
end;
|
end;
|
||||||
if token.kind = lexerKindWhile then
|
if token.kind = lexerKindWhile then
|
||||||
transpile_while_statement(context, lexer)
|
transpile_while_statement(context)
|
||||||
end;
|
end;
|
||||||
if token.kind = lexerKindReturn then
|
if token.kind = lexerKindReturn then
|
||||||
transpile_return_statement(context, lexer)
|
transpile_return_statement(context)
|
||||||
end;
|
end;
|
||||||
if token.kind = lexerKindIdentifier then
|
if token.kind = lexerKindIdentifier then
|
||||||
transpile_designator_expression(context, lexer);
|
transpile_designator_expression(context);
|
||||||
token := lexer_current(lexer);
|
token := lexer_current(context^.lexer);
|
||||||
|
|
||||||
if token.kind = lexerKindAssignment then
|
if token.kind = lexerKindAssignment then
|
||||||
transpile_assignment_statement(context, lexer)
|
transpile_assignment_statement(context)
|
||||||
end;
|
end;
|
||||||
if token.kind = lexerKindLeftParen then
|
if token.kind = lexerKindLeftParen then
|
||||||
transpile_call_statement(context, lexer)
|
transpile_call_statement(context)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_statements(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_statements(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
begin
|
begin
|
||||||
token := lexer_current(lexer);
|
token := lexer_current(context^.lexer);
|
||||||
|
|
||||||
while token.kind <> lexerKindEnd do
|
while token.kind <> lexerKindEnd do
|
||||||
transpile_statement(context, lexer);
|
transpile_statement(context);
|
||||||
token := lexer_current(lexer);
|
token := lexer_current(context^.lexer);
|
||||||
|
|
||||||
if token.kind = lexerKindSemicolon then
|
if token.kind = lexerKindSemicolon then
|
||||||
Write(';')
|
Write(';')
|
||||||
@ -613,46 +599,46 @@ begin
|
|||||||
end
|
end
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_statement_part(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_statement_part(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
begin
|
begin
|
||||||
token := lexer_current(lexer);
|
token := lexer_current(context^.lexer);
|
||||||
if token.kind = lexerKindBegin then
|
if token.kind = lexerKindBegin then
|
||||||
WriteString('BEGIN');
|
WriteString('BEGIN');
|
||||||
WriteLn();
|
WriteLn();
|
||||||
transpile_statements(context, lexer)
|
transpile_statements(context)
|
||||||
end
|
end
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_procedure_declaration(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_procedure_declaration(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
seen_part: BOOLEAN;
|
seen_part: BOOLEAN;
|
||||||
written_bytes: CARDINAL;
|
written_bytes: CARDINAL;
|
||||||
begin
|
begin
|
||||||
token := transpile_procedure_heading(context, lexer);
|
token := transpile_procedure_heading(context);
|
||||||
seen_part := transpile_constant_part(context, lexer);
|
seen_part := transpile_constant_part(context);
|
||||||
seen_part := transpile_variable_part(context, lexer);
|
seen_part := transpile_variable_part(context);
|
||||||
transpile_statement_part(context, lexer);
|
transpile_statement_part(context);
|
||||||
|
|
||||||
WriteString('END ');
|
WriteString('END ');
|
||||||
written_bytes := WriteNBytes(StdOut, ORD(token.identifierKind[1]), ADR(token.identifierKind[2]));
|
written_bytes := WriteNBytes(StdOut, ORD(token.identifierKind[1]), ADR(token.identifierKind[2]));
|
||||||
|
|
||||||
token := transpiler_lex(lexer);
|
token := transpiler_lex(context^.lexer);
|
||||||
write_semicolon();
|
write_semicolon(context^.output);
|
||||||
token := transpiler_lex(lexer)
|
token := transpiler_lex(context^.lexer)
|
||||||
end;
|
end;
|
||||||
|
|
||||||
proc transpile_procedure_part(context: PTranspilerContext, lexer: PLexer);
|
proc transpile_procedure_part(context: PTranspilerContext);
|
||||||
var
|
var
|
||||||
token: LexerToken;
|
token: LexerToken;
|
||||||
begin
|
begin
|
||||||
token := lexer_current(lexer);
|
token := lexer_current(context^.lexer);
|
||||||
|
|
||||||
while token.kind = lexerKindProc do
|
while token.kind = lexerKindProc do
|
||||||
transpile_procedure_declaration(context, lexer);
|
transpile_procedure_declaration(context);
|
||||||
token := lexer_current(lexer);
|
token := lexer_current(context^.lexer);
|
||||||
WriteLn()
|
WriteLn()
|
||||||
end
|
end
|
||||||
end;
|
end;
|
||||||
@ -663,7 +649,11 @@ var
|
|||||||
written_bytes: CARDINAL;
|
written_bytes: CARDINAL;
|
||||||
context: TranspilerContext;
|
context: TranspilerContext;
|
||||||
begin
|
begin
|
||||||
transpile_module(ADR(context), lexer)
|
context.indentation := 0;
|
||||||
|
context.output := StdOut;
|
||||||
|
context.lexer := lexer;
|
||||||
|
|
||||||
|
transpile_module(ADR(context))
|
||||||
end;
|
end;
|
||||||
|
|
||||||
end Transpiler.
|
end Transpiler.
|
||||||
|
Loading…
x
Reference in New Issue
Block a user