Don't require the module name after end

This commit is contained in:
Eugen Wissner 2025-06-03 12:14:59 +02:00
parent 1983ef8e71
commit d3c0c36fd8
Signed by: belka
GPG Key ID: A27FDC1E8EE902C0
10 changed files with 332 additions and 182 deletions

View File

@ -109,6 +109,7 @@ task :backport do
.gsub(/proc\(/, 'PROCEDURE(') .gsub(/proc\(/, 'PROCEDURE(')
.gsub(/ & /, ' AND ') .gsub(/ & /, ' AND ')
.gsub(/ -> /, ': ') .gsub(/ -> /, ': ')
.gsub(/end\./, "END #{source_path.basename.sub_ext('')}.")
.gsub(/([[:space:]]*)end(;?)$/, '\1END\2') .gsub(/([[:space:]]*)end(;?)$/, '\1END\2')
.gsub(/^([[:space:]]*)(while|return|if)\b/) { |match| match.upcase } .gsub(/^([[:space:]]*)(while|return|if)\b/) { |match| match.upcase }
.gsub(/^from ([[:alnum:]]+) import/, 'FROM \1 IMPORT') .gsub(/^from ([[:alnum:]]+) import/, 'FROM \1 IMPORT')

View File

@ -35,7 +35,7 @@ begin
parsed := true; parsed := true;
result^.parse := true result^.parse := true
end; end;
if parameter[0] <> '-' then if parameter[1] <> '-' then
parsed := true; parsed := true;
if Length(result^.input) > 0 then if Length(result^.input) > 0 then
@ -72,4 +72,4 @@ begin
return result return result
end; end;
end CommandLineInterface. end.

View File

@ -1,7 +1,7 @@
DEFINITION MODULE Common; DEFINITION MODULE Common;
TYPE TYPE
ShortString = ARRAY[0..255] OF CHAR; ShortString = ARRAY[1..256] OF CHAR;
Identifier = ARRAY[1..256] OF CHAR; Identifier = ARRAY[1..256] OF CHAR;
PIdentifier = POINTER TO Identifier; PIdentifier = POINTER TO Identifier;

View File

@ -1,3 +1,3 @@
implementation module Common; implementation module Common;
end Common. end.

View File

@ -29,7 +29,7 @@ begin
if IsNoError(source_input) then if IsNoError(source_input) then
lexer_initialize(ADR(lexer), source_input); lexer_initialize(ADR(lexer), source_input);
transpile(ADR(lexer), StdOut); transpile(ADR(lexer), StdOut, command_line^.input);
lexer_destroy(ADR(lexer)); lexer_destroy(ADR(lexer));
@ -48,4 +48,4 @@ begin
ExitOnHalt(1) ExitOnHalt(1)
end; end;
HALT() HALT()
end Compiler. end.

View File

@ -825,4 +825,4 @@ end;
begin begin
initialize_classification(); initialize_classification();
initialize_transitions() initialize_transitions()
end Lexer. end.

View File

@ -1,6 +1,7 @@
DEFINITION MODULE Parser; DEFINITION MODULE Parser;
FROM Common IMPORT Identifier, PIdentifier; FROM Common IMPORT Identifier, PIdentifier;
FROM Lexer IMPORT PLexer;
TYPE TYPE
AstConstantDeclaration = RECORD AstConstantDeclaration = RECORD
@ -56,4 +57,6 @@ TYPE
END; END;
PAstModule = POINTER TO AstModule; PAstModule = POINTER TO AstModule;
PROCEDURE parse_type_expression(lexer: PLexer): PAstTypeExpression;
END Parser. END Parser.

View File

@ -1,3 +1,232 @@
implementation module Parser; implementation module Parser;
end Parser. from SYSTEM import TSIZE;
from MemUtils import MemZero;
from Storage import ALLOCATE, REALLOCATE;
from Lexer import LexerKind, LexerToken, lexer_current, lexer_lex;
(* Calls lexer_lex() but skips the comments. *)
proc transpiler_lex(lexer: PLexer) -> LexerToken;
var
result: LexerToken;
begin
result := lexer_lex(lexer);
while result.kind = lexerKindComment do
result := lexer_lex(lexer)
end;
return result
end;
proc parse_type_fields(lexer: PLexer) -> PAstFieldDeclaration;
var
token: LexerToken;
field_declarations: PAstFieldDeclaration;
field_count: CARDINAL;
current_field: PAstFieldDeclaration;
begin
ALLOCATE(field_declarations, TSIZE(AstFieldDeclaration));
token := transpiler_lex(lexer);
field_count := 0;
while token.kind <> lexerKindEnd do
INC(field_count);
REALLOCATE(field_declarations, TSIZE(AstFieldDeclaration) * (field_count + 1));
current_field := field_declarations;
INC(current_field , TSIZE(AstFieldDeclaration) * (field_count - 1));
token := transpiler_lex(lexer);
current_field^.field_name := token.identifierKind;
token := transpiler_lex(lexer);
current_field^.field_type := parse_type_expression(lexer);
token := transpiler_lex(lexer);
if token.kind = lexerKindSemicolon then
token := transpiler_lex(lexer)
end
end;
INC(current_field, TSIZE(AstFieldDeclaration));
MemZero(current_field, TSIZE(AstFieldDeclaration));
return field_declarations
end;
proc parse_record_type(lexer: PLexer) -> PAstTypeExpression;
var
result: PAstTypeExpression;
begin
ALLOCATE(result, TSIZE(AstTypeExpression));
result^.kind := astTypeExpressionKindRecord;
result^.fields := parse_type_fields(lexer);
return result
end;
proc parse_pointer_type(lexer: PLexer) -> PAstTypeExpression;
var
token: LexerToken;
result: PAstTypeExpression;
begin
ALLOCATE(result, TSIZE(AstTypeExpression));
result^.kind := astTypeExpressionKindPointer;
token := lexer_current(lexer);
if token.kind = lexerKindPointer then
token := transpiler_lex(lexer)
end;
token := lexer_current(lexer);
result^.target := parse_type_expression(lexer);
return result
end;
proc parse_array_type(lexer: PLexer) -> PAstTypeExpression;
var
token: LexerToken;
buffer: [20]CHAR;
result: PAstTypeExpression;
begin
ALLOCATE(result, TSIZE(AstTypeExpression));
result^.kind := astTypeExpressionKindArray;
result^.length := 0;
token := lexer_current(lexer);
if token.kind = lexerKindArray then
token := transpiler_lex(lexer)
end;
if token.kind <> lexerKindOf then
token := transpiler_lex(lexer);
result^.length := token.integerKind;
token := transpiler_lex(lexer);
end;
token := transpiler_lex(lexer);
result^.base := parse_type_expression(lexer);
return result
end;
proc parse_enumeration_type(lexer: PLexer) -> PAstTypeExpression;
var
token: LexerToken;
result: PAstTypeExpression;
current_case: PIdentifier;
case_count: CARDINAL;
begin
ALLOCATE(result, TSIZE(AstTypeExpression));
result^.kind := astTypeExpressionKindEnumeration;
case_count := 1;
ALLOCATE(result^.cases, TSIZE(Identifier) * 2);
token := transpiler_lex(lexer);
current_case := result^.cases;
current_case^ := token.identifierKind;
token := transpiler_lex(lexer);
while token.kind = lexerKindComma do
token := transpiler_lex(lexer);
INC(case_count);
REALLOCATE(result^.cases, TSIZE(Identifier) * (case_count + 1));
current_case := result^.cases;
INC(current_case, TSIZE(Identifier) * (case_count - 1));
current_case^ := token.identifierKind;
token := transpiler_lex(lexer)
end;
INC(current_case, TSIZE(Identifier));
MemZero(current_case, TSIZE(Identifier));
return result
end;
proc parse_named_type(lexer: PLexer) -> PAstTypeExpression;
var
token: LexerToken;
result: PAstTypeExpression;
written_bytes: CARDINAL;
begin
token := lexer_current(lexer);
ALLOCATE(result, TSIZE(AstTypeExpression));
result^.kind := astTypeExpressionKindNamed;
result^.name := token.identifierKind;
return result
end;
proc parse_procedure_type(lexer: PLexer) -> PAstTypeExpression;
var
token: LexerToken;
result: PAstTypeExpression;
current_parameter: PPAstTypeExpression;
parameter_count: CARDINAL;
begin
parameter_count := 0;
ALLOCATE(result, TSIZE(AstTypeExpression));
result^.kind := astTypeExpressionKindProcedure;
ALLOCATE(result^.parameters, 1);
token := transpiler_lex(lexer);
token := transpiler_lex(lexer);
while token.kind <> lexerKindRightParen do
INC(parameter_count);
REALLOCATE(result^.parameters, TSIZE(PAstTypeExpression) * (parameter_count + 1));
current_parameter := result^.parameters;
INC(current_parameter, TSIZE(PAstTypeExpression) * (parameter_count - 1));
current_parameter^ := parse_type_expression(lexer);
token := transpiler_lex(lexer);
if token.kind = lexerKindComma then
token := transpiler_lex(lexer)
end
end;
current_parameter := result^.parameters;
INC(current_parameter, TSIZE(PAstTypeExpression) * parameter_count);
current_parameter^ := nil;
return result
end;
proc parse_type_expression(lexer: PLexer) -> PAstTypeExpression;
var
token: LexerToken;
result: PAstTypeExpression;
begin
result := nil;
token := lexer_current(lexer);
if token.kind = lexerKindRecord then
result := parse_record_type(lexer)
end;
if token.kind = lexerKindLeftParen then
result := parse_enumeration_type(lexer)
end;
if (token.kind = lexerKindArray) or (token.kind = lexerKindLeftSquare) then
result := parse_array_type(lexer)
end;
if token.kind = lexerKindHat then
result := parse_pointer_type(lexer)
end;
if token.kind = lexerKindProc then
result := parse_procedure_type(lexer)
end;
if token.kind = lexerKindIdentifier then
result := parse_named_type(lexer)
end;
return result
end;
end.

View File

@ -2,16 +2,17 @@ DEFINITION MODULE Transpiler;
FROM FIO IMPORT File; FROM FIO IMPORT File;
FROM Common IMPORT ShortString;
FROM Lexer IMPORT PLexer, Lexer; FROM Lexer IMPORT PLexer, Lexer;
TYPE TYPE
TranspilerContext = RECORD TranspilerContext = RECORD
indentation: CARDINAL; input_name: ShortString;
output: File; output: File;
lexer: PLexer lexer: PLexer
END; END;
PTranspilerContext = POINTER TO TranspilerContext; PTranspilerContext = POINTER TO TranspilerContext;
PROCEDURE transpile(lexer: PLexer; output: File); PROCEDURE transpile(lexer: PLexer; output: File; input_name: ShortString);
END Transpiler. END Transpiler.

View File

@ -7,13 +7,14 @@ from NumberIO import IntToStr;
from Storage import ALLOCATE, REALLOCATE; from Storage import ALLOCATE, REALLOCATE;
from MemUtils import MemCopy, MemZero; from MemUtils import MemCopy, MemZero;
from Common import Identifier, PIdentifier; from Common import Identifier, PIdentifier, ShortString;
from Lexer import Lexer, LexerToken, lexer_current, lexer_lex, LexerKind; from Lexer import Lexer, LexerToken, lexer_current, lexer_lex, LexerKind;
from Parser import AstModule, PAstModule, AstTypeExpressionKind, from Parser import AstModule, PAstModule, AstTypeExpressionKind,
AstConstantDeclaration, PPAstConstantDeclaration, AstConstantDeclaration, PPAstConstantDeclaration,
AstTypeDeclaration, PAstTypeDeclaration, PPAstTypeDeclaration, AstTypeDeclaration, PAstTypeDeclaration, PPAstTypeDeclaration,
AstVariableDeclaration, PPAstVariableDeclaration, AstVariableDeclaration, PPAstVariableDeclaration,
PAstTypeExpression, AstTypeExpression, PPAstTypeExpression, AstFieldDeclaration, PAstFieldDeclaration; PAstTypeExpression, AstTypeExpression, PPAstTypeExpression, AstFieldDeclaration, PAstFieldDeclaration,
parse_type_expression;
(* Calls lexer_lex() but skips the comments. *) (* Calls lexer_lex() but skips the comments. *)
proc transpiler_lex(lexer: PLexer) -> LexerToken; proc transpiler_lex(lexer: PLexer) -> LexerToken;
@ -138,7 +139,7 @@ begin
(* Write the module name and end the line with a semicolon and newline. *) (* Write the module name and end the line with a semicolon and newline. *)
token := transpiler_lex(context^.lexer); token := transpiler_lex(context^.lexer);
write_current(context^.lexer, context^.output); transpile_module_name(context);
token := transpiler_lex(context^.lexer); token := transpiler_lex(context^.lexer);
write_semicolon(context^.output); write_semicolon(context^.output);
@ -156,9 +157,7 @@ begin
transpile_statement_part(context); transpile_statement_part(context);
WriteString(context^.output, 'END '); WriteString(context^.output, 'END ');
transpile_module_name(context);
token := transpiler_lex(context^.lexer);
write_current(context^.lexer, context^.output);
token := transpiler_lex(context^.lexer); token := transpiler_lex(context^.lexer);
WriteChar(context^.output, '.'); WriteChar(context^.output, '.');
@ -169,148 +168,71 @@ begin
return result return result
end; end;
proc transpile_type_fields(context: PTranspilerContext) -> PAstFieldDeclaration; proc transpile_type_fields(context: PTranspilerContext, fields: PAstFieldDeclaration);
var var
token: LexerToken; written_bytes: CARDINAL;
field_declarations: PAstFieldDeclaration;
field_count: CARDINAL;
current_field: PAstFieldDeclaration; current_field: PAstFieldDeclaration;
begin begin
ALLOCATE(field_declarations, TSIZE(AstFieldDeclaration)); current_field := fields;
token := transpiler_lex(context^.lexer);
field_count := 0;
while token.kind <> lexerKindEnd do
INC(field_count);
REALLOCATE(field_declarations, TSIZE(AstFieldDeclaration) * (field_count + 1));
current_field := field_declarations;
INC(current_field , TSIZE(AstFieldDeclaration) * (field_count - 1));
while ORD(current_field^.field_name[1]) <> 0 do
WriteString(context^.output, ' '); WriteString(context^.output, ' ');
write_current(context^.lexer, context^.output); written_bytes := WriteNBytes(context^.output, ORD(current_field^.field_name[1]), ADR(current_field^.field_name[2]));
token := transpiler_lex(context^.lexer);
current_field^.field_name := token.identifierKind;
WriteString(context^.output, ': '); WriteString(context^.output, ': ');
token := transpiler_lex(context^.lexer); transpile_type_expression(context, current_field^.field_type);
current_field^.field_type := transpile_type_expression(context);
token := transpiler_lex(context^.lexer);
if token.kind = lexerKindSemicolon then INC(current_field , TSIZE(AstFieldDeclaration));
token := transpiler_lex(context^.lexer);
if ORD(current_field^.field_name[1]) <> 0 then
WriteChar(context^.output, ';') WriteChar(context^.output, ';')
end; end;
WriteLine(context^.output) WriteLine(context^.output)
end; end
INC(current_field, TSIZE(AstFieldDeclaration));
MemZero(current_field, TSIZE(AstFieldDeclaration));
return field_declarations
end; end;
proc transpile_record_type(context: PTranspilerContext) -> PAstTypeExpression; proc transpile_record_type(context: PTranspilerContext, type_expression: PAstTypeExpression);
var
result: PAstTypeExpression;
begin begin
ALLOCATE(result, TSIZE(AstTypeExpression));
result^.kind := astTypeExpressionKindRecord;
WriteString(context^.output, 'RECORD'); WriteString(context^.output, 'RECORD');
WriteLine(context^.output); WriteLine(context^.output);
result^.fields := transpile_type_fields(context); transpile_type_fields(context, type_expression^.fields);
WriteString(context^.output, ' END'); WriteString(context^.output, ' END')
return result
end; end;
proc transpile_pointer_type(context: PTranspilerContext) -> PAstTypeExpression; proc transpile_pointer_type(context: PTranspilerContext, type_expression: PAstTypeExpression);
var var
token: LexerToken; token: LexerToken;
result: PAstTypeExpression;
begin begin
ALLOCATE(result, TSIZE(AstTypeExpression));
result^.kind := astTypeExpressionKindPointer;
token := lexer_current(context^.lexer);
WriteString(context^.output, 'POINTER TO '); WriteString(context^.output, 'POINTER TO ');
if token.kind = lexerKindPointer then
token := transpiler_lex(context^.lexer)
end;
token := lexer_current(context^.lexer);
result^.target := transpile_type_expression(context);
return result transpile_type_expression(context, type_expression^.target)
end; end;
proc transpile_array_type(context: PTranspilerContext) -> PAstTypeExpression; proc transpile_array_type(context: PTranspilerContext, type_expression: PAstTypeExpression);
var var
token: LexerToken;
buffer: [20]CHAR; buffer: [20]CHAR;
result: PAstTypeExpression;
begin begin
ALLOCATE(result, TSIZE(AstTypeExpression));
result^.kind := astTypeExpressionKindArray;
WriteString(context^.output, 'ARRAY'); WriteString(context^.output, 'ARRAY');
token := lexer_current(context^.lexer);
if token.kind = lexerKindArray then if type_expression^.length <> 0 then
token := transpiler_lex(context^.lexer)
end;
if token.kind <> lexerKindOf then
WriteString(context^.output, '[1..'); WriteString(context^.output, '[1..');
token := transpiler_lex(context^.lexer);
result^.length := token.integerKind; IntToStr(type_expression^.length, 0, buffer);
IntToStr(result^.length, 0, buffer);
WriteString(context^.output, buffer); WriteString(context^.output, buffer);
token := transpiler_lex(context^.lexer);
WriteChar(context^.output, ']') WriteChar(context^.output, ']')
end; end;
WriteString(context^.output, ' OF '); WriteString(context^.output, ' OF ');
token := transpiler_lex(context^.lexer); transpile_type_expression(context, type_expression^.base)
result^.base := transpile_type_expression(context);
return result
end; end;
proc transpile_enumeration_type(context: PTranspilerContext) -> PAstTypeExpression; proc transpile_enumeration_type(context: PTranspilerContext, type_expression: PAstTypeExpression);
var var
token: LexerToken;
result: PAstTypeExpression;
current_case: PIdentifier; current_case: PIdentifier;
case_count: CARDINAL;
written_bytes: CARDINAL; written_bytes: CARDINAL;
begin begin
ALLOCATE(result, TSIZE(AstTypeExpression)); current_case := type_expression^.cases;
result^.kind := astTypeExpressionKindEnumeration;
case_count := 1;
ALLOCATE(result^.cases, TSIZE(Identifier) * 2);
token := transpiler_lex(context^.lexer);
current_case := result^.cases;
current_case^ := token.identifierKind;
token := transpiler_lex(context^.lexer);
while token.kind = lexerKindComma do
token := transpiler_lex(context^.lexer);
INC(case_count);
REALLOCATE(result^.cases, TSIZE(Identifier) * (case_count + 1));
current_case := result^.cases;
INC(current_case, TSIZE(Identifier) * (case_count - 1));
current_case^ := token.identifierKind;
token := transpiler_lex(context^.lexer)
end;
INC(current_case, TSIZE(Identifier));
MemZero(current_case, TSIZE(Identifier));
(* Write the cases using the generated identifier list before. *)
current_case := result^.cases;
WriteString(context^.output, '('); WriteString(context^.output, '(');
WriteLine(context^.output); WriteLine(context^.output);
@ -327,95 +249,57 @@ begin
INC(current_case, TSIZE(Identifier)) INC(current_case, TSIZE(Identifier))
end; end;
WriteLine(context^.output); WriteLine(context^.output);
WriteString(context^.output, ' )'); WriteString(context^.output, ' )')
return result
end; end;
proc transpile_named_type(context: PTranspilerContext) -> PAstTypeExpression; proc transpile_named_type(context: PTranspilerContext, type_expression: PAstTypeExpression);
var var
token: LexerToken;
result: PAstTypeExpression;
written_bytes: CARDINAL; written_bytes: CARDINAL;
begin begin
token := lexer_current(context^.lexer); written_bytes := WriteNBytes(context^.output, ORD(type_expression^.name[1]), ADR(type_expression^.name[2]))
ALLOCATE(result, TSIZE(AstTypeExpression));
result^.kind := astTypeExpressionKindNamed;
result^.name := token.identifierKind;
written_bytes := WriteNBytes(context^.output, ORD(result^.name[1]), ADR(result^.name[2]));
return result
end; end;
proc transpile_procedure_type(context: PTranspilerContext) -> PAstTypeExpression; proc transpile_procedure_type(context: PTranspilerContext, type_expression: PAstTypeExpression);
var var
token: LexerToken;
result: PAstTypeExpression; result: PAstTypeExpression;
current_parameter: PPAstTypeExpression; current_parameter: PPAstTypeExpression;
parameter_count: CARDINAL; parameter_count: CARDINAL;
begin begin
parameter_count := 0;
ALLOCATE(result, TSIZE(AstTypeExpression));
result^.kind := astTypeExpressionKindProcedure;
ALLOCATE(result^.parameters, 1);
token := transpiler_lex(context^.lexer);
WriteString(context^.output, 'PROCEDURE('); WriteString(context^.output, 'PROCEDURE(');
current_parameter := type_expression^.parameters;
token := transpiler_lex(context^.lexer); while current_parameter^ <> nil do
transpile_type_expression(context, current_parameter^);
while token.kind <> lexerKindRightParen do INC(current_parameter, TSIZE(PAstTypeExpression));
INC(parameter_count);
REALLOCATE(result^.parameters, TSIZE(PAstTypeExpression) * (parameter_count + 1));
current_parameter := result^.parameters;
INC(current_parameter, TSIZE(PAstTypeExpression) * (parameter_count - 1));
current_parameter^ := transpile_type_expression(context); if current_parameter^ <> nil then
token := transpiler_lex(context^.lexer);
if token.kind = lexerKindComma then
token := transpiler_lex(context^.lexer);
WriteString(context^.output, ', ') WriteString(context^.output, ', ')
end end
end; end;
current_parameter := result^.parameters; WriteChar(context^.output, ')')
INC(current_parameter, TSIZE(PAstTypeExpression) * parameter_count);
current_parameter^ := nil;
WriteChar(context^.output, ')');
return result
end; end;
proc transpile_type_expression(context: PTranspilerContext) -> PAstTypeExpression; proc transpile_type_expression(context: PTranspilerContext, type_expression: PAstTypeExpression);
var
token: LexerToken;
result: PAstTypeExpression;
begin begin
result := nil; if type_expression^.kind = astTypeExpressionKindRecord then
token := lexer_current(context^.lexer); transpile_record_type(context, type_expression)
if token.kind = lexerKindRecord then
result := transpile_record_type(context)
end; end;
if token.kind = lexerKindLeftParen then if type_expression^.kind = astTypeExpressionKindEnumeration then
result := transpile_enumeration_type(context) transpile_enumeration_type(context, type_expression)
end; end;
if (token.kind = lexerKindArray) or (token.kind = lexerKindLeftSquare) then if type_expression^.kind = astTypeExpressionKindArray then
result := transpile_array_type(context) transpile_array_type(context, type_expression)
end; end;
if token.kind = lexerKindHat then if type_expression^.kind = astTypeExpressionKindPointer then
result := transpile_pointer_type(context) transpile_pointer_type(context, type_expression)
end; end;
if token.kind = lexerKindProc then if type_expression^.kind = astTypeExpressionKindProcedure then
result := transpile_procedure_type(context) transpile_procedure_type(context, type_expression)
end; end;
if token.kind = lexerKindIdentifier then if type_expression^.kind = astTypeExpressionKindNamed then
result := transpile_named_type(context) transpile_named_type(context, type_expression)
end; end
return result
end; end;
proc transpile_type_declaration(context: PTranspilerContext) -> PAstTypeDeclaration; proc transpile_type_declaration(context: PTranspilerContext) -> PAstTypeDeclaration;
@ -435,7 +319,10 @@ begin
token := transpiler_lex(context^.lexer); token := transpiler_lex(context^.lexer);
WriteString(context^.output, ' = '); WriteString(context^.output, ' = ');
token := transpiler_lex(context^.lexer); token := transpiler_lex(context^.lexer);
result^.type_expression := transpile_type_expression(context);
result^.type_expression := parse_type_expression(context^.lexer);
transpile_type_expression(context, result^.type_expression);
token := transpiler_lex(context^.lexer); token := transpiler_lex(context^.lexer);
write_semicolon(context^.output); write_semicolon(context^.output);
@ -491,7 +378,8 @@ begin
token := transpiler_lex(context^.lexer); token := transpiler_lex(context^.lexer);
WriteString(context^.output, ': '); WriteString(context^.output, ': ');
token := transpiler_lex(context^.lexer); token := transpiler_lex(context^.lexer);
type_expression := transpile_type_expression(context); type_expression := parse_type_expression(context^.lexer);
transpile_type_expression(context, type_expression);
token := transpiler_lex(context^.lexer); token := transpiler_lex(context^.lexer);
write_semicolon(context^.output) write_semicolon(context^.output)
end; end;
@ -537,7 +425,8 @@ begin
WriteString(context^.output, ': '); WriteString(context^.output, ': ');
token := transpiler_lex(context^.lexer); token := transpiler_lex(context^.lexer);
type_expression := transpile_type_expression(context); type_expression := parse_type_expression(context^.lexer);
transpile_type_expression(context, type_expression);
token := transpiler_lex(context^.lexer); token := transpiler_lex(context^.lexer);
if (token.kind = lexerKindSemicolon) or (token.kind = lexerKindComma) then if (token.kind = lexerKindSemicolon) or (token.kind = lexerKindComma) then
@ -791,17 +680,44 @@ begin
end end
end; end;
proc transpile(lexer: PLexer, output: File); proc transpile_module_name(context: PTranspilerContext);
var
counter: CARDINAL;
last_slash: CARDINAL;
begin
counter := 1;
last_slash := 0;
while (context^.input_name[counter] <> '.') & (ORD(context^.input_name[counter]) <> 0) do
if context^.input_name[counter] = '/' then
last_slash := counter
end;
INC(counter)
end;
if last_slash = 0 then
counter := 1;
end;
if last_slash <> 0 then
counter := last_slash + 1;
end;
while (context^.input_name[counter] <> '.') & (ORD(context^.input_name[counter]) <> 0) do
WriteChar(context^.output, context^.input_name[counter]);
INC(counter)
end;
end;
proc transpile(lexer: PLexer, output: File, input_name: ShortString);
var var
token: LexerToken; token: LexerToken;
context: TranspilerContext; context: TranspilerContext;
ast_module: PAstModule; ast_module: PAstModule;
begin begin
context.indentation := 0; context.input_name := input_name;
context.output := output; context.output := output;
context.lexer := lexer; context.lexer := lexer;
ast_module := transpile_module(ADR(context)) ast_module := transpile_module(ADR(context))
end; end;
end Transpiler. end.