From d3c0c36fd8a3fc296e1fe6397d53d6e23b2d6e79 Mon Sep 17 00:00:00 2001 From: Eugen Wissner Date: Tue, 3 Jun 2025 12:14:59 +0200 Subject: [PATCH] Don't require the module name after end --- Rakefile | 1 + source/CommandLineInterface.elna | 4 +- source/Common.def | 2 +- source/Common.elna | 2 +- source/Compiler.elna | 4 +- source/Lexer.elna | 2 +- source/Parser.def | 3 + source/Parser.elna | 231 ++++++++++++++++++++++++++- source/Transpiler.def | 5 +- source/Transpiler.elna | 260 +++++++++++-------------------- 10 files changed, 332 insertions(+), 182 deletions(-) diff --git a/Rakefile b/Rakefile index ba6063a..df61126 100644 --- a/Rakefile +++ b/Rakefile @@ -109,6 +109,7 @@ task :backport do .gsub(/proc\(/, 'PROCEDURE(') .gsub(/ & /, ' AND ') .gsub(/ -> /, ': ') + .gsub(/end\./, "END #{source_path.basename.sub_ext('')}.") .gsub(/([[:space:]]*)end(;?)$/, '\1END\2') .gsub(/^([[:space:]]*)(while|return|if)\b/) { |match| match.upcase } .gsub(/^from ([[:alnum:]]+) import/, 'FROM \1 IMPORT') diff --git a/source/CommandLineInterface.elna b/source/CommandLineInterface.elna index eb16b4f..caff109 100644 --- a/source/CommandLineInterface.elna +++ b/source/CommandLineInterface.elna @@ -35,7 +35,7 @@ begin parsed := true; result^.parse := true end; - if parameter[0] <> '-' then + if parameter[1] <> '-' then parsed := true; if Length(result^.input) > 0 then @@ -72,4 +72,4 @@ begin return result end; -end CommandLineInterface. +end. diff --git a/source/Common.def b/source/Common.def index c6f661d..996a971 100644 --- a/source/Common.def +++ b/source/Common.def @@ -1,7 +1,7 @@ DEFINITION MODULE Common; TYPE - ShortString = ARRAY[0..255] OF CHAR; + ShortString = ARRAY[1..256] OF CHAR; Identifier = ARRAY[1..256] OF CHAR; PIdentifier = POINTER TO Identifier; diff --git a/source/Common.elna b/source/Common.elna index 7b8d623..6f34c41 100644 --- a/source/Common.elna +++ b/source/Common.elna @@ -1,3 +1,3 @@ implementation module Common; -end Common. +end. diff --git a/source/Compiler.elna b/source/Compiler.elna index b5f85a5..494e4b4 100644 --- a/source/Compiler.elna +++ b/source/Compiler.elna @@ -29,7 +29,7 @@ begin if IsNoError(source_input) then lexer_initialize(ADR(lexer), source_input); - transpile(ADR(lexer), StdOut); + transpile(ADR(lexer), StdOut, command_line^.input); lexer_destroy(ADR(lexer)); @@ -48,4 +48,4 @@ begin ExitOnHalt(1) end; HALT() -end Compiler. +end. diff --git a/source/Lexer.elna b/source/Lexer.elna index 9a8ecb0..2707968 100644 --- a/source/Lexer.elna +++ b/source/Lexer.elna @@ -825,4 +825,4 @@ end; begin initialize_classification(); initialize_transitions() -end Lexer. +end. diff --git a/source/Parser.def b/source/Parser.def index f968125..bda8cc0 100644 --- a/source/Parser.def +++ b/source/Parser.def @@ -1,6 +1,7 @@ DEFINITION MODULE Parser; FROM Common IMPORT Identifier, PIdentifier; +FROM Lexer IMPORT PLexer; TYPE AstConstantDeclaration = RECORD @@ -56,4 +57,6 @@ TYPE END; PAstModule = POINTER TO AstModule; +PROCEDURE parse_type_expression(lexer: PLexer): PAstTypeExpression; + END Parser. diff --git a/source/Parser.elna b/source/Parser.elna index d4d55ed..7dc0316 100644 --- a/source/Parser.elna +++ b/source/Parser.elna @@ -1,3 +1,232 @@ implementation module Parser; -end Parser. +from SYSTEM import TSIZE; + +from MemUtils import MemZero; +from Storage import ALLOCATE, REALLOCATE; + +from Lexer import LexerKind, LexerToken, lexer_current, lexer_lex; + +(* Calls lexer_lex() but skips the comments. *) +proc transpiler_lex(lexer: PLexer) -> LexerToken; +var + result: LexerToken; +begin + result := lexer_lex(lexer); + + while result.kind = lexerKindComment do + result := lexer_lex(lexer) + end; + + return result +end; + +proc parse_type_fields(lexer: PLexer) -> PAstFieldDeclaration; +var + token: LexerToken; + field_declarations: PAstFieldDeclaration; + field_count: CARDINAL; + current_field: PAstFieldDeclaration; +begin + ALLOCATE(field_declarations, TSIZE(AstFieldDeclaration)); + token := transpiler_lex(lexer); + field_count := 0; + + while token.kind <> lexerKindEnd do + INC(field_count); + REALLOCATE(field_declarations, TSIZE(AstFieldDeclaration) * (field_count + 1)); + current_field := field_declarations; + INC(current_field , TSIZE(AstFieldDeclaration) * (field_count - 1)); + + token := transpiler_lex(lexer); + + current_field^.field_name := token.identifierKind; + + token := transpiler_lex(lexer); + current_field^.field_type := parse_type_expression(lexer); + token := transpiler_lex(lexer); + + if token.kind = lexerKindSemicolon then + token := transpiler_lex(lexer) + end + end; + INC(current_field, TSIZE(AstFieldDeclaration)); + MemZero(current_field, TSIZE(AstFieldDeclaration)); + + return field_declarations +end; + +proc parse_record_type(lexer: PLexer) -> PAstTypeExpression; +var + result: PAstTypeExpression; +begin + ALLOCATE(result, TSIZE(AstTypeExpression)); + result^.kind := astTypeExpressionKindRecord; + result^.fields := parse_type_fields(lexer); + + return result +end; + +proc parse_pointer_type(lexer: PLexer) -> PAstTypeExpression; +var + token: LexerToken; + result: PAstTypeExpression; +begin + ALLOCATE(result, TSIZE(AstTypeExpression)); + result^.kind := astTypeExpressionKindPointer; + + token := lexer_current(lexer); + + if token.kind = lexerKindPointer then + token := transpiler_lex(lexer) + end; + token := lexer_current(lexer); + result^.target := parse_type_expression(lexer); + + return result +end; + +proc parse_array_type(lexer: PLexer) -> PAstTypeExpression; +var + token: LexerToken; + buffer: [20]CHAR; + result: PAstTypeExpression; +begin + ALLOCATE(result, TSIZE(AstTypeExpression)); + result^.kind := astTypeExpressionKindArray; + result^.length := 0; + + token := lexer_current(lexer); + + if token.kind = lexerKindArray then + token := transpiler_lex(lexer) + end; + if token.kind <> lexerKindOf then + token := transpiler_lex(lexer); + + result^.length := token.integerKind; + + token := transpiler_lex(lexer); + end; + token := transpiler_lex(lexer); + result^.base := parse_type_expression(lexer); + + return result +end; + +proc parse_enumeration_type(lexer: PLexer) -> PAstTypeExpression; +var + token: LexerToken; + result: PAstTypeExpression; + current_case: PIdentifier; + case_count: CARDINAL; +begin + ALLOCATE(result, TSIZE(AstTypeExpression)); + result^.kind := astTypeExpressionKindEnumeration; + + case_count := 1; + ALLOCATE(result^.cases, TSIZE(Identifier) * 2); + token := transpiler_lex(lexer); + current_case := result^.cases; + current_case^ := token.identifierKind; + + token := transpiler_lex(lexer); + + while token.kind = lexerKindComma do + token := transpiler_lex(lexer); + + INC(case_count); + REALLOCATE(result^.cases, TSIZE(Identifier) * (case_count + 1)); + current_case := result^.cases; + INC(current_case, TSIZE(Identifier) * (case_count - 1)); + current_case^ := token.identifierKind; + + token := transpiler_lex(lexer) + end; + INC(current_case, TSIZE(Identifier)); + MemZero(current_case, TSIZE(Identifier)); + + return result +end; + +proc parse_named_type(lexer: PLexer) -> PAstTypeExpression; +var + token: LexerToken; + result: PAstTypeExpression; + written_bytes: CARDINAL; +begin + token := lexer_current(lexer); + ALLOCATE(result, TSIZE(AstTypeExpression)); + + result^.kind := astTypeExpressionKindNamed; + result^.name := token.identifierKind; + + return result +end; + +proc parse_procedure_type(lexer: PLexer) -> PAstTypeExpression; +var + token: LexerToken; + result: PAstTypeExpression; + current_parameter: PPAstTypeExpression; + parameter_count: CARDINAL; +begin + parameter_count := 0; + ALLOCATE(result, TSIZE(AstTypeExpression)); + result^.kind := astTypeExpressionKindProcedure; + + ALLOCATE(result^.parameters, 1); + + token := transpiler_lex(lexer); + token := transpiler_lex(lexer); + + while token.kind <> lexerKindRightParen do + INC(parameter_count); + REALLOCATE(result^.parameters, TSIZE(PAstTypeExpression) * (parameter_count + 1)); + current_parameter := result^.parameters; + INC(current_parameter, TSIZE(PAstTypeExpression) * (parameter_count - 1)); + + current_parameter^ := parse_type_expression(lexer); + + token := transpiler_lex(lexer); + if token.kind = lexerKindComma then + token := transpiler_lex(lexer) + end + end; + current_parameter := result^.parameters; + INC(current_parameter, TSIZE(PAstTypeExpression) * parameter_count); + current_parameter^ := nil; + + return result +end; + +proc parse_type_expression(lexer: PLexer) -> PAstTypeExpression; +var + token: LexerToken; + result: PAstTypeExpression; +begin + result := nil; + token := lexer_current(lexer); + + if token.kind = lexerKindRecord then + result := parse_record_type(lexer) + end; + if token.kind = lexerKindLeftParen then + result := parse_enumeration_type(lexer) + end; + if (token.kind = lexerKindArray) or (token.kind = lexerKindLeftSquare) then + result := parse_array_type(lexer) + end; + if token.kind = lexerKindHat then + result := parse_pointer_type(lexer) + end; + if token.kind = lexerKindProc then + result := parse_procedure_type(lexer) + end; + if token.kind = lexerKindIdentifier then + result := parse_named_type(lexer) + end; + return result +end; + +end. diff --git a/source/Transpiler.def b/source/Transpiler.def index 555a960..22f4d69 100644 --- a/source/Transpiler.def +++ b/source/Transpiler.def @@ -2,16 +2,17 @@ DEFINITION MODULE Transpiler; FROM FIO IMPORT File; +FROM Common IMPORT ShortString; FROM Lexer IMPORT PLexer, Lexer; TYPE TranspilerContext = RECORD - indentation: CARDINAL; + input_name: ShortString; output: File; lexer: PLexer END; PTranspilerContext = POINTER TO TranspilerContext; -PROCEDURE transpile(lexer: PLexer; output: File); +PROCEDURE transpile(lexer: PLexer; output: File; input_name: ShortString); END Transpiler. diff --git a/source/Transpiler.elna b/source/Transpiler.elna index 1375db9..6cfa1fe 100644 --- a/source/Transpiler.elna +++ b/source/Transpiler.elna @@ -7,13 +7,14 @@ from NumberIO import IntToStr; from Storage import ALLOCATE, REALLOCATE; from MemUtils import MemCopy, MemZero; -from Common import Identifier, PIdentifier; +from Common import Identifier, PIdentifier, ShortString; from Lexer import Lexer, LexerToken, lexer_current, lexer_lex, LexerKind; from Parser import AstModule, PAstModule, AstTypeExpressionKind, AstConstantDeclaration, PPAstConstantDeclaration, AstTypeDeclaration, PAstTypeDeclaration, PPAstTypeDeclaration, AstVariableDeclaration, PPAstVariableDeclaration, - PAstTypeExpression, AstTypeExpression, PPAstTypeExpression, AstFieldDeclaration, PAstFieldDeclaration; + PAstTypeExpression, AstTypeExpression, PPAstTypeExpression, AstFieldDeclaration, PAstFieldDeclaration, + parse_type_expression; (* Calls lexer_lex() but skips the comments. *) proc transpiler_lex(lexer: PLexer) -> LexerToken; @@ -138,7 +139,7 @@ begin (* Write the module name and end the line with a semicolon and newline. *) token := transpiler_lex(context^.lexer); - write_current(context^.lexer, context^.output); + transpile_module_name(context); token := transpiler_lex(context^.lexer); write_semicolon(context^.output); @@ -156,9 +157,7 @@ begin transpile_statement_part(context); WriteString(context^.output, 'END '); - - token := transpiler_lex(context^.lexer); - write_current(context^.lexer, context^.output); + transpile_module_name(context); token := transpiler_lex(context^.lexer); WriteChar(context^.output, '.'); @@ -169,148 +168,71 @@ begin return result end; -proc transpile_type_fields(context: PTranspilerContext) -> PAstFieldDeclaration; +proc transpile_type_fields(context: PTranspilerContext, fields: PAstFieldDeclaration); var - token: LexerToken; - field_declarations: PAstFieldDeclaration; - field_count: CARDINAL; + written_bytes: CARDINAL; current_field: PAstFieldDeclaration; begin - ALLOCATE(field_declarations, TSIZE(AstFieldDeclaration)); - token := transpiler_lex(context^.lexer); - field_count := 0; - - while token.kind <> lexerKindEnd do - INC(field_count); - REALLOCATE(field_declarations, TSIZE(AstFieldDeclaration) * (field_count + 1)); - current_field := field_declarations; - INC(current_field , TSIZE(AstFieldDeclaration) * (field_count - 1)); + current_field := fields; + while ORD(current_field^.field_name[1]) <> 0 do WriteString(context^.output, ' '); - write_current(context^.lexer, context^.output); - token := transpiler_lex(context^.lexer); - - current_field^.field_name := token.identifierKind; + written_bytes := WriteNBytes(context^.output, ORD(current_field^.field_name[1]), ADR(current_field^.field_name[2])); WriteString(context^.output, ': '); - token := transpiler_lex(context^.lexer); - current_field^.field_type := transpile_type_expression(context); - token := transpiler_lex(context^.lexer); + transpile_type_expression(context, current_field^.field_type); - if token.kind = lexerKindSemicolon then - token := transpiler_lex(context^.lexer); + INC(current_field , TSIZE(AstFieldDeclaration)); + + if ORD(current_field^.field_name[1]) <> 0 then WriteChar(context^.output, ';') end; WriteLine(context^.output) - end; - INC(current_field, TSIZE(AstFieldDeclaration)); - MemZero(current_field, TSIZE(AstFieldDeclaration)); - return field_declarations + end end; -proc transpile_record_type(context: PTranspilerContext) -> PAstTypeExpression; -var - result: PAstTypeExpression; +proc transpile_record_type(context: PTranspilerContext, type_expression: PAstTypeExpression); begin - ALLOCATE(result, TSIZE(AstTypeExpression)); - result^.kind := astTypeExpressionKindRecord; - WriteString(context^.output, 'RECORD'); WriteLine(context^.output); - result^.fields := transpile_type_fields(context); - WriteString(context^.output, ' END'); - - return result + transpile_type_fields(context, type_expression^.fields); + WriteString(context^.output, ' END') end; -proc transpile_pointer_type(context: PTranspilerContext) -> PAstTypeExpression; +proc transpile_pointer_type(context: PTranspilerContext, type_expression: PAstTypeExpression); var token: LexerToken; - result: PAstTypeExpression; begin - ALLOCATE(result, TSIZE(AstTypeExpression)); - result^.kind := astTypeExpressionKindPointer; - - token := lexer_current(context^.lexer); WriteString(context^.output, 'POINTER TO '); - if token.kind = lexerKindPointer then - token := transpiler_lex(context^.lexer) - end; - token := lexer_current(context^.lexer); - result^.target := transpile_type_expression(context); - return result + transpile_type_expression(context, type_expression^.target) end; -proc transpile_array_type(context: PTranspilerContext) -> PAstTypeExpression; +proc transpile_array_type(context: PTranspilerContext, type_expression: PAstTypeExpression); var - token: LexerToken; buffer: [20]CHAR; - result: PAstTypeExpression; begin - ALLOCATE(result, TSIZE(AstTypeExpression)); - result^.kind := astTypeExpressionKindArray; - WriteString(context^.output, 'ARRAY'); - token := lexer_current(context^.lexer); - if token.kind = lexerKindArray then - token := transpiler_lex(context^.lexer) - end; - if token.kind <> lexerKindOf then + if type_expression^.length <> 0 then WriteString(context^.output, '[1..'); - token := transpiler_lex(context^.lexer); - result^.length := token.integerKind; - IntToStr(result^.length, 0, buffer); + IntToStr(type_expression^.length, 0, buffer); WriteString(context^.output, buffer); - token := transpiler_lex(context^.lexer); WriteChar(context^.output, ']') end; WriteString(context^.output, ' OF '); - token := transpiler_lex(context^.lexer); - result^.base := transpile_type_expression(context); - - return result + transpile_type_expression(context, type_expression^.base) end; -proc transpile_enumeration_type(context: PTranspilerContext) -> PAstTypeExpression; +proc transpile_enumeration_type(context: PTranspilerContext, type_expression: PAstTypeExpression); var - token: LexerToken; - result: PAstTypeExpression; current_case: PIdentifier; - case_count: CARDINAL; written_bytes: CARDINAL; begin - ALLOCATE(result, TSIZE(AstTypeExpression)); - result^.kind := astTypeExpressionKindEnumeration; - - case_count := 1; - ALLOCATE(result^.cases, TSIZE(Identifier) * 2); - token := transpiler_lex(context^.lexer); - current_case := result^.cases; - current_case^ := token.identifierKind; - - token := transpiler_lex(context^.lexer); - - while token.kind = lexerKindComma do - token := transpiler_lex(context^.lexer); - - INC(case_count); - REALLOCATE(result^.cases, TSIZE(Identifier) * (case_count + 1)); - current_case := result^.cases; - INC(current_case, TSIZE(Identifier) * (case_count - 1)); - current_case^ := token.identifierKind; - - token := transpiler_lex(context^.lexer) - end; - INC(current_case, TSIZE(Identifier)); - MemZero(current_case, TSIZE(Identifier)); - - (* Write the cases using the generated identifier list before. *) - current_case := result^.cases; + current_case := type_expression^.cases; WriteString(context^.output, '('); WriteLine(context^.output); @@ -327,95 +249,57 @@ begin INC(current_case, TSIZE(Identifier)) end; WriteLine(context^.output); - WriteString(context^.output, ' )'); - - return result + WriteString(context^.output, ' )') end; -proc transpile_named_type(context: PTranspilerContext) -> PAstTypeExpression; +proc transpile_named_type(context: PTranspilerContext, type_expression: PAstTypeExpression); var - token: LexerToken; - result: PAstTypeExpression; written_bytes: CARDINAL; begin - token := lexer_current(context^.lexer); - ALLOCATE(result, TSIZE(AstTypeExpression)); - - result^.kind := astTypeExpressionKindNamed; - result^.name := token.identifierKind; - - written_bytes := WriteNBytes(context^.output, ORD(result^.name[1]), ADR(result^.name[2])); - - return result + written_bytes := WriteNBytes(context^.output, ORD(type_expression^.name[1]), ADR(type_expression^.name[2])) end; -proc transpile_procedure_type(context: PTranspilerContext) -> PAstTypeExpression; +proc transpile_procedure_type(context: PTranspilerContext, type_expression: PAstTypeExpression); var - token: LexerToken; result: PAstTypeExpression; current_parameter: PPAstTypeExpression; parameter_count: CARDINAL; begin - parameter_count := 0; - ALLOCATE(result, TSIZE(AstTypeExpression)); - result^.kind := astTypeExpressionKindProcedure; - - ALLOCATE(result^.parameters, 1); - - token := transpiler_lex(context^.lexer); WriteString(context^.output, 'PROCEDURE('); + current_parameter := type_expression^.parameters; - token := transpiler_lex(context^.lexer); + while current_parameter^ <> nil do + transpile_type_expression(context, current_parameter^); - while token.kind <> lexerKindRightParen do - INC(parameter_count); - REALLOCATE(result^.parameters, TSIZE(PAstTypeExpression) * (parameter_count + 1)); - current_parameter := result^.parameters; - INC(current_parameter, TSIZE(PAstTypeExpression) * (parameter_count - 1)); + INC(current_parameter, TSIZE(PAstTypeExpression)); - current_parameter^ := transpile_type_expression(context); - - token := transpiler_lex(context^.lexer); - if token.kind = lexerKindComma then - token := transpiler_lex(context^.lexer); + if current_parameter^ <> nil then WriteString(context^.output, ', ') end end; - current_parameter := result^.parameters; - INC(current_parameter, TSIZE(PAstTypeExpression) * parameter_count); - current_parameter^ := nil; - WriteChar(context^.output, ')'); - - return result + WriteChar(context^.output, ')') end; -proc transpile_type_expression(context: PTranspilerContext) -> PAstTypeExpression; -var - token: LexerToken; - result: PAstTypeExpression; +proc transpile_type_expression(context: PTranspilerContext, type_expression: PAstTypeExpression); begin - result := nil; - token := lexer_current(context^.lexer); - - if token.kind = lexerKindRecord then - result := transpile_record_type(context) + if type_expression^.kind = astTypeExpressionKindRecord then + transpile_record_type(context, type_expression) end; - if token.kind = lexerKindLeftParen then - result := transpile_enumeration_type(context) + if type_expression^.kind = astTypeExpressionKindEnumeration then + transpile_enumeration_type(context, type_expression) end; - if (token.kind = lexerKindArray) or (token.kind = lexerKindLeftSquare) then - result := transpile_array_type(context) + if type_expression^.kind = astTypeExpressionKindArray then + transpile_array_type(context, type_expression) end; - if token.kind = lexerKindHat then - result := transpile_pointer_type(context) + if type_expression^.kind = astTypeExpressionKindPointer then + transpile_pointer_type(context, type_expression) end; - if token.kind = lexerKindProc then - result := transpile_procedure_type(context) + if type_expression^.kind = astTypeExpressionKindProcedure then + transpile_procedure_type(context, type_expression) end; - if token.kind = lexerKindIdentifier then - result := transpile_named_type(context) - end; - return result + if type_expression^.kind = astTypeExpressionKindNamed then + transpile_named_type(context, type_expression) + end end; proc transpile_type_declaration(context: PTranspilerContext) -> PAstTypeDeclaration; @@ -435,7 +319,10 @@ begin token := transpiler_lex(context^.lexer); WriteString(context^.output, ' = '); token := transpiler_lex(context^.lexer); - result^.type_expression := transpile_type_expression(context); + + result^.type_expression := parse_type_expression(context^.lexer); + transpile_type_expression(context, result^.type_expression); + token := transpiler_lex(context^.lexer); write_semicolon(context^.output); @@ -491,7 +378,8 @@ begin token := transpiler_lex(context^.lexer); WriteString(context^.output, ': '); token := transpiler_lex(context^.lexer); - type_expression := transpile_type_expression(context); + type_expression := parse_type_expression(context^.lexer); + transpile_type_expression(context, type_expression); token := transpiler_lex(context^.lexer); write_semicolon(context^.output) end; @@ -537,7 +425,8 @@ begin WriteString(context^.output, ': '); token := transpiler_lex(context^.lexer); - type_expression := transpile_type_expression(context); + type_expression := parse_type_expression(context^.lexer); + transpile_type_expression(context, type_expression); token := transpiler_lex(context^.lexer); if (token.kind = lexerKindSemicolon) or (token.kind = lexerKindComma) then @@ -791,17 +680,44 @@ begin end end; -proc transpile(lexer: PLexer, output: File); +proc transpile_module_name(context: PTranspilerContext); +var + counter: CARDINAL; + last_slash: CARDINAL; +begin + counter := 1; + last_slash := 0; + + while (context^.input_name[counter] <> '.') & (ORD(context^.input_name[counter]) <> 0) do + if context^.input_name[counter] = '/' then + last_slash := counter + end; + INC(counter) + end; + + if last_slash = 0 then + counter := 1; + end; + if last_slash <> 0 then + counter := last_slash + 1; + end; + while (context^.input_name[counter] <> '.') & (ORD(context^.input_name[counter]) <> 0) do + WriteChar(context^.output, context^.input_name[counter]); + INC(counter) + end; +end; + +proc transpile(lexer: PLexer, output: File, input_name: ShortString); var token: LexerToken; context: TranspilerContext; ast_module: PAstModule; begin - context.indentation := 0; + context.input_name := input_name; context.output := output; context.lexer := lexer; ast_module := transpile_module(ADR(context)) end; -end Transpiler. +end.