467 lines
12 KiB
Plaintext
467 lines
12 KiB
Plaintext
module;
|
|
|
|
from SYSTEM import TSIZE;
|
|
|
|
from MemUtils import MemZero;
|
|
from Storage import ALLOCATE, REALLOCATE;
|
|
|
|
from Lexer import LexerKind, LexerToken, lexer_current, lexer_lex;
|
|
|
|
(* Calls lexer_lex() but skips the comments. *)
|
|
proc transpiler_lex(lexer: PLexer) -> LexerToken;
|
|
var
|
|
result: LexerToken;
|
|
begin
|
|
result := lexer_lex(lexer);
|
|
|
|
while result.kind = lexerKindComment do
|
|
result := lexer_lex(lexer)
|
|
end;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_type_fields(lexer: PLexer) -> PAstFieldDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
field_declarations: PAstFieldDeclaration;
|
|
field_count: CARDINAL;
|
|
current_field: PAstFieldDeclaration;
|
|
begin
|
|
ALLOCATE(field_declarations, TSIZE(AstFieldDeclaration));
|
|
token := transpiler_lex(lexer);
|
|
field_count := 0;
|
|
|
|
while token.kind <> lexerKindEnd do
|
|
INC(field_count);
|
|
REALLOCATE(field_declarations, TSIZE(AstFieldDeclaration) * (field_count + 1));
|
|
current_field := field_declarations;
|
|
INC(current_field , TSIZE(AstFieldDeclaration) * (field_count - 1));
|
|
|
|
token := transpiler_lex(lexer);
|
|
|
|
current_field^.field_name := token.identifierKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
current_field^.field_type := parse_type_expression(lexer);
|
|
token := transpiler_lex(lexer);
|
|
|
|
if token.kind = lexerKindSemicolon then
|
|
token := transpiler_lex(lexer)
|
|
end
|
|
end;
|
|
INC(current_field, TSIZE(AstFieldDeclaration));
|
|
MemZero(current_field, TSIZE(AstFieldDeclaration));
|
|
|
|
return field_declarations
|
|
end;
|
|
|
|
proc parse_record_type(lexer: PLexer) -> PAstTypeExpression;
|
|
var
|
|
result: PAstTypeExpression;
|
|
begin
|
|
ALLOCATE(result, TSIZE(AstTypeExpression));
|
|
result^.kind := astTypeExpressionKindRecord;
|
|
result^.fields := parse_type_fields(lexer);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_pointer_type(lexer: PLexer) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeExpression;
|
|
begin
|
|
ALLOCATE(result, TSIZE(AstTypeExpression));
|
|
result^.kind := astTypeExpressionKindPointer;
|
|
|
|
token := lexer_current(lexer);
|
|
|
|
if token.kind = lexerKindPointer then
|
|
token := transpiler_lex(lexer)
|
|
end;
|
|
token := lexer_current(lexer);
|
|
result^.target := parse_type_expression(lexer);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_array_type(lexer: PLexer) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
buffer: [20]CHAR;
|
|
result: PAstTypeExpression;
|
|
begin
|
|
ALLOCATE(result, TSIZE(AstTypeExpression));
|
|
result^.kind := astTypeExpressionKindArray;
|
|
result^.length := 0;
|
|
|
|
token := lexer_current(lexer);
|
|
|
|
if token.kind = lexerKindArray then
|
|
token := transpiler_lex(lexer)
|
|
end;
|
|
if token.kind <> lexerKindOf then
|
|
token := transpiler_lex(lexer);
|
|
|
|
result^.length := token.integerKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
end;
|
|
token := transpiler_lex(lexer);
|
|
result^.base := parse_type_expression(lexer);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_enumeration_type(lexer: PLexer) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeExpression;
|
|
current_case: PIdentifier;
|
|
case_count: CARDINAL;
|
|
begin
|
|
ALLOCATE(result, TSIZE(AstTypeExpression));
|
|
result^.kind := astTypeExpressionKindEnumeration;
|
|
|
|
case_count := 1;
|
|
ALLOCATE(result^.cases, TSIZE(Identifier) * 2);
|
|
token := transpiler_lex(lexer);
|
|
current_case := result^.cases;
|
|
current_case^ := token.identifierKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
|
|
while token.kind = lexerKindComma do
|
|
token := transpiler_lex(lexer);
|
|
|
|
INC(case_count);
|
|
REALLOCATE(result^.cases, TSIZE(Identifier) * (case_count + 1));
|
|
current_case := result^.cases;
|
|
INC(current_case, TSIZE(Identifier) * (case_count - 1));
|
|
current_case^ := token.identifierKind;
|
|
|
|
token := transpiler_lex(lexer)
|
|
end;
|
|
INC(current_case, TSIZE(Identifier));
|
|
MemZero(current_case, TSIZE(Identifier));
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_named_type(lexer: PLexer) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeExpression;
|
|
written_bytes: CARDINAL;
|
|
begin
|
|
token := lexer_current(lexer);
|
|
ALLOCATE(result, TSIZE(AstTypeExpression));
|
|
|
|
result^.kind := astTypeExpressionKindNamed;
|
|
result^.name := token.identifierKind;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_procedure_type(lexer: PLexer) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeExpression;
|
|
current_parameter: PPAstTypeExpression;
|
|
parameter_count: CARDINAL;
|
|
begin
|
|
parameter_count := 0;
|
|
ALLOCATE(result, TSIZE(AstTypeExpression));
|
|
result^.kind := astTypeExpressionKindProcedure;
|
|
|
|
ALLOCATE(result^.parameters, 1);
|
|
|
|
token := transpiler_lex(lexer);
|
|
token := transpiler_lex(lexer);
|
|
|
|
while token.kind <> lexerKindRightParen do
|
|
INC(parameter_count);
|
|
REALLOCATE(result^.parameters, TSIZE(PAstTypeExpression) * (parameter_count + 1));
|
|
current_parameter := result^.parameters;
|
|
INC(current_parameter, TSIZE(PAstTypeExpression) * (parameter_count - 1));
|
|
|
|
current_parameter^ := parse_type_expression(lexer);
|
|
|
|
token := transpiler_lex(lexer);
|
|
if token.kind = lexerKindComma then
|
|
token := transpiler_lex(lexer)
|
|
end
|
|
end;
|
|
current_parameter := result^.parameters;
|
|
INC(current_parameter, TSIZE(PAstTypeExpression) * parameter_count);
|
|
current_parameter^ := nil;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_type_expression(lexer: PLexer) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeExpression;
|
|
begin
|
|
result := nil;
|
|
token := lexer_current(lexer);
|
|
|
|
if token.kind = lexerKindRecord then
|
|
result := parse_record_type(lexer)
|
|
end;
|
|
if token.kind = lexerKindLeftParen then
|
|
result := parse_enumeration_type(lexer)
|
|
end;
|
|
if (token.kind = lexerKindArray) or (token.kind = lexerKindLeftSquare) then
|
|
result := parse_array_type(lexer)
|
|
end;
|
|
if token.kind = lexerKindHat then
|
|
result := parse_pointer_type(lexer)
|
|
end;
|
|
if token.kind = lexerKindProc then
|
|
result := parse_procedure_type(lexer)
|
|
end;
|
|
if token.kind = lexerKindIdentifier then
|
|
result := parse_named_type(lexer)
|
|
end;
|
|
return result
|
|
end;
|
|
|
|
proc parse_type_declaration(lexer: PLexer) -> PAstTypeDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeDeclaration;
|
|
begin
|
|
token := lexer_current(lexer);
|
|
|
|
ALLOCATE(result, TSIZE(AstTypeDeclaration));
|
|
result^.identifier := token.identifierKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
token := transpiler_lex(lexer);
|
|
|
|
result^.type_expression := parse_type_expression(lexer);
|
|
token := transpiler_lex(lexer);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_type_part(lexer: PLexer) -> PPAstTypeDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PPAstTypeDeclaration;
|
|
current_declaration: PPAstTypeDeclaration;
|
|
declaration_count: CARDINAL;
|
|
begin
|
|
token := lexer_current(lexer);
|
|
|
|
ALLOCATE(result, TSIZE(PAstTypeDeclaration));
|
|
current_declaration := result;
|
|
declaration_count := 0;
|
|
|
|
if token.kind = lexerKindType then
|
|
token := transpiler_lex(lexer);
|
|
|
|
while token.kind = lexerKindIdentifier do
|
|
INC(declaration_count);
|
|
|
|
REALLOCATE(result, TSIZE(PAstTypeDeclaration) * (declaration_count + 1));
|
|
current_declaration := result;
|
|
INC(current_declaration, TSIZE(PAstTypeDeclaration) * (declaration_count - 1));
|
|
|
|
current_declaration^ := parse_type_declaration(lexer);
|
|
token := transpiler_lex(lexer)
|
|
end
|
|
end;
|
|
if declaration_count <> 0 then
|
|
INC(current_declaration, TSIZE(PAstTypeDeclaration))
|
|
end;
|
|
current_declaration^ := nil;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_variable_declaration(lexer: PLexer) -> PAstVariableDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstVariableDeclaration;
|
|
begin
|
|
ALLOCATE(result, TSIZE(AstVariableDeclaration));
|
|
|
|
token := lexer_current(lexer);
|
|
result^.variable_name := token.identifierKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
|
|
token := transpiler_lex(lexer);
|
|
result^.variable_type := parse_type_expression(lexer);
|
|
|
|
token := transpiler_lex(lexer);
|
|
return result
|
|
end;
|
|
|
|
proc parse_variable_part(lexer: PLexer) -> PPAstVariableDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PPAstVariableDeclaration;
|
|
current_declaration: PPAstVariableDeclaration;
|
|
declaration_count: CARDINAL;
|
|
begin
|
|
token := lexer_current(lexer);
|
|
|
|
ALLOCATE(result, TSIZE(PAstVariableDeclaration));
|
|
current_declaration := result;
|
|
declaration_count := 0;
|
|
|
|
if token.kind = lexerKindVar then
|
|
token := transpiler_lex(lexer);
|
|
|
|
while token.kind = lexerKindIdentifier do
|
|
INC(declaration_count);
|
|
|
|
REALLOCATE(result, TSIZE(PAstVariableDeclaration) * (declaration_count + 1));
|
|
current_declaration := result;
|
|
INC(current_declaration, TSIZE(PAstVariableDeclaration) * (declaration_count - 1));
|
|
|
|
current_declaration^ := parse_variable_declaration(lexer);
|
|
token := transpiler_lex(lexer)
|
|
end
|
|
end;
|
|
if declaration_count <> 0 then
|
|
INC(current_declaration, TSIZE(PAstVariableDeclaration))
|
|
end;
|
|
current_declaration^ := nil;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_constant_declaration(lexer: PLexer) -> PAstConstantDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstConstantDeclaration;
|
|
begin
|
|
ALLOCATE(result, TSIZE(AstConstantDeclaration));
|
|
|
|
token := lexer_current(lexer);
|
|
result^.constant_name := token.identifierKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
|
|
token := transpiler_lex(lexer);
|
|
result^.constant_value := token.integerKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_constant_part(lexer: PLexer) -> PPAstConstantDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PPAstConstantDeclaration;
|
|
current_declaration: PPAstConstantDeclaration;
|
|
declaration_count: CARDINAL;
|
|
begin
|
|
token := lexer_current(lexer);
|
|
|
|
ALLOCATE(result, TSIZE(PAstConstantDeclaration));
|
|
current_declaration := result;
|
|
declaration_count := 0;
|
|
|
|
if token.kind = lexerKindConst then
|
|
token := transpiler_lex(lexer);
|
|
|
|
while token.kind = lexerKindIdentifier do
|
|
INC(declaration_count);
|
|
|
|
REALLOCATE(result, TSIZE(PAstConstantDeclaration) * (declaration_count + 1));
|
|
current_declaration := result;
|
|
INC(current_declaration, TSIZE(PAstConstantDeclaration) * (declaration_count - 1));
|
|
|
|
current_declaration^ := parse_constant_declaration(lexer);
|
|
token := transpiler_lex(lexer)
|
|
end
|
|
end;
|
|
if declaration_count <> 0 then
|
|
INC(current_declaration, TSIZE(PAstConstantDeclaration))
|
|
end;
|
|
current_declaration^ := nil;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_import_statement(lexer: PLexer) -> PAstImportStatement;
|
|
var
|
|
result: PAstImportStatement;
|
|
token: LexerToken;
|
|
symbol_count: CARDINAL;
|
|
current_symbol: PIdentifier;
|
|
begin
|
|
ALLOCATE(result, TSIZE(AstImportStatement));
|
|
symbol_count := 1;
|
|
|
|
token := transpiler_lex(lexer);
|
|
result^.package := token.identifierKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
ALLOCATE(result^.symbols, TSIZE(Identifier) * 2);
|
|
|
|
current_symbol := result^.symbols;
|
|
|
|
token := transpiler_lex(lexer);
|
|
current_symbol^ := token.identifierKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
while token.kind <> lexerKindSemicolon do
|
|
token := transpiler_lex(lexer);
|
|
INC(symbol_count);
|
|
|
|
REALLOCATE(result^.symbols, TSIZE(Identifier) * (symbol_count + 1));
|
|
current_symbol := result^.symbols;
|
|
INC(current_symbol, TSIZE(Identifier) * (symbol_count - 1));
|
|
|
|
current_symbol^ := token.identifierKind;
|
|
token := transpiler_lex(lexer)
|
|
end;
|
|
INC(current_symbol, TSIZE(Identifier));
|
|
MemZero(current_symbol, TSIZE(Identifier));
|
|
|
|
token := transpiler_lex(lexer);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_import_part(lexer: PLexer) -> PPAstImportStatement;
|
|
var
|
|
token: LexerToken;
|
|
import_statement: PPAstImportStatement;
|
|
result: PPAstImportStatement;
|
|
import_count: CARDINAL;
|
|
begin
|
|
token := lexer_current(lexer);
|
|
ALLOCATE(result, TSIZE(PAstImportStatement));
|
|
import_statement := result;
|
|
import_count := 0;
|
|
|
|
while token.kind = lexerKindFrom do
|
|
INC(import_count);
|
|
|
|
REALLOCATE(result, TSIZE(PAstImportStatement) * (import_count + 1));
|
|
import_statement := result;
|
|
INC(import_statement, TSIZE(PAstImportStatement) * (import_count - 1));
|
|
|
|
import_statement^ := parse_import_statement(lexer);
|
|
token := lexer_current(lexer)
|
|
end;
|
|
if import_count > 0 then
|
|
INC(import_statement, TSIZE(PAstImportStatement))
|
|
end;
|
|
import_statement^ := nil;
|
|
|
|
return result
|
|
end;
|
|
|
|
end.
|