594 lines
15 KiB
Plaintext
594 lines
15 KiB
Plaintext
module;
|
|
|
|
from SYSTEM import TSIZE;
|
|
|
|
from MemUtils import MemZero;
|
|
from Storage import ALLOCATE, REALLOCATE;
|
|
|
|
from Lexer import LexerKind, LexerToken, lexer_current, lexer_lex;
|
|
|
|
(* Calls lexer_lex() but skips the comments. *)
|
|
proc transpiler_lex(lexer: PLexer) -> LexerToken;
|
|
var
|
|
result: LexerToken;
|
|
begin
|
|
result := lexer_lex(lexer);
|
|
|
|
while result.kind = lexerKindComment do
|
|
result := lexer_lex(lexer)
|
|
end;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_type_fields(lexer: PLexer) -> PAstFieldDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
field_declarations: PAstFieldDeclaration;
|
|
field_count: CARDINAL;
|
|
current_field: PAstFieldDeclaration;
|
|
begin
|
|
ALLOCATE(field_declarations, TSIZE(AstFieldDeclaration));
|
|
token := transpiler_lex(lexer);
|
|
field_count := 0;
|
|
|
|
while token.kind <> lexerKindEnd do
|
|
INC(field_count);
|
|
REALLOCATE(field_declarations, TSIZE(AstFieldDeclaration) * (field_count + 1));
|
|
current_field := field_declarations;
|
|
INC(current_field , TSIZE(AstFieldDeclaration) * (field_count - 1));
|
|
|
|
token := transpiler_lex(lexer);
|
|
|
|
current_field^.field_name := token.identifierKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
current_field^.field_type := parse_type_expression(lexer);
|
|
token := transpiler_lex(lexer);
|
|
|
|
if token.kind = lexerKindSemicolon then
|
|
token := transpiler_lex(lexer)
|
|
end
|
|
end;
|
|
INC(current_field, TSIZE(AstFieldDeclaration));
|
|
MemZero(current_field, TSIZE(AstFieldDeclaration));
|
|
|
|
return field_declarations
|
|
end;
|
|
|
|
proc parse_record_type(lexer: PLexer) -> PAstTypeExpression;
|
|
var
|
|
result: PAstTypeExpression;
|
|
begin
|
|
NEW(result);
|
|
result^.kind := astTypeExpressionKindRecord;
|
|
result^.fields := parse_type_fields(lexer);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_pointer_type(lexer: PLexer) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeExpression;
|
|
begin
|
|
NEW(result);
|
|
result^.kind := astTypeExpressionKindPointer;
|
|
|
|
token := lexer_current(lexer);
|
|
|
|
if token.kind = lexerKindPointer then
|
|
token := transpiler_lex(lexer)
|
|
end;
|
|
token := lexer_current(lexer);
|
|
result^.target := parse_type_expression(lexer);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_array_type(lexer: PLexer) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
buffer: [20]CHAR;
|
|
result: PAstTypeExpression;
|
|
begin
|
|
NEW(result);
|
|
result^.kind := astTypeExpressionKindArray;
|
|
result^.length := 0;
|
|
|
|
token := lexer_current(lexer);
|
|
|
|
if token.kind = lexerKindArray then
|
|
token := transpiler_lex(lexer)
|
|
end;
|
|
if token.kind <> lexerKindOf then
|
|
token := transpiler_lex(lexer);
|
|
|
|
result^.length := token.integerKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
end;
|
|
token := transpiler_lex(lexer);
|
|
result^.base := parse_type_expression(lexer);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_enumeration_type(lexer: PLexer) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeExpression;
|
|
current_case: PIdentifier;
|
|
case_count: CARDINAL;
|
|
begin
|
|
NEW(result);
|
|
result^.kind := astTypeExpressionKindEnumeration;
|
|
|
|
case_count := 1;
|
|
ALLOCATE(result^.cases, TSIZE(Identifier) * 2);
|
|
token := transpiler_lex(lexer);
|
|
current_case := result^.cases;
|
|
current_case^ := token.identifierKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
|
|
while token.kind = lexerKindComma do
|
|
token := transpiler_lex(lexer);
|
|
|
|
INC(case_count);
|
|
REALLOCATE(result^.cases, TSIZE(Identifier) * (case_count + 1));
|
|
current_case := result^.cases;
|
|
INC(current_case, TSIZE(Identifier) * (case_count - 1));
|
|
current_case^ := token.identifierKind;
|
|
|
|
token := transpiler_lex(lexer)
|
|
end;
|
|
INC(current_case, TSIZE(Identifier));
|
|
MemZero(current_case, TSIZE(Identifier));
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_named_type(lexer: PLexer) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeExpression;
|
|
begin
|
|
token := lexer_current(lexer);
|
|
NEW(result);
|
|
|
|
result^.kind := astTypeExpressionKindNamed;
|
|
result^.name := token.identifierKind;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_procedure_type(lexer: PLexer) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeExpression;
|
|
current_parameter: PPAstTypeExpression;
|
|
parameter_count: CARDINAL;
|
|
begin
|
|
parameter_count := 0;
|
|
NEW(result);
|
|
result^.kind := astTypeExpressionKindProcedure;
|
|
|
|
ALLOCATE(result^.parameters, 1);
|
|
|
|
token := transpiler_lex(lexer);
|
|
token := transpiler_lex(lexer);
|
|
|
|
while token.kind <> lexerKindRightParen do
|
|
INC(parameter_count);
|
|
REALLOCATE(result^.parameters, TSIZE(PAstTypeExpression) * (parameter_count + 1));
|
|
current_parameter := result^.parameters;
|
|
INC(current_parameter, TSIZE(PAstTypeExpression) * (parameter_count - 1));
|
|
|
|
current_parameter^ := parse_type_expression(lexer);
|
|
|
|
token := transpiler_lex(lexer);
|
|
if token.kind = lexerKindComma then
|
|
token := transpiler_lex(lexer)
|
|
end
|
|
end;
|
|
current_parameter := result^.parameters;
|
|
INC(current_parameter, TSIZE(PAstTypeExpression) * parameter_count);
|
|
current_parameter^ := nil;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_type_expression(lexer: PLexer) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeExpression;
|
|
begin
|
|
result := nil;
|
|
token := lexer_current(lexer);
|
|
|
|
if token.kind = lexerKindRecord then
|
|
result := parse_record_type(lexer)
|
|
end;
|
|
if token.kind = lexerKindLeftParen then
|
|
result := parse_enumeration_type(lexer)
|
|
end;
|
|
if (token.kind = lexerKindArray) or (token.kind = lexerKindLeftSquare) then
|
|
result := parse_array_type(lexer)
|
|
end;
|
|
if token.kind = lexerKindHat then
|
|
result := parse_pointer_type(lexer)
|
|
end;
|
|
if token.kind = lexerKindProc then
|
|
result := parse_procedure_type(lexer)
|
|
end;
|
|
if token.kind = lexerKindIdentifier then
|
|
result := parse_named_type(lexer)
|
|
end;
|
|
return result
|
|
end;
|
|
|
|
proc parse_type_declaration(lexer: PLexer) -> PAstTypeDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeDeclaration;
|
|
begin
|
|
token := lexer_current(lexer);
|
|
|
|
NEW(result);
|
|
result^.identifier := token.identifierKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
token := transpiler_lex(lexer);
|
|
|
|
result^.type_expression := parse_type_expression(lexer);
|
|
token := transpiler_lex(lexer);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_type_part(lexer: PLexer) -> PPAstTypeDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PPAstTypeDeclaration;
|
|
current_declaration: PPAstTypeDeclaration;
|
|
declaration_count: CARDINAL;
|
|
begin
|
|
token := lexer_current(lexer);
|
|
|
|
ALLOCATE(result, TSIZE(PAstTypeDeclaration));
|
|
current_declaration := result;
|
|
declaration_count := 0;
|
|
|
|
if token.kind = lexerKindType then
|
|
token := transpiler_lex(lexer);
|
|
|
|
while token.kind = lexerKindIdentifier do
|
|
INC(declaration_count);
|
|
|
|
REALLOCATE(result, TSIZE(PAstTypeDeclaration) * (declaration_count + 1));
|
|
current_declaration := result;
|
|
INC(current_declaration, TSIZE(PAstTypeDeclaration) * (declaration_count - 1));
|
|
|
|
current_declaration^ := parse_type_declaration(lexer);
|
|
token := transpiler_lex(lexer)
|
|
end
|
|
end;
|
|
if declaration_count <> 0 then
|
|
INC(current_declaration, TSIZE(PAstTypeDeclaration))
|
|
end;
|
|
current_declaration^ := nil;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_variable_declaration(lexer: PLexer) -> PAstVariableDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstVariableDeclaration;
|
|
begin
|
|
NEW(result);
|
|
|
|
token := lexer_current(lexer);
|
|
result^.variable_name := token.identifierKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
|
|
token := transpiler_lex(lexer);
|
|
result^.variable_type := parse_type_expression(lexer);
|
|
|
|
token := transpiler_lex(lexer);
|
|
return result
|
|
end;
|
|
|
|
proc parse_variable_part(lexer: PLexer) -> PPAstVariableDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PPAstVariableDeclaration;
|
|
current_declaration: PPAstVariableDeclaration;
|
|
declaration_count: CARDINAL;
|
|
begin
|
|
token := lexer_current(lexer);
|
|
|
|
ALLOCATE(result, TSIZE(PAstVariableDeclaration));
|
|
current_declaration := result;
|
|
declaration_count := 0;
|
|
|
|
if token.kind = lexerKindVar then
|
|
token := transpiler_lex(lexer);
|
|
|
|
while token.kind = lexerKindIdentifier do
|
|
INC(declaration_count);
|
|
|
|
REALLOCATE(result, TSIZE(PAstVariableDeclaration) * (declaration_count + 1));
|
|
current_declaration := result;
|
|
INC(current_declaration, TSIZE(PAstVariableDeclaration) * (declaration_count - 1));
|
|
|
|
current_declaration^ := parse_variable_declaration(lexer);
|
|
token := transpiler_lex(lexer)
|
|
end
|
|
end;
|
|
if declaration_count <> 0 then
|
|
INC(current_declaration, TSIZE(PAstVariableDeclaration))
|
|
end;
|
|
current_declaration^ := nil;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_constant_declaration(lexer: PLexer) -> PAstConstantDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstConstantDeclaration;
|
|
begin
|
|
NEW(result);
|
|
|
|
token := lexer_current(lexer);
|
|
result^.constant_name := token.identifierKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
|
|
token := transpiler_lex(lexer);
|
|
result^.constant_value := token.integerKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_constant_part(lexer: PLexer) -> PPAstConstantDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PPAstConstantDeclaration;
|
|
current_declaration: PPAstConstantDeclaration;
|
|
declaration_count: CARDINAL;
|
|
begin
|
|
token := lexer_current(lexer);
|
|
|
|
ALLOCATE(result, TSIZE(PAstConstantDeclaration));
|
|
current_declaration := result;
|
|
declaration_count := 0;
|
|
|
|
if token.kind = lexerKindConst then
|
|
token := transpiler_lex(lexer);
|
|
|
|
while token.kind = lexerKindIdentifier do
|
|
INC(declaration_count);
|
|
|
|
REALLOCATE(result, TSIZE(PAstConstantDeclaration) * (declaration_count + 1));
|
|
current_declaration := result;
|
|
INC(current_declaration, TSIZE(PAstConstantDeclaration) * (declaration_count - 1));
|
|
|
|
current_declaration^ := parse_constant_declaration(lexer);
|
|
token := transpiler_lex(lexer)
|
|
end
|
|
end;
|
|
if declaration_count <> 0 then
|
|
INC(current_declaration, TSIZE(PAstConstantDeclaration))
|
|
end;
|
|
current_declaration^ := nil;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_import_statement(lexer: PLexer) -> PAstImportStatement;
|
|
var
|
|
result: PAstImportStatement;
|
|
token: LexerToken;
|
|
symbol_count: CARDINAL;
|
|
current_symbol: PIdentifier;
|
|
begin
|
|
NEW(result);
|
|
symbol_count := 1;
|
|
|
|
token := transpiler_lex(lexer);
|
|
result^.package := token.identifierKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
ALLOCATE(result^.symbols, TSIZE(Identifier) * 2);
|
|
|
|
current_symbol := result^.symbols;
|
|
|
|
token := transpiler_lex(lexer);
|
|
current_symbol^ := token.identifierKind;
|
|
|
|
token := transpiler_lex(lexer);
|
|
while token.kind <> lexerKindSemicolon do
|
|
token := transpiler_lex(lexer);
|
|
INC(symbol_count);
|
|
|
|
REALLOCATE(result^.symbols, TSIZE(Identifier) * (symbol_count + 1));
|
|
current_symbol := result^.symbols;
|
|
INC(current_symbol, TSIZE(Identifier) * (symbol_count - 1));
|
|
|
|
current_symbol^ := token.identifierKind;
|
|
token := transpiler_lex(lexer)
|
|
end;
|
|
INC(current_symbol, TSIZE(Identifier));
|
|
MemZero(current_symbol, TSIZE(Identifier));
|
|
|
|
token := transpiler_lex(lexer);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_import_part(lexer: PLexer) -> PPAstImportStatement;
|
|
var
|
|
token: LexerToken;
|
|
import_statement: PPAstImportStatement;
|
|
result: PPAstImportStatement;
|
|
import_count: CARDINAL;
|
|
begin
|
|
token := lexer_current(lexer);
|
|
ALLOCATE(result, TSIZE(PAstImportStatement));
|
|
import_statement := result;
|
|
import_count := 0;
|
|
|
|
while token.kind = lexerKindFrom do
|
|
INC(import_count);
|
|
|
|
REALLOCATE(result, TSIZE(PAstImportStatement) * (import_count + 1));
|
|
import_statement := result;
|
|
INC(import_statement, TSIZE(PAstImportStatement) * (import_count - 1));
|
|
|
|
import_statement^ := parse_import_statement(lexer);
|
|
token := lexer_current(lexer)
|
|
end;
|
|
if import_count > 0 then
|
|
INC(import_statement, TSIZE(PAstImportStatement))
|
|
end;
|
|
import_statement^ := nil;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_literal(lexer: PLexer) -> PAstLiteral;
|
|
var
|
|
literal: PAstLiteral;
|
|
token: LexerToken;
|
|
begin
|
|
literal := nil;
|
|
token := lexer_current(lexer);
|
|
|
|
if token.kind = lexerKindInteger then
|
|
NEW(literal);
|
|
|
|
literal^.kind := astLiteralKindInteger;
|
|
literal^.integer := token.integerKind;
|
|
end;
|
|
if token.kind = lexerKindCharacter then
|
|
NEW(literal);
|
|
|
|
literal^.kind := astLiteralKindString;
|
|
literal^.string := token.stringKind;
|
|
end;
|
|
if token.kind = lexerKindNull then
|
|
NEW(literal);
|
|
|
|
literal^.kind := astLiteralKindNull;
|
|
end;
|
|
if literal <> nil then
|
|
token := transpiler_lex(lexer)
|
|
end;
|
|
|
|
return literal
|
|
end;
|
|
|
|
proc parse_factor(lexer: PLexer) -> PAstExpression;
|
|
var
|
|
next_token: LexerToken;
|
|
result: PAstExpression;
|
|
literal: PAstLiteral;
|
|
begin
|
|
result := nil;
|
|
next_token := lexer_current(lexer);
|
|
|
|
literal := parse_literal(lexer);
|
|
|
|
if (result = nil) & (literal <> nil) then
|
|
NEW(result);
|
|
|
|
result^.kind := astExpressionKindLiteral;
|
|
result^.literal := literal;
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindMinus) then
|
|
NEW(result);
|
|
next_token := transpiler_lex(lexer);
|
|
|
|
result^.kind := astExpressionKindUnary;
|
|
result^.unary_operator := astUnaryOperatorMinus;
|
|
result^.unary_operand := parse_factor(lexer)
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindTilde) then
|
|
NEW(result);
|
|
next_token := transpiler_lex(lexer);
|
|
|
|
result^.kind := astExpressionKindUnary;
|
|
result^.unary_operator := astUnaryOperatorNot;
|
|
result^.unary_operand := parse_factor(lexer)
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindIdentifier) then
|
|
NEW(result);
|
|
|
|
result^.kind := astExpressionKindIdentifier;
|
|
result^.identifier := next_token.identifierKind;
|
|
|
|
next_token := transpiler_lex(lexer)
|
|
end;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_designator(lexer: PLexer) -> PAstExpression;
|
|
var
|
|
next_token: LexerToken;
|
|
inner_expression: PAstExpression;
|
|
designator: PAstExpression;
|
|
handled: BOOLEAN;
|
|
begin
|
|
designator := parse_factor(lexer);
|
|
handled := designator <> nil;
|
|
next_token := lexer_current(lexer);
|
|
|
|
while handled do
|
|
inner_expression := designator;
|
|
handled := false;
|
|
|
|
if ~handled & (next_token.kind = lexerKindHat) then
|
|
NEW(designator);
|
|
|
|
designator^.kind := astExpressionKindDereference;
|
|
designator^.reference := inner_expression;
|
|
|
|
next_token := transpiler_lex(lexer);
|
|
handled := true
|
|
end;
|
|
if ~handled & (next_token.kind = lexerKindLeftSquare) then
|
|
NEW(designator);
|
|
next_token := transpiler_lex(lexer);
|
|
|
|
designator^.kind := astExpressionKindArrayAccess;
|
|
designator^.array := inner_expression;
|
|
designator^.index := parse_designator(lexer);
|
|
|
|
next_token := transpiler_lex(lexer);
|
|
handled := true
|
|
end;
|
|
if ~handled & (next_token.kind = lexerKindDot) then
|
|
NEW(designator);
|
|
next_token := transpiler_lex(lexer);
|
|
|
|
designator^.kind := astExpressionKindFieldAccess;
|
|
designator^.aggregate := inner_expression;
|
|
designator^.field := next_token.identifierKind;
|
|
|
|
next_token := transpiler_lex(lexer);
|
|
handled := true
|
|
end
|
|
end;
|
|
|
|
return designator
|
|
end;
|
|
|
|
end.
|