1009 lines
27 KiB
Plaintext
1009 lines
27 KiB
Plaintext
module;
|
|
|
|
from FIO import ReadNBytes;
|
|
from SYSTEM import TSIZE, ADR;
|
|
|
|
from MemUtils import MemZero;
|
|
from Storage import ALLOCATE, REALLOCATE;
|
|
|
|
from Lexer import Lexer, LexerKind, LexerToken, lexer_current, lexer_lex;
|
|
|
|
(* Calls lexer_lex() but skips the comments. *)
|
|
proc parser_lex(lexer: PLexer) -> LexerToken;
|
|
var
|
|
result: LexerToken;
|
|
begin
|
|
result := lexer_lex(lexer);
|
|
|
|
while result.kind = lexerKindComment do
|
|
result := lexer_lex(lexer)
|
|
end;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_type_fields(parser: PParser) -> PAstFieldDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
field_declarations: PAstFieldDeclaration;
|
|
field_count: CARDINAL;
|
|
current_field: PAstFieldDeclaration;
|
|
begin
|
|
ALLOCATE(field_declarations, TSIZE(AstFieldDeclaration));
|
|
token := parser_lex(parser^.lexer);
|
|
field_count := 0;
|
|
|
|
while token.kind <> lexerKindEnd do
|
|
INC(field_count);
|
|
INC(field_count);
|
|
REALLOCATE(field_declarations, TSIZE(AstFieldDeclaration) * field_count);
|
|
DEC(field_count);
|
|
current_field := field_declarations;
|
|
INC(current_field , TSIZE(AstFieldDeclaration) * (field_count - 1));
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
current_field^.field_name := token.identifierKind;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
current_field^.field_type := parse_type_expression(parser);
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
if token.kind = lexerKindSemicolon then
|
|
token := parser_lex(parser^.lexer)
|
|
end
|
|
end;
|
|
INC(current_field, TSIZE(AstFieldDeclaration));
|
|
MemZero(current_field, TSIZE(AstFieldDeclaration));
|
|
|
|
return field_declarations
|
|
end;
|
|
|
|
proc parse_record_type(parser: PParser) -> PAstTypeExpression;
|
|
var
|
|
result: PAstTypeExpression;
|
|
begin
|
|
NEW(result);
|
|
result^.kind := astTypeExpressionKindRecord;
|
|
result^.fields := parse_type_fields(parser);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_pointer_type(parser: PParser) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeExpression;
|
|
begin
|
|
NEW(result);
|
|
result^.kind := astTypeExpressionKindPointer;
|
|
|
|
token := lexer_current(parser^.lexer);
|
|
|
|
if token.kind = lexerKindPointer then
|
|
token := parser_lex(parser^.lexer)
|
|
end;
|
|
token := lexer_current(parser^.lexer);
|
|
result^.target := parse_type_expression(parser);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_array_type(parser: PParser) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
buffer: [20]CHAR;
|
|
result: PAstTypeExpression;
|
|
begin
|
|
NEW(result);
|
|
result^.kind := astTypeExpressionKindArray;
|
|
result^.length := 0;
|
|
|
|
token := lexer_current(parser^.lexer);
|
|
|
|
if token.kind = lexerKindArray then
|
|
token := parser_lex(parser^.lexer)
|
|
end;
|
|
if token.kind <> lexerKindOf then
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
result^.length := token.integerKind;
|
|
|
|
token := parser_lex(parser^.lexer)
|
|
end;
|
|
token := parser_lex(parser^.lexer);
|
|
result^.base := parse_type_expression(parser);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_enumeration_type(parser: PParser) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeExpression;
|
|
current_case: PIdentifier;
|
|
case_count: CARDINAL;
|
|
begin
|
|
NEW(result);
|
|
result^.kind := astTypeExpressionKindEnumeration;
|
|
|
|
case_count := 1;
|
|
ALLOCATE(result^.cases, TSIZE(Identifier) * 2);
|
|
token := parser_lex(parser^.lexer);
|
|
current_case := result^.cases;
|
|
current_case^ := token.identifierKind;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
while token.kind = lexerKindComma do
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
INC(case_count);
|
|
INC(case_count);
|
|
REALLOCATE(result^.cases, TSIZE(Identifier) * case_count);
|
|
DEC(case_count);
|
|
current_case := result^.cases;
|
|
INC(current_case, TSIZE(Identifier) * (case_count - 1));
|
|
current_case^ := token.identifierKind;
|
|
|
|
token := parser_lex(parser^.lexer)
|
|
end;
|
|
INC(current_case, TSIZE(Identifier));
|
|
MemZero(current_case, TSIZE(Identifier));
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_named_type(parser: PParser) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeExpression;
|
|
begin
|
|
token := lexer_current(parser^.lexer);
|
|
NEW(result);
|
|
|
|
result^.kind := astTypeExpressionKindNamed;
|
|
result^.name := token.identifierKind;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_procedure_type(parser: PParser) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeExpression;
|
|
current_parameter: PPAstTypeExpression;
|
|
parameter_count: CARDINAL;
|
|
begin
|
|
parameter_count := 0;
|
|
NEW(result);
|
|
result^.kind := astTypeExpressionKindProcedure;
|
|
|
|
ALLOCATE(result^.parameters, 1);
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
while token.kind <> lexerKindRightParen do
|
|
INC(parameter_count);
|
|
INC(parameter_count);
|
|
REALLOCATE(result^.parameters, TSIZE(PAstTypeExpression) * parameter_count);
|
|
DEC(parameter_count);
|
|
current_parameter := result^.parameters;
|
|
INC(current_parameter, TSIZE(PAstTypeExpression) * (parameter_count - 1));
|
|
|
|
current_parameter^ := parse_type_expression(parser);
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
if token.kind = lexerKindComma then
|
|
token := parser_lex(parser^.lexer)
|
|
end
|
|
end;
|
|
current_parameter := result^.parameters;
|
|
INC(current_parameter, TSIZE(PAstTypeExpression) * parameter_count);
|
|
current_parameter^ := nil;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_type_expression(parser: PParser) -> PAstTypeExpression;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypeExpression;
|
|
begin
|
|
result := nil;
|
|
token := lexer_current(parser^.lexer);
|
|
|
|
if token.kind = lexerKindRecord then
|
|
result := parse_record_type(parser)
|
|
end;
|
|
if token.kind = lexerKindLeftParen then
|
|
result := parse_enumeration_type(parser)
|
|
end;
|
|
if (token.kind = lexerKindArray) or (token.kind = lexerKindLeftSquare) then
|
|
result := parse_array_type(parser)
|
|
end;
|
|
if token.kind = lexerKindHat then
|
|
result := parse_pointer_type(parser)
|
|
end;
|
|
if token.kind = lexerKindProc then
|
|
result := parse_procedure_type(parser)
|
|
end;
|
|
if token.kind = lexerKindIdentifier then
|
|
result := parse_named_type(parser)
|
|
end;
|
|
return result
|
|
end;
|
|
|
|
proc parse_type_declaration(parser: PParser) -> PAstTypedDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstTypedDeclaration;
|
|
begin
|
|
token := lexer_current(parser^.lexer);
|
|
|
|
NEW(result);
|
|
result^.identifier := token.identifierKind;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
result^.type_expression := parse_type_expression(parser);
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_type_part(parser: PParser) -> PPAstTypedDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PPAstTypedDeclaration;
|
|
current_declaration: PPAstTypedDeclaration;
|
|
declaration_count: CARDINAL;
|
|
begin
|
|
token := lexer_current(parser^.lexer);
|
|
|
|
ALLOCATE(result, TSIZE(PAstTypedDeclaration));
|
|
current_declaration := result;
|
|
declaration_count := 0;
|
|
|
|
if token.kind = lexerKindType then
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
while token.kind = lexerKindIdentifier do
|
|
INC(declaration_count);
|
|
|
|
REALLOCATE(result, TSIZE(PAstTypedDeclaration) * (declaration_count + 1));
|
|
current_declaration := result;
|
|
INC(current_declaration, TSIZE(PAstTypedDeclaration) * (declaration_count - 1));
|
|
|
|
current_declaration^ := parse_type_declaration(parser);
|
|
token := parser_lex(parser^.lexer)
|
|
end
|
|
end;
|
|
if declaration_count <> 0 then
|
|
INC(current_declaration, TSIZE(PAstTypedDeclaration))
|
|
end;
|
|
current_declaration^ := nil;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_variable_declaration(parser: PParser) -> PAstVariableDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstVariableDeclaration;
|
|
begin
|
|
NEW(result);
|
|
|
|
token := lexer_current(parser^.lexer);
|
|
result^.variable_name := token.identifierKind;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
result^.variable_type := parse_type_expression(parser);
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
return result
|
|
end;
|
|
|
|
proc parse_variable_part(parser: PParser) -> PPAstVariableDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PPAstVariableDeclaration;
|
|
current_declaration: PPAstVariableDeclaration;
|
|
declaration_count: CARDINAL;
|
|
begin
|
|
token := lexer_current(parser^.lexer);
|
|
|
|
ALLOCATE(result, TSIZE(PAstVariableDeclaration));
|
|
current_declaration := result;
|
|
declaration_count := 0;
|
|
|
|
if token.kind = lexerKindVar then
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
while token.kind = lexerKindIdentifier do
|
|
INC(declaration_count);
|
|
|
|
REALLOCATE(result, TSIZE(PAstVariableDeclaration) * (declaration_count + 1));
|
|
current_declaration := result;
|
|
INC(current_declaration, TSIZE(PAstVariableDeclaration) * (declaration_count - 1));
|
|
|
|
current_declaration^ := parse_variable_declaration(parser);
|
|
token := parser_lex(parser^.lexer)
|
|
end
|
|
end;
|
|
if declaration_count <> 0 then
|
|
INC(current_declaration, TSIZE(PAstVariableDeclaration))
|
|
end;
|
|
current_declaration^ := nil;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_constant_declaration(parser: PParser) -> PAstConstantDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstConstantDeclaration;
|
|
begin
|
|
NEW(result);
|
|
|
|
token := lexer_current(parser^.lexer);
|
|
result^.constant_name := token.identifierKind;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
result^.constant_value := token.integerKind;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_constant_part(parser: PParser) -> PPAstConstantDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
result: PPAstConstantDeclaration;
|
|
current_declaration: PPAstConstantDeclaration;
|
|
declaration_count: CARDINAL;
|
|
begin
|
|
token := lexer_current(parser^.lexer);
|
|
|
|
ALLOCATE(result, TSIZE(PAstConstantDeclaration));
|
|
current_declaration := result;
|
|
declaration_count := 0;
|
|
|
|
if token.kind = lexerKindConst then
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
while token.kind = lexerKindIdentifier do
|
|
INC(declaration_count);
|
|
|
|
REALLOCATE(result, TSIZE(PAstConstantDeclaration) * (declaration_count + 1));
|
|
current_declaration := result;
|
|
INC(current_declaration, TSIZE(PAstConstantDeclaration) * (declaration_count - 1));
|
|
|
|
current_declaration^ := parse_constant_declaration(parser);
|
|
token := parser_lex(parser^.lexer)
|
|
end
|
|
end;
|
|
if declaration_count <> 0 then
|
|
INC(current_declaration, TSIZE(PAstConstantDeclaration))
|
|
end;
|
|
current_declaration^ := nil;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_import_statement(parser: PParser) -> PAstImportStatement;
|
|
var
|
|
result: PAstImportStatement;
|
|
token: LexerToken;
|
|
symbol_count: CARDINAL;
|
|
current_symbol: PIdentifier;
|
|
begin
|
|
NEW(result);
|
|
symbol_count := 1;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
result^.package := token.identifierKind;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
ALLOCATE(result^.symbols, TSIZE(Identifier) * 2);
|
|
|
|
current_symbol := result^.symbols;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
current_symbol^ := token.identifierKind;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
while token.kind <> lexerKindSemicolon do
|
|
token := parser_lex(parser^.lexer);
|
|
INC(symbol_count);
|
|
|
|
REALLOCATE(result^.symbols, TSIZE(Identifier) * (symbol_count + 1));
|
|
current_symbol := result^.symbols;
|
|
INC(current_symbol, TSIZE(Identifier) * (symbol_count - 1));
|
|
|
|
current_symbol^ := token.identifierKind;
|
|
token := parser_lex(parser^.lexer)
|
|
end;
|
|
INC(current_symbol, TSIZE(Identifier));
|
|
MemZero(current_symbol, TSIZE(Identifier));
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_import_part(parser: PParser) -> PPAstImportStatement;
|
|
var
|
|
token: LexerToken;
|
|
import_statement: PPAstImportStatement;
|
|
result: PPAstImportStatement;
|
|
import_count: CARDINAL;
|
|
begin
|
|
token := lexer_current(parser^.lexer);
|
|
ALLOCATE(result, TSIZE(PAstImportStatement));
|
|
import_statement := result;
|
|
import_count := 0;
|
|
|
|
while token.kind = lexerKindFrom do
|
|
INC(import_count);
|
|
|
|
REALLOCATE(result, TSIZE(PAstImportStatement) * (import_count + 1));
|
|
import_statement := result;
|
|
INC(import_statement, TSIZE(PAstImportStatement) * (import_count - 1));
|
|
|
|
import_statement^ := parse_import_statement(parser);
|
|
token := lexer_current(parser^.lexer)
|
|
end;
|
|
if import_count > 0 then
|
|
INC(import_statement, TSIZE(PAstImportStatement))
|
|
end;
|
|
import_statement^ := nil;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_literal(parser: PParser) -> PAstLiteral;
|
|
var
|
|
literal: PAstLiteral;
|
|
token: LexerToken;
|
|
begin
|
|
literal := nil;
|
|
token := lexer_current(parser^.lexer);
|
|
|
|
if token.kind = lexerKindInteger then
|
|
NEW(literal);
|
|
|
|
literal^.kind := astLiteralKindInteger;
|
|
literal^.integer := token.integerKind
|
|
end;
|
|
if (token.kind = lexerKindCharacter) or (token.kind = lexerKindString) then
|
|
NEW(literal);
|
|
|
|
literal^.kind := astLiteralKindString;
|
|
literal^.string := token.stringKind
|
|
end;
|
|
if token.kind = lexerKindNull then
|
|
NEW(literal);
|
|
|
|
literal^.kind := astLiteralKindNull
|
|
end;
|
|
if token.kind = lexerKindBoolean then
|
|
NEW(literal);
|
|
|
|
literal^.kind := astLiteralKindBoolean;
|
|
literal^.boolean := token.booleanKind
|
|
end;
|
|
if literal <> nil then
|
|
token := parser_lex(parser^.lexer)
|
|
end;
|
|
|
|
return literal
|
|
end;
|
|
|
|
proc parse_factor(parser: PParser) -> PAstExpression;
|
|
var
|
|
next_token: LexerToken;
|
|
result: PAstExpression;
|
|
literal: PAstLiteral;
|
|
begin
|
|
result := nil;
|
|
next_token := lexer_current(parser^.lexer);
|
|
|
|
literal := parse_literal(parser);
|
|
|
|
if (result = nil) & (literal <> nil) then
|
|
NEW(result);
|
|
|
|
result^.kind := astExpressionKindLiteral;
|
|
result^.literal := literal
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindMinus) then
|
|
NEW(result);
|
|
next_token := parser_lex(parser^.lexer);
|
|
|
|
result^.kind := astExpressionKindUnary;
|
|
result^.unary_operator := astUnaryOperatorMinus;
|
|
result^.unary_operand := parse_factor(parser)
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindTilde) then
|
|
NEW(result);
|
|
next_token := parser_lex(parser^.lexer);
|
|
|
|
result^.kind := astExpressionKindUnary;
|
|
result^.unary_operator := astUnaryOperatorNot;
|
|
result^.unary_operand := parse_factor(parser)
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindLeftParen) then
|
|
next_token := parser_lex(parser^.lexer);
|
|
result := parse_expression(parser);
|
|
if result <> nil then
|
|
next_token := parser_lex(parser^.lexer)
|
|
end
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindIdentifier) then
|
|
NEW(result);
|
|
|
|
result^.kind := astExpressionKindIdentifier;
|
|
result^.identifier := next_token.identifierKind;
|
|
|
|
next_token := parser_lex(parser^.lexer)
|
|
end;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_designator(parser: PParser) -> PAstExpression;
|
|
var
|
|
next_token: LexerToken;
|
|
inner_expression: PAstExpression;
|
|
designator: PAstExpression;
|
|
arguments: PPAstExpression;
|
|
handled: BOOLEAN;
|
|
begin
|
|
designator := parse_factor(parser);
|
|
handled := designator <> nil;
|
|
next_token := lexer_current(parser^.lexer);
|
|
|
|
while handled do
|
|
inner_expression := designator;
|
|
handled := false;
|
|
|
|
if ~handled & (next_token.kind = lexerKindHat) then
|
|
NEW(designator);
|
|
|
|
designator^.kind := astExpressionKindDereference;
|
|
designator^.reference := inner_expression;
|
|
|
|
next_token := parser_lex(parser^.lexer);
|
|
handled := true
|
|
end;
|
|
if ~handled & (next_token.kind = lexerKindLeftSquare) then
|
|
NEW(designator);
|
|
next_token := parser_lex(parser^.lexer);
|
|
|
|
designator^.kind := astExpressionKindArrayAccess;
|
|
designator^.array := inner_expression;
|
|
designator^.index := parse_expression(parser);
|
|
|
|
next_token := parser_lex(parser^.lexer);
|
|
handled := true
|
|
end;
|
|
if ~handled & (next_token.kind = lexerKindDot) then
|
|
NEW(designator);
|
|
next_token := parser_lex(parser^.lexer);
|
|
|
|
designator^.kind := astExpressionKindFieldAccess;
|
|
designator^.aggregate := inner_expression;
|
|
designator^.field := next_token.identifierKind;
|
|
|
|
next_token := parser_lex(parser^.lexer);
|
|
handled := true
|
|
end;
|
|
if ~handled & (next_token.kind = lexerKindLeftParen) then
|
|
NEW(designator);
|
|
next_token := parser_lex(parser^.lexer);
|
|
|
|
designator^.kind := astExpressionKindCall;
|
|
designator^.callable := inner_expression;
|
|
designator^.argument_count := 0;
|
|
designator^.arguments := nil;
|
|
|
|
if next_token.kind <> lexerKindRightParen then
|
|
ALLOCATE(designator^.arguments, TSIZE(PAstExpression));
|
|
designator^.argument_count := 1;
|
|
designator^.arguments^ := parse_expression(parser);
|
|
|
|
next_token := lexer_current(parser^.lexer);
|
|
|
|
while next_token.kind = lexerKindComma do
|
|
next_token := parser_lex(parser^.lexer);
|
|
|
|
designator^.argument_count := designator^.argument_count + 1;
|
|
REALLOCATE(designator^.arguments, TSIZE(PAstExpression) * designator^.argument_count);
|
|
arguments := designator^.arguments;
|
|
INC(arguments, TSIZE(PAstExpression) * (designator^.argument_count - 1));
|
|
arguments^ := parse_expression(parser);
|
|
|
|
next_token := lexer_current(parser^.lexer)
|
|
end
|
|
end;
|
|
|
|
next_token := parser_lex(parser^.lexer);
|
|
handled := true
|
|
end
|
|
end;
|
|
|
|
return designator
|
|
end;
|
|
|
|
proc parse_binary_expression(parser: PParser, left: PAstExpression, operator: AstBinaryOperator) -> PAstExpression;
|
|
var
|
|
next_token: LexerToken;
|
|
result: PAstExpression;
|
|
right: PAstExpression;
|
|
begin
|
|
next_token := parser_lex(parser^.lexer);
|
|
right := parse_designator(parser);
|
|
result := nil;
|
|
|
|
if right <> nil then
|
|
NEW(result);
|
|
result^.kind := astExpressionKindBinary;
|
|
result^.binary_operator := operator;
|
|
result^.lhs := left;
|
|
result^.rhs := right
|
|
end;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_expression(parser: PParser) -> PAstExpression;
|
|
var
|
|
next_token: LexerToken;
|
|
left: PAstExpression;
|
|
result: PAstExpression;
|
|
written_bytes: CARDINAL;
|
|
begin
|
|
left := parse_designator(parser);
|
|
result := nil;
|
|
next_token := lexer_current(parser^.lexer);
|
|
|
|
if left <> nil then
|
|
if (result = nil) & (next_token.kind = lexerKindNotEqual) then
|
|
result := parse_binary_expression(parser, left, astBinaryOperatorNotEquals)
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindEqual) then
|
|
result := parse_binary_expression(parser, left, astBinaryOperatorEquals)
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindGreaterThan) then
|
|
result := parse_binary_expression(parser, left, astBinaryOperatorGreater)
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindLessThan) then
|
|
result := parse_binary_expression(parser, left, astBinaryOperatorLess)
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindGreaterEqual) then
|
|
result := parse_binary_expression(parser, left, astBinaryOperatorGreaterEqual)
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindLessEqual) then
|
|
result := parse_binary_expression(parser, left, astBinaryOperatorLessEqual)
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindAnd) then
|
|
result := parse_binary_expression(parser, left, astBinaryOperatorConjunction)
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindOr) then
|
|
result := parse_binary_expression(parser, left, astBinaryOperatorDisjunction)
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindMinus) then
|
|
result := parse_binary_expression(parser, left, astBinaryOperatorSubtraction)
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindPlus) then
|
|
result := parse_binary_expression(parser, left, astBinaryOperatorSum)
|
|
end;
|
|
if (result = nil) & (next_token.kind = lexerKindAsterisk) then
|
|
result := parse_binary_expression(parser, left, astBinaryOperatorMultiplication)
|
|
end
|
|
end;
|
|
if (result = nil) & (left <> nil) then
|
|
result := left
|
|
end;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_return_statement(parser: PParser) -> PAstStatement;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstStatement;
|
|
begin
|
|
NEW(result);
|
|
result^.kind := astStatementKindReturn;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
result^.returned := parse_expression(parser);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_assignment_statement(parser: PParser, assignee: PAstExpression) -> PAstStatement;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstStatement;
|
|
begin
|
|
NEW(result);
|
|
result^.kind := astStatementKindAssignment;
|
|
result^.assignee := assignee;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
result^.assignment := parse_expression(parser);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_call_statement(parser: PParser, call: PAstExpression) -> PAstStatement;
|
|
var
|
|
result: PAstStatement;
|
|
begin
|
|
NEW(result);
|
|
result^.kind := astStatementKindCall;
|
|
result^.call := call;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_compound_statement(parser: PParser) -> AstCompoundStatement;
|
|
var
|
|
result: AstCompoundStatement;
|
|
token: LexerToken;
|
|
current_statement: PPAstStatement;
|
|
old_count: CARDINAL;
|
|
begin
|
|
result.count := 0;
|
|
result.statements := nil;
|
|
|
|
token := lexer_current(parser^.lexer);
|
|
|
|
while token.kind <> lexerKindEnd do
|
|
old_count := result.count;
|
|
INC(result.count);
|
|
|
|
REALLOCATE(result.statements, TSIZE(PAstStatement) * result.count);
|
|
current_statement := result.statements;
|
|
|
|
INC(current_statement, TSIZE(PAstStatement) * old_count);
|
|
current_statement^ := parse_statement(parser);
|
|
|
|
token := lexer_current(parser^.lexer)
|
|
end;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_statement(parser: PParser) -> PAstStatement;
|
|
var
|
|
token: LexerToken;
|
|
statement: PAstStatement;
|
|
designator: PAstExpression;
|
|
begin
|
|
statement := nil;
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
if token.kind = lexerKindIf then
|
|
statement := parse_if_statement(parser)
|
|
end;
|
|
if token.kind = lexerKindWhile then
|
|
statement := parse_while_statement(parser)
|
|
end;
|
|
if token.kind = lexerKindReturn then
|
|
statement := parse_return_statement(parser)
|
|
end;
|
|
if token.kind = lexerKindIdentifier then
|
|
designator := parse_designator(parser);
|
|
token := lexer_current(parser^.lexer);
|
|
|
|
if token.kind = lexerKindAssignment then
|
|
statement := parse_assignment_statement(parser, designator)
|
|
end;
|
|
if token.kind <> lexerKindAssignment then
|
|
statement := parse_call_statement(parser, designator)
|
|
end
|
|
end;
|
|
return statement
|
|
end;
|
|
|
|
proc parse_if_statement(parser: PParser) -> PAstStatement;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstStatement;
|
|
begin
|
|
NEW(result);
|
|
result^.kind := astStatementKindIf;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
result^.if_condition := parse_expression(parser);
|
|
result^.if_branch := parse_compound_statement(parser);
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
return result
|
|
end;
|
|
|
|
proc parse_while_statement(parser: PParser) -> PAstStatement;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstStatement;
|
|
begin
|
|
NEW(result);
|
|
result^.kind := astStatementKindWhile;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
result^.while_condition := parse_expression(parser);
|
|
result^.while_body := parse_compound_statement(parser);
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
return result
|
|
end;
|
|
|
|
proc parse_statement_part(parser: PParser) -> AstCompoundStatement;
|
|
var
|
|
token: LexerToken;
|
|
compound: AstCompoundStatement;
|
|
begin
|
|
compound.count := 0;
|
|
compound.statements := nil;
|
|
token := lexer_current(parser^.lexer);
|
|
|
|
if token.kind = lexerKindBegin then
|
|
compound := parse_compound_statement(parser)
|
|
end;
|
|
|
|
return compound
|
|
end;
|
|
|
|
proc parse_procedure_heading(parser: PParser) -> PAstProcedureDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
declaration: PAstProcedureDeclaration;
|
|
parameter_index: CARDINAL;
|
|
current_parameter: PAstTypedDeclaration;
|
|
begin
|
|
NEW(declaration);
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
declaration^.name := token.identifierKind;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
declaration^.parameters := nil;
|
|
declaration^.parameter_count := 0;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
while token.kind <> lexerKindRightParen do
|
|
parameter_index := declaration^.parameter_count;
|
|
INC(declaration^.parameter_count);
|
|
REALLOCATE(declaration^.parameters, TSIZE(AstTypedDeclaration) * declaration^.parameter_count);
|
|
|
|
current_parameter := declaration^.parameters;
|
|
INC(current_parameter, TSIZE(AstTypedDeclaration) * parameter_index);
|
|
|
|
current_parameter^.identifier := token.identifierKind;
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
current_parameter^.type_expression := parse_type_expression(parser);
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
if token.kind = lexerKindComma then
|
|
token := parser_lex(parser^.lexer)
|
|
end
|
|
end;
|
|
token := parser_lex(parser^.lexer);
|
|
declaration^.return_type := nil;
|
|
|
|
(* Check for the return type and write it. *)
|
|
if token.kind = lexerKindArrow then
|
|
token := parser_lex(parser^.lexer);
|
|
declaration^.return_type := parse_type_expression(parser);
|
|
token := parser_lex(parser^.lexer)
|
|
end;
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
return declaration
|
|
end;
|
|
|
|
proc parse_procedure_declaration(parser: PParser) -> PAstProcedureDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
declaration: PAstProcedureDeclaration;
|
|
begin
|
|
declaration := parse_procedure_heading(parser);
|
|
|
|
declaration^.constants := parse_constant_part(parser);
|
|
declaration^.variables := parse_variable_part(parser);
|
|
declaration^.statements := parse_statement_part(parser);
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
return declaration
|
|
end;
|
|
|
|
proc parse_procedure_part(parser: PParser) -> PPAstProcedureDeclaration;
|
|
var
|
|
token: LexerToken;
|
|
current_declaration: PPAstProcedureDeclaration;
|
|
result: PPAstProcedureDeclaration;
|
|
declaration_count: CARDINAL;
|
|
declaration_index: CARDINAL;
|
|
begin
|
|
token := lexer_current(parser^.lexer);
|
|
declaration_count := 0;
|
|
declaration_index := 0;
|
|
|
|
ALLOCATE(result, TSIZE(PAstProcedureDeclaration));
|
|
|
|
while token.kind = lexerKindProc do
|
|
INC(declaration_count);
|
|
REALLOCATE(result, TSIZE(PAstProcedureDeclaration) * (declaration_count + 1));
|
|
current_declaration := result;
|
|
INC(current_declaration, TSIZE(PAstProcedureDeclaration) * declaration_index);
|
|
|
|
current_declaration^ := parse_procedure_declaration(parser);
|
|
token := lexer_current(parser^.lexer);
|
|
declaration_index := declaration_count
|
|
end;
|
|
current_declaration := result;
|
|
INC(current_declaration, TSIZE(PAstProcedureDeclaration) * declaration_index);
|
|
current_declaration^ := nil;
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse_module(parser: PParser) -> PAstModule;
|
|
var
|
|
token: LexerToken;
|
|
result: PAstModule;
|
|
begin
|
|
NEW(result);
|
|
token := parser_lex(parser^.lexer);
|
|
result^.main := true;
|
|
|
|
if token.kind = lexerKindModule then
|
|
result^.main := false
|
|
end;
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
(* Write the module body. *)
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
result^.imports := parse_import_part(parser);
|
|
result^.constants := parse_constant_part(parser);
|
|
result^.types := parse_type_part(parser);
|
|
|
|
result^.variables := parse_variable_part(parser);
|
|
result^.procedures := parse_procedure_part(parser);
|
|
result^.statements := parse_statement_part(parser);
|
|
|
|
token := parser_lex(parser^.lexer);
|
|
token := parser_lex(parser^.lexer);
|
|
|
|
return result
|
|
end;
|
|
|
|
proc parse(lexer: PLexer) -> PAstModule;
|
|
var
|
|
parser: Parser;
|
|
begin
|
|
parser.lexer := lexer;
|
|
|
|
return parse_module(ADR(parser))
|
|
end;
|
|
|
|
end.
|