Split the parser from the code generator
This commit is contained in:
@ -35,9 +35,9 @@ begin
|
||||
|
||||
while token.kind <> lexerKindEnd do
|
||||
INC(field_count);
|
||||
INC(field_count);
|
||||
REALLOCATE(field_declarations, TSIZE(AstFieldDeclaration) * field_count);
|
||||
DEC(field_count);
|
||||
INC(field_count);
|
||||
REALLOCATE(field_declarations, TSIZE(AstFieldDeclaration) * field_count);
|
||||
DEC(field_count);
|
||||
current_field := field_declarations;
|
||||
INC(current_field , TSIZE(AstFieldDeclaration) * (field_count - 1));
|
||||
|
||||
@ -109,7 +109,7 @@ begin
|
||||
|
||||
result^.length := token.integerKind;
|
||||
|
||||
token := transpiler_lex(lexer);
|
||||
token := transpiler_lex(lexer)
|
||||
end;
|
||||
token := transpiler_lex(lexer);
|
||||
result^.base := parse_type_expression(lexer);
|
||||
@ -143,8 +143,8 @@ begin
|
||||
REALLOCATE(result^.cases, TSIZE(Identifier) * case_count);
|
||||
DEC(case_count);
|
||||
current_case := result^.cases;
|
||||
INC(current_case, TSIZE(Identifier) * (case_count - 1));
|
||||
current_case^ := token.identifierKind;
|
||||
INC(current_case, TSIZE(Identifier) * (case_count - 1));
|
||||
current_case^ := token.identifierKind;
|
||||
|
||||
token := transpiler_lex(lexer)
|
||||
end;
|
||||
@ -190,7 +190,7 @@ begin
|
||||
REALLOCATE(result^.parameters, TSIZE(PAstTypeExpression) * parameter_count);
|
||||
DEC(parameter_count);
|
||||
current_parameter := result^.parameters;
|
||||
INC(current_parameter, TSIZE(PAstTypeExpression) * (parameter_count - 1));
|
||||
INC(current_parameter, TSIZE(PAstTypeExpression) * (parameter_count - 1));
|
||||
|
||||
current_parameter^ := parse_type_expression(lexer);
|
||||
|
||||
@ -235,10 +235,10 @@ begin
|
||||
return result
|
||||
end;
|
||||
|
||||
proc parse_type_declaration(lexer: PLexer) -> PAstTypeDeclaration;
|
||||
proc parse_type_declaration(lexer: PLexer) -> PAstTypedDeclaration;
|
||||
var
|
||||
token: LexerToken;
|
||||
result: PAstTypeDeclaration;
|
||||
result: PAstTypedDeclaration;
|
||||
begin
|
||||
token := lexer_current(lexer);
|
||||
|
||||
@ -254,16 +254,16 @@ begin
|
||||
return result
|
||||
end;
|
||||
|
||||
proc parse_type_part(lexer: PLexer) -> PPAstTypeDeclaration;
|
||||
proc parse_type_part(lexer: PLexer) -> PPAstTypedDeclaration;
|
||||
var
|
||||
token: LexerToken;
|
||||
result: PPAstTypeDeclaration;
|
||||
current_declaration: PPAstTypeDeclaration;
|
||||
result: PPAstTypedDeclaration;
|
||||
current_declaration: PPAstTypedDeclaration;
|
||||
declaration_count: CARDINAL;
|
||||
begin
|
||||
token := lexer_current(lexer);
|
||||
|
||||
ALLOCATE(result, TSIZE(PAstTypeDeclaration));
|
||||
ALLOCATE(result, TSIZE(PAstTypedDeclaration));
|
||||
current_declaration := result;
|
||||
declaration_count := 0;
|
||||
|
||||
@ -273,16 +273,16 @@ begin
|
||||
while token.kind = lexerKindIdentifier do
|
||||
INC(declaration_count);
|
||||
|
||||
REALLOCATE(result, TSIZE(PAstTypeDeclaration) * (declaration_count + 1));
|
||||
REALLOCATE(result, TSIZE(PAstTypedDeclaration) * (declaration_count + 1));
|
||||
current_declaration := result;
|
||||
INC(current_declaration, TSIZE(PAstTypeDeclaration) * (declaration_count - 1));
|
||||
INC(current_declaration, TSIZE(PAstTypedDeclaration) * (declaration_count - 1));
|
||||
|
||||
current_declaration^ := parse_type_declaration(lexer);
|
||||
token := transpiler_lex(lexer)
|
||||
end
|
||||
end;
|
||||
if declaration_count <> 0 then
|
||||
INC(current_declaration, TSIZE(PAstTypeDeclaration))
|
||||
INC(current_declaration, TSIZE(PAstTypedDeclaration))
|
||||
end;
|
||||
current_declaration^ := nil;
|
||||
|
||||
@ -426,7 +426,7 @@ begin
|
||||
|
||||
REALLOCATE(result^.symbols, TSIZE(Identifier) * (symbol_count + 1));
|
||||
current_symbol := result^.symbols;
|
||||
INC(current_symbol, TSIZE(Identifier) * (symbol_count - 1));
|
||||
INC(current_symbol, TSIZE(Identifier) * (symbol_count - 1));
|
||||
|
||||
current_symbol^ := token.identifierKind;
|
||||
token := transpiler_lex(lexer)
|
||||
@ -481,18 +481,18 @@ begin
|
||||
NEW(literal);
|
||||
|
||||
literal^.kind := astLiteralKindInteger;
|
||||
literal^.integer := token.integerKind;
|
||||
literal^.integer := token.integerKind
|
||||
end;
|
||||
if (token.kind = lexerKindCharacter) or (token.kind = lexerKindString) then
|
||||
NEW(literal);
|
||||
|
||||
literal^.kind := astLiteralKindString;
|
||||
literal^.string := token.stringKind;
|
||||
literal^.string := token.stringKind
|
||||
end;
|
||||
if token.kind = lexerKindNull then
|
||||
NEW(literal);
|
||||
|
||||
literal^.kind := astLiteralKindNull;
|
||||
literal^.kind := astLiteralKindNull
|
||||
end;
|
||||
if token.kind = lexerKindBoolean then
|
||||
NEW(literal);
|
||||
@ -522,7 +522,7 @@ begin
|
||||
NEW(result);
|
||||
|
||||
result^.kind := astExpressionKindLiteral;
|
||||
result^.literal := literal;
|
||||
result^.literal := literal
|
||||
end;
|
||||
if (result = nil) & (next_token.kind = lexerKindMinus) then
|
||||
NEW(result);
|
||||
@ -549,10 +549,10 @@ begin
|
||||
end;
|
||||
if (result = nil) & (next_token.kind = lexerKindIdentifier) then
|
||||
NEW(result);
|
||||
|
||||
|
||||
result^.kind := astExpressionKindIdentifier;
|
||||
result^.identifier := next_token.identifierKind;
|
||||
|
||||
|
||||
next_token := transpiler_lex(lexer)
|
||||
end;
|
||||
|
||||
@ -658,7 +658,7 @@ begin
|
||||
result^.kind := astExpressionKindBinary;
|
||||
result^.binary_operator := operator;
|
||||
result^.lhs := left;
|
||||
result^.rhs := right;
|
||||
result^.rhs := right
|
||||
end;
|
||||
|
||||
return result
|
||||
@ -757,4 +757,243 @@ begin
|
||||
return result
|
||||
end;
|
||||
|
||||
proc parse_compound_statement(lexer: PLexer) -> AstCompoundStatement;
|
||||
var
|
||||
result: AstCompoundStatement;
|
||||
token: LexerToken;
|
||||
current_statement: PPAstStatement;
|
||||
old_count: CARDINAL;
|
||||
begin
|
||||
result.count := 0;
|
||||
result.statements := nil;
|
||||
|
||||
token := lexer_current(lexer);
|
||||
|
||||
while token.kind <> lexerKindEnd do
|
||||
old_count := result.count;
|
||||
INC(result.count);
|
||||
|
||||
REALLOCATE(result.statements, TSIZE(PAstStatement) * result.count);
|
||||
current_statement := result.statements;
|
||||
|
||||
INC(current_statement, TSIZE(PAstStatement) * old_count);
|
||||
current_statement^ := parse_statement(lexer);
|
||||
|
||||
token := lexer_current(lexer)
|
||||
end;
|
||||
|
||||
return result
|
||||
end;
|
||||
|
||||
proc parse_statement(lexer: PLexer) -> PAstStatement;
|
||||
var
|
||||
token: LexerToken;
|
||||
statement: PAstStatement;
|
||||
designator: PAstExpression;
|
||||
begin
|
||||
statement := nil;
|
||||
token := transpiler_lex(lexer);
|
||||
|
||||
if token.kind = lexerKindIf then
|
||||
statement := parse_if_statement(lexer)
|
||||
end;
|
||||
if token.kind = lexerKindWhile then
|
||||
statement := parse_while_statement(lexer)
|
||||
end;
|
||||
if token.kind = lexerKindReturn then
|
||||
statement := parse_return_statement(lexer)
|
||||
end;
|
||||
if token.kind = lexerKindIdentifier then
|
||||
designator := parse_designator(lexer);
|
||||
token := lexer_current(lexer);
|
||||
|
||||
if token.kind = lexerKindAssignment then
|
||||
statement := parse_assignment_statement(lexer, designator)
|
||||
end;
|
||||
if token.kind <> lexerKindAssignment then
|
||||
statement := parse_call_statement(lexer, designator)
|
||||
end
|
||||
end;
|
||||
return statement
|
||||
end;
|
||||
|
||||
proc parse_if_statement(lexer: PLexer) -> PAstStatement;
|
||||
var
|
||||
token: LexerToken;
|
||||
result: PAstStatement;
|
||||
begin
|
||||
NEW(result);
|
||||
result^.kind := astStatementKindIf;
|
||||
|
||||
token := transpiler_lex(lexer);
|
||||
result^.if_condition := parse_expression(lexer);
|
||||
result^.if_branch := parse_compound_statement(lexer);
|
||||
|
||||
token := transpiler_lex(lexer);
|
||||
return result
|
||||
end;
|
||||
|
||||
proc parse_while_statement(lexer: PLexer) -> PAstStatement;
|
||||
var
|
||||
token: LexerToken;
|
||||
result: PAstStatement;
|
||||
begin
|
||||
NEW(result);
|
||||
result^.kind := astStatementKindWhile;
|
||||
|
||||
token := transpiler_lex(lexer);
|
||||
result^.while_condition := parse_expression(lexer);
|
||||
result^.while_body := parse_compound_statement(lexer);
|
||||
|
||||
token := transpiler_lex(lexer);
|
||||
return result
|
||||
end;
|
||||
|
||||
proc parse_statement_part(lexer: PLexer) -> AstCompoundStatement;
|
||||
var
|
||||
token: LexerToken;
|
||||
compound: AstCompoundStatement;
|
||||
begin
|
||||
compound.count := 0;
|
||||
compound.statements := nil;
|
||||
token := lexer_current(lexer);
|
||||
|
||||
if token.kind = lexerKindBegin then
|
||||
compound := parse_compound_statement(lexer)
|
||||
end;
|
||||
|
||||
return compound
|
||||
end;
|
||||
|
||||
proc parse_procedure_heading(lexer: PLexer) -> PAstProcedureDeclaration;
|
||||
var
|
||||
token: LexerToken;
|
||||
declaration: PAstProcedureDeclaration;
|
||||
parameter_index: CARDINAL;
|
||||
current_parameter: PAstTypedDeclaration;
|
||||
begin
|
||||
NEW(declaration);
|
||||
|
||||
token := transpiler_lex(lexer);
|
||||
declaration^.name := token.identifierKind;
|
||||
|
||||
token := transpiler_lex(lexer);
|
||||
|
||||
declaration^.parameters := nil;
|
||||
declaration^.parameter_count := 0;
|
||||
|
||||
token := transpiler_lex(lexer);
|
||||
while token.kind <> lexerKindRightParen do
|
||||
parameter_index := declaration^.parameter_count;
|
||||
INC(declaration^.parameter_count);
|
||||
REALLOCATE(declaration^.parameters, TSIZE(AstTypedDeclaration) * declaration^.parameter_count);
|
||||
|
||||
current_parameter := declaration^.parameters;
|
||||
INC(current_parameter, TSIZE(AstTypedDeclaration) * parameter_index);
|
||||
|
||||
current_parameter^.identifier := token.identifierKind;
|
||||
|
||||
token := transpiler_lex(lexer);
|
||||
token := transpiler_lex(lexer);
|
||||
|
||||
current_parameter^.type_expression := parse_type_expression(lexer);
|
||||
|
||||
token := transpiler_lex(lexer);
|
||||
if token.kind = lexerKindComma then
|
||||
token := transpiler_lex(lexer)
|
||||
end
|
||||
end;
|
||||
token := transpiler_lex(lexer);
|
||||
declaration^.return_type := nil;
|
||||
|
||||
(* Check for the return type and write it. *)
|
||||
if token.kind = lexerKindArrow then
|
||||
token := transpiler_lex(lexer);
|
||||
declaration^.return_type := parse_type_expression(lexer);
|
||||
token := transpiler_lex(lexer)
|
||||
end;
|
||||
token := transpiler_lex(lexer);
|
||||
|
||||
return declaration
|
||||
end;
|
||||
|
||||
proc parse_procedure_declaration(lexer: PLexer) -> PAstProcedureDeclaration;
|
||||
var
|
||||
token: LexerToken;
|
||||
declaration: PAstProcedureDeclaration;
|
||||
begin
|
||||
declaration := parse_procedure_heading(lexer);
|
||||
|
||||
declaration^.constants := parse_constant_part(lexer);
|
||||
declaration^.variables := parse_variable_part(lexer);
|
||||
declaration^.statements := parse_statement_part(lexer);
|
||||
|
||||
token := transpiler_lex(lexer);
|
||||
token := transpiler_lex(lexer);
|
||||
|
||||
return declaration
|
||||
end;
|
||||
|
||||
proc parse_procedure_part(lexer: PLexer) -> PPAstProcedureDeclaration;
|
||||
var
|
||||
token: LexerToken;
|
||||
current_declaration: PPAstProcedureDeclaration;
|
||||
result: PPAstProcedureDeclaration;
|
||||
declaration_count: CARDINAL;
|
||||
declaration_index: CARDINAL;
|
||||
begin
|
||||
token := lexer_current(lexer);
|
||||
declaration_count := 0;
|
||||
declaration_index := 0;
|
||||
|
||||
ALLOCATE(result, TSIZE(PAstProcedureDeclaration));
|
||||
|
||||
while token.kind = lexerKindProc do
|
||||
INC(declaration_count);
|
||||
REALLOCATE(result, TSIZE(PAstProcedureDeclaration) * (declaration_count + 1));
|
||||
current_declaration := result;
|
||||
INC(current_declaration, TSIZE(PAstProcedureDeclaration) * declaration_index);
|
||||
|
||||
current_declaration^ := parse_procedure_declaration(lexer);
|
||||
token := lexer_current(lexer);
|
||||
declaration_index := declaration_count
|
||||
end;
|
||||
current_declaration := result;
|
||||
INC(current_declaration, TSIZE(PAstProcedureDeclaration) * declaration_index);
|
||||
current_declaration^ := nil;
|
||||
|
||||
return result
|
||||
end;
|
||||
|
||||
proc parse_module(lexer: PLexer) -> PAstModule;
|
||||
var
|
||||
token: LexerToken;
|
||||
result: PAstModule;
|
||||
begin
|
||||
NEW(result);
|
||||
token := transpiler_lex(lexer);
|
||||
result^.main := true;
|
||||
|
||||
if token.kind = lexerKindModule then
|
||||
result^.main := false
|
||||
end;
|
||||
token := transpiler_lex(lexer);
|
||||
|
||||
(* Write the module body. *)
|
||||
token := transpiler_lex(lexer);
|
||||
|
||||
result^.imports := parse_import_part(lexer);
|
||||
result^.constants := parse_constant_part(lexer);
|
||||
result^.types := parse_type_part(lexer);
|
||||
|
||||
result^.variables := parse_variable_part(lexer);
|
||||
result^.procedures := parse_procedure_part(lexer);
|
||||
result^.statements := parse_statement_part(lexer);
|
||||
|
||||
token := transpiler_lex(lexer);
|
||||
token := transpiler_lex(lexer);
|
||||
|
||||
return result
|
||||
end;
|
||||
|
||||
end.
|
||||
|
Reference in New Issue
Block a user