Parse and transpile unary operations
This commit is contained in:
parent
3ca8491f64
commit
8d52410be9
@ -18,7 +18,7 @@ var
|
||||
parsed: BOOLEAN;
|
||||
begin
|
||||
i := 1;
|
||||
ALLOCATE(result, TSIZE(CommandLine));
|
||||
NEW(result);
|
||||
result^.lex := false;
|
||||
result^.parse := false;
|
||||
MemZero(ADR(result^.input), 256);
|
||||
|
@ -2,7 +2,7 @@ DEFINITION MODULE Lexer;
|
||||
|
||||
FROM FIO IMPORT File;
|
||||
|
||||
FROM Common IMPORT Identifier;
|
||||
FROM Common IMPORT Identifier, ShortString;
|
||||
|
||||
TYPE
|
||||
PLexerBuffer = POINTER TO CHAR;
|
||||
@ -41,7 +41,7 @@ TYPE
|
||||
lexerKindNull,
|
||||
lexerKindAnd,
|
||||
lexerKindOr,
|
||||
lexerKindNot,
|
||||
lexerKindTilde,
|
||||
lexerKindReturn,
|
||||
lexerKindDefinition,
|
||||
lexerKindRange,
|
||||
@ -84,7 +84,8 @@ TYPE
|
||||
CASE kind: LexerKind OF
|
||||
lexerKindBoolean: booleanKind: BOOLEAN |
|
||||
lexerKindIdentifier: identifierKind: Identifier |
|
||||
lexerKindInteger: integerKind: INTEGER
|
||||
lexerKindInteger: integerKind: INTEGER |
|
||||
lexerKindString: stringKind: ShortString
|
||||
END
|
||||
END;
|
||||
PLexerToken = POINTER TO LexerToken;
|
||||
|
@ -301,6 +301,8 @@ end;
|
||||
|
||||
(* Delimited string action. *)
|
||||
proc transition_action_delimited(lexer: PLexer, token: PLexerToken);
|
||||
var
|
||||
text_length: CARDINAL;
|
||||
begin
|
||||
if lexer^.Start^ = '(' then
|
||||
token^.kind := lexerKindComment
|
||||
@ -309,6 +311,10 @@ begin
|
||||
token^.kind := lexerKindCharacter
|
||||
end;
|
||||
if lexer^.Start^ = "'" then
|
||||
text_length := lexer^.Current - lexer^.Start;
|
||||
MemZero(ADR(token^.stringKind), TSIZE(ShortString));
|
||||
MemCopy(lexer^.Start, text_length, ADR(token^.stringKind));
|
||||
|
||||
token^.kind := lexerKindString
|
||||
end;
|
||||
INC(lexer^.Current)
|
||||
@ -432,8 +438,8 @@ begin
|
||||
if lexer^.Current^ = ',' then
|
||||
token^.kind := lexerKindComma
|
||||
end;
|
||||
if lexer^.Current^ = ',' then
|
||||
token^.kind := lexerKindComma
|
||||
if lexer^.Current^ = '~' then
|
||||
token^.kind := lexerKindTilde
|
||||
end;
|
||||
if lexer^.Current^ = ')' then
|
||||
token^.kind := lexerKindRightParen
|
||||
|
@ -1,9 +1,55 @@
|
||||
DEFINITION MODULE Parser;
|
||||
|
||||
FROM Common IMPORT Identifier, PIdentifier;
|
||||
FROM Common IMPORT Identifier, PIdentifier, ShortString;
|
||||
FROM Lexer IMPORT PLexer;
|
||||
|
||||
TYPE
|
||||
AstLiteralKind = (
|
||||
astLiteralKindInteger,
|
||||
astLiteralKindString,
|
||||
astLiteralKindNull
|
||||
);
|
||||
AstLiteral = RECORD
|
||||
CASE kind: AstLiteralKind OF
|
||||
astLiteralKindInteger: integer: INTEGER |
|
||||
astLiteralKindString: string: ShortString |
|
||||
astLiteralKindNull:
|
||||
END
|
||||
END;
|
||||
PAstLiteral = POINTER TO AstLiteral;
|
||||
|
||||
AstUnaryOperator = (
|
||||
astUnaryOperatorNot,
|
||||
astUnaryOperatorMinus
|
||||
);
|
||||
|
||||
AstExpressionKind = (
|
||||
astExpressionKindLiteral,
|
||||
astExpressionKindIdentifier,
|
||||
astExpressionKindArrayAccess,
|
||||
astExpressionKindDereference,
|
||||
astExpressionKindFieldAccess,
|
||||
astExpressionKindUnary
|
||||
);
|
||||
AstExpression = RECORD
|
||||
CASE kind: AstExpressionKind OF
|
||||
astExpressionKindLiteral: literal: PAstLiteral |
|
||||
astExpressionKindIdentifier: identifier: Identifier |
|
||||
astExpressionKindDereference: reference: PAstExpression |
|
||||
astExpressionKindArrayAccess:
|
||||
array: PAstExpression;
|
||||
index: PAstExpression |
|
||||
astExpressionKindFieldAccess:
|
||||
aggregate: PAstExpression;
|
||||
field: Identifier |
|
||||
astExpressionKindUnary:
|
||||
unary_operator: AstUnaryOperator;
|
||||
unary_operand: PAstExpression
|
||||
END
|
||||
END;
|
||||
PAstExpression = POINTER TO AstExpression;
|
||||
PPAstExpression = POINTER TO PAstExpression;
|
||||
|
||||
AstImportStatement = RECORD
|
||||
package: Identifier;
|
||||
symbols: PIdentifier
|
||||
@ -74,5 +120,6 @@ PROCEDURE parse_type_part(lexer: PLexer): PPAstTypeDeclaration;
|
||||
PROCEDURE parse_variable_part(lexer: PLexer): PPAstVariableDeclaration;
|
||||
PROCEDURE parse_constant_part(lexer: PLexer): PPAstConstantDeclaration;
|
||||
PROCEDURE parse_import_part(lexer: PLexer): PPAstImportStatement;
|
||||
PROCEDURE parse_designator(lexer: PLexer): PAstExpression;
|
||||
|
||||
END Parser.
|
||||
|
@ -60,7 +60,7 @@ proc parse_record_type(lexer: PLexer) -> PAstTypeExpression;
|
||||
var
|
||||
result: PAstTypeExpression;
|
||||
begin
|
||||
ALLOCATE(result, TSIZE(AstTypeExpression));
|
||||
NEW(result);
|
||||
result^.kind := astTypeExpressionKindRecord;
|
||||
result^.fields := parse_type_fields(lexer);
|
||||
|
||||
@ -72,7 +72,7 @@ var
|
||||
token: LexerToken;
|
||||
result: PAstTypeExpression;
|
||||
begin
|
||||
ALLOCATE(result, TSIZE(AstTypeExpression));
|
||||
NEW(result);
|
||||
result^.kind := astTypeExpressionKindPointer;
|
||||
|
||||
token := lexer_current(lexer);
|
||||
@ -92,7 +92,7 @@ var
|
||||
buffer: [20]CHAR;
|
||||
result: PAstTypeExpression;
|
||||
begin
|
||||
ALLOCATE(result, TSIZE(AstTypeExpression));
|
||||
NEW(result);
|
||||
result^.kind := astTypeExpressionKindArray;
|
||||
result^.length := 0;
|
||||
|
||||
@ -121,7 +121,7 @@ var
|
||||
current_case: PIdentifier;
|
||||
case_count: CARDINAL;
|
||||
begin
|
||||
ALLOCATE(result, TSIZE(AstTypeExpression));
|
||||
NEW(result);
|
||||
result^.kind := astTypeExpressionKindEnumeration;
|
||||
|
||||
case_count := 1;
|
||||
@ -153,10 +153,9 @@ proc parse_named_type(lexer: PLexer) -> PAstTypeExpression;
|
||||
var
|
||||
token: LexerToken;
|
||||
result: PAstTypeExpression;
|
||||
written_bytes: CARDINAL;
|
||||
begin
|
||||
token := lexer_current(lexer);
|
||||
ALLOCATE(result, TSIZE(AstTypeExpression));
|
||||
NEW(result);
|
||||
|
||||
result^.kind := astTypeExpressionKindNamed;
|
||||
result^.name := token.identifierKind;
|
||||
@ -172,7 +171,7 @@ var
|
||||
parameter_count: CARDINAL;
|
||||
begin
|
||||
parameter_count := 0;
|
||||
ALLOCATE(result, TSIZE(AstTypeExpression));
|
||||
NEW(result);
|
||||
result^.kind := astTypeExpressionKindProcedure;
|
||||
|
||||
ALLOCATE(result^.parameters, 1);
|
||||
@ -236,7 +235,7 @@ var
|
||||
begin
|
||||
token := lexer_current(lexer);
|
||||
|
||||
ALLOCATE(result, TSIZE(AstTypeDeclaration));
|
||||
NEW(result);
|
||||
result^.identifier := token.identifierKind;
|
||||
|
||||
token := transpiler_lex(lexer);
|
||||
@ -288,7 +287,7 @@ var
|
||||
token: LexerToken;
|
||||
result: PAstVariableDeclaration;
|
||||
begin
|
||||
ALLOCATE(result, TSIZE(AstVariableDeclaration));
|
||||
NEW(result);
|
||||
|
||||
token := lexer_current(lexer);
|
||||
result^.variable_name := token.identifierKind;
|
||||
@ -342,7 +341,7 @@ var
|
||||
token: LexerToken;
|
||||
result: PAstConstantDeclaration;
|
||||
begin
|
||||
ALLOCATE(result, TSIZE(AstConstantDeclaration));
|
||||
NEW(result);
|
||||
|
||||
token := lexer_current(lexer);
|
||||
result^.constant_name := token.identifierKind;
|
||||
@ -399,7 +398,7 @@ var
|
||||
symbol_count: CARDINAL;
|
||||
current_symbol: PIdentifier;
|
||||
begin
|
||||
ALLOCATE(result, TSIZE(AstImportStatement));
|
||||
NEW(result);
|
||||
symbol_count := 1;
|
||||
|
||||
token := transpiler_lex(lexer);
|
||||
@ -463,4 +462,132 @@ begin
|
||||
return result
|
||||
end;
|
||||
|
||||
proc parse_literal(lexer: PLexer) -> PAstLiteral;
|
||||
var
|
||||
literal: PAstLiteral;
|
||||
token: LexerToken;
|
||||
begin
|
||||
literal := nil;
|
||||
token := lexer_current(lexer);
|
||||
|
||||
if token.kind = lexerKindInteger then
|
||||
NEW(literal);
|
||||
|
||||
literal^.kind := astLiteralKindInteger;
|
||||
literal^.integer := token.integerKind;
|
||||
end;
|
||||
if token.kind = lexerKindCharacter then
|
||||
NEW(literal);
|
||||
|
||||
literal^.kind := astLiteralKindString;
|
||||
literal^.string := token.stringKind;
|
||||
end;
|
||||
if token.kind = lexerKindNull then
|
||||
NEW(literal);
|
||||
|
||||
literal^.kind := astLiteralKindNull;
|
||||
end;
|
||||
if literal <> nil then
|
||||
token := transpiler_lex(lexer)
|
||||
end;
|
||||
|
||||
return literal
|
||||
end;
|
||||
|
||||
proc parse_factor(lexer: PLexer) -> PAstExpression;
|
||||
var
|
||||
next_token: LexerToken;
|
||||
result: PAstExpression;
|
||||
literal: PAstLiteral;
|
||||
begin
|
||||
result := nil;
|
||||
next_token := lexer_current(lexer);
|
||||
|
||||
literal := parse_literal(lexer);
|
||||
|
||||
if (result = nil) & (literal <> nil) then
|
||||
NEW(result);
|
||||
|
||||
result^.kind := astExpressionKindLiteral;
|
||||
result^.literal := literal;
|
||||
end;
|
||||
if (result = nil) & (next_token.kind = lexerKindMinus) then
|
||||
NEW(result);
|
||||
next_token := transpiler_lex(lexer);
|
||||
|
||||
result^.kind := astExpressionKindUnary;
|
||||
result^.unary_operator := astUnaryOperatorMinus;
|
||||
result^.unary_operand := parse_factor(lexer)
|
||||
end;
|
||||
if (result = nil) & (next_token.kind = lexerKindTilde) then
|
||||
NEW(result);
|
||||
next_token := transpiler_lex(lexer);
|
||||
|
||||
result^.kind := astExpressionKindUnary;
|
||||
result^.unary_operator := astUnaryOperatorNot;
|
||||
result^.unary_operand := parse_factor(lexer)
|
||||
end;
|
||||
if (result = nil) & (next_token.kind = lexerKindIdentifier) then
|
||||
NEW(result);
|
||||
|
||||
result^.kind := astExpressionKindIdentifier;
|
||||
result^.identifier := next_token.identifierKind;
|
||||
|
||||
next_token := transpiler_lex(lexer)
|
||||
end;
|
||||
|
||||
return result
|
||||
end;
|
||||
|
||||
proc parse_designator(lexer: PLexer) -> PAstExpression;
|
||||
var
|
||||
next_token: LexerToken;
|
||||
inner_expression: PAstExpression;
|
||||
designator: PAstExpression;
|
||||
handled: BOOLEAN;
|
||||
begin
|
||||
designator := parse_factor(lexer);
|
||||
handled := designator <> nil;
|
||||
next_token := lexer_current(lexer);
|
||||
|
||||
while handled do
|
||||
inner_expression := designator;
|
||||
handled := false;
|
||||
|
||||
if ~handled & (next_token.kind = lexerKindHat) then
|
||||
NEW(designator);
|
||||
|
||||
designator^.kind := astExpressionKindDereference;
|
||||
designator^.reference := inner_expression;
|
||||
|
||||
next_token := transpiler_lex(lexer);
|
||||
handled := true
|
||||
end;
|
||||
if ~handled & (next_token.kind = lexerKindLeftSquare) then
|
||||
NEW(designator);
|
||||
next_token := transpiler_lex(lexer);
|
||||
|
||||
designator^.kind := astExpressionKindArrayAccess;
|
||||
designator^.array := inner_expression;
|
||||
designator^.index := parse_designator(lexer);
|
||||
|
||||
next_token := transpiler_lex(lexer);
|
||||
handled := true
|
||||
end;
|
||||
if ~handled & (next_token.kind = lexerKindDot) then
|
||||
NEW(designator);
|
||||
next_token := transpiler_lex(lexer);
|
||||
|
||||
designator^.kind := astExpressionKindFieldAccess;
|
||||
designator^.aggregate := inner_expression;
|
||||
designator^.field := next_token.identifierKind;
|
||||
|
||||
next_token := transpiler_lex(lexer);
|
||||
handled := true
|
||||
end
|
||||
end;
|
||||
|
||||
return designator
|
||||
end;
|
||||
|
||||
end.
|
||||
|
@ -1,6 +1,6 @@
|
||||
module;
|
||||
|
||||
from FIO import WriteNBytes, WriteLine, WriteChar, WriteString;
|
||||
from FIO import StdErr, WriteNBytes, WriteLine, WriteChar, WriteString;
|
||||
from SYSTEM import ADR, ADDRESS, TSIZE;
|
||||
|
||||
from NumberIO import IntToStr;
|
||||
@ -9,12 +9,14 @@ from MemUtils import MemCopy, MemZero;
|
||||
|
||||
from Common import Identifier, PIdentifier, ShortString;
|
||||
from Lexer import Lexer, LexerToken, lexer_current, lexer_lex, LexerKind;
|
||||
from Parser import AstModule, PAstModule, AstTypeExpressionKind,
|
||||
from Parser import AstTypeExpressionKind, AstExpressionKind, AstLiteralKind, AstUnaryOperator,
|
||||
AstModule, PAstModule, AstExpression, PAstExpression, PAstLiteral,
|
||||
PAstConstantDeclaration, PPAstConstantDeclaration,
|
||||
AstTypeDeclaration, PAstTypeDeclaration, PPAstTypeDeclaration,
|
||||
PAstVariableDeclaration, PPAstVariableDeclaration, PAstImportStatement, PPAstImportStatement,
|
||||
PAstTypeExpression, PPAstTypeExpression, AstFieldDeclaration, PAstFieldDeclaration,
|
||||
parse_type_expression, parse_variable_part, parse_type_part, parse_constant_part, parse_import_part;
|
||||
parse_type_expression, parse_variable_part, parse_type_part, parse_constant_part, parse_import_part,
|
||||
parse_designator;
|
||||
|
||||
(* Calls lexer_lex() but skips the comments. *)
|
||||
proc transpiler_lex(lexer: PLexer) -> LexerToken;
|
||||
@ -117,7 +119,7 @@ var
|
||||
token: LexerToken;
|
||||
result: PAstModule;
|
||||
begin
|
||||
ALLOCATE(result, TSIZE(AstModule));
|
||||
NEW(result);
|
||||
token := transpiler_lex(context^.lexer);
|
||||
|
||||
if token.kind = lexerKindModule then
|
||||
@ -404,12 +406,12 @@ begin
|
||||
return result
|
||||
end;
|
||||
|
||||
proc transpile_expression(context: PTranspilerContext, trailing_token: LexerKind);
|
||||
proc transpile_unchanged(context: PTranspilerContext, trailing_token: LexerKind);
|
||||
var
|
||||
token: LexerToken;
|
||||
written_bytes: CARDINAL;
|
||||
begin
|
||||
token := transpiler_lex(context^.lexer);
|
||||
token := lexer_current(context^.lexer);
|
||||
|
||||
while (token.kind <> trailing_token) & (token.kind <> lexerKindEnd) do
|
||||
written_bytes := 0;
|
||||
@ -433,7 +435,7 @@ begin
|
||||
WriteString(context^.output, 'AND ');
|
||||
written_bytes := 1
|
||||
end;
|
||||
if token.kind = lexerKindNot then
|
||||
if token.kind = lexerKindTilde then
|
||||
WriteString(context^.output, 'NOT ');
|
||||
written_bytes := 1
|
||||
end;
|
||||
@ -445,12 +447,92 @@ begin
|
||||
end
|
||||
end;
|
||||
|
||||
proc parse_expression(lexer: PLexer) -> PAstExpression;
|
||||
var
|
||||
next_token: LexerToken;
|
||||
result: PAstExpression;
|
||||
written_bytes: CARDINAL;
|
||||
begin
|
||||
result := parse_designator(lexer);
|
||||
|
||||
written_bytes := WriteNBytes(StdErr, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
|
||||
WriteLine(StdErr);
|
||||
|
||||
return result
|
||||
end;
|
||||
|
||||
proc transpile_unary_operator(context: PTranspilerContext, operator: AstUnaryOperator);
|
||||
begin
|
||||
if operator = astUnaryOperatorMinus then
|
||||
WriteChar(context^.output, '-')
|
||||
end;
|
||||
if operator = astUnaryOperatorNot then
|
||||
WriteChar(context^.output, '~')
|
||||
end
|
||||
end;
|
||||
|
||||
proc transpile_expression(context: PTranspilerContext, expression: PAstExpression);
|
||||
var
|
||||
literal: PAstLiteral;
|
||||
buffer: [20]CHAR;
|
||||
written_bytes: CARDINAL;
|
||||
begin
|
||||
if expression^.kind = astExpressionKindLiteral then
|
||||
literal := expression^.literal;
|
||||
|
||||
if literal^.kind = astLiteralKindInteger then
|
||||
IntToStr(literal^.integer, 0, buffer);
|
||||
WriteString(context^.output, buffer);
|
||||
end;
|
||||
if literal^.kind = astLiteralKindString then
|
||||
WriteString(context^.output, literal^.string)
|
||||
end
|
||||
end;
|
||||
if expression^.kind = astExpressionKindIdentifier then
|
||||
written_bytes := WriteNBytes(context^.output, ORD(expression^.identifier[1]), ADR(expression^.identifier[2]))
|
||||
end;
|
||||
if expression^.kind = astExpressionKindDereference then
|
||||
transpile_expression(context, expression^.reference);
|
||||
WriteChar(context^.output, '^')
|
||||
end;
|
||||
if expression^.kind = astExpressionKindArrayAccess then
|
||||
transpile_expression(context, expression^.array);
|
||||
WriteChar(context^.output, '[');
|
||||
transpile_expression(context, expression^.index);
|
||||
WriteChar(context^.output, ']')
|
||||
end;
|
||||
if expression^.kind = astExpressionKindFieldAccess then
|
||||
transpile_expression(context, expression^.aggregate);
|
||||
WriteChar(context^.output, '.');
|
||||
written_bytes := WriteNBytes(context^.output, ORD(expression^.field[1]), ADR(expression^.field[2]));
|
||||
end;
|
||||
if expression^.kind = astExpressionKindUnary then
|
||||
transpile_unary_operator(context, expression^.unary_operator);
|
||||
transpile_expression(context, expression^.unary_operand)
|
||||
end
|
||||
end;
|
||||
|
||||
proc transpile_if_statement(context: PTranspilerContext);
|
||||
var
|
||||
token: LexerToken;
|
||||
expression: PAstExpression;
|
||||
lexer: Lexer;
|
||||
begin
|
||||
WriteString(context^.output, ' IF ');
|
||||
transpile_expression(context, lexerKindThen);
|
||||
|
||||
lexer := context^.lexer^;
|
||||
token := transpiler_lex(ADR(lexer));
|
||||
expression := parse_expression(ADR(lexer));
|
||||
|
||||
if expression <> nil then
|
||||
context^.lexer^ := lexer;
|
||||
transpile_expression(context, expression);
|
||||
WriteChar(context^.output, ' ')
|
||||
end;
|
||||
if expression = nil then
|
||||
token := transpiler_lex(context^.lexer)
|
||||
end;
|
||||
transpile_unchanged(context, lexerKindThen);
|
||||
|
||||
WriteString(context^.output, 'THEN');
|
||||
WriteLine(context^.output);
|
||||
@ -464,7 +546,8 @@ var
|
||||
token: LexerToken;
|
||||
begin
|
||||
WriteString(context^.output, ' WHILE ');
|
||||
transpile_expression(context, lexerKindDo);
|
||||
token := transpiler_lex(context^.lexer);
|
||||
transpile_unchanged(context, lexerKindDo);
|
||||
|
||||
WriteString(context^.output, 'DO');
|
||||
WriteLine(context^.output);
|
||||
@ -474,9 +557,12 @@ begin
|
||||
end;
|
||||
|
||||
proc transpile_assignment_statement(context: PTranspilerContext);
|
||||
var
|
||||
token: LexerToken;
|
||||
begin
|
||||
WriteString(context^.output, ' := ');
|
||||
transpile_expression(context, lexerKindSemicolon);
|
||||
token := transpiler_lex(context^.lexer);
|
||||
transpile_unchanged(context, lexerKindSemicolon);
|
||||
end;
|
||||
|
||||
proc transpile_call_statement(context: PTranspilerContext);
|
||||
@ -541,7 +627,8 @@ var
|
||||
token: LexerToken;
|
||||
begin
|
||||
WriteString(context^.output, ' RETURN ');
|
||||
transpile_expression(context, lexerKindSemicolon)
|
||||
token := transpiler_lex(context^.lexer);
|
||||
transpile_unchanged(context, lexerKindSemicolon)
|
||||
end;
|
||||
|
||||
proc transpile_statement(context: PTranspilerContext);
|
||||
|
Loading…
x
Reference in New Issue
Block a user