From 6ee7a8d2a7f533310b864162a7992c9e2f7191f8 Mon Sep 17 00:00:00 2001 From: Eugen Wissner Date: Thu, 29 May 2025 14:59:56 +0200 Subject: [PATCH] Make procedure name after the end --- Rakefile | 32 ++++++--- source/Compiler.elna | 4 +- source/Lexer.def | 3 +- source/Lexer.elna | 43 ++++++------ source/Transpiler.elna | 145 ++++++++++++++++++++++------------------- 5 files changed, 130 insertions(+), 97 deletions(-) diff --git a/Rakefile b/Rakefile index 82260a3..7c75556 100644 --- a/Rakefile +++ b/Rakefile @@ -15,11 +15,11 @@ CLEAN.include 'build' rule(/build\/stage1\/.+\.o$/ => ->(file) { path = Pathname.new('boot/stage1/source') + Pathname.new(file).basename - ['build/stage1', path.sub_ext('.def'), path.sub_ext('.elna')] + ['build/stage1', path.sub_ext('.def'), path.sub_ext('.mod')] }) do |t| - sources = t.prerequisites.filter { |f| f.end_with? '.elna' } + sources = t.prerequisites.filter { |f| f.end_with? '.mod' } - sh M2C, '-fmod=.elna', '-c', '-I', 'boot/stage1/source', '-o', t.name, *sources + sh M2C, '-c', '-I', 'boot/stage1/source', '-o', t.name, *sources end file 'build/stage1/elna' => FileList['boot/stage1/source/*'].map { |file| @@ -28,10 +28,10 @@ file 'build/stage1/elna' => FileList['boot/stage1/source/*'].map { |file| sh M2C, '-o', t.name, *t.prerequisites end -file 'build/stage1/Compiler.o' => ['build/stage1', 'boot/stage1/source/Compiler.elna'] do |t| - sources = t.prerequisites.filter { |f| f.end_with? '.elna' } +file 'build/stage1/Compiler.o' => ['build/stage1', 'boot/stage1/source/Compiler.mod'] do |t| + sources = t.prerequisites.filter { |f| f.end_with? '.mod' } - sh M2C, '-fscaffold-main', '-fmod=.elna', '-c', '-I', 'boot/stage1/source', '-o', t.name, *sources + sh M2C, '-fscaffold-main', '-c', '-I', 'boot/stage1/source', '-o', t.name, *sources end ['source', 'self'].each do |sub| @@ -98,8 +98,10 @@ task :backport do FileList['source/*.elna'].each do |file| source_path = Pathname.new file source = File.read source_path + current_procedure = nil + target = '' - target = source + source .gsub(/^(var|type|const|begin)/) { |match| match.upcase } .gsub(/^[[:alnum:]]* ?module/) { |match| match.upcase } .gsub(/\brecord\b/) { |match| match.upcase } @@ -109,8 +111,22 @@ task :backport do .gsub(/^from ([[:alnum:]]+) import/, 'FROM \1 IMPORT') .gsub(/ \^([[:alnum:]])/, ' POINTER TO \1') .gsub(/(then|do)$/) { |match| match.upcase } + .each_line do |line| + if line.start_with? 'PROCEDURE' + current_procedure = line[10...line.index('(')] + elsif line.start_with?('END;') && !current_procedure.nil? + line = "END #{current_procedure};" + current_proceure = nil + elsif line.start_with?('end') + line = 'END ' + line[4..] + end + target += line + end - target_path = Pathname.new('boot/stage1/source') + source_path.basename + target_path = Pathname.new('boot/stage1/source') + source_path.basename.sub_ext('.mod') File.write target_path, target end + FileList['source/*.def'].each do |file| + cp file, File.join('boot/stage1/source', Pathname.new(file).basename) + end end diff --git a/source/Compiler.elna b/source/Compiler.elna index 23e700f..e173867 100644 --- a/source/Compiler.elna +++ b/source/Compiler.elna @@ -9,10 +9,10 @@ from Transpiler import Transpile; var ALexer: Lexer; -BEGIN +begin LexerInitialize(ADR(ALexer), StdIn); Transpile(ADR(ALexer)); LexerDestroy(ADR(ALexer)) -END Compiler. +end Compiler. diff --git a/source/Lexer.def b/source/Lexer.def index a6ba01f..b6648f5 100644 --- a/source/Lexer.def +++ b/source/Lexer.def @@ -80,7 +80,8 @@ TYPE ); LexerToken = RECORD CASE Kind: LexerKind OF - lexerKindBoolean: booleanKind: BOOLEAN + lexerKindBoolean: booleanKind: BOOLEAN | + lexerKindIdentifier: identifierKind: ARRAY[1..256] OF CHAR END END; PLexerToken = POINTER TO LexerToken; diff --git a/source/Lexer.elna b/source/Lexer.elna index cd4f99a..d65dcba 100644 --- a/source/Lexer.elna +++ b/source/Lexer.elna @@ -5,7 +5,7 @@ from SYSTEM import ADR; from Storage import DEALLOCATE, ALLOCATE; from Strings import Length; -from MemUtils import MemZero; +from MemUtils import MemCopy, MemZero; from StrCase import Lower; const @@ -200,7 +200,7 @@ begin Classification[126] := transitionClassOther; (* } *) Classification[127] := transitionClassSingle; (* ~ *) Classification[128] := transitionClassInvalid (* DEL *) -END InitializeClassification; +end; proc CompareKeyword(Keyword: ARRAY OF CHAR; TokenStart: PLexerBuffer; TokenEnd: PLexerBuffer): BOOLEAN; var @@ -217,19 +217,19 @@ begin end; Result := (Index = Length(Keyword)) AND (TokenStart = TokenEnd) AND Result; return Result -END CompareKeyword; +end; (* Reached the end of file. *) proc TransitionActionEof(ALexer: PLexer; AToken: PLexerToken); begin AToken^.Kind := lexerKindEof -END TransitionActionEof; +end; (* Add the character to the token currently read and advance to the next character. *) proc TransitionActionAccumulate(ALexer: PLexer; AToken: PLexerToken); begin INC(ALexer^.Current) -END TransitionActionAccumulate; +end; (* The current character is not a part of the token. Finish the token already * read. Don't advance to the next character. *) @@ -253,7 +253,7 @@ begin if ALexer^.Start^ = '.' then AToken^.Kind := lexerKindDot end -END TransitionActionFinalize; +end; (* An action for tokens containing multiple characters. *) proc TransitionActionComposite(ALexer: PLexer; AToken: PLexerToken); @@ -276,14 +276,14 @@ begin AToken^.Kind := lexerKindAssignment end; INC(ALexer^.Current) -END TransitionActionComposite; +end; (* Skip a space. *) proc TransitionActionSkip(ALexer: PLexer; AToken: PLexerToken); begin INC(ALexer^.Current); INC(ALexer^.Start) -END TransitionActionSkip; +end; (* Delimited string action. *) proc TransitionActionDelimited(ALexer: PLexer; AToken: PLexerToken); @@ -298,13 +298,16 @@ begin AToken^.Kind := lexerKindString end; INC(ALexer^.Current) -END TransitionActionDelimited; +end; (* Finalize keyword or identifier. *) proc TransitionActionKeyId(ALexer: PLexer; AToken: PLexerToken); begin AToken^.Kind := lexerKindIdentifier; + AToken^.identifierKind[1] := ALexer^.Current - ALexer^.Start; + MemCopy(ALexer^.Start, ORD(AToken^.identifierKind[1]), ADR(AToken^.identifierKind[2])); + if CompareKeyword('PROGRAM', ALexer^.Start, ALexer^.Current) then AToken^.Kind := lexerKindProgram end; @@ -400,7 +403,7 @@ begin AToken^.Kind := lexerKindBoolean; AToken^.booleanKind := FALSE end -END TransitionActionKeyId; +end; (* Action for tokens containing only one character. The character cannot be * followed by other characters forming a composite token. *) @@ -449,13 +452,13 @@ begin AToken^.Kind := lexerKindPipe end; INC(ALexer^.Current) -END TransitionActionSingle; +end; (* Handle an integer literal. *) proc TransitionActionInteger(ALexer: PLexer; AToken: PLexerToken); begin AToken^.Kind := lexerKindInteger -END TransitionActionInteger; +end; proc SetDefaultTransition(CurrentState: TransitionState; DefaultAction: TransitionAction; NextState: TransitionState); var @@ -486,7 +489,7 @@ begin Transitions[ORD(CurrentState)][ORD(transitionClassGreater)] := DefaultTransition; Transitions[ORD(CurrentState)][ORD(transitionClassLess)] := DefaultTransition; Transitions[ORD(CurrentState)][ORD(transitionClassOther)] := DefaultTransition -END SetDefaultTransition; +end; (* * The transition table describes transitions from one state to another, given @@ -740,7 +743,7 @@ begin Transitions[ORD(transitionStateDecimalSuffix)][ORD(transitionClassX)].Action := NIL; Transitions[ORD(transitionStateDecimalSuffix)][ORD(transitionClassX)].NextState := transitionStateEnd -END InitializeTransitions; +end; proc LexerInitialize(ALexer: PLexer; Input: File); begin @@ -750,7 +753,7 @@ begin ALLOCATE(ALexer^.Buffer, ChunkSize); MemZero(ALexer^.Buffer, ChunkSize); ALexer^.Size := ChunkSize -END LexerInitialize; +end; proc LexerCurrent(ALexer: PLexer): LexerToken; var @@ -772,7 +775,7 @@ begin CurrentState := CurrentTransition.NextState end; return Result -END LexerCurrent; +end; proc LexerLex(ALexer: PLexer): LexerToken; var @@ -786,14 +789,14 @@ begin Result := LexerCurrent(ALexer); return Result -END LexerLex; +end; proc LexerDestroy(ALexer: PLexer); begin DEALLOCATE(ALexer^.Buffer, ALexer^.Size) -END LexerDestroy; +end; -BEGIN +begin InitializeClassification(); InitializeTransitions() -END Lexer. +end Lexer. diff --git a/source/Transpiler.elna b/source/Transpiler.elna index 080b790..faa6333 100644 --- a/source/Transpiler.elna +++ b/source/Transpiler.elna @@ -24,14 +24,14 @@ begin end; return Result -END TranspilerLex; +end; (* Write a semicolon followed by a newline. *) proc WriteSemicolon(); begin WriteString(';'); WriteLn() -END WriteSemicolon; +end; proc TranspileImport(AContext: PTranspilerContext; ALexer: PLexer); var @@ -58,7 +58,7 @@ begin end; WriteSemicolon(); Token := TranspilerLex(ALexer) -END TranspileImport; +end; proc TranspileImportPart(AContext: PTranspilerContext; ALexer: PLexer); var @@ -71,7 +71,7 @@ begin Token := LexerCurrent(ALexer) end; WriteLn() -END TranspileImportPart; +end; proc TranspileConstant(AContext: PTranspilerContext; ALexer: PLexer); var @@ -90,7 +90,7 @@ begin Token := TranspilerLex(ALexer); WriteSemicolon() -END TranspileConstant; +end; proc TranspileConstantPart(AContext: PTranspilerContext; ALexer: PLexer): BOOLEAN; var @@ -111,7 +111,7 @@ begin end end; return Result -END TranspileConstantPart; +end; proc TranspileModule(AContext: PTranspilerContext; ALexer: PLexer); var @@ -149,15 +149,19 @@ begin WriteLn() end; TranspileProcedurePart(AContext, ALexer); + TranspileStatementPart(AContext, ALexer); - Token := LexerCurrent(ALexer); - while Token.Kind <> lexerKindEof do - WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start); - WriteLn(); + WriteString('END '); - Token := TranspilerLex(ALexer) - end -END TranspileModule; + Token := TranspilerLex(ALexer); + WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start); + + Token := TranspilerLex(ALexer); + Write('.'); + + Token := TranspilerLex(ALexer); + WriteLn() +end; proc TranspileTypeFields(AContext: PTranspilerContext; ALexer: PLexer); var @@ -180,7 +184,7 @@ begin end; WriteLn() end -END TranspileTypeFields; +end; proc TranspileRecordType(AContext: PTranspilerContext; ALexer: PLexer); var @@ -190,7 +194,7 @@ begin WriteLn(); TranspileTypeFields(AContext, ALexer); WriteString(' END') -END TranspileRecordType; +end; proc TranspilePointerType(AContext: PTranspilerContext; ALexer: PLexer); var @@ -203,7 +207,7 @@ begin Token := TranspilerLex(ALexer) end; TranspileTypeExpression(AContext, ALexer) -END TranspilePointerType; +end; proc TranspileArrayType(AContext: PTranspilerContext; ALexer: PLexer); var @@ -227,7 +231,7 @@ begin end; WriteString(' OF '); TranspileTypeExpression(AContext, ALexer) -END TranspileArrayType; +end; proc TranspileEnumerationType(AContext: PTranspilerContext; ALexer: PLexer); var @@ -254,12 +258,12 @@ begin end; WriteLn(); WriteString(' )') -END TranspileEnumerationType; +end; proc TranspileUnionType(AContext: PTranspilerContext; ALexer: PLexer); var Token: LexerToken; -END TranspileUnionType; +end; proc TranspileProcedureType(AContext: PTranspilerContext; ALexer: PLexer); var @@ -281,7 +285,7 @@ begin end end; Write(')') -END TranspileProcedureType; +end; proc TranspileTypeExpression(AContext: PTranspilerContext; ALexer: PLexer); var @@ -307,7 +311,7 @@ begin if Token.Kind = lexerKindIdentifier then WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start) end -END TranspileTypeExpression; +end; proc TranspileTypeDeclaration(AContext: PTranspilerContext; ALexer: PLexer); var @@ -323,7 +327,7 @@ begin TranspileTypeExpression(AContext, ALexer); Token := TranspilerLex(ALexer); WriteSemicolon(); -END TranspileTypeDeclaration; +end; proc TranspileTypePart(AContext: PTranspilerContext; ALexer: PLexer); var @@ -342,7 +346,7 @@ begin end; WriteLn() end -END TranspileTypePart; +end; proc TranspileVariableDeclaration(AContext: PTranspilerContext; ALexer: PLexer); var @@ -358,7 +362,7 @@ begin TranspileTypeExpression(AContext, ALexer); Token := TranspilerLex(ALexer); WriteSemicolon() -END TranspileVariableDeclaration; +end; proc TranspileVariablePart(AContext: PTranspilerContext; ALexer: PLexer): BOOLEAN; var @@ -379,16 +383,17 @@ begin end end; return Result -END TranspileVariablePart; +end; -proc TranspileProcedureHeading(AContext: PTranspilerContext; ALexer: PLexer); +proc TranspileProcedureHeading(AContext: PTranspilerContext; ALexer: PLexer): LexerToken; var Token: LexerToken; + Result: LexerToken; WrittenBytes: CARDINAL; begin WriteString('PROCEDURE '); - Token := TranspilerLex(ALexer); + Result := TranspilerLex(ALexer); WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start); Token := TranspilerLex(ALexer); @@ -420,8 +425,24 @@ begin Token := TranspilerLex(ALexer) end; Token := TranspilerLex(ALexer); - WriteSemicolon() -END TranspileProcedureHeading; + WriteSemicolon(); + + return Result +end; + +proc TranspileExpression(AContext: PTranspilerContext; ALexer: PLexer; TrailingToken: LexerKind); +var + Token: LexerToken; + WrittenBytes: CARDINAL; +begin + Token := TranspilerLex(ALexer); + + while (Token.Kind <> TrailingToken) AND (Token.Kind <> lexerKindEnd) do + WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start); + Write(' '); + Token := TranspilerLex(ALexer) + end +end; proc TranspileIfStatement(AContext: PTranspilerContext; ALexer: PLexer); var @@ -429,19 +450,14 @@ var WrittenBytes: CARDINAL; begin WriteString(' IF '); - Token := TranspilerLex(ALexer); + TranspileExpression(AContext, ALexer, lexerKindThen); - while Token.Kind <> lexerKindThen do - WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start); - Write(' '); - Token := TranspilerLex(ALexer) - end; WriteString('THEN'); WriteLn(); TranspileStatements(AContext, ALexer); WriteString(' END'); Token := TranspilerLex(ALexer) -END TranspileIfStatement; +end; proc TranspileWhileStatement(AContext: PTranspilerContext; ALexer: PLexer); var @@ -449,34 +465,20 @@ var WrittenBytes: CARDINAL; begin WriteString(' WHILE '); - Token := TranspilerLex(ALexer); + TranspileExpression(AContext, ALexer, lexerKindDo); - while Token.Kind <> lexerKindDo do - WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start); - Write(' '); - Token := TranspilerLex(ALexer) - end; WriteString('DO'); WriteLn(); TranspileStatements(AContext, ALexer); WriteString(' END'); Token := TranspilerLex(ALexer) -END TranspileWhileStatement; +end; proc TranspileAssignmentStatement(AContext: PTranspilerContext; ALexer: PLexer); -var - Token: LexerToken; - WrittenBytes: CARDINAL; begin WriteString(' := '); - Token := TranspilerLex(ALexer); - - while (Token.Kind <> lexerKindSemicolon) AND (Token.Kind <> lexerKindEnd) do - WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start); - Write(' '); - Token := TranspilerLex(ALexer) - end -END TranspileAssignmentStatement; + TranspileExpression(AContext, ALexer, lexerKindSemicolon); +end; proc TranspileCallStatement(AContext: PTranspilerContext; ALexer: PLexer); var @@ -490,7 +492,7 @@ begin WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start); Token := TranspilerLex(ALexer) end -END TranspileCallStatement; +end; proc TranspileReturnStatement(AContext: PTranspilerContext; ALexer: PLexer); var @@ -501,7 +503,7 @@ begin Token := TranspilerLex(ALexer); WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start); Token := TranspilerLex(ALexer) -END TranspileReturnStatement; +end; proc TranspileStatement(AContext: PTranspilerContext; ALexer: PLexer); var @@ -548,6 +550,16 @@ begin Write('^'); Token := TranspilerLex(ALexer) end; + while Token.Kind = lexerKindLeftSquare do + Write('['); + Token := TranspilerLex(ALexer); + while Token.Kind <> lexerKindRightSquare do + WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start); + Token := TranspilerLex(ALexer) + end; + Write(']'); + Token := TranspilerLex(ALexer); + end; if Token.Kind = lexerKindAssignment then TranspileAssignmentStatement(AContext, ALexer) @@ -556,7 +568,7 @@ begin TranspileCallStatement(AContext, ALexer) end end -END TranspileStatement; +end; proc TranspileStatements(AContext: PTranspilerContext; ALexer: PLexer); var @@ -573,7 +585,7 @@ begin end; WriteLn() end -END TranspileStatements; +end; proc TranspileStatementPart(AContext: PTranspilerContext; ALexer: PLexer); var @@ -585,7 +597,7 @@ begin WriteLn(); TranspileStatements(AContext, ALexer) end -END TranspileStatementPart; +end; proc TranspileProcedureDeclaration(AContext: PTranspilerContext; ALexer: PLexer); var @@ -593,17 +605,18 @@ var SeenPart: BOOLEAN; WrittenBytes: CARDINAL; begin - TranspileProcedureHeading(AContext, ALexer); + Token := TranspileProcedureHeading(AContext, ALexer); SeenPart := TranspileConstantPart(AContext, ALexer); SeenPart := TranspileVariablePart(AContext, ALexer); TranspileStatementPart(AContext, ALexer); + WriteString('END '); - Token := TranspilerLex(ALexer); - WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start); + WrittenBytes := WriteNBytes(StdOut, ORD(Token.identifierKind[1]), ADR(Token.identifierKind[2])); + Token := TranspilerLex(ALexer); WriteSemicolon(); Token := TranspilerLex(ALexer) -END TranspileProcedureDeclaration; +end; proc TranspileProcedurePart(AContext: PTranspilerContext; ALexer: PLexer); var @@ -616,7 +629,7 @@ begin Token := LexerCurrent(ALexer); WriteLn() end -END TranspileProcedurePart; +end; proc Transpile(ALexer: PLexer); var @@ -625,6 +638,6 @@ var Context: TranspilerContext; begin TranspileModule(ADR(Context), ALexer) -END Transpile; +end; -END Transpiler. +end Transpiler.