diff --git a/.gitignore b/.gitignore index cde7e11..b341f53 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,3 @@ a.out -/dub.selections.json +/boot/ /build/ diff --git a/Rakefile b/Rakefile index 92be6e2..5893232 100644 --- a/Rakefile +++ b/Rakefile @@ -13,13 +13,13 @@ directory 'build/self' CLEAN.include 'build' rule(/build\/stage1\/.+\.o$/ => ->(file) { - path = Pathname.new('source') + Pathname.new(file).basename + path = Pathname.new('boot/stage1/source') + Pathname.new(file).basename ['build/stage1', path.sub_ext('.def'), path.sub_ext('.elna')] }) do |t| sources = t.prerequisites.filter { |f| f.end_with? '.elna' } - sh M2C, '-fmod=.elna', '-c', '-I', 'source', '-o', t.name, *sources + sh M2C, '-fmod=.elna', '-c', '-I', 'boot/stage1/source', '-o', t.name, *sources end file 'build/stage1/elna' => FileList['boot/stage1/source/*'].map { |file| @@ -28,7 +28,7 @@ file 'build/stage1/elna' => FileList['boot/stage1/source/*'].map { |file| sh M2C, '-o', t.name, *t.prerequisites end -file 'build/stage1/Compiler.o' => ['build/stage1', 'source/Compiler.elna'] do |t| +file 'build/stage1/Compiler.o' => ['build/stage1', 'boot/stage1/source/Compiler.elna'] do |t| sources = t.prerequisites.filter { |f| f.end_with? '.elna' } sh M2C, '-fscaffold-main', '-fmod=.elna', '-c', '-I', 'boot/stage1/source', '-o', t.name, *sources diff --git a/source/Compiler.elna b/source/Compiler.elna index 1cd98ab..9fe3ffa 100644 --- a/source/Compiler.elna +++ b/source/Compiler.elna @@ -1,10 +1,10 @@ -MODULE Compiler; +module Compiler; -FROM FIO IMPORT StdIn; -FROM SYSTEM IMPORT ADR; +from FIO import StdIn; +from SYSTEM import ADR; -FROM Lexer IMPORT Lexer, LexerDestroy, LexerInitialize; -FROM Transpiler IMPORT Transpile; +from Lexer import Lexer, LexerDestroy, LexerInitialize; +from Transpiler import Transpile; VAR ALexer: Lexer; diff --git a/source/Lexer.def b/source/Lexer.def index 23bc7f3..a6ba01f 100644 --- a/source/Lexer.def +++ b/source/Lexer.def @@ -71,8 +71,8 @@ TYPE lexerKindCharacter, lexerKindString, lexerKindFrom, - lexerKindExclamation, - lexerKindArrow, + lexerKindPointer, + lexerKindArray, lexerKindTrait, lexerKindProgram, lexerKindModule, diff --git a/source/Lexer.elna b/source/Lexer.elna index 4f09fc3..45f2762 100644 --- a/source/Lexer.elna +++ b/source/Lexer.elna @@ -1,16 +1,17 @@ -IMPLEMENTATION MODULE Lexer; +implementation module Lexer; -FROM FIO IMPORT ReadNBytes; -FROM SYSTEM IMPORT ADR; +from FIO import ReadNBytes; +from SYSTEM import ADR; -FROM Storage IMPORT DEALLOCATE, ALLOCATE; -FROM Strings IMPORT Length; -FROM MemUtils IMPORT MemZero; +from Storage import DEALLOCATE, ALLOCATE; +from Strings import Length; +from MemUtils import MemZero; +from StrCase import Lower; -CONST +const ChunkSize = 65536; -TYPE +type (* * Classification table assigns each possible character to a group (class). All * characters of the same group a handled equivalently. @@ -59,11 +60,11 @@ TYPE transitionStateDecimalSuffix, transitionStateEnd ); - TransitionAction = PROCEDURE(PLexer, PLexerToken); - Transition = RECORD + TransitionAction = procedure(PLexer, PLexerToken); + Transition = record Action: TransitionAction; NextState: TransitionState - END; + end; VAR Classification: ARRAY[1..128] OF TransitionClass; @@ -210,7 +211,7 @@ BEGIN Result := TRUE; WHILE (Index < Length(Keyword)) AND (TokenStart <> TokenEnd) AND Result DO - Result := Keyword[Index] = TokenStart^; + Result := (Keyword[Index] = TokenStart^) OR (Lower(Keyword[Index]) = TokenStart^); INC(TokenStart); INC(Index) END; @@ -344,6 +345,10 @@ BEGIN AToken^.Kind := lexerKindModule ELSIF CompareKeyword('IMPLEMENTATION', ALexer^.Start, ALexer^.Current) THEN AToken^.Kind := lexerKindImplementation + ELSIF CompareKeyword('POINTER', ALexer^.Start, ALexer^.Current) THEN + AToken^.Kind := lexerKindPointer + ELSIF CompareKeyword('ARRAY', ALexer^.Start, ALexer^.Current) THEN + AToken^.Kind := lexerKindArray ELSIF CompareKeyword('TRUE', ALexer^.Start, ALexer^.Current) THEN AToken^.Kind := lexerKindBoolean; AToken^.booleanKind := TRUE @@ -685,7 +690,6 @@ VAR Result: LexerToken; BEGIN ALexer^.Current := ALexer^.Start; - Result.Kind := lexerKindTrait; CurrentState := transitionStateStart; WHILE CurrentState <> transitionStateEnd DO diff --git a/source/Transpiler.elna b/source/Transpiler.elna index 25dfe52..3bd76e4 100644 --- a/source/Transpiler.elna +++ b/source/Transpiler.elna @@ -1,15 +1,16 @@ -IMPLEMENTATION MODULE Transpiler; +implementation module Transpiler; -FROM FIO IMPORT WriteNBytes, StdOut; -FROM SYSTEM IMPORT ADR, ADDRESS; +from FIO import WriteNBytes, StdOut; +from SYSTEM import ADR, ADDRESS; -FROM Terminal IMPORT Write, WriteLn, WriteString; -FROM Lexer IMPORT Lexer, LexerToken, LexerCurrent, LexerLex, LexerKind; +from Terminal import Write, WriteLn, WriteString; +from Lexer import Lexer, LexerToken, LexerCurrent, LexerLex, LexerKind; -TYPE - TranspilerContext = RECORD - END; - PTranspilerContext = POINTER TO TranspilerContext; +type + PTranspilerContext = pointer to TranspilerContext; + TranspilerContext = record + Indentation: CARDINAL + end; (* Calls LexerLex() but skips the comments. *) PROCEDURE TranspilerLex(ALexer: PLexer): LexerToken; @@ -138,6 +139,7 @@ BEGIN Token := TranspilerLex(ALexer); TranspileImportPart(AContext, ALexer); TranspileConstantPart(AContext, ALexer); + TranspileTypePart(AContext, ALexer); Token := LexerCurrent(ALexer); WHILE Token.Kind <> lexerKindEof DO @@ -148,6 +150,165 @@ BEGIN END END TranspileModule; +PROCEDURE TranspileTypeFields(AContext: PTranspilerContext; ALexer: PLexer); +VAR + Token: LexerToken; + WrittenBytes: CARDINAL; +BEGIN + Token := TranspilerLex(ALexer); + + WHILE Token.Kind <> lexerKindEnd DO + WriteString(' '); + WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start); + Token := TranspilerLex(ALexer); + WriteString(': '); + TranspileTypeExpression(AContext, ALexer); + Token := TranspilerLex(ALexer); + + IF Token.Kind = lexerKindSemicolon THEN + Token := TranspilerLex(ALexer); + WriteSemicolon() + ELSE + WriteLn() + END + END +END TranspileTypeFields; + +PROCEDURE TranspileRecordType(AContext: PTranspilerContext; ALexer: PLexer); +VAR + Token: LexerToken; +BEGIN + WriteString('RECORD'); + WriteLn(); + TranspileTypeFields(AContext, ALexer); + WriteString(' END') +END TranspileRecordType; + +PROCEDURE TranspilePointerType(AContext: PTranspilerContext; ALexer: PLexer); +VAR + Token: LexerToken; + WrittenBytes: CARDINAL; +BEGIN + WriteString('POINTER TO '); + Token := TranspilerLex(ALexer); + TranspileTypeExpression(AContext, ALexer) +END TranspilePointerType; + +PROCEDURE TranspileArrayType(AContext: PTranspilerContext; ALexer: PLexer); +VAR + Token: LexerToken; +BEGIN +END TranspileArrayType; + +PROCEDURE TranspileEnumerationType(AContext: PTranspilerContext; ALexer: PLexer); +VAR + Token: LexerToken; + WrittenBytes: CARDINAL; +BEGIN + WriteString('('); + WriteLn(); + WriteString(' '); + + Token := TranspilerLex(ALexer); + WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start); + + Token := TranspilerLex(ALexer); + + WHILE Token.Kind = lexerKindComma DO + Write(','); + WriteLn(); + WriteString(' '); + Token := TranspilerLex(ALexer); + WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start); + + Token := TranspilerLex(ALexer) + END; + WriteLn(); + WriteString(' )') +END TranspileEnumerationType; + +PROCEDURE TranspileUnionType(AContext: PTranspilerContext; ALexer: PLexer); +VAR + Token: LexerToken; +END TranspileUnionType; + +PROCEDURE TranspileProcedureType(AContext: PTranspilerContext; ALexer: PLexer); +VAR + Token: LexerToken; + WrittenBytes: CARDINAL; +BEGIN + Token := TranspilerLex(ALexer); + WriteString('PROCEDURE('); + + Token := TranspilerLex(ALexer); + + WHILE Token.Kind <> lexerKindRightParen DO + WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start); + + Token := TranspilerLex(ALexer); + IF Token.Kind = lexerKindComma THEN + Token := TranspilerLex(ALexer); + WriteString(', ') + END + END; + Write(')') +END TranspileProcedureType; + +PROCEDURE TranspileTypeExpression(AContext: PTranspilerContext; ALexer: PLexer); +VAR + Token: LexerToken; + WrittenBytes: CARDINAL; +BEGIN + Token := TranspilerLex(ALexer); + IF Token.Kind = lexerKindRecord THEN + TranspileRecordType(AContext, ALexer) + ELSIF Token.Kind = lexerKindLeftParen THEN + TranspileEnumerationType(AContext, ALexer) + ELSIF Token.Kind = lexerKindArray THEN + TranspileArrayType(AContext, ALexer) + ELSIF Token.Kind = lexerKindPointer THEN + TranspilePointerType(AContext, ALexer) + ELSIF Token.Kind = lexerKindProc THEN + TranspileProcedureType(AContext, ALexer) + ELSE + WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start) + END +END TranspileTypeExpression; + +PROCEDURE TranspileTypeDeclaration(AContext: PTranspilerContext; ALexer: PLexer); +VAR + Token: LexerToken; + WrittenBytes: CARDINAL; +BEGIN + WriteString(' '); + Token := LexerCurrent(ALexer); + WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start); + + Token := TranspilerLex(ALexer); + WriteString(' = '); + TranspileTypeExpression(AContext, ALexer); + Token := TranspilerLex(ALexer); + WriteSemicolon(); +END TranspileTypeDeclaration; + +PROCEDURE TranspileTypePart(AContext: PTranspilerContext; ALexer: PLexer); +VAR + Token: LexerToken; +BEGIN + Token := LexerCurrent(ALexer); + + IF Token.Kind = lexerKindType THEN + WriteString('TYPE'); + WriteLn(); + Token := TranspilerLex(ALexer); + + WHILE Token.Kind = lexerKindIdentifier DO + TranspileTypeDeclaration(AContext, ALexer); + Token := TranspilerLex(ALexer) + END + END +END TranspileTypePart; + PROCEDURE Transpile(ALexer: PLexer); VAR Token: LexerToken;