Compare commits

...

13 Commits

14 changed files with 2685 additions and 727 deletions

2
.gitignore vendored
View File

@ -1,3 +1,3 @@
a.out
/dub.selections.json
/boot/
/build/

164
Rakefile
View File

@ -3,91 +3,139 @@ require 'rake/clean'
require 'open3'
M2C = 'gm2' # Modula-2 compiler.
BOOT_OBJECTS = FileList['source/*.elna']
.map do |source|
Pathname.new(source).basename.sub_ext('.o')
end
directory 'build/boot'
stage_compiler = Pathname.new 'build/stage1/elna'
directory 'build/stage1'
directory 'build/source'
directory 'build/self'
CLEAN.include 'build'
rule(/build\/boot\/.+\.o$/ => ->(file) {
path = Pathname.new('source') + Pathname.new(file).basename
rule(/build\/stage1\/.+\.o$/ => ->(file) {
path = Pathname.new('boot/stage1/source') + Pathname.new(file).basename
['build/boot', path.sub_ext('.def'), path.sub_ext('.elna')]
}) do |t|
sources = t.prerequisites.filter { |f| f.end_with? '.elna' }
sh M2C, '-fmod=.elna', '-c', '-I', 'source', '-o', t.name, *sources
end
rule(/build\/self\/.+\.o$/ => ->(file) {
path = Pathname.new(file).relative_path_from('build')
result = []
result << File.join('source', path.basename.sub_ext('.def'))
result << File.join('build', path.sub_ext('.mod'))
['build/stage1', path.sub_ext('.def'), path.sub_ext('.mod')]
}) do |t|
sources = t.prerequisites.filter { |f| f.end_with? '.mod' }
sh M2C, '-c', '-I', 'source', '-o', t.name, *sources
sh M2C, '-c', '-I', 'boot/stage1/source', '-o', t.name, *sources
end
rule(/build\/self\/.+\.mod$/ => [
'build/self', 'build/boot/Compiler',
->(file) { File.join('source', Pathname.new(file).basename.sub_ext('.elna')) }
]) do |t|
sources, compiler = t.prerequisites
.reject { |f| File.directory? f }
.partition { |f| f.end_with? '.elna' }
file 'build/stage1/elna' => FileList['boot/stage1/source/*'].map { |file|
File.join 'build', 'stage1', Pathname.new(file).basename.sub_ext('.o')
} do |t|
sh M2C, '-o', t.name, *t.prerequisites
end
File.open t.name, 'w' do |output|
puts
puts(compiler * ' ')
file 'build/stage1/Compiler.o' => ['build/stage1', 'boot/stage1/source/Compiler.mod'] do |t|
sources = t.prerequisites.filter { |f| f.end_with? '.mod' }
Open3.popen2(*compiler) do |cl_in, cl_out|
cl_in.write File.read(*sources)
cl_in.close
sh M2C, '-fscaffold-main', '-c', '-I', 'boot/stage1/source', '-o', t.name, *sources
end
IO.copy_stream cl_out, output
cl_out.close
['source', 'self'].each do |sub|
rule(/build\/#{sub}\/.+\.mod$/ => [
"build/#{sub}", stage_compiler.to_path,
->(file) { File.join('source', Pathname.new(file).basename.sub_ext('.elna')) }
]) do |t|
sources, compiler = t.prerequisites
.reject { |f| File.directory? f }
.partition { |f| f.end_with? '.elna' }
File.open t.name, 'w' do |output|
compiler_command = compiler + sources
puts
puts(compiler_command * ' ')
Open3.popen2(*compiler_command) do |cl_in, cl_out|
cl_in.close
IO.copy_stream cl_out, output
cl_out.close
end
end
end
end
['boot', 'self'].each do |sub|
compiler_binary = Pathname.new('build') + sub + 'Compiler'
rule(/build\/#{sub}\/.+\.o$/ => ->(file) {
path = Pathname.new(file).relative_path_from('build')
result = []
file compiler_binary.to_path => BOOT_OBJECTS.map { |file| File.join('build', sub, file) } do |t|
sh M2C, '-o', t.name, *t.prerequisites
result << File.join('source', path.basename.sub_ext('.def'))
result << File.join('build', path.sub_ext('.mod'))
}) do |t|
sources = t.prerequisites.filter { |f| f.end_with? '.mod' }
sh M2C, '-c', '-I', 'source', '-o', t.name, *sources
end
compiler_object = compiler_binary.sub_ext('.o')
file "build/#{sub}/Compiler.o" => ["build/#{sub}/Compiler.mod"] do |t|
sh M2C, '-fscaffold-main', '-c', '-I', 'source', '-o', t.name, *t.prerequisites
end
stage_compiler = Pathname.new('build') + sub + 'elna'
file stage_compiler => FileList["source/*.elna"].map { |file|
File.join 'build', sub, Pathname.new(file).basename.sub_ext('.o')
} do |t|
sh M2C, '-o', t.name, *t.prerequisites
end
end
file 'build/boot/Compiler.o' => ['build/boot', 'source/Compiler.elna'] do |t|
sources = t.prerequisites.filter { |f| f.end_with? '.elna' }
sh M2C, '-fscaffold-main', '-fmod=.elna', '-c', '-I', 'source', '-o', t.name, *sources
end
file 'build/self/Compiler.o' => ['build/self/Compiler.mod'] do |t|
sources = t.prerequisites.filter { |f| f.end_with? '.mod' }
sh M2C, '-fscaffold-main', '-c', '-I', 'source', '-o', t.name, *sources
end
task default: 'build/self/Compiler'
task default: 'build/self/elna'
task default: 'build/self/Compiler.mod'
task default: 'source/Compiler.elna'
task :default do |t|
exe, previous_output, source = t.prerequisites
cat_arguments = ['cat', source]
exe_arguments = [exe, source]
diff_arguments = ['diff', '-Nur', '--text', previous_output, '-']
puts [cat_arguments * ' ', exe, diff_arguments * ' '].join(' | ')
Open3.pipeline(cat_arguments, exe, diff_arguments)
puts [exe, diff_arguments * ' '].join(' | ')
Open3.pipeline exe_arguments, diff_arguments
end
task :backport do
FileList['source/*.elna'].each do |file|
source_path = Pathname.new file
source = File.read source_path
current_procedure = nil
target = ''
module_name = source_path.basename.sub_ext('')
source
.gsub(/^(var|type|const|begin)/) { |match| match.upcase }
.gsub(/\b(record|nil|or|false|true)\b/) { |match| match.upcase }
.gsub(/proc\(/, 'PROCEDURE(')
.gsub(/ & /, ' AND ')
.gsub(/ -> /, ': ')
.gsub(/program;/, "MODULE #{module_name};")
.gsub(/module;/, "IMPLEMENTATION MODULE #{module_name};")
.gsub(/end\./, "END #{module_name}.")
.gsub(/([[:space:]]*)end(;?)$/, '\1END\2')
.gsub(/^([[:space:]]*)(while|return|if)\b/) { |match| match.upcase }
.gsub(/^from ([[:alnum:]]+) import/, 'FROM \1 IMPORT')
.gsub(/ \^([[:alnum:]])/, ' POINTER TO \1')
.gsub(/(then|do)$/) { |match| match.upcase }
.gsub(/(:|=) \[([[:digit:]]+)\]/, '\1 ARRAY[1..\2] OF ')
.each_line do |line|
if line.start_with? 'proc'
current_procedure = line[5...line.index('(')]
line = 'PROCEDURE ' + line[5..].gsub(',', ';')
elsif line.start_with?('END;') && !current_procedure.nil?
line = "END #{current_procedure};"
current_proceure = nil
elsif line.start_with?('end')
line = 'END ' + line[4..]
end
target += line
end
target_path = Pathname.new('boot/stage1/source') + source_path.basename.sub_ext('.mod')
File.write target_path, target
end
FileList['source/*.def'].each do |file|
cp file, File.join('boot/stage1/source', Pathname.new(file).basename)
end
end

323
rakelib/cross.rake Normal file
View File

@ -0,0 +1,323 @@
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/. -}
# frozen_string_literal: true
require 'pathname'
require 'uri'
require 'net/http'
require 'rake/clean'
require 'open3'
require 'etc'
GCC_VERSION = "15.1.0"
BINUTILS_VERSION = '2.44'
GLIBC_VERSION = '2.41'
KERNEL_VERSION = '5.15.181'
CLOBBER.include 'build'
class BuildTarget
attr_accessor(:build, :gcc, :target, :tmp)
def gxx
@gcc.gsub 'c', '+'
end
def sysroot
tmp + 'sysroot'
end
def rootfs
tmp + 'rootfs'
end
def tools
tmp + 'tools'
end
end
def gcc_verbose(gcc_binary)
read, write = IO.pipe
sh({'LANG' => 'C'}, gcc_binary, '--verbose', err: write)
write.close
output = read.read
read.close
output
end
def find_build_target(gcc_version, task)
gcc_binary = 'gcc'
output = gcc_verbose gcc_binary
if output.start_with? 'Apple clang'
gcc_binary = "gcc-#{gcc_version.split('.').first}"
output = gcc_verbose gcc_binary
end
result = output
.lines
.each_with_object(BuildTarget.new) do |line, accumulator|
if line.start_with? 'Target: '
accumulator.build = line.split(' ').last.strip
elsif line.start_with? 'COLLECT_GCC'
accumulator.gcc = line.split('=').last.strip
end
end
result.tmp = Pathname.new('./build')
task.with_defaults target: 'riscv32-unknown-linux-gnu'
result.target = task[:target]
result
end
def download_and_unarchive(url, target)
case File.extname url.path
when '.bz2'
archive_type = '-j'
root_directory = File.basename url.path, '.tar.bz2'
when '.xz'
archive_type = '-J'
root_directory = File.basename url.path, '.tar.xz'
else
raise "Unsupported archive type #{url.path}."
end
Net::HTTP.start(url.host, url.port, use_ssl: url.scheme == 'https') do |http|
request = Net::HTTP::Get.new url.request_uri
http.request request do |response|
case response
when Net::HTTPRedirection
download_and_unarchive URI.parse(response['location'])
when Net::HTTPSuccess
Open3.popen2 'tar', '-C', target.to_path, archive_type, '-xv' do |stdin, stdout, wait_thread|
Thread.new do
stdout.each { |line| puts line }
end
response.read_body do |chunk|
stdin.write chunk
end
stdin.close
wait_thread.value
end
else
response.error!
end
end
end
target + root_directory
end
namespace :cross do
desc 'Build cross binutils'
task :binutils, [:target] do |_, args|
options = find_build_target GCC_VERSION, args
options.tools.mkpath
source_directory = download_and_unarchive(
URI.parse("https://ftp.gnu.org/gnu/binutils/binutils-#{BINUTILS_VERSION}.tar.xz"),
options.tools)
cwd = source_directory.dirname + 'build-binutils'
cwd.mkpath
options.rootfs.mkpath
env = {
'CC' => options.gcc,
'CXX' => options.gxx
}
configure_options = [
"--prefix=#{options.rootfs.realpath}",
"--target=#{options.target}",
'--disable-nls',
'--enable-gprofng=no',
'--disable-werror',
'--enable-default-hash-style=gnu',
'--disable-libquadmath'
]
configure = source_directory.relative_path_from(cwd) + 'configure'
sh env, configure.to_path, *configure_options, chdir: cwd.to_path
sh env, 'make', '-j', Etc.nprocessors.to_s, chdir: cwd.to_path
sh env, 'make', 'install', chdir: cwd.to_path
end
desc 'Build stage 1 GCC'
task :gcc1, [:target] do |_, args|
options = find_build_target GCC_VERSION, args
options.tools.mkpath
source_directory = download_and_unarchive(
URI.parse("https://gcc.gnu.org/pub/gcc/releases/gcc-#{GCC_VERSION}/gcc-#{GCC_VERSION}.tar.xz"),
options.tools)
cwd = source_directory.dirname + 'build-gcc'
cwd.mkpath
options.rootfs.mkpath
options.sysroot.mkpath
sh 'contrib/download_prerequisites', chdir: source_directory.to_path
configure_options = [
"--prefix=#{options.rootfs.realpath}",
"--with-sysroot=#{options.sysroot.realpath}",
'--enable-languages=c,c++',
'--disable-shared',
'--with-arch=rv32imafdc',
'--with-abi=ilp32d',
'--with-tune=rocket',
'--with-isa-spec=20191213',
'--disable-bootstrap',
'--disable-multilib',
'--disable-libmudflap',
'--disable-libssp',
'--disable-libquadmath',
'--disable-libsanitizer',
'--disable-threads',
'--disable-libatomic',
'--disable-libgomp',
'--disable-libvtv',
'--disable-libstdcxx',
'--disable-nls',
'--with-newlib',
'--without-headers',
"--target=#{options.target}",
"--build=#{options.build}",
"--host=#{options.build}"
]
flags = '-O2 -fPIC'
env = {
'CC' => options.gcc,
'CXX' => options.gxx,
'CFLAGS' => flags,
'CXXFLAGS' => flags,
'PATH' => "#{options.rootfs.realpath + 'bin'}:#{ENV['PATH']}"
}
configure = source_directory.relative_path_from(cwd) + 'configure'
sh env, configure.to_path, *configure_options, chdir: cwd.to_path
sh env, 'make', '-j', Etc.nprocessors.to_s, chdir: cwd.to_path
sh env, 'make', 'install', chdir: cwd.to_path
end
desc 'Copy glibc headers'
task :headers, [:target] do |_, args|
options = find_build_target GCC_VERSION, args
options.tools.mkpath
source_directory = download_and_unarchive(
URI.parse("https://ftp.gnu.org/gnu/glibc/glibc-#{GLIBC_VERSION}.tar.xz"),
options.tools)
include_directory = options.tools + 'include'
include_directory.mkpath
cp (source_directory + 'elf/elf.h'), (include_directory + 'elf.h')
end
desc 'Build linux kernel'
task :kernel, [:target] do |_, args|
options = find_build_target GCC_VERSION, args
options.tools.mkpath
cwd = download_and_unarchive(
URI.parse("https://cdn.kernel.org/pub/linux/kernel/v5.x/linux-#{KERNEL_VERSION}.tar.xz"),
options.tools)
env = {
'CROSS_COMPILE' => "#{options.target}-",
'ARCH' => 'riscv',
'PATH' => "#{options.rootfs.realpath + 'bin'}:#{ENV['PATH']}",
'HOSTCFLAGS' => "-D_UUID_T -D__GETHOSTUUID_H -I#{options.tools.realpath + 'include'}"
}
sh env, 'make', 'rv32_defconfig', chdir: cwd.to_path
sh env, 'make', '-j', Etc.nprocessors.to_s, chdir: cwd.to_path
sh env, 'make', 'headers', chdir: cwd.to_path
user_directory = options.sysroot + 'usr'
user_directory.mkpath
cp_r (cwd + 'usr/include'), (user_directory + 'include')
end
desc 'Build glibc'
task :glibc, [:target] do |_, args|
options = find_build_target GCC_VERSION, args
source_directory = options.tools + "glibc-#{GLIBC_VERSION}"
configure_options = [
'--prefix=/usr',
"--host=#{options.target}",
"--target=#{options.target}",
"--build=#{options.build}",
"--enable-kernel=#{KERNEL_VERSION}",
"--with-headers=#{options.sysroot.realpath + 'usr/include'}",
'--disable-nscd',
'--disable-libquadmath',
'--disable-libitm',
'--disable-werror',
'libc_cv_forced_unwind=yes'
]
bin = options.rootfs.realpath + 'bin'
env = {
'PATH' => "#{bin}:#{ENV['PATH']}",
'MAKE' => 'make' # Otherwise it uses gnumake which can be different and too old.
}
cwd = source_directory.dirname + 'build-glibc'
cwd.mkpath
configure = source_directory.relative_path_from(cwd) +'./configure'
sh env, configure.to_path, *configure_options, chdir: cwd.to_path
sh env, 'make', '-j', Etc.nprocessors.to_s, chdir: cwd.to_path
sh env, 'make', "install_root=#{options.sysroot.realpath}", 'install', chdir: cwd.to_path
end
desc 'Build stage 2 GCC'
task :gcc2, [:target] do |_, args|
options = find_build_target GCC_VERSION, args
source_directory = options.tools + "gcc-#{GCC_VERSION}"
cwd = options.tools + 'build-gcc'
rm_rf cwd
cwd.mkpath
configure_options = [
"--prefix=#{options.rootfs.realpath}",
"--with-sysroot=#{options.sysroot.realpath}",
'--enable-languages=c,c++,lto',
'--enable-lto',
'--enable-shared',
'--with-arch=rv32imafdc',
'--with-abi=ilp32d',
'--with-tune=rocket',
'--with-isa-spec=20191213',
'--disable-bootstrap',
'--disable-multilib',
'--enable-checking=release',
'--disable-libssp',
'--disable-libquadmath',
'--enable-threads=posix',
'--with-default-libstdcxx-abi=new',
'--disable-nls',
"--target=#{options.target}",
"--build=#{options.build}",
"--host=#{options.build}"
]
flags = '-O2 -fPIC'
env = {
'CFLAGS' => flags,
'CXXFLAGS' => flags,
'PATH' => "#{options.rootfs.realpath + 'bin'}:#{ENV['PATH']}"
}
configure = source_directory.relative_path_from(cwd) + 'configure'
sh env, configure.to_path, *configure_options, chdir: cwd.to_path
sh env, 'make', '-j', Etc.nprocessors.to_s, chdir: cwd.to_path
sh env, 'make', 'install', chdir: cwd.to_path
end
end
desc 'Build cross toolchain'
task cross: [
'cross:binutils',
'cross:gcc1',
'cross:headers',
'cross:kernel',
'cross:glibc',
'cross:gcc2'
] do
end

View File

@ -0,0 +1,15 @@
DEFINITION MODULE CommandLineInterface;
FROM Common IMPORT ShortString;
TYPE
CommandLine = RECORD
input: ShortString;
lex: BOOLEAN;
parse: BOOLEAN
END;
PCommandLine = POINTER TO CommandLine;
PROCEDURE parse_command_line(): PCommandLine;
END CommandLineInterface.

View File

@ -0,0 +1,75 @@
module;
from SYSTEM import ADR, TSIZE;
from Args import GetArg, Narg;
from FIO import WriteString, WriteChar, WriteLine, StdErr;
from Storage import ALLOCATE;
from Strings import CompareStr, Length;
from MemUtils import MemZero;
from Common import ShortString;
proc parse_command_line() -> PCommandLine;
var
parameter: ShortString;
i: CARDINAL;
result: PCommandLine;
parsed: BOOLEAN;
begin
i := 1;
NEW(result);
result^.lex := false;
result^.parse := false;
MemZero(ADR(result^.input), 256);
while (i < Narg()) & (result <> nil) do
parsed := GetArg(parameter, i);
parsed := false;
if CompareStr(parameter, '--lex') = 0 then
parsed := true;
result^.lex := true
end;
if CompareStr(parameter, '--parse') = 0 then
parsed := true;
result^.parse := true
end;
if parameter[1] <> '-' then
parsed := true;
if Length(result^.input) > 0 then
WriteString(StdErr, 'Fatal error: only one source file can be compiled at once. First given "');
WriteString(StdErr, result^.input);
WriteString(StdErr, '", then "');
WriteString(StdErr, parameter);
WriteString(StdErr, '".');
WriteLine(StdErr);
result := nil
end;
if result <> nil then
result^.input := parameter
end
end;
if parsed = false then
WriteString(StdErr, 'Fatal error: unknown command line options: ');
WriteString(StdErr, parameter);
WriteChar(StdErr, '.');
WriteLine(StdErr);
result := nil
end;
i := i + 1
end;
if (result <> nil) & (Length(result^.input) = 0) then
WriteString(StdErr, 'Fatal error: no input files.');
WriteLine(StdErr);
result := nil
end;
return result
end;
end.

8
source/Common.def Normal file
View File

@ -0,0 +1,8 @@
DEFINITION MODULE Common;
TYPE
ShortString = ARRAY[1..256] OF CHAR;
Identifier = ARRAY[1..256] OF CHAR;
PIdentifier = POINTER TO Identifier;
END Common.

3
source/Common.elna Normal file
View File

@ -0,0 +1,3 @@
module;
end.

View File

@ -1,18 +1,51 @@
MODULE Compiler;
program;
FROM FIO IMPORT StdIn;
FROM SYSTEM IMPORT ADR;
from FIO import Close, IsNoError, File, OpenToRead, StdErr, StdOut, WriteLine, WriteString;
from SYSTEM import ADR;
from M2RTS import HALT, ExitOnHalt;
FROM Lexer IMPORT Lexer, LexerDestroy, LexerInitialize;
FROM Transpiler IMPORT Transpile;
from Lexer import Lexer, lexer_destroy, lexer_initialize;
from Transpiler import transpile;
from CommandLineInterface import PCommandLine, parse_command_line;
VAR
ALexer: Lexer;
var
command_line: PCommandLine;
BEGIN
LexerInitialize(ADR(ALexer), StdIn);
proc compile_from_stream();
var
lexer: Lexer;
source_input: File;
begin
source_input := OpenToRead(command_line^.input);
Transpile(ADR(ALexer));
if IsNoError(source_input) = false then
WriteString(StdErr, 'Fatal error: failed to read the input file "');
WriteString(StdErr, command_line^.input);
WriteString(StdErr, '".');
WriteLine(StdErr);
LexerDestroy(ADR(ALexer))
END Compiler.
ExitOnHalt(2)
end;
if IsNoError(source_input) then
lexer_initialize(ADR(lexer), source_input);
transpile(ADR(lexer), StdOut, command_line^.input);
lexer_destroy(ADR(lexer));
Close(source_input)
end
end;
begin
ExitOnHalt(0);
command_line := parse_command_line();
if command_line <> nil then
compile_from_stream()
end;
if command_line = nil then
ExitOnHalt(1)
end;
HALT()
end.

View File

@ -2,6 +2,8 @@ DEFINITION MODULE Lexer;
FROM FIO IMPORT File;
FROM Common IMPORT Identifier, ShortString;
TYPE
PLexerBuffer = POINTER TO CHAR;
Lexer = RECORD
@ -39,7 +41,7 @@ TYPE
lexerKindNull,
lexerKindAnd,
lexerKindOr,
lexerKindNot,
lexerKindTilde,
lexerKindReturn,
lexerKindDefinition,
lexerKindRange,
@ -71,25 +73,28 @@ TYPE
lexerKindCharacter,
lexerKindString,
lexerKindFrom,
lexerKindExclamation,
lexerKindPointer,
lexerKindArray,
lexerKindArrow,
lexerKindTrait,
lexerKindProgram,
lexerKindModule,
lexerKindImport
);
LexerToken = RECORD
CASE Kind: LexerKind OF
lexerKindBoolean: booleanKind: BOOLEAN
CASE kind: LexerKind OF
lexerKindBoolean: booleanKind: BOOLEAN |
lexerKindIdentifier: identifierKind: Identifier |
lexerKindInteger: integerKind: INTEGER |
lexerKindString: stringKind: ShortString
END
END;
PLexerToken = POINTER TO LexerToken;
PROCEDURE LexerInitialize(ALexer: PLexer; Input: File);
PROCEDURE LexerDestroy(ALexer: PLexer);
PROCEDURE lexer_initialize(ALexer: PLexer; Input: File);
PROCEDURE lexer_destroy(ALexer: PLexer);
(* Returns the last read token. *)
PROCEDURE LexerCurrent(ALexer: PLexer): LexerToken;
PROCEDURE lexer_current(ALexer: PLexer): LexerToken;
(* Read and return the next token. *)
PROCEDURE LexerLex(ALexer: PLexer): LexerToken;
PROCEDURE lexer_lex(ALexer: PLexer): LexerToken;
END Lexer.

File diff suppressed because it is too large Load Diff

125
source/Parser.def Normal file
View File

@ -0,0 +1,125 @@
DEFINITION MODULE Parser;
FROM Common IMPORT Identifier, PIdentifier, ShortString;
FROM Lexer IMPORT PLexer;
TYPE
AstLiteralKind = (
astLiteralKindInteger,
astLiteralKindString,
astLiteralKindNull
);
AstLiteral = RECORD
CASE kind: AstLiteralKind OF
astLiteralKindInteger: integer: INTEGER |
astLiteralKindString: string: ShortString |
astLiteralKindNull:
END
END;
PAstLiteral = POINTER TO AstLiteral;
AstUnaryOperator = (
astUnaryOperatorNot,
astUnaryOperatorMinus
);
AstExpressionKind = (
astExpressionKindLiteral,
astExpressionKindIdentifier,
astExpressionKindArrayAccess,
astExpressionKindDereference,
astExpressionKindFieldAccess,
astExpressionKindUnary
);
AstExpression = RECORD
CASE kind: AstExpressionKind OF
astExpressionKindLiteral: literal: PAstLiteral |
astExpressionKindIdentifier: identifier: Identifier |
astExpressionKindDereference: reference: PAstExpression |
astExpressionKindArrayAccess:
array: PAstExpression;
index: PAstExpression |
astExpressionKindFieldAccess:
aggregate: PAstExpression;
field: Identifier |
astExpressionKindUnary:
unary_operator: AstUnaryOperator;
unary_operand: PAstExpression
END
END;
PAstExpression = POINTER TO AstExpression;
PPAstExpression = POINTER TO PAstExpression;
AstImportStatement = RECORD
package: Identifier;
symbols: PIdentifier
END;
PAstImportStatement = POINTER TO AstImportStatement;
PPAstImportStatement = POINTER TO PAstImportStatement;
AstConstantDeclaration = RECORD
constant_name: Identifier;
constant_value: INTEGER
END;
PAstConstantDeclaration = POINTER TO AstConstantDeclaration;
PPAstConstantDeclaration = POINTER TO PAstConstantDeclaration;
AstFieldDeclaration = RECORD
field_name: Identifier;
field_type: PAstTypeExpression
END;
PAstFieldDeclaration = POINTER TO AstFieldDeclaration;
AstTypeExpressionKind = (
astTypeExpressionKindNamed,
astTypeExpressionKindRecord,
astTypeExpressionKindEnumeration,
astTypeExpressionKindArray,
astTypeExpressionKindPointer,
astTypeExpressionKindProcedure
);
AstTypeExpression = RECORD
CASE kind: AstTypeExpressionKind OF
astTypeExpressionKindNamed: name: Identifier |
astTypeExpressionKindEnumeration: cases: PIdentifier |
astTypeExpressionKindPointer: target: PAstTypeExpression |
astTypeExpressionKindRecord: fields: PAstFieldDeclaration |
astTypeExpressionKindArray:
base: PAstTypeExpression;
length: CARDINAL |
astTypeExpressionKindProcedure: parameters: PPAstTypeExpression
END
END;
PAstTypeExpression = POINTER TO AstTypeExpression;
PPAstTypeExpression = POINTER TO PAstTypeExpression;
AstTypeDeclaration = RECORD
identifier: Identifier;
type_expression: PAstTypeExpression
END;
PAstTypeDeclaration = POINTER TO AstTypeDeclaration;
PPAstTypeDeclaration = POINTER TO PAstTypeDeclaration;
AstVariableDeclaration = RECORD
variable_name: Identifier;
variable_type: PAstTypeExpression
END;
PAstVariableDeclaration = POINTER TO AstVariableDeclaration;
PPAstVariableDeclaration = POINTER TO PAstVariableDeclaration;
AstModule = RECORD
imports: PPAstImportStatement;
constants: PPAstConstantDeclaration;
types: PPAstTypeDeclaration;
variables: PPAstVariableDeclaration
END;
PAstModule = POINTER TO AstModule;
PROCEDURE parse_type_expression(lexer: PLexer): PAstTypeExpression;
PROCEDURE parse_type_part(lexer: PLexer): PPAstTypeDeclaration;
PROCEDURE parse_variable_part(lexer: PLexer): PPAstVariableDeclaration;
PROCEDURE parse_constant_part(lexer: PLexer): PPAstConstantDeclaration;
PROCEDURE parse_import_part(lexer: PLexer): PPAstImportStatement;
PROCEDURE parse_designator(lexer: PLexer): PAstExpression;
END Parser.

593
source/Parser.elna Normal file
View File

@ -0,0 +1,593 @@
module;
from SYSTEM import TSIZE;
from MemUtils import MemZero;
from Storage import ALLOCATE, REALLOCATE;
from Lexer import LexerKind, LexerToken, lexer_current, lexer_lex;
(* Calls lexer_lex() but skips the comments. *)
proc transpiler_lex(lexer: PLexer) -> LexerToken;
var
result: LexerToken;
begin
result := lexer_lex(lexer);
while result.kind = lexerKindComment do
result := lexer_lex(lexer)
end;
return result
end;
proc parse_type_fields(lexer: PLexer) -> PAstFieldDeclaration;
var
token: LexerToken;
field_declarations: PAstFieldDeclaration;
field_count: CARDINAL;
current_field: PAstFieldDeclaration;
begin
ALLOCATE(field_declarations, TSIZE(AstFieldDeclaration));
token := transpiler_lex(lexer);
field_count := 0;
while token.kind <> lexerKindEnd do
INC(field_count);
REALLOCATE(field_declarations, TSIZE(AstFieldDeclaration) * (field_count + 1));
current_field := field_declarations;
INC(current_field , TSIZE(AstFieldDeclaration) * (field_count - 1));
token := transpiler_lex(lexer);
current_field^.field_name := token.identifierKind;
token := transpiler_lex(lexer);
current_field^.field_type := parse_type_expression(lexer);
token := transpiler_lex(lexer);
if token.kind = lexerKindSemicolon then
token := transpiler_lex(lexer)
end
end;
INC(current_field, TSIZE(AstFieldDeclaration));
MemZero(current_field, TSIZE(AstFieldDeclaration));
return field_declarations
end;
proc parse_record_type(lexer: PLexer) -> PAstTypeExpression;
var
result: PAstTypeExpression;
begin
NEW(result);
result^.kind := astTypeExpressionKindRecord;
result^.fields := parse_type_fields(lexer);
return result
end;
proc parse_pointer_type(lexer: PLexer) -> PAstTypeExpression;
var
token: LexerToken;
result: PAstTypeExpression;
begin
NEW(result);
result^.kind := astTypeExpressionKindPointer;
token := lexer_current(lexer);
if token.kind = lexerKindPointer then
token := transpiler_lex(lexer)
end;
token := lexer_current(lexer);
result^.target := parse_type_expression(lexer);
return result
end;
proc parse_array_type(lexer: PLexer) -> PAstTypeExpression;
var
token: LexerToken;
buffer: [20]CHAR;
result: PAstTypeExpression;
begin
NEW(result);
result^.kind := astTypeExpressionKindArray;
result^.length := 0;
token := lexer_current(lexer);
if token.kind = lexerKindArray then
token := transpiler_lex(lexer)
end;
if token.kind <> lexerKindOf then
token := transpiler_lex(lexer);
result^.length := token.integerKind;
token := transpiler_lex(lexer);
end;
token := transpiler_lex(lexer);
result^.base := parse_type_expression(lexer);
return result
end;
proc parse_enumeration_type(lexer: PLexer) -> PAstTypeExpression;
var
token: LexerToken;
result: PAstTypeExpression;
current_case: PIdentifier;
case_count: CARDINAL;
begin
NEW(result);
result^.kind := astTypeExpressionKindEnumeration;
case_count := 1;
ALLOCATE(result^.cases, TSIZE(Identifier) * 2);
token := transpiler_lex(lexer);
current_case := result^.cases;
current_case^ := token.identifierKind;
token := transpiler_lex(lexer);
while token.kind = lexerKindComma do
token := transpiler_lex(lexer);
INC(case_count);
REALLOCATE(result^.cases, TSIZE(Identifier) * (case_count + 1));
current_case := result^.cases;
INC(current_case, TSIZE(Identifier) * (case_count - 1));
current_case^ := token.identifierKind;
token := transpiler_lex(lexer)
end;
INC(current_case, TSIZE(Identifier));
MemZero(current_case, TSIZE(Identifier));
return result
end;
proc parse_named_type(lexer: PLexer) -> PAstTypeExpression;
var
token: LexerToken;
result: PAstTypeExpression;
begin
token := lexer_current(lexer);
NEW(result);
result^.kind := astTypeExpressionKindNamed;
result^.name := token.identifierKind;
return result
end;
proc parse_procedure_type(lexer: PLexer) -> PAstTypeExpression;
var
token: LexerToken;
result: PAstTypeExpression;
current_parameter: PPAstTypeExpression;
parameter_count: CARDINAL;
begin
parameter_count := 0;
NEW(result);
result^.kind := astTypeExpressionKindProcedure;
ALLOCATE(result^.parameters, 1);
token := transpiler_lex(lexer);
token := transpiler_lex(lexer);
while token.kind <> lexerKindRightParen do
INC(parameter_count);
REALLOCATE(result^.parameters, TSIZE(PAstTypeExpression) * (parameter_count + 1));
current_parameter := result^.parameters;
INC(current_parameter, TSIZE(PAstTypeExpression) * (parameter_count - 1));
current_parameter^ := parse_type_expression(lexer);
token := transpiler_lex(lexer);
if token.kind = lexerKindComma then
token := transpiler_lex(lexer)
end
end;
current_parameter := result^.parameters;
INC(current_parameter, TSIZE(PAstTypeExpression) * parameter_count);
current_parameter^ := nil;
return result
end;
proc parse_type_expression(lexer: PLexer) -> PAstTypeExpression;
var
token: LexerToken;
result: PAstTypeExpression;
begin
result := nil;
token := lexer_current(lexer);
if token.kind = lexerKindRecord then
result := parse_record_type(lexer)
end;
if token.kind = lexerKindLeftParen then
result := parse_enumeration_type(lexer)
end;
if (token.kind = lexerKindArray) or (token.kind = lexerKindLeftSquare) then
result := parse_array_type(lexer)
end;
if token.kind = lexerKindHat then
result := parse_pointer_type(lexer)
end;
if token.kind = lexerKindProc then
result := parse_procedure_type(lexer)
end;
if token.kind = lexerKindIdentifier then
result := parse_named_type(lexer)
end;
return result
end;
proc parse_type_declaration(lexer: PLexer) -> PAstTypeDeclaration;
var
token: LexerToken;
result: PAstTypeDeclaration;
begin
token := lexer_current(lexer);
NEW(result);
result^.identifier := token.identifierKind;
token := transpiler_lex(lexer);
token := transpiler_lex(lexer);
result^.type_expression := parse_type_expression(lexer);
token := transpiler_lex(lexer);
return result
end;
proc parse_type_part(lexer: PLexer) -> PPAstTypeDeclaration;
var
token: LexerToken;
result: PPAstTypeDeclaration;
current_declaration: PPAstTypeDeclaration;
declaration_count: CARDINAL;
begin
token := lexer_current(lexer);
ALLOCATE(result, TSIZE(PAstTypeDeclaration));
current_declaration := result;
declaration_count := 0;
if token.kind = lexerKindType then
token := transpiler_lex(lexer);
while token.kind = lexerKindIdentifier do
INC(declaration_count);
REALLOCATE(result, TSIZE(PAstTypeDeclaration) * (declaration_count + 1));
current_declaration := result;
INC(current_declaration, TSIZE(PAstTypeDeclaration) * (declaration_count - 1));
current_declaration^ := parse_type_declaration(lexer);
token := transpiler_lex(lexer)
end
end;
if declaration_count <> 0 then
INC(current_declaration, TSIZE(PAstTypeDeclaration))
end;
current_declaration^ := nil;
return result
end;
proc parse_variable_declaration(lexer: PLexer) -> PAstVariableDeclaration;
var
token: LexerToken;
result: PAstVariableDeclaration;
begin
NEW(result);
token := lexer_current(lexer);
result^.variable_name := token.identifierKind;
token := transpiler_lex(lexer);
token := transpiler_lex(lexer);
result^.variable_type := parse_type_expression(lexer);
token := transpiler_lex(lexer);
return result
end;
proc parse_variable_part(lexer: PLexer) -> PPAstVariableDeclaration;
var
token: LexerToken;
result: PPAstVariableDeclaration;
current_declaration: PPAstVariableDeclaration;
declaration_count: CARDINAL;
begin
token := lexer_current(lexer);
ALLOCATE(result, TSIZE(PAstVariableDeclaration));
current_declaration := result;
declaration_count := 0;
if token.kind = lexerKindVar then
token := transpiler_lex(lexer);
while token.kind = lexerKindIdentifier do
INC(declaration_count);
REALLOCATE(result, TSIZE(PAstVariableDeclaration) * (declaration_count + 1));
current_declaration := result;
INC(current_declaration, TSIZE(PAstVariableDeclaration) * (declaration_count - 1));
current_declaration^ := parse_variable_declaration(lexer);
token := transpiler_lex(lexer)
end
end;
if declaration_count <> 0 then
INC(current_declaration, TSIZE(PAstVariableDeclaration))
end;
current_declaration^ := nil;
return result
end;
proc parse_constant_declaration(lexer: PLexer) -> PAstConstantDeclaration;
var
token: LexerToken;
result: PAstConstantDeclaration;
begin
NEW(result);
token := lexer_current(lexer);
result^.constant_name := token.identifierKind;
token := transpiler_lex(lexer);
token := transpiler_lex(lexer);
result^.constant_value := token.integerKind;
token := transpiler_lex(lexer);
return result
end;
proc parse_constant_part(lexer: PLexer) -> PPAstConstantDeclaration;
var
token: LexerToken;
result: PPAstConstantDeclaration;
current_declaration: PPAstConstantDeclaration;
declaration_count: CARDINAL;
begin
token := lexer_current(lexer);
ALLOCATE(result, TSIZE(PAstConstantDeclaration));
current_declaration := result;
declaration_count := 0;
if token.kind = lexerKindConst then
token := transpiler_lex(lexer);
while token.kind = lexerKindIdentifier do
INC(declaration_count);
REALLOCATE(result, TSIZE(PAstConstantDeclaration) * (declaration_count + 1));
current_declaration := result;
INC(current_declaration, TSIZE(PAstConstantDeclaration) * (declaration_count - 1));
current_declaration^ := parse_constant_declaration(lexer);
token := transpiler_lex(lexer)
end
end;
if declaration_count <> 0 then
INC(current_declaration, TSIZE(PAstConstantDeclaration))
end;
current_declaration^ := nil;
return result
end;
proc parse_import_statement(lexer: PLexer) -> PAstImportStatement;
var
result: PAstImportStatement;
token: LexerToken;
symbol_count: CARDINAL;
current_symbol: PIdentifier;
begin
NEW(result);
symbol_count := 1;
token := transpiler_lex(lexer);
result^.package := token.identifierKind;
token := transpiler_lex(lexer);
ALLOCATE(result^.symbols, TSIZE(Identifier) * 2);
current_symbol := result^.symbols;
token := transpiler_lex(lexer);
current_symbol^ := token.identifierKind;
token := transpiler_lex(lexer);
while token.kind <> lexerKindSemicolon do
token := transpiler_lex(lexer);
INC(symbol_count);
REALLOCATE(result^.symbols, TSIZE(Identifier) * (symbol_count + 1));
current_symbol := result^.symbols;
INC(current_symbol, TSIZE(Identifier) * (symbol_count - 1));
current_symbol^ := token.identifierKind;
token := transpiler_lex(lexer)
end;
INC(current_symbol, TSIZE(Identifier));
MemZero(current_symbol, TSIZE(Identifier));
token := transpiler_lex(lexer);
return result
end;
proc parse_import_part(lexer: PLexer) -> PPAstImportStatement;
var
token: LexerToken;
import_statement: PPAstImportStatement;
result: PPAstImportStatement;
import_count: CARDINAL;
begin
token := lexer_current(lexer);
ALLOCATE(result, TSIZE(PAstImportStatement));
import_statement := result;
import_count := 0;
while token.kind = lexerKindFrom do
INC(import_count);
REALLOCATE(result, TSIZE(PAstImportStatement) * (import_count + 1));
import_statement := result;
INC(import_statement, TSIZE(PAstImportStatement) * (import_count - 1));
import_statement^ := parse_import_statement(lexer);
token := lexer_current(lexer)
end;
if import_count > 0 then
INC(import_statement, TSIZE(PAstImportStatement))
end;
import_statement^ := nil;
return result
end;
proc parse_literal(lexer: PLexer) -> PAstLiteral;
var
literal: PAstLiteral;
token: LexerToken;
begin
literal := nil;
token := lexer_current(lexer);
if token.kind = lexerKindInteger then
NEW(literal);
literal^.kind := astLiteralKindInteger;
literal^.integer := token.integerKind;
end;
if token.kind = lexerKindCharacter then
NEW(literal);
literal^.kind := astLiteralKindString;
literal^.string := token.stringKind;
end;
if token.kind = lexerKindNull then
NEW(literal);
literal^.kind := astLiteralKindNull;
end;
if literal <> nil then
token := transpiler_lex(lexer)
end;
return literal
end;
proc parse_factor(lexer: PLexer) -> PAstExpression;
var
next_token: LexerToken;
result: PAstExpression;
literal: PAstLiteral;
begin
result := nil;
next_token := lexer_current(lexer);
literal := parse_literal(lexer);
if (result = nil) & (literal <> nil) then
NEW(result);
result^.kind := astExpressionKindLiteral;
result^.literal := literal;
end;
if (result = nil) & (next_token.kind = lexerKindMinus) then
NEW(result);
next_token := transpiler_lex(lexer);
result^.kind := astExpressionKindUnary;
result^.unary_operator := astUnaryOperatorMinus;
result^.unary_operand := parse_factor(lexer)
end;
if (result = nil) & (next_token.kind = lexerKindTilde) then
NEW(result);
next_token := transpiler_lex(lexer);
result^.kind := astExpressionKindUnary;
result^.unary_operator := astUnaryOperatorNot;
result^.unary_operand := parse_factor(lexer)
end;
if (result = nil) & (next_token.kind = lexerKindIdentifier) then
NEW(result);
result^.kind := astExpressionKindIdentifier;
result^.identifier := next_token.identifierKind;
next_token := transpiler_lex(lexer)
end;
return result
end;
proc parse_designator(lexer: PLexer) -> PAstExpression;
var
next_token: LexerToken;
inner_expression: PAstExpression;
designator: PAstExpression;
handled: BOOLEAN;
begin
designator := parse_factor(lexer);
handled := designator <> nil;
next_token := lexer_current(lexer);
while handled do
inner_expression := designator;
handled := false;
if ~handled & (next_token.kind = lexerKindHat) then
NEW(designator);
designator^.kind := astExpressionKindDereference;
designator^.reference := inner_expression;
next_token := transpiler_lex(lexer);
handled := true
end;
if ~handled & (next_token.kind = lexerKindLeftSquare) then
NEW(designator);
next_token := transpiler_lex(lexer);
designator^.kind := astExpressionKindArrayAccess;
designator^.array := inner_expression;
designator^.index := parse_designator(lexer);
next_token := transpiler_lex(lexer);
handled := true
end;
if ~handled & (next_token.kind = lexerKindDot) then
NEW(designator);
next_token := transpiler_lex(lexer);
designator^.kind := astExpressionKindFieldAccess;
designator^.aggregate := inner_expression;
designator^.field := next_token.identifierKind;
next_token := transpiler_lex(lexer);
handled := true
end
end;
return designator
end;
end.

View File

@ -1,7 +1,18 @@
DEFINITION MODULE Transpiler;
FROM Lexer IMPORT PLexer;
FROM FIO IMPORT File;
PROCEDURE Transpile(ALexer: PLexer);
FROM Common IMPORT ShortString;
FROM Lexer IMPORT PLexer, Lexer;
TYPE
TranspilerContext = RECORD
input_name: ShortString;
output: File;
lexer: PLexer
END;
PTranspilerContext = POINTER TO TranspilerContext;
PROCEDURE transpile(lexer: PLexer; output: File; input_name: ShortString);
END Transpiler.

View File

@ -1,160 +1,767 @@
IMPLEMENTATION MODULE Transpiler;
module;
FROM FIO IMPORT WriteNBytes, StdOut;
FROM SYSTEM IMPORT ADR, ADDRESS;
from FIO import StdErr, WriteNBytes, WriteLine, WriteChar, WriteString;
from SYSTEM import ADR, ADDRESS, TSIZE;
FROM Terminal IMPORT Write, WriteLn, WriteString;
FROM Lexer IMPORT Lexer, LexerToken, LexerCurrent, LexerLex, LexerKind;
from NumberIO import IntToStr;
from Storage import ALLOCATE, REALLOCATE;
from MemUtils import MemCopy, MemZero;
TYPE
TranspilerContext = RECORD
END;
PTranspilerContext = POINTER TO TranspilerContext;
from Common import Identifier, PIdentifier, ShortString;
from Lexer import Lexer, LexerToken, lexer_current, lexer_lex, LexerKind;
from Parser import AstTypeExpressionKind, AstExpressionKind, AstLiteralKind, AstUnaryOperator,
AstModule, PAstModule, AstExpression, PAstExpression, PAstLiteral,
PAstConstantDeclaration, PPAstConstantDeclaration,
AstTypeDeclaration, PAstTypeDeclaration, PPAstTypeDeclaration,
PAstVariableDeclaration, PPAstVariableDeclaration, PAstImportStatement, PPAstImportStatement,
PAstTypeExpression, PPAstTypeExpression, AstFieldDeclaration, PAstFieldDeclaration,
parse_type_expression, parse_variable_part, parse_type_part, parse_constant_part, parse_import_part,
parse_designator;
(* Calls LexerLex() but skips the comments. *)
PROCEDURE TranspilerLex(ALexer: PLexer): LexerToken;
VAR
Result: LexerToken;
BEGIN
Result := LexerLex(ALexer);
(* Calls lexer_lex() but skips the comments. *)
proc transpiler_lex(lexer: PLexer) -> LexerToken;
var
result: LexerToken;
begin
result := lexer_lex(lexer);
WHILE Result.Kind = lexerKindComment DO
Result := LexerLex(ALexer)
END;
while result.kind = lexerKindComment do
result := lexer_lex(lexer)
end;
RETURN Result
END TranspilerLex;
return result
end;
(* Write a semicolon followed by a newline. *)
PROCEDURE WriteSemicolon();
BEGIN
WriteString(';');
WriteLn()
END WriteSemicolon;
proc write_semicolon(output: File);
begin
WriteChar(output, ';');
WriteLine(output)
end;
PROCEDURE TranspileImport(AContext: PTranspilerContext; ALexer: PLexer);
VAR
Token: LexerToken;
WrittenBytes: CARDINAL;
BEGIN
WriteString('FROM ');
Token := TranspilerLex(ALexer);
proc write_current(lexer: PLexer, output: File);
var
written_bytes: CARDINAL;
begin
written_bytes := WriteNBytes(output, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start)
end;
WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start);
proc transpile_import_statement(context: PTranspilerContext, import_statement: PAstImportStatement);
var
token: LexerToken;
written_bytes: CARDINAL;
current_symbol: PIdentifier;
begin
WriteString(context^.output, 'FROM ');
written_bytes := WriteNBytes(context^.output, ORD(import_statement^.package[1]), ADR(import_statement^.package[2]));
Token := TranspilerLex(ALexer);
WriteString(' IMPORT ');
WriteString(context^.output, ' IMPORT ');
Token := TranspilerLex(ALexer);
WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start);
current_symbol := import_statement^.symbols;
written_bytes := WriteNBytes(context^.output, ORD(current_symbol^[1]), ADR(current_symbol^[2]));
INC(current_symbol, TSIZE(Identifier));
Token := TranspilerLex(ALexer);
WHILE Token.Kind <> lexerKindSemicolon DO
WriteString(', ');
Token := TranspilerLex(ALexer);
WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start);
Token := TranspilerLex(ALexer)
END;
WriteSemicolon();
Token := TranspilerLex(ALexer)
END TranspileImport;
while ORD(current_symbol^[1]) <> 0 do
WriteString(context^.output, ', ');
written_bytes := WriteNBytes(context^.output, ORD(current_symbol^[1]), ADR(current_symbol^[2]));
INC(current_symbol, TSIZE(Identifier))
end;
write_semicolon(context^.output)
end;
PROCEDURE TranspileImportPart(AContext: PTranspilerContext; ALexer: PLexer);
VAR
Token: LexerToken;
BEGIN
Token := LexerCurrent(ALexer);
proc transpile_import_part(context: PTranspilerContext, imports: PPAstImportStatement);
var
import_statement: PAstImportStatement;
begin
while imports^ <> nil do
transpile_import_statement(context, imports^);
INC(imports, TSIZE(PAstImportStatement))
end;
WriteLine(context^.output)
end;
WHILE Token.Kind = lexerKindFrom DO
TranspileImport(AContext, ALexer);
Token := LexerCurrent(ALexer)
END;
WriteLn()
END TranspileImportPart;
proc transpile_constant_declaration(context: PTranspilerContext, declaration: PAstConstantDeclaration);
var
buffer: [20]CHAR;
written_bytes: CARDINAL;
begin
WriteString(context^.output, ' ');
written_bytes := WriteNBytes(context^.output, ORD(declaration^.constant_name[1]), ADR(declaration^.constant_name[2]));
PROCEDURE TranspileConstant(AContext: PTranspilerContext; ALexer: PLexer);
VAR
Token: LexerToken;
WrittenBytes: CARDINAL;
BEGIN
WriteString(' ');
Token := LexerCurrent(ALexer);
WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start);
WriteString(context^.output, ' = ');
Token := TranspilerLex(ALexer);
WriteString(' = ');
IntToStr(declaration^.constant_value, 0, buffer);
WriteString(context^.output, buffer);
Token := TranspilerLex(ALexer);
WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start);
write_semicolon(context^.output)
end;
Token := TranspilerLex(ALexer);
WriteSemicolon()
END TranspileConstant;
proc transpile_constant_part(context: PTranspilerContext, declarations: PPAstConstantDeclaration);
var
current_declaration: PPAstConstantDeclaration;
begin
if declarations^ <> nil then
WriteString(context^.output, 'CONST');
WriteLine(context^.output);
PROCEDURE TranspileConstantPart(AContext: PTranspilerContext; ALexer: PLexer);
VAR
Token: LexerToken;
BEGIN
Token := LexerCurrent(ALexer);
current_declaration := declarations;
while current_declaration^ <> nil do
transpile_constant_declaration(context, current_declaration^);
IF Token.Kind = lexerKindConst THEN
WriteString('CONST');
WriteLn();
Token := TranspilerLex(ALexer);
INC(current_declaration, TSIZE(PAstConstantDeclaration))
end;
WriteLine(context^.output)
end
end;
WHILE Token.Kind = lexerKindIdentifier DO
TranspileConstant(AContext, ALexer);
Token := TranspilerLex(ALexer)
END;
WriteLn()
END
END TranspileConstantPart;
proc transpile_module(context: PTranspilerContext) -> PAstModule;
var
token: LexerToken;
result: PAstModule;
begin
NEW(result);
token := transpiler_lex(context^.lexer);
PROCEDURE TranspileModule(AContext: PTranspilerContext; ALexer: PLexer);
VAR
Token: LexerToken;
WrittenBytes: CARDINAL;
BEGIN
Token := TranspilerLex(ALexer);
IF Token.Kind = lexerKindDefinition THEN
WriteString('DEFINITION ');
Token := TranspilerLex(ALexer);
ELSIF Token.Kind = lexerKindImplementation THEN
WriteString('IMPLEMENTATION ');
Token := TranspilerLex(ALexer)
END;
WriteString('MODULE ');
if token.kind = lexerKindModule then
WriteString(context^.output, 'IMPLEMENTATION ')
end;
WriteString(context^.output, 'MODULE ');
(* Write the module name and end the line with a semicolon and newline. *)
Token := TranspilerLex(ALexer);
WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start);
transpile_module_name(context);
Token := TranspilerLex(ALexer);
WriteSemicolon();
WriteLn();
token := transpiler_lex(context^.lexer);
write_semicolon(context^.output);
WriteLine(context^.output);
(* Write the module body. *)
Token := TranspilerLex(ALexer);
TranspileImportPart(AContext, ALexer);
TranspileConstantPart(AContext, ALexer);
token := transpiler_lex(context^.lexer);
Token := LexerCurrent(ALexer);
WHILE Token.Kind <> lexerKindEof DO
WrittenBytes := WriteNBytes(StdOut, ADDRESS(ALexer^.Current - ALexer^.Start), ALexer^.Start);
WriteLn();
result^.imports := parse_import_part(context^.lexer);
transpile_import_part(context, result^.imports);
Token := TranspilerLex(ALexer)
END
END TranspileModule;
result^.constants := parse_constant_part(context^.lexer);
transpile_constant_part(context, result^.constants);
result^.types := parse_type_part(context^.lexer);
transpile_type_part(context, result^.types);
PROCEDURE Transpile(ALexer: PLexer);
VAR
Token: LexerToken;
WrittenBytes: CARDINAL;
Context: TranspilerContext;
BEGIN
TranspileModule(ADR(Context), ALexer)
END Transpile;
result^.variables := parse_variable_part(context^.lexer);
transpile_variable_part(context, result^.variables);
END Transpiler.
transpile_procedure_part(context);
transpile_statement_part(context);
WriteString(context^.output, 'END ');
transpile_module_name(context);
token := transpiler_lex(context^.lexer);
WriteChar(context^.output, '.');
token := transpiler_lex(context^.lexer);
WriteLine(context^.output);
return result
end;
proc transpile_type_fields(context: PTranspilerContext, fields: PAstFieldDeclaration);
var
written_bytes: CARDINAL;
current_field: PAstFieldDeclaration;
begin
current_field := fields;
while ORD(current_field^.field_name[1]) <> 0 do
WriteString(context^.output, ' ');
written_bytes := WriteNBytes(context^.output, ORD(current_field^.field_name[1]), ADR(current_field^.field_name[2]));
WriteString(context^.output, ': ');
transpile_type_expression(context, current_field^.field_type);
INC(current_field , TSIZE(AstFieldDeclaration));
if ORD(current_field^.field_name[1]) <> 0 then
WriteChar(context^.output, ';')
end;
WriteLine(context^.output)
end
end;
proc transpile_record_type(context: PTranspilerContext, type_expression: PAstTypeExpression);
begin
WriteString(context^.output, 'RECORD');
WriteLine(context^.output);
transpile_type_fields(context, type_expression^.fields);
WriteString(context^.output, ' END')
end;
proc transpile_pointer_type(context: PTranspilerContext, type_expression: PAstTypeExpression);
var
token: LexerToken;
begin
WriteString(context^.output, 'POINTER TO ');
transpile_type_expression(context, type_expression^.target)
end;
proc transpile_array_type(context: PTranspilerContext, type_expression: PAstTypeExpression);
var
buffer: [20]CHAR;
begin
WriteString(context^.output, 'ARRAY');
if type_expression^.length <> 0 then
WriteString(context^.output, '[1..');
IntToStr(type_expression^.length, 0, buffer);
WriteString(context^.output, buffer);
WriteChar(context^.output, ']')
end;
WriteString(context^.output, ' OF ');
transpile_type_expression(context, type_expression^.base)
end;
proc transpile_enumeration_type(context: PTranspilerContext, type_expression: PAstTypeExpression);
var
current_case: PIdentifier;
written_bytes: CARDINAL;
begin
current_case := type_expression^.cases;
WriteString(context^.output, '(');
WriteLine(context^.output);
WriteString(context^.output, ' ');
written_bytes := WriteNBytes(context^.output, ORD(current_case^[1]), ADR(current_case^[2]));
INC(current_case, TSIZE(Identifier));
while ORD(current_case^[1]) <> 0 do
WriteChar(context^.output, ',');
WriteLine(context^.output);
WriteString(context^.output, ' ');
written_bytes := WriteNBytes(context^.output, ORD(current_case^[1]), ADR(current_case^[2]));
INC(current_case, TSIZE(Identifier))
end;
WriteLine(context^.output);
WriteString(context^.output, ' )')
end;
proc transpile_named_type(context: PTranspilerContext, type_expression: PAstTypeExpression);
var
written_bytes: CARDINAL;
begin
written_bytes := WriteNBytes(context^.output, ORD(type_expression^.name[1]), ADR(type_expression^.name[2]))
end;
proc transpile_procedure_type(context: PTranspilerContext, type_expression: PAstTypeExpression);
var
result: PAstTypeExpression;
current_parameter: PPAstTypeExpression;
parameter_count: CARDINAL;
begin
WriteString(context^.output, 'PROCEDURE(');
current_parameter := type_expression^.parameters;
while current_parameter^ <> nil do
transpile_type_expression(context, current_parameter^);
INC(current_parameter, TSIZE(PAstTypeExpression));
if current_parameter^ <> nil then
WriteString(context^.output, ', ')
end
end;
WriteChar(context^.output, ')')
end;
proc transpile_type_expression(context: PTranspilerContext, type_expression: PAstTypeExpression);
begin
if type_expression^.kind = astTypeExpressionKindRecord then
transpile_record_type(context, type_expression)
end;
if type_expression^.kind = astTypeExpressionKindEnumeration then
transpile_enumeration_type(context, type_expression)
end;
if type_expression^.kind = astTypeExpressionKindArray then
transpile_array_type(context, type_expression)
end;
if type_expression^.kind = astTypeExpressionKindPointer then
transpile_pointer_type(context, type_expression)
end;
if type_expression^.kind = astTypeExpressionKindProcedure then
transpile_procedure_type(context, type_expression)
end;
if type_expression^.kind = astTypeExpressionKindNamed then
transpile_named_type(context, type_expression)
end
end;
proc transpile_type_declaration(context: PTranspilerContext, declaration: PAstTypeDeclaration);
var
written_bytes: CARDINAL;
begin
WriteString(context^.output, ' ');
written_bytes := WriteNBytes(context^.output, ORD(declaration^.identifier[1]), ADR(declaration^.identifier[2]));
WriteString(context^.output, ' = ');
transpile_type_expression(context, declaration^.type_expression);
write_semicolon(context^.output)
end;
proc transpile_type_part(context: PTranspilerContext, declarations: PPAstTypeDeclaration);
var
current_declaration: PPAstTypeDeclaration;
begin
if declarations^ <> nil then
WriteString(context^.output, 'TYPE');
WriteLine(context^.output);
current_declaration := declarations;
while current_declaration^ <> nil do
transpile_type_declaration(context, current_declaration^);
INC(current_declaration, TSIZE(PAstTypeDeclaration))
end;
WriteLine(context^.output)
end
end;
proc transpile_variable_declaration(context: PTranspilerContext, declaration: PAstVariableDeclaration);
var
written_bytes: CARDINAL;
begin
WriteString(context^.output, ' ');
written_bytes := WriteNBytes(context^.output, ORD(declaration^.variable_name[1]), ADR(declaration^.variable_name[2]));
WriteString(context^.output, ': ');
transpile_type_expression(context, declaration^.variable_type);
write_semicolon(context^.output)
end;
proc transpile_variable_part(context: PTranspilerContext, declarations: PPAstVariableDeclaration);
var
current_declaration: PPAstVariableDeclaration;
begin
if declarations^ <> nil then
WriteString(context^.output, 'VAR');
WriteLine(context^.output);
current_declaration := declarations;
while current_declaration^ <> nil do
transpile_variable_declaration(context, current_declaration^);
INC(current_declaration, TSIZE(PAstVariableDeclaration))
end;
WriteLine(context^.output)
end
end;
proc transpile_procedure_heading(context: PTranspilerContext) -> LexerToken;
var
token: LexerToken;
result: LexerToken;
type_expression: PAstTypeExpression;
begin
WriteString(context^.output, 'PROCEDURE ');
result := transpiler_lex(context^.lexer);
write_current(context^.lexer, context^.output);
token := transpiler_lex(context^.lexer);
WriteChar(context^.output, '(');
token := transpiler_lex(context^.lexer);
while token.kind <> lexerKindRightParen do
write_current(context^.lexer, context^.output);
token := transpiler_lex(context^.lexer);
WriteString(context^.output, ': ');
token := transpiler_lex(context^.lexer);
type_expression := parse_type_expression(context^.lexer);
transpile_type_expression(context, type_expression);
token := transpiler_lex(context^.lexer);
if (token.kind = lexerKindSemicolon) or (token.kind = lexerKindComma) then
WriteString(context^.output, '; ');
token := transpiler_lex(context^.lexer)
end
end;
WriteString(context^.output, ')');
token := transpiler_lex(context^.lexer);
(* Check for the return type and write it. *)
if token.kind = lexerKindArrow then
WriteString(context^.output, ': ');
token := transpiler_lex(context^.lexer);
write_current(context^.lexer, context^.output);
token := transpiler_lex(context^.lexer)
end;
token := transpiler_lex(context^.lexer);
write_semicolon(context^.output);
return result
end;
proc transpile_unchanged(context: PTranspilerContext, trailing_token: LexerKind);
var
token: LexerToken;
written_bytes: CARDINAL;
begin
token := lexer_current(context^.lexer);
while (token.kind <> trailing_token) & (token.kind <> lexerKindEnd) do
written_bytes := 0;
if token.kind = lexerKindNull then
WriteString(context^.output, 'NIL ');
written_bytes := 1
end;
if (token.kind = lexerKindBoolean) & token.booleanKind then
WriteString(context^.output, 'TRUE ');
written_bytes := 1
end;
if (token.kind = lexerKindBoolean) & (~token.booleanKind) then
WriteString(context^.output, 'FALSE ');
written_bytes := 1
end;
if token.kind = lexerKindOr then
WriteString(context^.output, 'OR ');
written_bytes := 1
end;
if token.kind = lexerKindAnd then
WriteString(context^.output, 'AND ');
written_bytes := 1
end;
if token.kind = lexerKindTilde then
WriteString(context^.output, 'NOT ');
written_bytes := 1
end;
if written_bytes = 0 then
write_current(context^.lexer, context^.output);
WriteChar(context^.output, ' ')
end;
token := transpiler_lex(context^.lexer)
end
end;
proc parse_expression(lexer: PLexer) -> PAstExpression;
var
next_token: LexerToken;
result: PAstExpression;
written_bytes: CARDINAL;
begin
result := parse_designator(lexer);
written_bytes := WriteNBytes(StdErr, ADDRESS(lexer^.Current - lexer^.Start), lexer^.Start);
WriteLine(StdErr);
return result
end;
proc transpile_unary_operator(context: PTranspilerContext, operator: AstUnaryOperator);
begin
if operator = astUnaryOperatorMinus then
WriteChar(context^.output, '-')
end;
if operator = astUnaryOperatorNot then
WriteChar(context^.output, '~')
end
end;
proc transpile_expression(context: PTranspilerContext, expression: PAstExpression);
var
literal: PAstLiteral;
buffer: [20]CHAR;
written_bytes: CARDINAL;
begin
if expression^.kind = astExpressionKindLiteral then
literal := expression^.literal;
if literal^.kind = astLiteralKindInteger then
IntToStr(literal^.integer, 0, buffer);
WriteString(context^.output, buffer);
end;
if literal^.kind = astLiteralKindString then
WriteString(context^.output, literal^.string)
end
end;
if expression^.kind = astExpressionKindIdentifier then
written_bytes := WriteNBytes(context^.output, ORD(expression^.identifier[1]), ADR(expression^.identifier[2]))
end;
if expression^.kind = astExpressionKindDereference then
transpile_expression(context, expression^.reference);
WriteChar(context^.output, '^')
end;
if expression^.kind = astExpressionKindArrayAccess then
transpile_expression(context, expression^.array);
WriteChar(context^.output, '[');
transpile_expression(context, expression^.index);
WriteChar(context^.output, ']')
end;
if expression^.kind = astExpressionKindFieldAccess then
transpile_expression(context, expression^.aggregate);
WriteChar(context^.output, '.');
written_bytes := WriteNBytes(context^.output, ORD(expression^.field[1]), ADR(expression^.field[2]));
end;
if expression^.kind = astExpressionKindUnary then
transpile_unary_operator(context, expression^.unary_operator);
transpile_expression(context, expression^.unary_operand)
end
end;
proc transpile_if_statement(context: PTranspilerContext);
var
token: LexerToken;
expression: PAstExpression;
lexer: Lexer;
begin
WriteString(context^.output, ' IF ');
lexer := context^.lexer^;
token := transpiler_lex(ADR(lexer));
expression := parse_expression(ADR(lexer));
if expression <> nil then
context^.lexer^ := lexer;
transpile_expression(context, expression);
WriteChar(context^.output, ' ')
end;
if expression = nil then
token := transpiler_lex(context^.lexer)
end;
transpile_unchanged(context, lexerKindThen);
WriteString(context^.output, 'THEN');
WriteLine(context^.output);
transpile_statements(context);
WriteString(context^.output, ' END');
token := transpiler_lex(context^.lexer)
end;
proc transpile_while_statement(context: PTranspilerContext);
var
token: LexerToken;
begin
WriteString(context^.output, ' WHILE ');
token := transpiler_lex(context^.lexer);
transpile_unchanged(context, lexerKindDo);
WriteString(context^.output, 'DO');
WriteLine(context^.output);
transpile_statements(context);
WriteString(context^.output, ' END');
token := transpiler_lex(context^.lexer)
end;
proc transpile_assignment_statement(context: PTranspilerContext);
var
token: LexerToken;
begin
WriteString(context^.output, ' := ');
token := transpiler_lex(context^.lexer);
transpile_unchanged(context, lexerKindSemicolon);
end;
proc transpile_call_statement(context: PTranspilerContext);
var
token: LexerToken;
begin
WriteString(context^.output, '(');
token := transpiler_lex(context^.lexer);
while (token.kind <> lexerKindSemicolon) & (token.kind <> lexerKindEnd) do
write_current(context^.lexer, context^.output);
token := transpiler_lex(context^.lexer)
end
end;
proc transpile_designator_expression(context: PTranspilerContext);
var
token: LexerToken;
begin
WriteString(context^.output, ' ');
write_current(context^.lexer, context^.output);
token := transpiler_lex(context^.lexer);
while token.kind = lexerKindLeftSquare do
WriteChar(context^.output, '[');
token := transpiler_lex(context^.lexer);
while token.kind <> lexerKindRightSquare do
write_current(context^.lexer, context^.output);
token := transpiler_lex(context^.lexer)
end;
WriteChar(context^.output, ']');
token := transpiler_lex(context^.lexer)
end;
if token.kind = lexerKindHat then
WriteChar(context^.output, '^');
token := transpiler_lex(context^.lexer)
end;
if token.kind = lexerKindDot then
WriteChar(context^.output, '.');
token := transpiler_lex(context^.lexer);
write_current(context^.lexer, context^.output);
token := transpiler_lex(context^.lexer)
end;
if token.kind = lexerKindHat then
WriteChar(context^.output, '^');
token := transpiler_lex(context^.lexer)
end;
while token.kind = lexerKindLeftSquare do
WriteChar(context^.output, '[');
token := transpiler_lex(context^.lexer);
while token.kind <> lexerKindRightSquare do
write_current(context^.lexer, context^.output);
token := transpiler_lex(context^.lexer)
end;
WriteChar(context^.output, ']');
token := transpiler_lex(context^.lexer)
end
end;
proc transpile_return_statement(context: PTranspilerContext);
var
token: LexerToken;
begin
WriteString(context^.output, ' RETURN ');
token := transpiler_lex(context^.lexer);
transpile_unchanged(context, lexerKindSemicolon)
end;
proc transpile_statement(context: PTranspilerContext);
var
token: LexerToken;
begin
token := transpiler_lex(context^.lexer);
if token.kind = lexerKindIf then
transpile_if_statement(context)
end;
if token.kind = lexerKindWhile then
transpile_while_statement(context)
end;
if token.kind = lexerKindReturn then
transpile_return_statement(context)
end;
if token.kind = lexerKindIdentifier then
transpile_designator_expression(context);
token := lexer_current(context^.lexer);
if token.kind = lexerKindAssignment then
transpile_assignment_statement(context)
end;
if token.kind = lexerKindLeftParen then
transpile_call_statement(context)
end
end
end;
proc transpile_statements(context: PTranspilerContext);
var
token: LexerToken;
begin
token := lexer_current(context^.lexer);
while token.kind <> lexerKindEnd do
transpile_statement(context);
token := lexer_current(context^.lexer);
if token.kind = lexerKindSemicolon then
WriteChar(context^.output, ';')
end;
WriteLine(context^.output)
end
end;
proc transpile_statement_part(context: PTranspilerContext);
var
token: LexerToken;
begin
token := lexer_current(context^.lexer);
if token.kind = lexerKindBegin then
WriteString(context^.output, 'BEGIN');
WriteLine(context^.output);
transpile_statements(context)
end
end;
proc transpile_procedure_declaration(context: PTranspilerContext);
var
token: LexerToken;
seen_variables: PPAstVariableDeclaration;
written_bytes: CARDINAL;
seen_constants: PPAstConstantDeclaration;
begin
token := transpile_procedure_heading(context);
seen_constants := parse_constant_part(context^.lexer);
transpile_constant_part(context, seen_constants);
seen_variables := parse_variable_part(context^.lexer);
transpile_variable_part(context, seen_variables);
transpile_statement_part(context);
WriteString(context^.output, 'END ');
written_bytes := WriteNBytes(context^.output, ORD(token.identifierKind[1]), ADR(token.identifierKind[2]));
token := transpiler_lex(context^.lexer);
write_semicolon(context^.output);
token := transpiler_lex(context^.lexer)
end;
proc transpile_procedure_part(context: PTranspilerContext);
var
token: LexerToken;
begin
token := lexer_current(context^.lexer);
while token.kind = lexerKindProc do
transpile_procedure_declaration(context);
token := lexer_current(context^.lexer);
WriteLine(context^.output)
end
end;
proc transpile_module_name(context: PTranspilerContext);
var
counter: CARDINAL;
last_slash: CARDINAL;
begin
counter := 1;
last_slash := 0;
while (context^.input_name[counter] <> '.') & (ORD(context^.input_name[counter]) <> 0) do
if context^.input_name[counter] = '/' then
last_slash := counter
end;
INC(counter)
end;
if last_slash = 0 then
counter := 1
end;
if last_slash <> 0 then
counter := last_slash + 1
end;
while (context^.input_name[counter] <> '.') & (ORD(context^.input_name[counter]) <> 0) do
WriteChar(context^.output, context^.input_name[counter]);
INC(counter)
end;
end;
proc transpile(lexer: PLexer, output: File, input_name: ShortString);
var
token: LexerToken;
context: TranspilerContext;
ast_module: PAstModule;
begin
context.input_name := input_name;
context.output := output;
context.lexer := lexer;
ast_module := transpile_module(ADR(context))
end;
end.