Pass command line arguments to main

This commit is contained in:
Eugen Wissner 2025-02-01 11:47:23 +01:00
parent 5108016d39
commit b41d6fb907
Signed by: belka
GPG Key ID: A27FDC1E8EE902C0
2 changed files with 123 additions and 35 deletions

View File

@ -1,3 +1,5 @@
#include <array>
#include "elna/gcc/elna-generic.h" #include "elna/gcc/elna-generic.h"
#include "elna/gcc/elna-diagnostic.h" #include "elna/gcc/elna-diagnostic.h"
@ -94,11 +96,11 @@ namespace gcc
{ {
constant->accept(this); constant->accept(this);
} }
tree parameter_types[] = { std::array<tree, 2> parameter_types{
integer_type_node, integer_type_node,
build_pointer_type(build_pointer_type(char_type_node)) build_pointer_type(build_pointer_type(char_type_node))
}; };
tree declaration_type = build_function_type_array(integer_type_node, 2, parameter_types); tree declaration_type = build_function_type_array(integer_type_node, 2, parameter_types.data());
this->main_fndecl = build_fn_decl("main", declaration_type); this->main_fndecl = build_fn_decl("main", declaration_type);
tree resdecl = build_decl(UNKNOWN_LOCATION, RESULT_DECL, NULL_TREE, integer_type_node); tree resdecl = build_decl(UNKNOWN_LOCATION, RESULT_DECL, NULL_TREE, integer_type_node);
DECL_CONTEXT(resdecl) = this->main_fndecl; DECL_CONTEXT(resdecl) = this->main_fndecl;
@ -106,6 +108,19 @@ namespace gcc
enter_scope(); enter_scope();
tree_chain argument_chain;
for (std::size_t i = 0; i < 2; ++i)
{
std::string argument_name = i == 0 ? "count" : "parameters";
tree argc_declaration_tree = build_decl(UNKNOWN_LOCATION, PARM_DECL,
get_identifier(argument_name.c_str()), parameter_types[i]);
DECL_CONTEXT(argc_declaration_tree) = this->main_fndecl;
DECL_ARG_TYPE(argc_declaration_tree) = parameter_types[i];
this->symbol_map->enter(argument_name, boot::make_info(argc_declaration_tree));
argument_chain.append(argc_declaration_tree);
}
DECL_ARGUMENTS(this->main_fndecl) = argument_chain.head();
for (const auto body_statement : program->body) for (const auto body_statement : program->body)
{ {
body_statement->accept(this); body_statement->accept(this);
@ -152,7 +167,7 @@ namespace gcc
enter_scope(); enter_scope();
} }
gcc::tree_chain argument_chain; tree_chain argument_chain;
for (std::size_t i = 0; i < definition->parameters.size(); ++i) for (std::size_t i = 0; i < definition->parameters.size(); ++i)
{ {
auto parameter = definition->parameters.at(i); auto parameter = definition->parameters.at(i);
@ -241,7 +256,9 @@ namespace gcc
void generic_visitor::visit(boot::number_literal<bool> *boolean) void generic_visitor::visit(boot::number_literal<bool> *boolean)
{ {
this->current_expression = build_int_cst_type(boolean_type_node, boolean->number()); auto symbol = this->symbol_map->lookup("Bool");
this->current_expression = build_int_cst_type(symbol->payload, boolean->number());
} }
void generic_visitor::visit(boot::number_literal<unsigned char> *character) void generic_visitor::visit(boot::number_literal<unsigned char> *character)

View File

@ -141,17 +141,6 @@ end;
End of standard procedures. End of standard procedures.
*) *)
proc test_record();
var r: Token;
begin
write_s("\nTest record:\n");
r.kind := 4;
r.value.int_value := 8;
write_i(r.value.int_value)
end;
proc test_primitive(); proc test_primitive();
begin begin
write_s("\nTest primitives:\n"); write_s("\nTest primitives:\n");
@ -274,6 +263,49 @@ begin
return input return input
end; end;
proc lex_string(input: pointer to Char, current_token: pointer to Token): pointer to Char;
var
token_end: pointer to Char,
previous: pointer to Char,
constructed_string: pointer to Char,
token_length: Word,
is_valid: Bool;
begin
token_end := input;
previous := cast(cast(input as Word) - 1u as pointer to Char);
while token_end^ <> '\0' and not (previous^ <> '\\' and token_end^ = '"') do
previous := token_end;
token_end := token_end + 1
end;
if token_end^ <> '\"' then
return input
end;
token_length := cast(token_end as Word) - cast(input as Word);
current_token^.value.string_value := cast(calloc(token_length, 1) as pointer to Char);
is_valid := true;
constructed_string := current_token^.value.string_value;
while cast(input as Word) < cast(token_end as Word) and is_valid do
if input^ = '\\' then
input := input + 1;
if escape_char(input^, constructed_string) then
input := input + 1
else
is_valid := false
end
elsif input^ <> '\0' then
constructed_string^ := input^;
input := input + 1
end;
constructed_string := constructed_string + 1
end;
return token_end
end;
proc print_tokens(tokens: pointer to Token, tokens_size: Word); proc print_tokens(tokens: pointer to Token, tokens_size: Word);
var var
current_token: pointer to Token, current_token: pointer to Token,
@ -413,7 +445,8 @@ begin
write_c(' '); write_c(' ');
i := i + sizeof(Token) i := i + sizeof(Token)
end end;
write_c('\n')
end; end;
proc categorize_identifier(input_pointer: pointer to Char, token_length: Int): Token; proc categorize_identifier(input_pointer: pointer to Char, token_length: Int): Token;
@ -489,30 +522,25 @@ begin
return current_token return current_token
end; end;
proc compile(); proc tokenize(input_pointer: pointer to Char, tokens_size: pointer to Word): pointer to Token;
var var
input: pointer to Char,
input_pointer: pointer to Char,
token_end: pointer to Char, token_end: pointer to Char,
token_length: Int,
tokens: pointer to Token, tokens: pointer to Token,
current_token: pointer to Token, current_token: pointer to Token,
tokens_size: Word; token_length: Word;
begin begin
tokens_size := 0u; tokens_size^ := 0u;
tokens := cast(nil as pointer to Token); tokens := cast(nil as pointer to Token);
input := read_source("example.elna"); input_pointer := skip_spaces(input_pointer);
input_pointer := skip_spaces(input);
while input_pointer^ <> '\0' do while input_pointer^ <> '\0' do
tokens := cast(realloc(tokens, tokens_size + sizeof(Token)) as pointer to Token); tokens := cast(realloc(tokens, tokens_size^ + sizeof(Token)) as pointer to Token);
current_token := tokens + tokens_size; current_token := tokens + tokens_size^;
if is_alpha(input_pointer^) or input_pointer^ = '_' then if is_alpha(input_pointer^) or input_pointer^ = '_' then
token_end := lex_identifier(input_pointer + 1); token_end := lex_identifier(input_pointer + 1);
token_length := cast(token_end as Int) - cast(input_pointer as Int); token_length := cast(token_end as Word) - cast(input_pointer as Word);
current_token^ := categorize_identifier(input_pointer, token_length); current_token^ := categorize_identifier(input_pointer, token_length);
@ -534,8 +562,8 @@ begin
token_end := lex_comment(input_pointer + 1); token_end := lex_comment(input_pointer + 1);
if token_end <> cast(nil as pointer to Char) then if token_end <> cast(nil as pointer to Char) then
token_length := cast(token_end as Int) - cast(input_pointer as Int); token_length := cast(token_end as Word) - cast(input_pointer as Word);
current_token^.value.string_value := cast(calloc(token_length + 1, 1) as pointer to Char); current_token^.value.string_value := cast(calloc(token_length + 1u, 1) as pointer to Char);
strncpy(current_token^.value.string_value, input_pointer, token_length); strncpy(current_token^.value.string_value, input_pointer, token_length);
current_token^.kind := TOKEN_COMMENT; current_token^.kind := TOKEN_COMMENT;
@ -558,6 +586,13 @@ begin
else else
input_pointer := input_pointer + 1 input_pointer := input_pointer + 1
end end
elsif input_pointer^ = '"' then
token_end := lex_string(input_pointer + 1, current_token);
if token_end^ = '"' then
current_token^.kind := TOKEN_STRING;
input_pointer := token_end + 1
end
elsif input_pointer^ = '[' then elsif input_pointer^ = '[' then
current_token^.kind := TOKEN_LEFT_SQUARE; current_token^.kind := TOKEN_LEFT_SQUARE;
input_pointer := input_pointer + 1 input_pointer := input_pointer + 1
@ -630,18 +665,54 @@ begin
end; end;
if current_token^.kind <> 0 then if current_token^.kind <> 0 then
tokens_size := tokens_size + sizeof(Token) tokens_size^ := tokens_size^ + sizeof(Token);
input_pointer := skip_spaces(input_pointer)
else
write_s("Lexical analysis error on \"");
write_c(input_pointer^);
write_s("\".\n")
end end
end; end;
print_tokens(tokens, tokens_size);
free(input) return tokens
end;
proc command_line(argc: Int, argv: pointer to pointer to Char);
var
parameter: pointer to pointer to Char,
i: Int;
begin
write_s("Argument count: ");
write_i(argc - 1);
write_s("\nArguments:");
i := 1;
while i < argc do
parameter := argv + i * cast(sizeof(pointer to Char) as Int);
write_c(' ');
write_s(parameter^);
i := i + 1
end
end;
proc compile();
var
input: pointer to Char,
tokens: pointer to Token,
tokens_size: Word;
begin
input := read_source("example.elna");
tokens := tokenize(input, @tokens_size);
free(input);
print_tokens(tokens, tokens_size)
end; end;
begin begin
command_line(count, parameters);
compile(); compile();
test_record();
test_primitive(); test_primitive();
exit(0) exit(0)