From b41d6fb907a6c748b8ef9abd535a3060f072a51e Mon Sep 17 00:00:00 2001 From: Eugen Wissner Date: Sat, 1 Feb 2025 11:47:23 +0100 Subject: [PATCH] Pass command line arguments to main --- gcc/elna-generic.cc | 25 +++++++-- source.elna | 133 +++++++++++++++++++++++++++++++++----------- 2 files changed, 123 insertions(+), 35 deletions(-) diff --git a/gcc/elna-generic.cc b/gcc/elna-generic.cc index fc42e5a..35e451a 100644 --- a/gcc/elna-generic.cc +++ b/gcc/elna-generic.cc @@ -1,3 +1,5 @@ +#include + #include "elna/gcc/elna-generic.h" #include "elna/gcc/elna-diagnostic.h" @@ -94,11 +96,11 @@ namespace gcc { constant->accept(this); } - tree parameter_types[] = { + std::array parameter_types{ integer_type_node, build_pointer_type(build_pointer_type(char_type_node)) }; - tree declaration_type = build_function_type_array(integer_type_node, 2, parameter_types); + tree declaration_type = build_function_type_array(integer_type_node, 2, parameter_types.data()); this->main_fndecl = build_fn_decl("main", declaration_type); tree resdecl = build_decl(UNKNOWN_LOCATION, RESULT_DECL, NULL_TREE, integer_type_node); DECL_CONTEXT(resdecl) = this->main_fndecl; @@ -106,6 +108,19 @@ namespace gcc enter_scope(); + tree_chain argument_chain; + for (std::size_t i = 0; i < 2; ++i) + { + std::string argument_name = i == 0 ? "count" : "parameters"; + tree argc_declaration_tree = build_decl(UNKNOWN_LOCATION, PARM_DECL, + get_identifier(argument_name.c_str()), parameter_types[i]); + DECL_CONTEXT(argc_declaration_tree) = this->main_fndecl; + DECL_ARG_TYPE(argc_declaration_tree) = parameter_types[i]; + this->symbol_map->enter(argument_name, boot::make_info(argc_declaration_tree)); + argument_chain.append(argc_declaration_tree); + } + DECL_ARGUMENTS(this->main_fndecl) = argument_chain.head(); + for (const auto body_statement : program->body) { body_statement->accept(this); @@ -152,7 +167,7 @@ namespace gcc enter_scope(); } - gcc::tree_chain argument_chain; + tree_chain argument_chain; for (std::size_t i = 0; i < definition->parameters.size(); ++i) { auto parameter = definition->parameters.at(i); @@ -241,7 +256,9 @@ namespace gcc void generic_visitor::visit(boot::number_literal *boolean) { - this->current_expression = build_int_cst_type(boolean_type_node, boolean->number()); + auto symbol = this->symbol_map->lookup("Bool"); + + this->current_expression = build_int_cst_type(symbol->payload, boolean->number()); } void generic_visitor::visit(boot::number_literal *character) diff --git a/source.elna b/source.elna index 4320e73..c53c785 100644 --- a/source.elna +++ b/source.elna @@ -141,17 +141,6 @@ end; End of standard procedures. *) -proc test_record(); -var r: Token; -begin - write_s("\nTest record:\n"); - - r.kind := 4; - r.value.int_value := 8; - - write_i(r.value.int_value) -end; - proc test_primitive(); begin write_s("\nTest primitives:\n"); @@ -274,6 +263,49 @@ begin return input end; +proc lex_string(input: pointer to Char, current_token: pointer to Token): pointer to Char; +var + token_end: pointer to Char, + previous: pointer to Char, + constructed_string: pointer to Char, + token_length: Word, + is_valid: Bool; +begin + token_end := input; + previous := cast(cast(input as Word) - 1u as pointer to Char); + + while token_end^ <> '\0' and not (previous^ <> '\\' and token_end^ = '"') do + previous := token_end; + token_end := token_end + 1 + end; + if token_end^ <> '\"' then + return input + end; + token_length := cast(token_end as Word) - cast(input as Word); + current_token^.value.string_value := cast(calloc(token_length, 1) as pointer to Char); + + is_valid := true; + constructed_string := current_token^.value.string_value; + while cast(input as Word) < cast(token_end as Word) and is_valid do + + if input^ = '\\' then + input := input + 1; + if escape_char(input^, constructed_string) then + input := input + 1 + else + is_valid := false + end + elsif input^ <> '\0' then + constructed_string^ := input^; + input := input + 1 + end; + + constructed_string := constructed_string + 1 + end; + + return token_end +end; + proc print_tokens(tokens: pointer to Token, tokens_size: Word); var current_token: pointer to Token, @@ -413,7 +445,8 @@ begin write_c(' '); i := i + sizeof(Token) - end + end; + write_c('\n') end; proc categorize_identifier(input_pointer: pointer to Char, token_length: Int): Token; @@ -489,30 +522,25 @@ begin return current_token end; -proc compile(); +proc tokenize(input_pointer: pointer to Char, tokens_size: pointer to Word): pointer to Token; var - input: pointer to Char, - input_pointer: pointer to Char, token_end: pointer to Char, - token_length: Int, tokens: pointer to Token, current_token: pointer to Token, - tokens_size: Word; + token_length: Word; begin - tokens_size := 0u; + tokens_size^ := 0u; tokens := cast(nil as pointer to Token); - input := read_source("example.elna"); - - input_pointer := skip_spaces(input); + input_pointer := skip_spaces(input_pointer); while input_pointer^ <> '\0' do - tokens := cast(realloc(tokens, tokens_size + sizeof(Token)) as pointer to Token); - current_token := tokens + tokens_size; + tokens := cast(realloc(tokens, tokens_size^ + sizeof(Token)) as pointer to Token); + current_token := tokens + tokens_size^; if is_alpha(input_pointer^) or input_pointer^ = '_' then token_end := lex_identifier(input_pointer + 1); - token_length := cast(token_end as Int) - cast(input_pointer as Int); + token_length := cast(token_end as Word) - cast(input_pointer as Word); current_token^ := categorize_identifier(input_pointer, token_length); @@ -534,8 +562,8 @@ begin token_end := lex_comment(input_pointer + 1); if token_end <> cast(nil as pointer to Char) then - token_length := cast(token_end as Int) - cast(input_pointer as Int); - current_token^.value.string_value := cast(calloc(token_length + 1, 1) as pointer to Char); + token_length := cast(token_end as Word) - cast(input_pointer as Word); + current_token^.value.string_value := cast(calloc(token_length + 1u, 1) as pointer to Char); strncpy(current_token^.value.string_value, input_pointer, token_length); current_token^.kind := TOKEN_COMMENT; @@ -558,6 +586,13 @@ begin else input_pointer := input_pointer + 1 end + elsif input_pointer^ = '"' then + token_end := lex_string(input_pointer + 1, current_token); + + if token_end^ = '"' then + current_token^.kind := TOKEN_STRING; + input_pointer := token_end + 1 + end elsif input_pointer^ = '[' then current_token^.kind := TOKEN_LEFT_SQUARE; input_pointer := input_pointer + 1 @@ -630,18 +665,54 @@ begin end; if current_token^.kind <> 0 then - tokens_size := tokens_size + sizeof(Token) + tokens_size^ := tokens_size^ + sizeof(Token); + input_pointer := skip_spaces(input_pointer) + else + write_s("Lexical analysis error on \""); + write_c(input_pointer^); + write_s("\".\n") end end; - print_tokens(tokens, tokens_size); - free(input) + return tokens +end; + +proc command_line(argc: Int, argv: pointer to pointer to Char); +var + parameter: pointer to pointer to Char, + i: Int; +begin + write_s("Argument count: "); + write_i(argc - 1); + write_s("\nArguments:"); + + i := 1; + while i < argc do + parameter := argv + i * cast(sizeof(pointer to Char) as Int); + + write_c(' '); + write_s(parameter^); + i := i + 1 + end +end; + +proc compile(); +var + input: pointer to Char, + tokens: pointer to Token, + tokens_size: Word; +begin + input := read_source("example.elna"); + tokens := tokenize(input, @tokens_size); + + free(input); + + print_tokens(tokens, tokens_size) end; begin + command_line(count, parameters); compile(); - - test_record(); test_primitive(); exit(0)