added imports

This commit is contained in:
afonya2 2025-06-11 15:35:25 +02:00
parent ce93fb10d4
commit b4f930d1c5
Signed by: afonya
GPG key ID: EBB9C4CAFAAFB2DC
8 changed files with 91 additions and 14 deletions

View file

@ -1,5 +1,5 @@
use std::{collections::HashMap, process, vec};
use crate::{errors::{create_error, print_error, ErrorType, ErrorSubType}, parser::ASTPart, Context};
use std::{collections::HashMap, fs, process, vec};
use crate::{errors::{create_error, print_error, ErrorSubType, ErrorType}, lexer::lex, parser::{parse, ASTPart}, Context};
const ASXVERSION: [u8; 3] = [0,1,0];
@ -607,6 +607,42 @@ fn do_ast_op(ast_op: ASTPart, op_count: &mut usize, ops: &mut Vec<Operation>, va
let value_reg = do_ast_op(*tbl_set.value, op_count, ops, variables, next_var_id, strings, next_string_id, functions, next_function_id, registers, ctx, traceback);
ops.push(Operation { opcode: 31, arg1: Some(table_reg), arg2: Some(key_reg as i64), arg3: Some(value_reg), pos: tbl_set.pos as u32 });
},
ASTPart::Import(impr) => {
let fi = fs::read_to_string(&impr.path);
match fi {
Ok(data) => {
let imp_ctx = Context {
file: String::from(&impr.path),
raw_file: data.clone(),
c_funcid: 0,
known: true
};
let self_tb = PrevFunc {
variables: variables.clone(),
previous: Some(Box::new(traceback.clone())),
};
let lexed = lex(data, &imp_ctx);
let ast = parse(lexed, &imp_ctx);
let compiled = compile_function(ast, None, registers, next_var_id, &imp_ctx, &self_tb);
functions.insert(*next_function_id, compiled);
*next_function_id += 1;
let reg = allocate_register(registers);
if reg.unbind_before {
ops.push(Operation { opcode: 8, arg1: Some(reg.register), arg2: None, arg3: None, pos: impr.pos as u32 });
}
ops.push(Operation { opcode: 5, arg1: Some(reg.register), arg2: Some((*next_function_id-1) as i64), arg3: None, pos: impr.pos as u32 });
ops.push(Operation { opcode: 27, arg1: Some(reg.register), arg2: Some(reg.register as i64), arg3: None, pos: impr.pos as u32 });
set_register(registers, RegisterState { id: reg.register, used: true, variable: 0, last_used: *op_count });
return reg.register;
},
Err(e) => {
let err = create_error(&format!("Failed to read file `{}`: {}", impr.path, e), impr.pos, ErrorType::IOError, ErrorSubType::FileError);
print_error(&err, &ctx);
process::exit(1);
}
}
},
_ => {}
}
return 0;

View file

@ -6,6 +6,7 @@ pub enum ErrorType {
TypeError,
MathError,
MachineError,
IOError,
}
pub enum ErrorSubType {
@ -36,6 +37,8 @@ pub enum ErrorSubType {
DivisionByZero,
//Type errors
WrongType,
//IO errors
FileError
}
pub struct ASLError {
@ -53,6 +56,7 @@ fn convert_types_to_string(typ: &ErrorType) -> String {
ErrorType::MathError => String::from("Math Error: "),
ErrorType::SemanticError => String::from("Semantic Error: "),
ErrorType::MachineError => String::from("Machine Error: "),
ErrorType::IOError => String::from("IO Error: "),
}
}
fn convert_types_to_short(typ: &ErrorType) -> String {
@ -62,6 +66,7 @@ fn convert_types_to_short(typ: &ErrorType) -> String {
ErrorType::MathError => String::from("MT:"),
ErrorType::SemanticError => String::from("SM:"),
ErrorType::MachineError => String::from("MC:"),
ErrorType::IOError => String::from("IO:"),
}
}
fn convert_subtypes_to_string(stype: &ErrorSubType) -> String {
@ -88,6 +93,7 @@ fn convert_subtypes_to_string(stype: &ErrorSubType) -> String {
ErrorSubType::DivisionByZero => String::from("Division by zero"),
ErrorSubType::WrongType => String::from("Wrong type"),
ErrorSubType::TooManyArguments => String::from("Too many arguments"),
ErrorSubType::FileError => String::from("File error"),
}
}
fn convert_subtypes_to_short(stype: &ErrorSubType) -> String {
@ -114,6 +120,7 @@ fn convert_subtypes_to_short(stype: &ErrorSubType) -> String {
ErrorSubType::DivisionByZero => String::from("DZ:"),
ErrorSubType::WrongType => String::from("WT:"),
ErrorSubType::TooManyArguments => String::from("TA:"),
ErrorSubType::FileError => String::from("FE:"),
}
}
@ -133,6 +140,9 @@ pub fn create_error(message: &str, position: usize, typ: ErrorType, stype: Error
fn get_exact_pos(file: &String, pos: usize) -> (usize, usize) {
let mut line = 1;
let mut column = 1;
if pos < 1 {
return (line, column);
}
for (i, c) in file.chars().enumerate() {
if i == pos-1 {
return (line, column);
@ -203,7 +213,11 @@ pub fn print_error(error: &ASLError, ctx: &Context) {
out.push_str(" ");
out.push_str(&" ".repeat(column - 1));
out.push_str("^ ");
out.push_str(&error.message);
if error.message.len() < 1 {
out.push_str(&convert_subtypes_to_string(&error.subtype));
} else {
out.push_str(&error.message);
}
out.push_str("\n");
out.push_str(&(line+1).to_string());

View file

@ -113,7 +113,7 @@ fn generate_combinations(words: Vec<&str>) -> Vec<String> {
return result;
}
fn read_identifier(splitted: &Vec<&str>, pos: &mut usize, out: &mut Vec<Token>) {
let keywords = vec!["kraf","piszolj","ha nem geny akkor geny","ha nem geny","nem piszv","kopva","gethelj","ha geny","lőcsve","csecs","reti","piszv","amíg geny","nincs hám","szard le"];
let keywords = vec!["kraf","piszolj","ha nem geny akkor geny","ha nem geny","nem piszv","kopva","gethelj","ha geny","lőcsve","csecs","reti","piszv","amíg geny","nincs hám","szard le","hámozd","be","ba"];
let mut raw_keywords: Vec<String> = vec![];
for keyword in &keywords {
let spi: Vec<&str> = keyword.split(" ").collect();

View file

@ -3,7 +3,6 @@ use std::fs;
use parser::ASTPart;
use virtualmachine::Machine;
mod lexer;
mod parser;
mod enviroment;
@ -131,6 +130,9 @@ fn log_ast_part(part: &ASTPart, prefix: String) {
println!("{} Value:", prefix);
log_ast_part(&tbl_set.value, format!("{} ", prefix));
},
ASTPart::Import(imp) => {
println!("{}{}: Import: {}", prefix, imp.pos, imp.path);
},
ASTPart::NOOP => println!("{}NOOP", prefix)
}
}
@ -144,11 +146,11 @@ struct Context {
}
fn main() {
let inp = fs::read_to_string("./test.as");
let inp = fs::read_to_string("./test.asl");
match inp {
Result::Ok(data) => {
let ctx = Context {
file: String::from("./test.as"),
file: String::from("./test.asl"),
raw_file: data.clone(),
c_funcid: 0,
known: true

View file

@ -25,6 +25,7 @@ pub enum ASTPart {
Table(AstTable),
TableGet(AstTableGet),
TableSet(AstTableSet),
Import(AstImport),
NOOP
}
#[derive(Debug, Clone, PartialEq)]
@ -150,6 +151,11 @@ pub struct AstTableSet {
pub value: Box<ASTPart>,
pub pos: usize
}
#[derive(Debug, Clone, PartialEq)]
pub struct AstImport {
pub path: String,
pub pos: usize
}
fn is_end(input: &Token, end: &Vec<Token>) -> bool {
for token in end {
@ -786,6 +792,28 @@ fn next_operation(pos: &mut usize, input: &Vec<Token>, op_ends: &Vec<Token>, par
let value = read_exp(pos, input, op_ends, parse_ends, ctx);
return ASTPart::Return(AstReturn { value: Box::new(value), pos: token.pos });
}
} else if token.value == "hámozd" {
let var = &input[*pos];
*pos += 1;
if var.typ != TokenType::IDENTIFIER {
let err = create_error(&format!("Expected identifier after hámozd"), token.pos, ErrorType::SyntaxError, ErrorSubType::Expected);
print_error(&err, &ctx);
process::exit(1);
}
if input[*pos].typ != TokenType::KEYWORD || (input[*pos].value != "be" && input[*pos].value != "ba") {
let err = create_error(&format!("Expected `be`/`ba` after hámozd"), input[*pos].pos, ErrorType::SyntaxError, ErrorSubType::Expected);
print_error(&err, &ctx);
process::exit(1);
}
*pos += 1;
let path = &input[*pos];
if path.typ != TokenType::STRING {
let err = create_error(&format!("Expected string for hámozd"), path.pos, ErrorType::SyntaxError, ErrorSubType::Expected);
print_error(&err, &ctx);
process::exit(1);
}
*pos += 1;
return ASTPart::Assigment(AstAssigment { variable: var.value.clone(), value: Box::new(ASTPart::Import(AstImport { path: path.value.clone(), pos: token.pos })), pos: token.pos });
} else {
let err = create_error(&format!("Unexpected `{:?}({})`", token.typ, token.value), token.pos, ErrorType::SyntaxError, ErrorSubType::Unexpected);
print_error(&err, &ctx);

View file

@ -1,7 +0,0 @@
gethelj a = 5
gethelj b = lőcsve() {
ugass(a, c)
}
gethelj c = 10
b()

2
test.asl Normal file
View file

@ -0,0 +1,2 @@
hámozd test be szaft"test2.asl"szaft
ugass(test)

2
test2.asl Normal file
View file

@ -0,0 +1,2 @@
ugass(szaft"Hell yeah!"szaft)
reti 1