Reintroducing meta tokens
This commit is contained in:
parent
015de5dc0a
commit
42fa5affb5
|
@ -1,6 +1,3 @@
|
||||||
#with mathlib.mlc
|
|
||||||
variable:=-3; c := (a+b- 3) * 23 + variable; d := c - a;Natural : Number (n) := {n >= 0};faculty : Natural (n) -> Natural := if n = 0 then 1 else faculty (n-1) * n end;
|
variable:=-3; c := (a+b- 3) * 23 + variable; d := c - a;Natural : Number (n) := {n >= 0};faculty : Natural (n) -> Natural := if n = 0 then 1 else faculty (n-1) * n end;
|
||||||
String Natural (n) := {Character * n};hello_word -> String := "Hello World!";
|
String Natural (n) := {Character * n};hello_word -> String := "Hello World!";
|
||||||
first_letter -> Character := 'a';
|
first_letter -> Character := 'a';
|
||||||
wrong -> Logic := false;date -> String := "#date_now";
|
|
||||||
user -> String := "#user"
|
|
||||||
|
|
49
src/main.rs
49
src/main.rs
|
@ -6,8 +6,51 @@ mod tokenizer;
|
||||||
use tokenizer::*;
|
use tokenizer::*;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
// Preprocessor
|
// CL-Wrapper
|
||||||
let sample_code: String = std::fs::read_to_string("example.mlc").unwrap();
|
let args: Vec<String> = std::env::args().collect();
|
||||||
|
|
||||||
|
// Adjust to following principle:
|
||||||
|
// micro [-t <target>] [-l <language.toml>] [<list of source files>]
|
||||||
|
// -t default: first found
|
||||||
|
// -l default: language.toml
|
||||||
|
//
|
||||||
|
// Either loads all source files or takes stdin input by piping code into the program
|
||||||
|
let mut raw_source_code: String = String::from("");
|
||||||
|
for i in 1..args.len() {
|
||||||
|
raw_source_code = raw_source_code
|
||||||
|
+ std::fs::read_to_string(args[i].clone())
|
||||||
|
.expect("Source file not found!")
|
||||||
|
.as_str();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load language toml
|
||||||
|
let mut meta_rules: crate::preprocessor::MetaRules =
|
||||||
|
crate::preprocessor::MetaRules::new("./language.toml");
|
||||||
|
let mut tokenizer_configuration: Tokenizer = Tokenizer::new();
|
||||||
|
tokenizer_configuration.read_configuration_from_file("./language.toml");
|
||||||
|
|
||||||
|
// Run preprocessor
|
||||||
|
let preprocessed_source_code: String = meta_rules.process(raw_source_code);
|
||||||
|
|
||||||
|
// Tokenizing
|
||||||
|
tokenizer_configuration.eat(preprocessed_source_code.as_str());
|
||||||
|
tokenizer_configuration.identify_tokens();
|
||||||
|
// Reintroducing meta_tokens
|
||||||
|
for meta_token in meta_rules.special_tokens.iter() {
|
||||||
|
// Go through all tokens
|
||||||
|
for i in 0..tokenizer_configuration.tokens.len() {
|
||||||
|
if meta_token.0 == tokenizer_configuration.tokens[i].token {
|
||||||
|
tokenizer_configuration.tokens[i] = meta_token.1.clone();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Syntax resolving
|
||||||
|
|
||||||
|
// Apply translation
|
||||||
|
|
||||||
|
/* let sample_code: String = std::fs::read_to_string("example.mlc").unwrap();
|
||||||
let mut example_tokenizer: Tokenizer = Tokenizer::new();
|
let mut example_tokenizer: Tokenizer = Tokenizer::new();
|
||||||
let mut meta_rules: crate::preprocessor::MetaRules =
|
let mut meta_rules: crate::preprocessor::MetaRules =
|
||||||
crate::preprocessor::MetaRules::new("./language.toml");
|
crate::preprocessor::MetaRules::new("./language.toml");
|
||||||
|
@ -31,5 +74,5 @@ fn main() {
|
||||||
|
|
||||||
for token in example_identifier.tokens.iter() {
|
for token in example_identifier.tokens.iter() {
|
||||||
print!("{}", token.token);
|
print!("{}", token.token);
|
||||||
}
|
} */
|
||||||
}
|
}
|
||||||
|
|
|
@ -172,7 +172,6 @@ impl MetaRules {
|
||||||
processed_code = value_regex
|
processed_code = value_regex
|
||||||
.replace(processed_code.as_str(), meta_id.as_str())
|
.replace(processed_code.as_str(), meta_id.as_str())
|
||||||
.to_string();
|
.to_string();
|
||||||
println!("Replace {} with {}.", meta_value, meta_id);
|
|
||||||
|
|
||||||
// Safe id and token
|
// Safe id and token
|
||||||
self.special_tokens.push((
|
self.special_tokens.push((
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use toml::{Table, Value};
|
use toml::{Table, Value};
|
||||||
|
|
||||||
#[derive(PartialEq)]
|
#[derive(PartialEq, Debug)]
|
||||||
pub enum TokenType {
|
pub enum TokenType {
|
||||||
OPERAND,
|
OPERAND,
|
||||||
TERMINATOR,
|
TERMINATOR,
|
||||||
|
@ -21,10 +21,27 @@ pub struct Tokenizer {
|
||||||
|
|
||||||
// Token
|
// Token
|
||||||
// This is a token with a token type.
|
// This is a token with a token type.
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct Token {
|
pub struct Token {
|
||||||
pub token: String,
|
pub token: String,
|
||||||
pub token_type: TokenType,
|
pub token_type: TokenType,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Clone for Token {
|
||||||
|
fn clone(&self) -> Token {
|
||||||
|
let token_type: TokenType = match self.token_type {
|
||||||
|
TokenType::OPERAND => TokenType::OPERAND,
|
||||||
|
TokenType::KEYWORD => TokenType::KEYWORD,
|
||||||
|
TokenType::TERMINATOR => TokenType::TERMINATOR,
|
||||||
|
TokenType::IDENTIFIER => TokenType::IDENTIFIER,
|
||||||
|
};
|
||||||
|
Token {
|
||||||
|
token: self.token.clone(),
|
||||||
|
token_type: token_type,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Implementation of Tokenizer
|
// Implementation of Tokenizer
|
||||||
// Functions associated with the tokenizer struct and module.
|
// Functions associated with the tokenizer struct and module.
|
||||||
impl Tokenizer {
|
impl Tokenizer {
|
||||||
|
|
Loading…
Reference in New Issue