aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/lex/mod.rs70
1 files changed, 41 insertions, 29 deletions
diff --git a/src/lex/mod.rs b/src/lex/mod.rs
index e39fd23..c54722f 100644
--- a/src/lex/mod.rs
+++ b/src/lex/mod.rs
@@ -1,3 +1,5 @@
1use std::collections::HashMap;
2
1#[derive(Debug, Copy, Clone, PartialEq)] 3#[derive(Debug, Copy, Clone, PartialEq)]
2pub struct Operator { 4pub struct Operator {
3 token: char, 5 token: char,
@@ -52,7 +54,36 @@ pub enum Token {
52 RParen 54 RParen
53} 55}
54 56
57fn get_functions() -> HashMap<&'static str, Token> {
58 return [
59 ("sin", Function::token_from_fn("sin".into(), |x| x.to_radians().sin())),
60 ("cos", Function::token_from_fn("cos".into(), |x| x.to_radians().cos())),
61 ("tan", Function::token_from_fn("tan".into(), |x| x.to_radians().tan())),
62 ("csc", Function::token_from_fn("csc".into(), |x| 1. / x.to_radians().sin())),
63 ("sec", Function::token_from_fn("sec".into(), |x| 1. / x.to_radians().cos())),
64 ("cot", Function::token_from_fn("cot".into(), |x| 1. / x.to_radians().tan())),
65 ("ln", Function::token_from_fn("ln".into(), |x| x.ln())),
66 ("log", Function::token_from_fn("log".into(), |x| x.log10())),
67 ("sqrt", Function::token_from_fn("sqrt".into(), |x| x.sqrt())),
68 ("ceil", Function::token_from_fn("ceil".into(), |x| x.ceil())),
69 ("floor", Function::token_from_fn("floor".into(), |x| x.floor())),
70 ].iter().cloned().collect();
71}
72
73fn get_operators() -> HashMap<char, Token> {
74 return [
75 ('+', Operator::token_from_op('+', |x, y| x + y, 2, true)),
76 ('-', Operator::token_from_op('-', |x, y| x - y, 2, true)),
77 ('*', Operator::token_from_op('*', |x, y| x * y, 3, true)),
78 ('/', Operator::token_from_op('/', |x, y| x / y, 3, true)),
79 ('^', Operator::token_from_op('^', |x, y| x.powf(y) , 4, true)),
80 ].iter().cloned().collect();
81}
82
55pub fn lexer(input: &str) -> Result<Vec<Token>, String> { 83pub fn lexer(input: &str) -> Result<Vec<Token>, String> {
84 let functions: HashMap<&str, Token> = get_functions();
85 let operators: HashMap<char, Token> = get_operators();
86
56 let mut num_vec: String = String::new(); 87 let mut num_vec: String = String::new();
57 let mut char_vec: String = String::new(); 88 let mut char_vec: String = String::new();
58 let mut result: Vec<Token> = vec![]; 89 let mut result: Vec<Token> = vec![];
@@ -65,17 +96,13 @@ pub fn lexer(input: &str) -> Result<Vec<Token>, String> {
65 let parse_num = num_vec.parse::<f64>().ok(); 96 let parse_num = num_vec.parse::<f64>().ok();
66 if let Some(x) = parse_num { 97 if let Some(x) = parse_num {
67 result.push(Token::Num(x)); 98 result.push(Token::Num(x));
68 result.push(Operator::token_from_op('*', |x, y| x * y, 3, true)); 99 result.push(operators.get(&'*').unwrap().clone());
69 num_vec.clear(); 100 num_vec.clear();
70 } 101 }
71 char_vec.push(letter); 102 char_vec.push(letter);
72 }, 103 },
73 '+' | '-' => { 104 '+' | '-' => {
74 let op_token = match letter { 105 let op_token = operators.get(&letter).unwrap().clone();
75 '+' => Operator::token_from_op('+', |x, y| x + y, 2, true),
76 '-' => Operator::token_from_op('-', |x, y| x - y, 2, true),
77 _ => unreachable!()
78 };
79 let parse_num = num_vec.parse::<f64>().ok(); 106 let parse_num = num_vec.parse::<f64>().ok();
80 if let Some(x) = parse_num { 107 if let Some(x) = parse_num {
81 result.push(Token::Num(x)); 108 result.push(Token::Num(x));
@@ -85,42 +112,27 @@ pub fn lexer(input: &str) -> Result<Vec<Token>, String> {
85 result.push(Token::LParen); 112 result.push(Token::LParen);
86 result.push(Token::Num((letter.to_string() + "1").parse::<f64>().unwrap())); 113 result.push(Token::Num((letter.to_string() + "1").parse::<f64>().unwrap()));
87 result.push(Token::RParen); 114 result.push(Token::RParen);
88 result.push(Operator::token_from_op('*', |x, y| x * y, 2, true)); 115 result.push(operators.get(&'*').unwrap().clone());
89 } 116 }
90 }, 117 },
91 '/' | '*' | '^' => { 118 '/' | '*' | '^' => {
92 drain_num_stack(&mut num_vec, &mut result); 119 drain_num_stack(&mut num_vec, &mut result);
93 let operator_token: Token = match letter { 120 let operator_token: Token = operators.get(&letter).unwrap().clone();
94 '/' => Operator::token_from_op('/', |x, y| x / y, 3, true),
95 '*' => Operator::token_from_op('*', |x, y| x * y, 3, true),
96 '^' => Operator::token_from_op('^', |x, y| x.powf(y), 4, false),
97 _ => panic!("unexpected op whuuu"),
98 };
99 result.push(operator_token); 121 result.push(operator_token);
100 }, 122 },
101 '(' => { 123 '(' => {
102 if char_vec.len() > 0 { 124 if char_vec.len() > 0 {
103 let funct = char_vec.clone(); 125 if let Some(res) = functions.get(&char_vec[..]) {
104 match &funct[..] { 126 result.push(res.clone());
105 "sin" | "sine" => result.push(Function::token_from_fn("sin".into(), |x| x.to_radians().sin())), 127 } else {
106 "cos" | "cosine" => result.push(Function::token_from_fn("cos".into(), |x| x.to_radians().cos())), 128 return Err(format!("Unexpected function {}", char_vec))
107 "tan" | "tangent" => result.push(Function::token_from_fn("tan".into(), |x| x.to_radians().tan())),
108 "csc" | "cosec" => result.push(Function::token_from_fn("csc".into(), |x| 1f64 / x.to_radians().sin())),
109 "sec" | "secant" => result.push(Function::token_from_fn("sec".into(), |x| 1f64 / x.to_radians().cos())),
110 "cot" | "cotangent" => result.push(Function::token_from_fn("cot".into(), |x| 1f64 / x.to_radians().tan())),
111 "ln" => result.push(Function::token_from_fn("ln".into(), |x| x.ln())),
112 "log" => result.push(Function::token_from_fn("log".into(), |x| x.log10())),
113 "sqrt" => result.push(Function::token_from_fn("sqrt".into(), |x| x.sqrt())),
114 "floor" => result.push(Function::token_from_fn("floor".into(), |x| x.floor())),
115 "ceil" => result.push(Function::token_from_fn("ceil".into(), |x| x.ceil())),
116 _ => return Err(format!("Unexpected function {}", funct))
117 } 129 }
118 char_vec.clear(); 130 char_vec.clear();
119 } else { 131 } else {
120 let parse_num = num_vec.parse::<f64>().ok(); 132 let parse_num = num_vec.parse::<f64>().ok();
121 if let Some(x) = parse_num { 133 if let Some(x) = parse_num {
122 result.push(Token::Num(x)); 134 result.push(Token::Num(x));
123 result.push(Operator::token_from_op('*', |x, y| x * y, 3, true)); 135 result.push(operators.get(&'*').unwrap().clone());
124 num_vec.clear(); 136 num_vec.clear();
125 } 137 }
126 } 138 }
@@ -128,7 +140,7 @@ pub fn lexer(input: &str) -> Result<Vec<Token>, String> {
128 if let Some(x) = result.last() { 140 if let Some(x) = result.last() {
129 match x { 141 match x {
130 Token::RParen => { 142 Token::RParen => {
131 result.push(Operator::token_from_op('*', |x, y| x * y, 3, true)); 143 result.push(operators.get(&'*').unwrap().clone());
132 }, 144 },
133 _ => {} 145 _ => {}
134 }; 146 };