aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorIvan Tham <[email protected]>2019-08-03 07:10:34 +0100
committerIvan Tham <[email protected]>2019-08-03 07:15:43 +0100
commit96db39bad36d0554ca5a48d6bc294e7aae6b2df6 (patch)
tree0d21fe935be80eab0da82ff4d24f85ebcc071c3b /src
parent051f0e40bdd3157902fd5e94f71f9747ad2fb72c (diff)
Lazy static lex constants
Diffstat (limited to 'src')
-rw-r--r--src/lex/mod.rs115
1 files changed, 57 insertions, 58 deletions
diff --git a/src/lex/mod.rs b/src/lex/mod.rs
index 2793699..c82c17e 100644
--- a/src/lex/mod.rs
+++ b/src/lex/mod.rs
@@ -2,6 +2,7 @@
2 * Refer to LICENCE for more information. 2 * Refer to LICENCE for more information.
3 * */ 3 * */
4 4
5use lazy_static::lazy_static;
5use std::collections::HashMap; 6use std::collections::HashMap;
6 7
7use crate::CONFIGURATION; 8use crate::CONFIGURATION;
@@ -77,52 +78,54 @@ pub enum Token {
77 RParen 78 RParen
78} 79}
79 80
80fn get_constants() -> HashMap<&'static str, Token> { 81lazy_static! {
81 return [ 82 static ref CONSTANTS: HashMap<&'static str, Token> = {
82 ("e", Token::Num(std::f64::consts::E)), 83 let mut m = HashMap::new();
83 ("pi", Token::Num(std::f64::consts::PI)), 84 m.insert("e", Token::Num(std::f64::consts::E));
84 ].iter().cloned().collect(); 85 m.insert("pi", Token::Num(std::f64::consts::PI));
85} 86 m
87 };
86 88
87fn get_functions() -> HashMap<&'static str, Token> { 89 static ref FUNCTIONS: HashMap<&'static str, Token> = {
88 return [ 90 let mut m = HashMap::new();
89 ("sin", Function::token_from_fn("sin".into(), |x| is_radian_mode(x, CONFIGURATION.radian_mode).sin())), 91 m.insert("sin", Function::token_from_fn("sin".into(), |x| is_radian_mode(x, CONFIGURATION.radian_mode).sin()));
90 ("cos", Function::token_from_fn("cos".into(), |x| is_radian_mode(x, CONFIGURATION.radian_mode).cos())), 92 m.insert("cos", Function::token_from_fn("cos".into(), |x| is_radian_mode(x, CONFIGURATION.radian_mode).cos()));
91 ("tan", Function::token_from_fn("tan".into(), |x| is_radian_mode(x, CONFIGURATION.radian_mode).tan())), 93 m.insert("tan", Function::token_from_fn("tan".into(), |x| is_radian_mode(x, CONFIGURATION.radian_mode).tan()));
92 ("csc", Function::token_from_fn("csc".into(), |x| is_radian_mode(x, CONFIGURATION.radian_mode).sin().recip())), 94 m.insert("csc", Function::token_from_fn("csc".into(), |x| is_radian_mode(x, CONFIGURATION.radian_mode).sin().recip()));
93 ("sec", Function::token_from_fn("sec".into(), |x| is_radian_mode(x, CONFIGURATION.radian_mode).cos().recip())), 95 m.insert("sec", Function::token_from_fn("sec".into(), |x| is_radian_mode(x, CONFIGURATION.radian_mode).cos().recip()));
94 ("cot", Function::token_from_fn("cot".into(), |x| is_radian_mode(x, CONFIGURATION.radian_mode).tan().recip())), 96 m.insert("cot", Function::token_from_fn("cot".into(), |x| is_radian_mode(x, CONFIGURATION.radian_mode).tan().recip()));
95 ("sinh", Function::token_from_fn("sinh".into(), |x| x.sinh())), 97 m.insert("sinh", Function::token_from_fn("sinh".into(), |x| x.sinh()));
96 ("cosh", Function::token_from_fn("cosh".into(), |x| x.cosh())), 98 m.insert("cosh", Function::token_from_fn("cosh".into(), |x| x.cosh()));
97 ("tanh", Function::token_from_fn("tanh".into(), |x| x.tanh())), 99 m.insert("tanh", Function::token_from_fn("tanh".into(), |x| x.tanh()));
98 ("ln", Function::token_from_fn("ln".into(), |x| x.ln())), 100 m.insert("ln", Function::token_from_fn("ln".into(), |x| x.ln()));
99 ("log", Function::token_from_fn("log".into(), |x| x.log10())), 101 m.insert("log", Function::token_from_fn("log".into(), |x| x.log10()));
100 ("sqrt", Function::token_from_fn("sqrt".into(), |x| x.sqrt())), 102 m.insert("sqrt", Function::token_from_fn("sqrt".into(), |x| x.sqrt()));
101 ("ceil", Function::token_from_fn("ceil".into(), |x| x.ceil())), 103 m.insert("ceil", Function::token_from_fn("ceil".into(), |x| x.ceil()));
102 ("floor", Function::token_from_fn("floor".into(), |x| x.floor())), 104 m.insert("floor", Function::token_from_fn("floor".into(), |x| x.floor()));
103 ("rad", Function::token_from_fn("rad".into(), |x| x.to_radians())), 105 m.insert("rad", Function::token_from_fn("rad".into(), |x| x.to_radians()));
104 ("deg", Function::token_from_fn("deg".into(), |x| x.to_degrees())), 106 m.insert("deg", Function::token_from_fn("deg".into(), |x| x.to_degrees()));
105 ("abs", Function::token_from_fn("abs".into(), |x| x.abs())), 107 m.insert("abs", Function::token_from_fn("abs".into(), |x| x.abs()));
106 ("asin", Function::token_from_fn("asin".into(), |x| x.asin())), 108 m.insert("asin", Function::token_from_fn("asin".into(), |x| x.asin()));
107 ("acos", Function::token_from_fn("acos".into(), |x| x.acos())), 109 m.insert("acos", Function::token_from_fn("acos".into(), |x| x.acos()));
108 ("atan", Function::token_from_fn("atan".into(), |x| x.atan())), 110 m.insert("atan", Function::token_from_fn("atan".into(), |x| x.atan()));
109 ("acsc", Function::token_from_fn("acsc".into(), |x| (1./x).asin())), 111 m.insert("acsc", Function::token_from_fn("acsc".into(), |x| (1./x).asin()));
110 ("asec", Function::token_from_fn("asec".into(), |x| (1./x).acos())), 112 m.insert("asec", Function::token_from_fn("asec".into(), |x| (1./x).acos()));
111 ("acot", Function::token_from_fn("acot".into(), |x| (1./x).atan())), 113 m.insert("acot", Function::token_from_fn("acot".into(), |x| (1./x).atan()));
112 // single arg function s can be added here 114 // single arg function s can be added here
113 ].iter().cloned().collect(); 115 m
114} 116 };
115 117
116pub fn get_operators() -> HashMap<char, Token> { 118 static ref OPERATORS: HashMap<char, Token> = {
117 return [ 119 let mut m = HashMap::new();
118 ('+', Operator::token_from_op('+', |x, y| x + y, 2, true)), 120 m.insert('+', Operator::token_from_op('+', |x, y| x + y, 2, true));
119 ('-', Operator::token_from_op('-', |x, y| x - y, 2, true)), 121 m.insert('-', Operator::token_from_op('-', |x, y| x - y, 2, true));
120 ('*', Operator::token_from_op('*', |x, y| x * y, 3, true)), 122 m.insert('*', Operator::token_from_op('*', |x, y| x * y, 3, true));
121 ('/', Operator::token_from_op('/', |x, y| x / y, 3, true)), 123 m.insert('/', Operator::token_from_op('/', |x, y| x / y, 3, true));
122 ('%', Operator::token_from_op('%', |x, y| x % y, 3, true)), 124 m.insert('%', Operator::token_from_op('%', |x, y| x % y, 3, true));
123 ('^', Operator::token_from_op('^', |x, y| x.powf(y) , 4, false)), 125 m.insert('^', Operator::token_from_op('^', |x, y| x.powf(y) , 4, false));
124 ('!', Operator::token_from_op('!', |x, _| factorial(x) , 4, true)), 126 m.insert('!', Operator::token_from_op('!', |x, _| factorial(x) , 4, true));
125 ].iter().cloned().collect(); 127 m
128 };
126} 129}
127 130
128fn factorial (n: f64) -> f64 { 131fn factorial (n: f64) -> f64 {
@@ -130,10 +133,6 @@ fn factorial (n: f64) -> f64 {
130} 133}
131 134
132pub fn lexer(input: &str, prev_ans: f64) -> Result<Vec<Token>, CalcError> { 135pub fn lexer(input: &str, prev_ans: f64) -> Result<Vec<Token>, CalcError> {
133 let constants: HashMap<&str, Token> = get_constants();
134 let functions: HashMap<&str, Token> = get_functions();
135 let operators: HashMap<char, Token> = get_operators();
136
137 let mut num_vec: String = String::new(); 136 let mut num_vec: String = String::new();
138 let mut char_vec: String = String::new(); 137 let mut char_vec: String = String::new();
139 let mut result: Vec<Token> = vec![]; 138 let mut result: Vec<Token> = vec![];
@@ -143,7 +142,7 @@ pub fn lexer(input: &str, prev_ans: f64) -> Result<Vec<Token>, CalcError> {
143 match letter { 142 match letter {
144 '0'...'9' | '.' => { 143 '0'...'9' | '.' => {
145 if char_vec.len() > 0 { 144 if char_vec.len() > 0 {
146 if let Some(_) = functions.get(&char_vec[..]) { 145 if let Some(_) = FUNCTIONS.get(&char_vec[..]) {
147 return Err(CalcError::Syntax(format!("Function '{}' expected parentheses", char_vec))) 146 return Err(CalcError::Syntax(format!("Function '{}' expected parentheses", char_vec)))
148 } else { 147 } else {
149 return Err(CalcError::Syntax(format!("Unexpected character '{}'", char_vec))) 148 return Err(CalcError::Syntax(format!("Unexpected character '{}'", char_vec)))
@@ -154,7 +153,7 @@ pub fn lexer(input: &str, prev_ans: f64) -> Result<Vec<Token>, CalcError> {
154 }, 153 },
155 '_' => { 154 '_' => {
156 if char_vec.len() > 0 { 155 if char_vec.len() > 0 {
157 if let Some(_) = functions.get(&char_vec[..]) { 156 if let Some(_) = FUNCTIONS.get(&char_vec[..]) {
158 return Err(CalcError::Syntax(format!("Function '{}' expected parentheses", char_vec))) 157 return Err(CalcError::Syntax(format!("Function '{}' expected parentheses", char_vec)))
159 } else { 158 } else {
160 return Err(CalcError::Syntax(format!("Unexpected character '{}'", char_vec))) 159 return Err(CalcError::Syntax(format!("Unexpected character '{}'", char_vec)))
@@ -163,7 +162,7 @@ pub fn lexer(input: &str, prev_ans: f64) -> Result<Vec<Token>, CalcError> {
163 let parse_num = num_vec.parse::<f64>().ok(); 162 let parse_num = num_vec.parse::<f64>().ok();
164 if let Some(x) = parse_num { 163 if let Some(x) = parse_num {
165 result.push(Token::Num(x)); 164 result.push(Token::Num(x));
166 result.push(operators.get(&'*').unwrap().clone()); 165 result.push(OPERATORS.get(&'*').unwrap().clone());
167 num_vec.clear(); 166 num_vec.clear();
168 } 167 }
169 last_char_is_op = false; 168 last_char_is_op = false;
@@ -173,21 +172,21 @@ pub fn lexer(input: &str, prev_ans: f64) -> Result<Vec<Token>, CalcError> {
173 let parse_num = num_vec.parse::<f64>().ok(); 172 let parse_num = num_vec.parse::<f64>().ok();
174 if let Some(x) = parse_num { 173 if let Some(x) = parse_num {
175 result.push(Token::Num(x)); 174 result.push(Token::Num(x));
176 result.push(operators.get(&'*').unwrap().clone()); 175 result.push(OPERATORS.get(&'*').unwrap().clone());
177 num_vec.clear(); 176 num_vec.clear();
178 } 177 }
179 char_vec.push(letter); 178 char_vec.push(letter);
180 last_char_is_op = false; 179 last_char_is_op = false;
181 }, 180 },
182 '+' | '-' => { 181 '+' | '-' => {
183 let op_token = operators.get(&letter).unwrap().clone(); 182 let op_token = OPERATORS.get(&letter).unwrap().clone();
184 let parse_num = num_vec.parse::<f64>().ok(); 183 let parse_num = num_vec.parse::<f64>().ok();
185 if !last_char_is_op { 184 if !last_char_is_op {
186 if let Some(x) = parse_num { 185 if let Some(x) = parse_num {
187 result.push(Token::Num(x)); 186 result.push(Token::Num(x));
188 num_vec.clear(); 187 num_vec.clear();
189 last_char_is_op = true; 188 last_char_is_op = true;
190 } else if let Some(token) = constants.get(&char_vec[..]) { 189 } else if let Some(token) = CONSTANTS.get(&char_vec[..]) {
191 result.push(token.clone()); 190 result.push(token.clone());
192 char_vec.clear(); 191 char_vec.clear();
193 last_char_is_op = true; 192 last_char_is_op = true;
@@ -202,7 +201,7 @@ pub fn lexer(input: &str, prev_ans: f64) -> Result<Vec<Token>, CalcError> {
202 }, 201 },
203 '/' | '*' | '%' | '^' | '!' => { 202 '/' | '*' | '%' | '^' | '!' => {
204 drain_stack(&mut num_vec, &mut char_vec, &mut result); 203 drain_stack(&mut num_vec, &mut char_vec, &mut result);
205 let operator_token: Token = operators.get(&letter).unwrap().clone(); 204 let operator_token: Token = OPERATORS.get(&letter).unwrap().clone();
206 result.push(operator_token); 205 result.push(operator_token);
207 last_char_is_op = true; 206 last_char_is_op = true;
208 if letter == '!' { 207 if letter == '!' {
@@ -212,7 +211,7 @@ pub fn lexer(input: &str, prev_ans: f64) -> Result<Vec<Token>, CalcError> {
212 }, 211 },
213 '(' => { 212 '(' => {
214 if char_vec.len() > 0 { 213 if char_vec.len() > 0 {
215 if let Some(res) = functions.get(&char_vec[..]) { 214 if let Some(res) = FUNCTIONS.get(&char_vec[..]) {
216 result.push(res.clone()); 215 result.push(res.clone());
217 } else { 216 } else {
218 return Err(CalcError::Syntax(format!("Unknown function '{}'", char_vec))) 217 return Err(CalcError::Syntax(format!("Unknown function '{}'", char_vec)))
@@ -222,7 +221,7 @@ pub fn lexer(input: &str, prev_ans: f64) -> Result<Vec<Token>, CalcError> {
222 let parse_num = num_vec.parse::<f64>().ok(); 221 let parse_num = num_vec.parse::<f64>().ok();
223 if let Some(x) = parse_num { 222 if let Some(x) = parse_num {
224 result.push(Token::Num(x)); 223 result.push(Token::Num(x));
225 result.push(operators.get(&'*').unwrap().clone()); 224 result.push(OPERATORS.get(&'*').unwrap().clone());
226 num_vec.clear(); 225 num_vec.clear();
227 } 226 }
228 } 227 }
@@ -230,7 +229,7 @@ pub fn lexer(input: &str, prev_ans: f64) -> Result<Vec<Token>, CalcError> {
230 if let Some(x) = result.last() { 229 if let Some(x) = result.last() {
231 match x { 230 match x {
232 Token::RParen => { 231 Token::RParen => {
233 result.push(operators.get(&'*').unwrap().clone()); 232 result.push(OPERATORS.get(&'*').unwrap().clone());
234 }, 233 },
235 _ => {} 234 _ => {}
236 }; 235 };
@@ -259,7 +258,7 @@ fn drain_stack(num_vec: &mut String, char_vec: &mut String, result: &mut Vec<Tok
259 if let Some(x) = parse_num { 258 if let Some(x) = parse_num {
260 result.push(Token::Num(x)); 259 result.push(Token::Num(x));
261 num_vec.clear(); 260 num_vec.clear();
262 } else if let Some(token) = get_constants().get(&char_vec[..]) { 261 } else if let Some(token) = CONSTANTS.get(&char_vec[..]) {
263 result.push(token.clone()); 262 result.push(token.clone());
264 char_vec.clear(); 263 char_vec.clear();
265 } 264 }