diff options
author | NerdyPepper <[email protected]> | 2019-03-29 16:24:13 +0000 |
---|---|---|
committer | NerdyPepper <[email protected]> | 2019-03-29 16:24:13 +0000 |
commit | b27bf74c5f5daf4bbe20416b8a7b521c7a4eacc5 (patch) | |
tree | c7e68ffe3b686c9e68901bc28f1b6bd19464161a /src | |
parent | 028396f536141fa03623265e14a36e3d8eef5e90 (diff) |
add inverse trignometric functions, remainder operator
Diffstat (limited to 'src')
-rw-r--r-- | src/lex/mod.rs | 11 |
1 files changed, 9 insertions, 2 deletions
diff --git a/src/lex/mod.rs b/src/lex/mod.rs index 39538ca..51f124c 100644 --- a/src/lex/mod.rs +++ b/src/lex/mod.rs | |||
@@ -94,7 +94,13 @@ fn get_functions() -> HashMap<&'static str, Token> { | |||
94 | ("rad", Function::token_from_fn("rad".into(), |x| x.to_radians())), | 94 | ("rad", Function::token_from_fn("rad".into(), |x| x.to_radians())), |
95 | ("deg", Function::token_from_fn("deg".into(), |x| x.to_degrees())), | 95 | ("deg", Function::token_from_fn("deg".into(), |x| x.to_degrees())), |
96 | ("abs", Function::token_from_fn("abs".into(), |x| x.abs())), | 96 | ("abs", Function::token_from_fn("abs".into(), |x| x.abs())), |
97 | // single arg functions can be added here | 97 | ("asin", Function::token_from_fn("asin".into(), |x| x.asin())), |
98 | ("acos", Function::token_from_fn("acos".into(), |x| x.acos())), | ||
99 | ("atan", Function::token_from_fn("atan".into(), |x| x.atan())), | ||
100 | ("acsc", Function::token_from_fn("acsc".into(), |x| (1./x).asin())), | ||
101 | ("asec", Function::token_from_fn("asec".into(), |x| (1./x).acos())), | ||
102 | ("acot", Function::token_from_fn("acot".into(), |x| (1./x).atan())), | ||
103 | // single arg function s can be added here | ||
98 | ].iter().cloned().collect(); | 104 | ].iter().cloned().collect(); |
99 | } | 105 | } |
100 | 106 | ||
@@ -104,6 +110,7 @@ fn get_operators() -> HashMap<char, Token> { | |||
104 | ('-', Operator::token_from_op('-', |x, y| x - y, 2, true)), | 110 | ('-', Operator::token_from_op('-', |x, y| x - y, 2, true)), |
105 | ('*', Operator::token_from_op('*', |x, y| x * y, 3, true)), | 111 | ('*', Operator::token_from_op('*', |x, y| x * y, 3, true)), |
106 | ('/', Operator::token_from_op('/', |x, y| x / y, 3, true)), | 112 | ('/', Operator::token_from_op('/', |x, y| x / y, 3, true)), |
113 | ('%', Operator::token_from_op('%', |x, y| x % y, 3, true)), | ||
107 | ('^', Operator::token_from_op('^', |x, y| x.powf(y) , 4, false)), | 114 | ('^', Operator::token_from_op('^', |x, y| x.powf(y) , 4, false)), |
108 | ].iter().cloned().collect(); | 115 | ].iter().cloned().collect(); |
109 | } | 116 | } |
@@ -157,7 +164,7 @@ pub fn lexer(input: &str) -> Result<Vec<Token>, CalcError> { | |||
157 | result.push(Operator::token_from_op('*', |x, y| x * y, 10, true)); | 164 | result.push(Operator::token_from_op('*', |x, y| x * y, 10, true)); |
158 | } | 165 | } |
159 | }, | 166 | }, |
160 | '/' | '*' | '^' => { | 167 | '/' | '*' | '%' | '^' => { |
161 | drain_num_stack(&mut num_vec, &mut result); | 168 | drain_num_stack(&mut num_vec, &mut result); |
162 | let operator_token: Token = operators.get(&letter).unwrap().clone(); | 169 | let operator_token: Token = operators.get(&letter).unwrap().clone(); |
163 | result.push(operator_token); | 170 | result.push(operator_token); |