diff options
author | NerdyPepper <[email protected]> | 2019-03-23 05:12:45 +0000 |
---|---|---|
committer | NerdyPepper <[email protected]> | 2019-03-23 05:12:45 +0000 |
commit | ed0d484e0f1a76bded753877122f9807211fe969 (patch) | |
tree | 71d8048844d8d017c797e73fe8141cf75b27c812 /src | |
parent | de4c9d0cc54b60b4ff7bef568f3643bdbbbf22f4 (diff) |
use dirty precedence hack to fix unary once and for all
Diffstat (limited to 'src')
-rw-r--r-- | src/lex/mod.rs | 11 |
1 files changed, 7 insertions, 4 deletions
diff --git a/src/lex/mod.rs b/src/lex/mod.rs index a4ea952..9912ca3 100644 --- a/src/lex/mod.rs +++ b/src/lex/mod.rs | |||
@@ -70,6 +70,7 @@ fn get_functions() -> HashMap<&'static str, Token> { | |||
70 | ("sqrt", Function::token_from_fn("sqrt".into(), |x| x.sqrt())), | 70 | ("sqrt", Function::token_from_fn("sqrt".into(), |x| x.sqrt())), |
71 | ("ceil", Function::token_from_fn("ceil".into(), |x| x.ceil())), | 71 | ("ceil", Function::token_from_fn("ceil".into(), |x| x.ceil())), |
72 | ("floor", Function::token_from_fn("floor".into(), |x| x.floor())), | 72 | ("floor", Function::token_from_fn("floor".into(), |x| x.floor())), |
73 | // single arg functions can be added here | ||
73 | ].iter().cloned().collect(); | 74 | ].iter().cloned().collect(); |
74 | } | 75 | } |
75 | 76 | ||
@@ -79,7 +80,7 @@ fn get_operators() -> HashMap<char, Token> { | |||
79 | ('-', Operator::token_from_op('-', |x, y| x - y, 2, true)), | 80 | ('-', Operator::token_from_op('-', |x, y| x - y, 2, true)), |
80 | ('*', Operator::token_from_op('*', |x, y| x * y, 3, true)), | 81 | ('*', Operator::token_from_op('*', |x, y| x * y, 3, true)), |
81 | ('/', Operator::token_from_op('/', |x, y| x / y, 3, true)), | 82 | ('/', Operator::token_from_op('/', |x, y| x / y, 3, true)), |
82 | ('^', Operator::token_from_op('^', |x, y| x.powf(y) , 4, true)), | 83 | ('^', Operator::token_from_op('^', |x, y| x.powf(y) , 4, false)), |
83 | ].iter().cloned().collect(); | 84 | ].iter().cloned().collect(); |
84 | } | 85 | } |
85 | 86 | ||
@@ -123,7 +124,7 @@ pub fn lexer(input: &str) -> Result<Vec<Token>, String> { | |||
123 | result.push(Token::LParen); | 124 | result.push(Token::LParen); |
124 | result.push(Token::Num((letter.to_string() + "1").parse::<f64>().unwrap())); | 125 | result.push(Token::Num((letter.to_string() + "1").parse::<f64>().unwrap())); |
125 | result.push(Token::RParen); | 126 | result.push(Token::RParen); |
126 | result.push(operators.get(&'*').unwrap().clone()); | 127 | result.push(Operator::token_from_op('*', |x, y| x * y, 10, true)); |
127 | } | 128 | } |
128 | }, | 129 | }, |
129 | '/' | '*' | '^' => { | 130 | '/' | '*' | '^' => { |
@@ -132,6 +133,8 @@ pub fn lexer(input: &str) -> Result<Vec<Token>, String> { | |||
132 | result.push(operator_token); | 133 | result.push(operator_token); |
133 | last_char_is_op = true; // 5 / -5 | 134 | last_char_is_op = true; // 5 / -5 |
134 | // ^---- unary | 135 | // ^---- unary |
136 | // TODO: parse right associative followed by unary properly | ||
137 | // 2^+5 is parsed as 2^1*5 = 10 | ||
135 | }, | 138 | }, |
136 | '(' => { | 139 | '(' => { |
137 | if char_vec.len() > 0 { | 140 | if char_vec.len() > 0 { |
@@ -161,7 +164,8 @@ pub fn lexer(input: &str) -> Result<Vec<Token>, String> { | |||
161 | result.push(Token::LParen); | 164 | result.push(Token::LParen); |
162 | last_char_is_op = true; // unary + or - if a lparen was encountered | 165 | last_char_is_op = true; // unary + or - if a lparen was encountered |
163 | // (-5 + 6) or (+6 + 7) | 166 | // (-5 + 6) or (+6 + 7) |
164 | // ^-----------^-----unary | 167 | // ^ ^ |
168 | // `-----------`----unary | ||
165 | }, | 169 | }, |
166 | ')' => { | 170 | ')' => { |
167 | drain_num_stack(&mut num_vec, &mut result); | 171 | drain_num_stack(&mut num_vec, &mut result); |
@@ -177,7 +181,6 @@ pub fn lexer(input: &str) -> Result<Vec<Token>, String> { | |||
177 | } | 181 | } |
178 | } | 182 | } |
179 | drain_num_stack(&mut num_vec, &mut result); | 183 | drain_num_stack(&mut num_vec, &mut result); |
180 | println!("{:?}", result); | ||
181 | Ok(result) | 184 | Ok(result) |
182 | } | 185 | } |
183 | 186 | ||