Fix clippy errors

This commit is contained in:
2025-05-26 15:50:22 -05:00
parent da92f241eb
commit a400730c88
3 changed files with 45 additions and 63 deletions

View File

@@ -4,18 +4,13 @@ use crate::tokenizer::Token;
use std::iter::Peekable;
fn is_mul_text_token(text_token: &str) -> bool {
if text_token.ends_with("mul") {
true
} else {
false
}
text_token.ends_with("mul")
}
fn parse_mul<'a>(token_iter: &mut Peekable<impl Iterator<Item = &'a Token>>) -> Option<MulOp> {
// Take OpenParenthesis or return None
if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() {
return None;
}
token_iter.next_if_eq(&&Token::OpenParenthesis)?;
// Take Number or return None
let lhs =
if let Some(Token::Number(lhs)) = token_iter.next_if(|v| matches!(v, Token::Number(_))) {
@@ -23,10 +18,10 @@ fn parse_mul<'a>(token_iter: &mut Peekable<impl Iterator<Item = &'a Token>>) ->
} else {
return None;
};
// Take Comma or return None
if !token_iter.next_if_eq(&&Token::Comma).is_some() {
return None;
}
token_iter.next_if_eq(&&Token::Comma)?;
// Take Number or return None
let rhs = if let Some(Token::Number(rhs)) =
token_iter.next_if(|v: &&Token| matches!(v, Token::Number(_)))
@@ -35,78 +30,63 @@ fn parse_mul<'a>(token_iter: &mut Peekable<impl Iterator<Item = &'a Token>>) ->
} else {
return None;
};
// Take CloseParenthesis or return None
if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() {
return None;
}
token_iter.next_if_eq(&&Token::CloseParenthesis)?;
Some(MulOp::new(lhs, rhs))
}
fn is_do_text_token(text_token: &str) -> bool {
if text_token.ends_with("do") {
true
} else {
false
}
text_token.ends_with("do")
}
fn parse_do<'a>(token_iter: &mut Peekable<impl Iterator<Item = &'a Token>>) -> Option<DoOp> {
// Take OpenParenthesis or return None
if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() {
return None;
}
token_iter.next_if_eq(&&Token::OpenParenthesis)?;
// Take CloseParenthesis or return None
if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() {
return None;
}
token_iter.next_if_eq(&&Token::CloseParenthesis)?;
Some(DoOp::new())
}
fn is_dont_text_token(text_token: &str) -> bool {
if text_token.ends_with("don't") {
true
} else {
false
}
text_token.ends_with("don't")
}
fn parse_dont<'a>(token_iter: &mut Peekable<impl Iterator<Item = &'a Token>>) -> Option<DontOp> {
// Take OpenParenthesis or return None
if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() {
return None;
}
token_iter.next_if_eq(&&Token::OpenParenthesis)?;
// Take CloseParenthesis or return None
if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() {
return None;
}
token_iter.next_if_eq(&&Token::CloseParenthesis)?;
Some(DontOp::new())
}
/// Parse all tokens into Ops
pub fn parse_tokens(tokens: &Vec<Token>) -> Vec<Ops> {
pub fn parse_tokens(tokens: &[Token]) -> Vec<Ops> {
let mut tokens = tokens.iter().peekable();
let mut muls: Vec<Ops> = Vec::new();
while let Some(token) = tokens.next() {
match token {
Token::Text(t) => {
if is_mul_text_token(t) {
let op = parse_mul(tokens.by_ref());
if let Some(op) = op {
muls.push(Ops::Mul(op));
}
} else if is_do_text_token(t) {
let op = parse_do(tokens.by_ref());
if let Some(op) = op {
muls.push(Ops::Do(op));
}
} else if is_dont_text_token(t) {
let op = parse_dont(tokens.by_ref());
if let Some(op) = op {
muls.push(Ops::Dont(op));
}
if let Token::Text(t) = token {
if is_mul_text_token(t) {
let op = parse_mul(tokens.by_ref());
if let Some(op) = op {
muls.push(Ops::Mul(op));
}
} else if is_do_text_token(t) {
let op = parse_do(tokens.by_ref());
if let Some(op) = op {
muls.push(Ops::Do(op));
}
} else if is_dont_text_token(t) {
let op = parse_dont(tokens.by_ref());
if let Some(op) = op {
muls.push(Ops::Dont(op));
}
}
_ => (),
}
}
muls