diff --git a/day_01/src/main.rs b/day_01/src/main.rs index 29e145c..07e447c 100644 --- a/day_01/src/main.rs +++ b/day_01/src/main.rs @@ -20,7 +20,7 @@ fn parse_line(line: &str) -> Result<(i64, i64), Box> { // Parse and return our two elements let first_element = split_line[0].parse::()?; let second_element = split_line[1].parse::()?; - return Ok((first_element, second_element)); + Ok((first_element, second_element)) } /// Part 1 @@ -108,5 +108,5 @@ fn main() -> Result<(), Box> { calculate_similarity(left_list, right_list) ); - return Ok(()); + Ok(()) } diff --git a/day_02/src/main.rs b/day_02/src/main.rs index 5d853a7..0403145 100644 --- a/day_02/src/main.rs +++ b/day_02/src/main.rs @@ -65,11 +65,11 @@ fn compare_levels(curr_level: i64, next_level: i64) -> LevelTransition { TransitionDirection::Desc }; - return LevelTransition { state, direction }; + LevelTransition { state, direction } } /// Tests if a report is safe -fn is_report_safe_no_dampening(report: &Vec) -> bool { +fn is_report_safe_no_dampening(report: &[i64]) -> bool { // Turn the report into a bunch of states for each adjacent pair in the report let report: Vec = report .windows(2) @@ -98,13 +98,14 @@ fn is_report_safe_no_dampening(report: &Vec) -> bool { // Setup for next iteration prev_transition = level_transition; } - return true; + + true } /// Tests if a report is safe or code be made safe if you remove a single /// level from the report -fn is_report_safe_with_dampening_brute_force(report: &Vec) -> bool { - if is_report_safe_no_dampening(&report) { +fn is_report_safe_with_dampening_brute_force(report: &[i64]) -> bool { + if is_report_safe_no_dampening(report) { return true; } @@ -115,7 +116,8 @@ fn is_report_safe_with_dampening_brute_force(report: &Vec) -> bool { return true; } } - return false; + + false } // fn compare_levels_3(prev_level: i64, curr_level: i64, next_level: i64) -> LevelTransition { diff --git a/day_03/src/parser.rs b/day_03/src/parser.rs index 3b40681..565e974 100644 --- a/day_03/src/parser.rs +++ b/day_03/src/parser.rs @@ -4,18 +4,13 @@ use crate::tokenizer::Token; use std::iter::Peekable; fn is_mul_text_token(text_token: &str) -> bool { - if text_token.ends_with("mul") { - true - } else { - false - } + text_token.ends_with("mul") } fn parse_mul<'a>(token_iter: &mut Peekable>) -> Option { // Take OpenParenthesis or return None - if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() { - return None; - } + token_iter.next_if_eq(&&Token::OpenParenthesis)?; + // Take Number or return None let lhs = if let Some(Token::Number(lhs)) = token_iter.next_if(|v| matches!(v, Token::Number(_))) { @@ -23,10 +18,10 @@ fn parse_mul<'a>(token_iter: &mut Peekable>) -> } else { return None; }; + // Take Comma or return None - if !token_iter.next_if_eq(&&Token::Comma).is_some() { - return None; - } + token_iter.next_if_eq(&&Token::Comma)?; + // Take Number or return None let rhs = if let Some(Token::Number(rhs)) = token_iter.next_if(|v: &&Token| matches!(v, Token::Number(_))) @@ -35,78 +30,63 @@ fn parse_mul<'a>(token_iter: &mut Peekable>) -> } else { return None; }; + // Take CloseParenthesis or return None - if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() { - return None; - } + token_iter.next_if_eq(&&Token::CloseParenthesis)?; + Some(MulOp::new(lhs, rhs)) } fn is_do_text_token(text_token: &str) -> bool { - if text_token.ends_with("do") { - true - } else { - false - } + text_token.ends_with("do") } fn parse_do<'a>(token_iter: &mut Peekable>) -> Option { // Take OpenParenthesis or return None - if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() { - return None; - } + token_iter.next_if_eq(&&Token::OpenParenthesis)?; + // Take CloseParenthesis or return None - if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() { - return None; - } + token_iter.next_if_eq(&&Token::CloseParenthesis)?; + Some(DoOp::new()) } fn is_dont_text_token(text_token: &str) -> bool { - if text_token.ends_with("don't") { - true - } else { - false - } + text_token.ends_with("don't") } fn parse_dont<'a>(token_iter: &mut Peekable>) -> Option { // Take OpenParenthesis or return None - if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() { - return None; - } + token_iter.next_if_eq(&&Token::OpenParenthesis)?; + // Take CloseParenthesis or return None - if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() { - return None; - } + token_iter.next_if_eq(&&Token::CloseParenthesis)?; + Some(DontOp::new()) } /// Parse all tokens into Ops -pub fn parse_tokens(tokens: &Vec) -> Vec { +pub fn parse_tokens(tokens: &[Token]) -> Vec { let mut tokens = tokens.iter().peekable(); let mut muls: Vec = Vec::new(); while let Some(token) = tokens.next() { - match token { - Token::Text(t) => { - if is_mul_text_token(t) { - let op = parse_mul(tokens.by_ref()); - if let Some(op) = op { - muls.push(Ops::Mul(op)); - } - } else if is_do_text_token(t) { - let op = parse_do(tokens.by_ref()); - if let Some(op) = op { - muls.push(Ops::Do(op)); - } - } else if is_dont_text_token(t) { - let op = parse_dont(tokens.by_ref()); - if let Some(op) = op { - muls.push(Ops::Dont(op)); - } + if let Token::Text(t) = token { + if is_mul_text_token(t) { + let op = parse_mul(tokens.by_ref()); + if let Some(op) = op { + muls.push(Ops::Mul(op)); + } + } else if is_do_text_token(t) { + let op = parse_do(tokens.by_ref()); + if let Some(op) = op { + muls.push(Ops::Do(op)); + } + } else if is_dont_text_token(t) { + let op = parse_dont(tokens.by_ref()); + if let Some(op) = op { + muls.push(Ops::Dont(op)); } } - _ => (), } } muls