From 5eaf05ff1c03636348a38ef5167721d1b526ce6e Mon Sep 17 00:00:00 2001 From: Bearmine Date: Mon, 26 May 2025 13:25:35 -0500 Subject: [PATCH] Break day 3 into modules --- day_03/src/main.rs | 216 ++-------------------------------------- day_03/src/op.rs | 52 ++++++++++ day_03/src/parser.rs | 113 +++++++++++++++++++++ day_03/src/tokenizer.rs | 51 ++++++++++ 4 files changed, 224 insertions(+), 208 deletions(-) create mode 100644 day_03/src/op.rs create mode 100644 day_03/src/parser.rs create mode 100644 day_03/src/tokenizer.rs diff --git a/day_03/src/main.rs b/day_03/src/main.rs index b3ceed7..c57576f 100644 --- a/day_03/src/main.rs +++ b/day_03/src/main.rs @@ -2,219 +2,19 @@ // // Run command: cargo run ./input.txt -use core::ops::Mul; +mod op; +mod parser; +mod tokenizer; + +use crate::op::Ops; +use crate::parser::parse_tokens; +use crate::tokenizer::{Token, tokenize}; + use core::panic; use std::env; use std::error::Error; use std::fs::File; use std::io::{BufReader, prelude::*}; -use std::iter::Peekable; -use std::str::FromStr; - -#[derive(Eq, PartialEq, Clone, Debug)] -enum Token { - Text(String), - Number(String), - Comma, - OpenParenthesis, - CloseParenthesis, - SpecialChar(char), - Whitespace, - NewLine, -} - -fn tokenize(input_str: String) -> Vec { - let mut tokens: Vec = Vec::new(); - let mut chars = input_str.chars().peekable(); - while let Some(c) = chars.next() { - let token = match c { - // Text Numeric - _ if char::is_alphabetic(c) => { - let mut text = c.to_string(); - // HACK: including ' as part of alphabetic to implent "don't" expressions for part 2 - while let Some(alphabetic_char) = chars.next_if(|p| p.is_alphabetic() || *p == '\'') - { - text.push(alphabetic_char) - } - Token::Text(text) - } - // Numbers - _ if char::is_numeric(c) => { - let mut number = c.to_string(); - while let Some(numeric_char) = chars.next_if(|p| p.is_numeric()) { - number.push(numeric_char) - } - Token::Number(number) - } - // whitespace - _ if char::is_whitespace(c) => { - while chars.next_if(|p| p.is_whitespace()).is_some() {} - Token::Whitespace - } - _ if c == ',' => Token::Comma, - _ if c == '(' => Token::OpenParenthesis, - _ if c == ')' => Token::CloseParenthesis, - _ if c == '\n' => Token::NewLine, - _ => Token::SpecialChar(c), - }; - tokens.push(token); - } - tokens -} - -#[derive(Debug)] -struct MulOp { - lhs: String, - rhs: String, -} - -impl MulOp { - pub fn new(lhs: String, rhs: String) -> MulOp { - MulOp { lhs, rhs } - } - - pub fn multiple(&self) -> Result<::Output, ::Err> - where - T: FromStr + Mul, - { - let lhs: T = self.lhs.parse()?; - let rhs: T = self.rhs.parse()?; - Ok(lhs * rhs) - } -} - -fn is_mul_text_token(text_token: &str) -> bool { - if text_token.ends_with("mul") { - true - } else { - false - } -} - -fn parse_mul<'a>(token_iter: &mut Peekable>) -> Option { - // Take OpenParenthesis or return None - if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() { - return None; - } - // Take Number or return None - let lhs = - if let Some(Token::Number(lhs)) = token_iter.next_if(|v| matches!(v, Token::Number(_))) { - lhs.to_owned() - } else { - return None; - }; - // Take Comma or return None - if !token_iter.next_if_eq(&&Token::Comma).is_some() { - return None; - } - // Take Number or return None - let rhs = if let Some(Token::Number(rhs)) = - token_iter.next_if(|v: &&Token| matches!(v, Token::Number(_))) - { - rhs.to_owned() - } else { - return None; - }; - // Take CloseParenthesis or return None - if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() { - return None; - } - Some(MulOp::new(lhs, rhs)) -} - -#[derive(Debug)] -struct DoOp {} - -impl DoOp { - pub fn new() -> DoOp { - DoOp {} - } -} - -fn is_do_text_token(text_token: &str) -> bool { - if text_token.ends_with("do") { - true - } else { - false - } -} - -fn parse_do<'a>(token_iter: &mut Peekable>) -> Option { - // Take OpenParenthesis or return None - if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() { - return None; - } - // Take CloseParenthesis or return None - if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() { - return None; - } - Some(DoOp::new()) -} - -#[derive(Debug)] -struct DontOp {} - -impl DontOp { - pub fn new() -> DontOp { - DontOp {} - } -} - -fn is_dont_text_token(text_token: &str) -> bool { - if text_token.ends_with("don't") { - true - } else { - false - } -} - -fn parse_dont<'a>(token_iter: &mut Peekable>) -> Option { - // Take OpenParenthesis or return None - if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() { - return None; - } - // Take CloseParenthesis or return None - if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() { - return None; - } - Some(DontOp::new()) -} - -#[derive(Debug)] -enum Ops { - Mul(MulOp), - Do(DoOp), - Dont(DontOp), -} - -fn parse_tokens(tokens: &Vec) -> Vec { - let mut tokens = tokens.iter().peekable(); - let mut muls: Vec = Vec::new(); - while let Some(token) = tokens.next() { - match token { - Token::Text(t) => { - if is_mul_text_token(t) { - let op = parse_mul(tokens.by_ref()); - if let Some(op) = op { - muls.push(Ops::Mul(op)); - } - } else if is_do_text_token(t) { - let op = parse_do(tokens.by_ref()); - if let Some(op) = op { - muls.push(Ops::Do(op)); - } - } else if is_dont_text_token(t) { - let op = parse_dont(tokens.by_ref()); - if let Some(op) = op { - muls.push(Ops::Dont(op)); - } - } - } - _ => (), - } - } - muls -} fn main() -> Result<(), Box> { // Handle command input diff --git a/day_03/src/op.rs b/day_03/src/op.rs new file mode 100644 index 0000000..68b19c6 --- /dev/null +++ b/day_03/src/op.rs @@ -0,0 +1,52 @@ +use core::ops::Mul; +use std::str::FromStr; + +/// Multiple Operation +#[derive(Debug)] +pub struct MulOp { + lhs: String, + rhs: String, +} + +impl MulOp { + pub fn new(lhs: String, rhs: String) -> MulOp { + MulOp { lhs, rhs } + } + + pub fn multiple(&self) -> Result<::Output, ::Err> + where + T: FromStr + Mul, + { + let lhs: T = self.lhs.parse()?; + let rhs: T = self.rhs.parse()?; + Ok(lhs * rhs) + } +} + +/// Do Operation +#[derive(Debug)] +pub struct DoOp {} + +impl DoOp { + pub fn new() -> DoOp { + DoOp {} + } +} + +/// Don't Operation +#[derive(Debug)] +pub struct DontOp {} + +impl DontOp { + pub fn new() -> DontOp { + DontOp {} + } +} + +/// Enum of all Operations +#[derive(Debug)] +pub enum Ops { + Mul(MulOp), + Do(DoOp), + Dont(DontOp), +} diff --git a/day_03/src/parser.rs b/day_03/src/parser.rs new file mode 100644 index 0000000..3b40681 --- /dev/null +++ b/day_03/src/parser.rs @@ -0,0 +1,113 @@ +use crate::op::*; +use crate::tokenizer::Token; + +use std::iter::Peekable; + +fn is_mul_text_token(text_token: &str) -> bool { + if text_token.ends_with("mul") { + true + } else { + false + } +} + +fn parse_mul<'a>(token_iter: &mut Peekable>) -> Option { + // Take OpenParenthesis or return None + if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() { + return None; + } + // Take Number or return None + let lhs = + if let Some(Token::Number(lhs)) = token_iter.next_if(|v| matches!(v, Token::Number(_))) { + lhs.to_owned() + } else { + return None; + }; + // Take Comma or return None + if !token_iter.next_if_eq(&&Token::Comma).is_some() { + return None; + } + // Take Number or return None + let rhs = if let Some(Token::Number(rhs)) = + token_iter.next_if(|v: &&Token| matches!(v, Token::Number(_))) + { + rhs.to_owned() + } else { + return None; + }; + // Take CloseParenthesis or return None + if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() { + return None; + } + Some(MulOp::new(lhs, rhs)) +} + +fn is_do_text_token(text_token: &str) -> bool { + if text_token.ends_with("do") { + true + } else { + false + } +} + +fn parse_do<'a>(token_iter: &mut Peekable>) -> Option { + // Take OpenParenthesis or return None + if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() { + return None; + } + // Take CloseParenthesis or return None + if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() { + return None; + } + Some(DoOp::new()) +} + +fn is_dont_text_token(text_token: &str) -> bool { + if text_token.ends_with("don't") { + true + } else { + false + } +} + +fn parse_dont<'a>(token_iter: &mut Peekable>) -> Option { + // Take OpenParenthesis or return None + if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() { + return None; + } + // Take CloseParenthesis or return None + if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() { + return None; + } + Some(DontOp::new()) +} + +/// Parse all tokens into Ops +pub fn parse_tokens(tokens: &Vec) -> Vec { + let mut tokens = tokens.iter().peekable(); + let mut muls: Vec = Vec::new(); + while let Some(token) = tokens.next() { + match token { + Token::Text(t) => { + if is_mul_text_token(t) { + let op = parse_mul(tokens.by_ref()); + if let Some(op) = op { + muls.push(Ops::Mul(op)); + } + } else if is_do_text_token(t) { + let op = parse_do(tokens.by_ref()); + if let Some(op) = op { + muls.push(Ops::Do(op)); + } + } else if is_dont_text_token(t) { + let op = parse_dont(tokens.by_ref()); + if let Some(op) = op { + muls.push(Ops::Dont(op)); + } + } + } + _ => (), + } + } + muls +} diff --git a/day_03/src/tokenizer.rs b/day_03/src/tokenizer.rs new file mode 100644 index 0000000..7b1da91 --- /dev/null +++ b/day_03/src/tokenizer.rs @@ -0,0 +1,51 @@ +#[derive(Eq, PartialEq, Clone, Debug)] +pub enum Token { + Text(String), + Number(String), + Comma, + OpenParenthesis, + CloseParenthesis, + SpecialChar(char), + Whitespace, + NewLine, +} + +/// Tokenize the given string into a vector of Tokens +pub fn tokenize(input_str: String) -> Vec { + let mut tokens: Vec = Vec::new(); + let mut chars = input_str.chars().peekable(); + while let Some(c) = chars.next() { + let token = match c { + // Text Numeric + _ if char::is_alphabetic(c) => { + let mut text = c.to_string(); + // HACK: including ' as part of alphabetic to implent "don't" expressions for part 2 + while let Some(alphabetic_char) = chars.next_if(|p| p.is_alphabetic() || *p == '\'') + { + text.push(alphabetic_char) + } + Token::Text(text) + } + // Numbers + _ if char::is_numeric(c) => { + let mut number = c.to_string(); + while let Some(numeric_char) = chars.next_if(|p| p.is_numeric()) { + number.push(numeric_char) + } + Token::Number(number) + } + // whitespace + _ if char::is_whitespace(c) => { + while chars.next_if(|p| p.is_whitespace()).is_some() {} + Token::Whitespace + } + _ if c == ',' => Token::Comma, + _ if c == '(' => Token::OpenParenthesis, + _ if c == ')' => Token::CloseParenthesis, + _ if c == '\n' => Token::NewLine, + _ => Token::SpecialChar(c), + }; + tokens.push(token); + } + tokens +}