Break day 3 into modules

This commit is contained in:
2025-05-26 13:25:35 -05:00
parent 4eaf6948e9
commit 5eaf05ff1c
4 changed files with 224 additions and 208 deletions

View File

@@ -2,219 +2,19 @@
// //
// Run command: cargo run ./input.txt // Run command: cargo run ./input.txt
use core::ops::Mul; mod op;
mod parser;
mod tokenizer;
use crate::op::Ops;
use crate::parser::parse_tokens;
use crate::tokenizer::{Token, tokenize};
use core::panic; use core::panic;
use std::env; use std::env;
use std::error::Error; use std::error::Error;
use std::fs::File; use std::fs::File;
use std::io::{BufReader, prelude::*}; use std::io::{BufReader, prelude::*};
use std::iter::Peekable;
use std::str::FromStr;
#[derive(Eq, PartialEq, Clone, Debug)]
enum Token {
Text(String),
Number(String),
Comma,
OpenParenthesis,
CloseParenthesis,
SpecialChar(char),
Whitespace,
NewLine,
}
fn tokenize(input_str: String) -> Vec<Token> {
let mut tokens: Vec<Token> = Vec::new();
let mut chars = input_str.chars().peekable();
while let Some(c) = chars.next() {
let token = match c {
// Text Numeric
_ if char::is_alphabetic(c) => {
let mut text = c.to_string();
// HACK: including ' as part of alphabetic to implent "don't" expressions for part 2
while let Some(alphabetic_char) = chars.next_if(|p| p.is_alphabetic() || *p == '\'')
{
text.push(alphabetic_char)
}
Token::Text(text)
}
// Numbers
_ if char::is_numeric(c) => {
let mut number = c.to_string();
while let Some(numeric_char) = chars.next_if(|p| p.is_numeric()) {
number.push(numeric_char)
}
Token::Number(number)
}
// whitespace
_ if char::is_whitespace(c) => {
while chars.next_if(|p| p.is_whitespace()).is_some() {}
Token::Whitespace
}
_ if c == ',' => Token::Comma,
_ if c == '(' => Token::OpenParenthesis,
_ if c == ')' => Token::CloseParenthesis,
_ if c == '\n' => Token::NewLine,
_ => Token::SpecialChar(c),
};
tokens.push(token);
}
tokens
}
#[derive(Debug)]
struct MulOp {
lhs: String,
rhs: String,
}
impl MulOp {
pub fn new(lhs: String, rhs: String) -> MulOp {
MulOp { lhs, rhs }
}
pub fn multiple<T>(&self) -> Result<<T as Mul>::Output, <T as FromStr>::Err>
where
T: FromStr + Mul,
{
let lhs: T = self.lhs.parse()?;
let rhs: T = self.rhs.parse()?;
Ok(lhs * rhs)
}
}
fn is_mul_text_token(text_token: &str) -> bool {
if text_token.ends_with("mul") {
true
} else {
false
}
}
fn parse_mul<'a>(token_iter: &mut Peekable<impl Iterator<Item = &'a Token>>) -> Option<MulOp> {
// Take OpenParenthesis or return None
if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() {
return None;
}
// Take Number or return None
let lhs =
if let Some(Token::Number(lhs)) = token_iter.next_if(|v| matches!(v, Token::Number(_))) {
lhs.to_owned()
} else {
return None;
};
// Take Comma or return None
if !token_iter.next_if_eq(&&Token::Comma).is_some() {
return None;
}
// Take Number or return None
let rhs = if let Some(Token::Number(rhs)) =
token_iter.next_if(|v: &&Token| matches!(v, Token::Number(_)))
{
rhs.to_owned()
} else {
return None;
};
// Take CloseParenthesis or return None
if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() {
return None;
}
Some(MulOp::new(lhs, rhs))
}
#[derive(Debug)]
struct DoOp {}
impl DoOp {
pub fn new() -> DoOp {
DoOp {}
}
}
fn is_do_text_token(text_token: &str) -> bool {
if text_token.ends_with("do") {
true
} else {
false
}
}
fn parse_do<'a>(token_iter: &mut Peekable<impl Iterator<Item = &'a Token>>) -> Option<DoOp> {
// Take OpenParenthesis or return None
if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() {
return None;
}
// Take CloseParenthesis or return None
if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() {
return None;
}
Some(DoOp::new())
}
#[derive(Debug)]
struct DontOp {}
impl DontOp {
pub fn new() -> DontOp {
DontOp {}
}
}
fn is_dont_text_token(text_token: &str) -> bool {
if text_token.ends_with("don't") {
true
} else {
false
}
}
fn parse_dont<'a>(token_iter: &mut Peekable<impl Iterator<Item = &'a Token>>) -> Option<DontOp> {
// Take OpenParenthesis or return None
if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() {
return None;
}
// Take CloseParenthesis or return None
if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() {
return None;
}
Some(DontOp::new())
}
#[derive(Debug)]
enum Ops {
Mul(MulOp),
Do(DoOp),
Dont(DontOp),
}
fn parse_tokens(tokens: &Vec<Token>) -> Vec<Ops> {
let mut tokens = tokens.iter().peekable();
let mut muls: Vec<Ops> = Vec::new();
while let Some(token) = tokens.next() {
match token {
Token::Text(t) => {
if is_mul_text_token(t) {
let op = parse_mul(tokens.by_ref());
if let Some(op) = op {
muls.push(Ops::Mul(op));
}
} else if is_do_text_token(t) {
let op = parse_do(tokens.by_ref());
if let Some(op) = op {
muls.push(Ops::Do(op));
}
} else if is_dont_text_token(t) {
let op = parse_dont(tokens.by_ref());
if let Some(op) = op {
muls.push(Ops::Dont(op));
}
}
}
_ => (),
}
}
muls
}
fn main() -> Result<(), Box<dyn Error>> { fn main() -> Result<(), Box<dyn Error>> {
// Handle command input // Handle command input

52
day_03/src/op.rs Normal file
View File

@@ -0,0 +1,52 @@
use core::ops::Mul;
use std::str::FromStr;
/// Multiple Operation
#[derive(Debug)]
pub struct MulOp {
lhs: String,
rhs: String,
}
impl MulOp {
pub fn new(lhs: String, rhs: String) -> MulOp {
MulOp { lhs, rhs }
}
pub fn multiple<T>(&self) -> Result<<T as Mul>::Output, <T as FromStr>::Err>
where
T: FromStr + Mul,
{
let lhs: T = self.lhs.parse()?;
let rhs: T = self.rhs.parse()?;
Ok(lhs * rhs)
}
}
/// Do Operation
#[derive(Debug)]
pub struct DoOp {}
impl DoOp {
pub fn new() -> DoOp {
DoOp {}
}
}
/// Don't Operation
#[derive(Debug)]
pub struct DontOp {}
impl DontOp {
pub fn new() -> DontOp {
DontOp {}
}
}
/// Enum of all Operations
#[derive(Debug)]
pub enum Ops {
Mul(MulOp),
Do(DoOp),
Dont(DontOp),
}

113
day_03/src/parser.rs Normal file
View File

@@ -0,0 +1,113 @@
use crate::op::*;
use crate::tokenizer::Token;
use std::iter::Peekable;
fn is_mul_text_token(text_token: &str) -> bool {
if text_token.ends_with("mul") {
true
} else {
false
}
}
fn parse_mul<'a>(token_iter: &mut Peekable<impl Iterator<Item = &'a Token>>) -> Option<MulOp> {
// Take OpenParenthesis or return None
if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() {
return None;
}
// Take Number or return None
let lhs =
if let Some(Token::Number(lhs)) = token_iter.next_if(|v| matches!(v, Token::Number(_))) {
lhs.to_owned()
} else {
return None;
};
// Take Comma or return None
if !token_iter.next_if_eq(&&Token::Comma).is_some() {
return None;
}
// Take Number or return None
let rhs = if let Some(Token::Number(rhs)) =
token_iter.next_if(|v: &&Token| matches!(v, Token::Number(_)))
{
rhs.to_owned()
} else {
return None;
};
// Take CloseParenthesis or return None
if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() {
return None;
}
Some(MulOp::new(lhs, rhs))
}
fn is_do_text_token(text_token: &str) -> bool {
if text_token.ends_with("do") {
true
} else {
false
}
}
fn parse_do<'a>(token_iter: &mut Peekable<impl Iterator<Item = &'a Token>>) -> Option<DoOp> {
// Take OpenParenthesis or return None
if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() {
return None;
}
// Take CloseParenthesis or return None
if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() {
return None;
}
Some(DoOp::new())
}
fn is_dont_text_token(text_token: &str) -> bool {
if text_token.ends_with("don't") {
true
} else {
false
}
}
fn parse_dont<'a>(token_iter: &mut Peekable<impl Iterator<Item = &'a Token>>) -> Option<DontOp> {
// Take OpenParenthesis or return None
if !token_iter.next_if_eq(&&Token::OpenParenthesis).is_some() {
return None;
}
// Take CloseParenthesis or return None
if !token_iter.next_if_eq(&&Token::CloseParenthesis).is_some() {
return None;
}
Some(DontOp::new())
}
/// Parse all tokens into Ops
pub fn parse_tokens(tokens: &Vec<Token>) -> Vec<Ops> {
let mut tokens = tokens.iter().peekable();
let mut muls: Vec<Ops> = Vec::new();
while let Some(token) = tokens.next() {
match token {
Token::Text(t) => {
if is_mul_text_token(t) {
let op = parse_mul(tokens.by_ref());
if let Some(op) = op {
muls.push(Ops::Mul(op));
}
} else if is_do_text_token(t) {
let op = parse_do(tokens.by_ref());
if let Some(op) = op {
muls.push(Ops::Do(op));
}
} else if is_dont_text_token(t) {
let op = parse_dont(tokens.by_ref());
if let Some(op) = op {
muls.push(Ops::Dont(op));
}
}
}
_ => (),
}
}
muls
}

51
day_03/src/tokenizer.rs Normal file
View File

@@ -0,0 +1,51 @@
#[derive(Eq, PartialEq, Clone, Debug)]
pub enum Token {
Text(String),
Number(String),
Comma,
OpenParenthesis,
CloseParenthesis,
SpecialChar(char),
Whitespace,
NewLine,
}
/// Tokenize the given string into a vector of Tokens
pub fn tokenize(input_str: String) -> Vec<Token> {
let mut tokens: Vec<Token> = Vec::new();
let mut chars = input_str.chars().peekable();
while let Some(c) = chars.next() {
let token = match c {
// Text Numeric
_ if char::is_alphabetic(c) => {
let mut text = c.to_string();
// HACK: including ' as part of alphabetic to implent "don't" expressions for part 2
while let Some(alphabetic_char) = chars.next_if(|p| p.is_alphabetic() || *p == '\'')
{
text.push(alphabetic_char)
}
Token::Text(text)
}
// Numbers
_ if char::is_numeric(c) => {
let mut number = c.to_string();
while let Some(numeric_char) = chars.next_if(|p| p.is_numeric()) {
number.push(numeric_char)
}
Token::Number(number)
}
// whitespace
_ if char::is_whitespace(c) => {
while chars.next_if(|p| p.is_whitespace()).is_some() {}
Token::Whitespace
}
_ if c == ',' => Token::Comma,
_ if c == '(' => Token::OpenParenthesis,
_ if c == ')' => Token::CloseParenthesis,
_ if c == '\n' => Token::NewLine,
_ => Token::SpecialChar(c),
};
tokens.push(token);
}
tokens
}