scaffholding
This commit is contained in:
commit
9b822cedc8
11 changed files with 305 additions and 0 deletions
62
aoc2024/day2.rs
Normal file
62
aoc2024/day2.rs
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
const TEST_INPUT: &'static str = "\
|
||||
7 6 4 2 1
|
||||
1 2 7 8 9
|
||||
9 7 6 2 1
|
||||
1 3 2 4 5
|
||||
8 6 4 4 1
|
||||
1 3 6 7 9\
|
||||
";
|
||||
|
||||
const INPUT: &'static str = include_str!("../inputs/2024/day2.input");
|
||||
|
||||
fn parse(i: &str) -> Vec<Vec<i32>> {
|
||||
i.lines().map(|l|
|
||||
l
|
||||
.split_whitespace()
|
||||
.map(|w| w.parse::<i32>().unwrap())
|
||||
.collect()
|
||||
).collect()
|
||||
}
|
||||
|
||||
// Might make it not return a vector
|
||||
fn follows_rules(i: &Vec<i32>) -> Vec<bool> {
|
||||
let mut iter = i.iter();
|
||||
let first = iter.next().unwrap();
|
||||
let second = iter.next().unwrap();
|
||||
let initial_diff = second - first;
|
||||
let initial_holds = initial_diff.abs() <= 3 && initial_diff.abs() >= 1;
|
||||
let (acc, _, _) = iter.fold((vec![true, initial_holds], *second, initial_diff),
|
||||
|(mut acc, prev, prev_diff): (Vec<bool>, i32, i32), n: &i32| {
|
||||
let diff: i32 = n - prev;
|
||||
acc.push(
|
||||
diff.abs() <= 3 &&
|
||||
diff.abs() >= 1 &&
|
||||
((diff > 0 && prev_diff > 0) || (diff < 0 && prev_diff < 0))
|
||||
);
|
||||
(acc, *n, diff)
|
||||
});
|
||||
acc
|
||||
}
|
||||
|
||||
fn try_without(mut l: Vec<i32>, index: usize) -> bool {
|
||||
l.remove(index);
|
||||
follows_rules(&l).iter().all(|x| *x)
|
||||
}
|
||||
|
||||
pub fn part1() -> u32 {
|
||||
let input = parse(INPUT);
|
||||
input.iter().map(|l| if follows_rules(l).iter().all(|x| *x) { 1 } else { 0 }).sum()
|
||||
}
|
||||
|
||||
pub fn part2() -> u32 {
|
||||
let mut input = parse(INPUT);
|
||||
input.iter_mut().map(|l| {
|
||||
let rules = follows_rules(l);
|
||||
if rules.iter().all(|x| *x) {
|
||||
1
|
||||
} else {
|
||||
l.iter().enumerate().any(|(i, _)| try_without(l.clone(), i)) as u32
|
||||
}
|
||||
}).sum()
|
||||
}
|
||||
|
||||
Loading…
Add table
Add a link
Reference in a new issue