Probably the hardest one so far this year. I was caught out in part 2 by using part 1''s solution - I was using an "error count" > 0 to find invalid tickets, but there's one record where the only invalid field has value zero, so the "error count" is zero. Fixed by changing it to the arguably clearer and definitely more efficient check that no fields are invalid.
usestd::collections::{HashMap,HashSet};usestd::ops::RangeInclusive;useparser::*;// --- model#[derive(Debug,Eq,PartialEq)]structRanges(Vec<RangeInclusive<i64>>);typeFieldRanges=HashMap<String,Ranges>;typeTicket=Vec<i64>;#[derive(Debug,Eq,PartialEq)]structTicketData{field_ranges:FieldRanges,your_ticket:Ticket,nearby_tickets:Vec<Ticket>}implRanges{fncontains(&self,value:&i64)->bool{self.0.iter().any(|r|r.contains(value))}}implTicketData{fnis_invalid_value_for_field(&self,value:&i64,field:&str)->bool{self.field_ranges.get(field).map(|r|!r.contains(value)).unwrap()}fnis_invalid_value_for_any_field(&self,value:&i64)->bool{self.field_ranges.values().all(|r|!r.contains(value))}fnticket_errors(&self,ticket:&Ticket)->i64{ticket.iter().filter(|value|self.is_invalid_value_for_any_field(value)).sum()}fnticket_has_invalid_fields(&self,ticket:&Ticket)->bool{ticket.iter().any(|value|self.is_invalid_value_for_any_field(value))}fnticket_scanning_error_rate(&self)->i64{self.nearby_tickets.iter().map(|ticket|self.ticket_errors(ticket)).sum()}fnvalid_tickets<'a>(&'aself)->implIterator<Item=&'aTicket>+'a{self.nearby_tickets.iter().filter(move|ticket|!self.ticket_has_invalid_fields(ticket))}fnfind_field_indices(&self)->HashMap<String,usize>{letmutmatcher=FieldMatcher::new(self);self.valid_tickets().for_each(|ticket|{matcher.eliminate_indices_for_ticket(ticket,|value,field_name|self.is_invalid_value_for_field(value,field_name));});while!matcher.is_fully_determined(){matcher.eliminate_determined_indices();}matcher.flatten()}}structFieldMatcher{ordered_fields:Vec<String>,possible_indices:HashMap<String,HashSet<usize>>}implFieldMatcher{fnnew(ticket_data:&TicketData)->Self{letmutordered_fields:Vec<String>=ticket_data.field_ranges.keys().cloned().collect();ordered_fields.sort();letall_indices:HashSet<usize>=(0..ticket_data.your_ticket.len()).collect();letpossible_indices:HashMap<String,HashSet<usize>>=ticket_data.field_ranges.iter().map(|(name,_)|(name.clone(),all_indices.clone())).collect();FieldMatcher{ordered_fields,possible_indices}}fneliminate_indices_for_ticket<F>(&mutself,ticket:&Ticket,is_invalid:F)whereF:Fn(&i64,&str)->bool{ticket.iter().enumerate().for_each(|(index,value)|{self.possible_indices.iter_mut().for_each(|(field_name,indices)|{ifis_invalid(value,field_name){indices.remove(&index);}})})}fneliminate_determined_indices(&mutself){letdetermined:HashSet<usize>=self.possible_indices.values().filter(|ns|ns.len()==1).flat_map(|ns|ns.iter().cloned()).collect();self.possible_indices.values_mut().filter(|ns|ns.len()>1).for_each(|ns|*ns=ns.difference(&determined).cloned().collect());}fnis_fully_determined(&self)->bool{self.possible_indices.values().all(|ns|ns.len()==1)}fnflatten(&self)->HashMap<String,usize>{self.possible_indices.iter().map(|(name,ns)|(name.clone(),*ns.iter().next().unwrap())).collect()}fndebug(&self){self.ordered_fields.iter().for_each(|f|{letmutns:Vec<&usize>=self.possible_indices.get(f).unwrap().iter().collect();ns.sort();println!("{:20} -> {:?}",f,ns);});println!();}}// --- parserfnparse_input(input:&str)->ParseResult<TicketData>{letrange=pair(left(integer,match_literal("-")),integer,|min,max|(min..=max));letranges=range.sep_by(whitespace_wrap(match_literal("or"))).map(|rs|Ranges(rs));letfield_name=one_or_more(any_char.pred(|c|*c!=':')).map(|cs|cs.iter().collect());letfield_range=tuple2(left(field_name,match_literal(":")),whitespace_wrap(ranges));letcsv=integer.sep_by(match_literal(","));letyour_ticket=right(whitespace_wrap(match_literal("your ticket:")),csv.clone());letnearby_tickets=right(whitespace_wrap(match_literal("nearby tickets:")),one_or_more(whitespace_wrap(csv)));letticket_data=tuple3(one_or_more(field_range),your_ticket,nearby_tickets).map(|(field_ranges,your_ticket,nearby_tickets)|TicketData{field_ranges:field_ranges.into_iter().collect(),your_ticket,nearby_tickets});ticket_data.parse(input)}// --- problemsfnpart1(ticket_data:&TicketData)->i64{ticket_data.ticket_scanning_error_rate()}fnpart2(ticket_data:&TicketData)->i64{letindices=ticket_data.find_field_indices();letvalues:Vec<&i64>=indices.iter().filter(|(name,_)|name.starts_with("departure")).map(|(_,index)|ticket_data.your_ticket.get(*index).unwrap()).collect();assert_eq!(values.len(),6);values.into_iter().product()}fnmain(){letinput=std::fs::read_to_string("./input.txt").unwrap();letticket_data=parse_input(&input).unwrap().1;println!("part 1 {:?}",part1(&ticket_data));println!("part 2 {:?}",part2(&ticket_data));}#[cfg(test)]#[macro_use]externcratemaplit;#[cfg(test)]modtests{usesuper::*;fnsample_data()->TicketData{TicketData{field_ranges:hashmap!["class".to_string()=>Ranges(vec![1..=3,5..=7]),"row".to_string()=>Ranges(vec![6..=11,33..=44]),"seat".to_string()=>Ranges(vec![13..=40,45..=50])],your_ticket:vec![7,1,14],nearby_tickets:vec![vec![7,3,47],vec![40,4,50],vec![55,2,20],vec![38,6,12]]}}#[test]fntest_parser(){letticket_data=parse_input("class: 1-3 or 5-7
row: 6-11 or 33-44
seat: 13-40 or 45-50
your ticket:
7,1,14
nearby tickets:
7,3,47
40,4,50
55,2,20
38,6,12");assert_eq!(ticket_data,Ok(("",sample_data())));}#[test]fntest_ticket_scanning_error_rate(){assert_eq!(sample_data().ticket_scanning_error_rate(),71);}#[test]fntest_find_field_indices(){letindices=sample_data().find_field_indices();assert_eq!(indices,hashmap!["row".to_string()=>0,"class".to_string()=>1,"seat".to_string()=>2]);}}
For further actions, you may consider blocking this person and/or reporting abuse
We're a place where coders share, stay up-to-date and grow their careers.
Probably the hardest one so far this year. I was caught out in part 2 by using part 1''s solution - I was using an "error count" > 0 to find invalid tickets, but there's one record where the only invalid field has value zero, so the "error count" is zero. Fixed by changing it to the arguably clearer and definitely more efficient check that no fields are invalid.