this post was submitted on 23 Dec 2024
14 points (88.9% liked)

Advent Of Code

920 readers
2 users here now

An unofficial home for the advent of code community on programming.dev!

Advent of Code is an annual Advent calendar of small programming puzzles for a variety of skill sets and skill levels that can be solved in any programming language you like.

AoC 2024

Solution Threads

M T W T F S S
1
2 3 4 5 6 7 8
9 10 11 12 13 14 15
16 17 18 19 20 21 22
23 24 25

Rules/Guidelines

Relevant Communities

Relevant Links

Credits

Icon base by Lorc under CC BY 3.0 with modifications to add a gradient

console.log('Hello World')

founded 1 year ago
MODERATORS
 

Day 23: LAN Party

Megathread guidelines

  • Keep top level comments as only solutions, if you want to say something other than a solution put it in a new post. (replies to comments can be whatever)
  • You can send code in code blocks by using three backticks, the code, and then three backticks or use something such as https://topaz.github.io/paste/ if you prefer sending it through a URL

FAQ

all 14 comments
sorted by: hot top controversial new old
[โ€“] Acters 1 points 3 days ago* (last edited 2 days ago)

Python3

Another day solved with optimized logic

Method build_graph took : 1.327543 milliseconds
Method count_unique_triads took : 903.221 microseconds
Method find_largest_clique took : 926.006 microseconds
Method solver took : 3.570724 milliseconds

Again, Linux handles python better with a final solve time of a combined ~3.6 ms. still it is quite fast on windows with only 0.5 ms increase.

Still having fun adding typing and comments with VSCode + Qwen-coder 1.5B running locally. feels so nice to be proud of readable and optimized code.

Code

from os.path import dirname,realpath,join
from itertools import combinations as itertools_combinations
from collections import defaultdict

from collections.abc import Callable
def profiler(method) -> Callable[..., any]:
    from time import perf_counter_ns
    def wrapper_method(*args: any, **kwargs: any) -> any:
        start_time = perf_counter_ns()
        ret = method(*args, **kwargs)
        stop_time = perf_counter_ns() - start_time
        time_len = min(9, ((len(str(stop_time))-1)//3)*3)
        time_conversion = {9: 'seconds', 6: 'milliseconds', 3: 'microseconds', 0: 'nanoseconds'}
        print(f"Method {method.__name__} took : {stop_time / (10**time_len)} {time_conversion[time_len]}")
        return ret
    return wrapper_method

@profiler
def build_graph(connections: list[str]) -> defaultdict[set[str]]:
    """
    Builds an adjacency list from the list of connections.
    """
    adj = defaultdict(set)
    for conn in connections:
        nodes = conn.strip().split('-')
        node1, node2 = nodes
        adj[node1].add(node2)
        adj[node2].add(node1)
    
    return adj

@profiler
def count_unique_triads(adj: defaultdict[set[str]]) -> int:
    """
    Counts the number of unique triads where a 't_node' is connected to two other nodes
    that are also directly connected to each other. Ensures that each triad is counted only once.
    """
    unique_triads = set()
    
    # Identify all nodes starting with 't' (case-insensitive)
    t_nodes = [node for node in adj if node.lower().startswith('t')]
    
    for t_node in t_nodes:
        neighbors = adj[t_node]
        if len(neighbors) < 2:
            continue  # Need at least two neighbors to form a triad
        
        # Generate all unique unordered pairs of neighbors
        for node1, node2 in itertools_combinations(neighbors, 2):
            if node2 in adj[node1]:
                # Create a sorted tuple to represent the triad uniquely
                triad = tuple(sorted([t_node, node1, node2]))
                unique_triads.add(triad)
    
    return len(unique_triads)

def all_connected(nodes: tuple, adj: defaultdict[set[str]]) -> bool:
    """
    Determines if all nodes are connected to each other by checking if every node is reachable from any other node.
    Effectively determines if a clique exists.
    """
    for i,node in enumerate(nodes):
        for j in range(i + 1, len(nodes)):
            if nodes[j] not in adj[node]:
                return False
    return True

@profiler
def find_largest_clique(adj: defaultdict[set[str]]) -> list[str]:
    """
    Iterates over each vertex and its neighbors to find the largest clique. A clique is a subset of nodes where every pair of nodes is connected by an edge.
    The function returns the vertices of the largest clique found. If no clique is found, it returns an empty list.
    """
    # Keep track of the largest connected set found so far
    best_connected_set = tuple(['',''])
    best_connected_set_len = len(best_connected_set)
    # Iterate over each vertex in the graph with the neighbors
    for vertex, neighbors in adj.items():
        # Since the clique must have all nodes share similar neighbors, 
        # then we can start with the size of the neighbors and iterate down to a clique size of the best connected set size
        for i in range(len(neighbors), best_connected_set_len-1, -1):
            # Iterate over all combinations of neighbors with size i
            for comb in itertools_combinations(neighbors, r=i):
                if all_connected(comb, adj):
                    if len((*comb, vertex)) > best_connected_set_len:
                        best_connected_set = (*comb, vertex)
                        best_connected_set_len = len(best_connected_set)
                    break
    return sorted(best_connected_set)

# Solve Part 1 and Part 2 of the challenge at the same time
@profiler
def solver(connections: str) -> tuple[int, str]:
    # Build the graph
    adj = build_graph(connections.splitlines())
    # return the count of unique triads and the largest clique found
    return count_unique_triads(adj),','.join(find_largest_clique(adj))

if __name__ == "__main__":
    BASE_DIR = dirname(realpath(__file__))
    with open(join(BASE_DIR, r'input'), 'r') as f:
        input_data = f.read().replace('\r', '').strip()
    result = solver(input_data)
    print("Part 1:", result[0], "\nPart 2:", result[1])

[โ€“] [email protected] 2 points 1 week ago* (last edited 1 week ago)

Rust

Finding cliques in a graph, which is actually NP-comlete. For part two I did look up how to do it and implemented the Bron-Kerbosch algorithm. Adding the pivoting optimization improved the runtime from 134ms to 7.4ms, so that is definitely worth it (in some sense, of course I already had the correct answer without pivoting).

Solution

use rustc_hash::{FxHashMap, FxHashSet};

fn parse(input: &str) -> (Vec<Vec<usize>>, FxHashMap<&str, usize>) {
    let mut graph = Vec::new();
    let mut names: FxHashMap<&str, usize> = FxHashMap::default();
    for l in input.lines() {
        let (vs, ws) = l.split_once('-').unwrap();
        let v = *names.entry(vs).or_insert_with(|| {
            graph.push(vec![]);
            graph.len() - 1
        });
        let w = *names.entry(ws).or_insert_with(|| {
            graph.push(vec![]);
            graph.len() - 1
        });
        graph[v].push(w);
        graph[w].push(v);
    }
    (graph, names)
}

fn part1(input: String) {
    let (graph, names) = parse(&input);
    let mut triples: FxHashSet<[usize; 3]> = FxHashSet::default();
    for (_, &v) in names.iter().filter(|(name, _)| name.starts_with('t')) {
        for (i, &u) in graph[v].iter().enumerate().skip(1) {
            for w in graph[v].iter().take(i) {
                if graph[u].contains(w) {
                    let mut triple = [u, v, *w];
                    triple.sort();
                    triples.insert(triple);
                }
            }
        }
    }
    println!("{}", triples.len());
}

// Bron-Kerbosch algorithm for finding all maximal cliques in a graph
fn bron_kerbosch(
    graph: &[Vec<usize>],
    r: &mut Vec<usize>,
    mut p: FxHashSet<usize>,
    mut x: FxHashSet<usize>,
) -> Vec<Vec<usize>> {
    if p.is_empty() && x.is_empty() {
        return vec![r.to_vec()];
    }
    let mut maximal_cliques = Vec::new();
    let Some(&u) = p.iter().next() else {
        return maximal_cliques;
    };
    let mut p_pivot = p.clone();
    for w in &graph[u] {
        p_pivot.remove(w);
    }
    for v in p_pivot {
        let pn = graph[v].iter().filter(|w| p.contains(w)).copied().collect();
        let xn = graph[v].iter().filter(|w| x.contains(w)).copied().collect();
        r.push(v);
        let new_cliques = bron_kerbosch(graph, r, pn, xn);
        r.pop();
        maximal_cliques.extend(new_cliques);
        p.remove(&v);
        x.insert(v);
    }
    maximal_cliques
}

fn part2(input: String) {
    let (graph, names) = parse(&input);
    let p = (0..graph.len()).collect();
    let mut r = Vec::new();
    let maximal_cliques = bron_kerbosch(&graph, &mut r, p, FxHashSet::default());
    let maximum_clique = maximal_cliques
        .iter()
        .max_by_key(|clique| clique.len())
        .unwrap();
    let mut lan_names: Vec<&str> = names
        .iter()
        .filter(|(_, v)| maximum_clique.contains(v))
        .map(|(name, _)| *name)
        .collect();
    lan_names.sort_unstable();
    println!("{}", lan_names.join(","));
}

util::aoc_main!();

Also on github

[โ€“] [email protected] 2 points 1 week ago (1 children)

Kotlin

Part1 was pretty simple, just check all neighbors of a node for overlap, then filter out triples which don't have nodes beginning with 't'.

For part2, I seem to have picked a completely different strategy to everyone else. I was a bit lost, then just boldly assumed, that if I take overlap of all triples with 1 equal node, I might be able to find the answer that way. To my surprise, this worked for my input. I'd be very curious to know if I just got lucky or if the puzzle is designed to work with this approach.

The full code is also on GitHub.

::: spoiler Solution

class Day23 : Puzzle {

    private val connections = ArrayList<Pair<String, String>>()

    private val tripleCache = HashSet<Triple<String, String, String>>()

    override fun readFile() {
        val input = readInputFromFile("src/main/resources/day23.txt")
        for (line in input.lines()) {
            val parts = line.split("-")
            connections.add(Pair(parts[0], parts[1]))
        }
    }

    override fun solvePartOne(): String {
        val triples = getConnectionTriples(connections)
        tripleCache.addAll(triples) // for part 2
        val res = triples.count { it.first.startsWith("t") || it.second.startsWith("t") || it.third.startsWith("t") }
        return res.toString()
    }

    private fun getConnectionTriples(connectionList: List<Pair<String, String>>): List<Triple<String, String, String>> {
        val triples = ArrayList<Triple<String, String, String>>()
        for (connection in connectionList) {
            val connectionListTemp = getAllConnections(connection.first, connectionList)
            for (i in connectionListTemp.indices) {
                for (j in i + 1 until connectionListTemp.size) {
                    val con1 = connectionListTemp[i]
                    val con2 = connectionListTemp[j]
                    if (Pair(con1, con2) in connectionList || Pair(con2, con1) in connectionList) {
                        val tripleList = mutableListOf(connection.first, con1, con2)
                        tripleList.sort()
                        triples.add(Triple(tripleList[0], tripleList[1], tripleList[2]))
                    }
                }
            }
        }
        return triples.distinct()
    }

    private fun getAllConnections(connection: String, connectionList: List<Pair<String, String>>): List<String> {
        val res = HashSet<String>()
        for (entry in connectionList) {
            when (connection) {
                entry.first -> res.add(entry.second)
                entry.second -> res.add(entry.first)
            }
        }
        return res.toList()
    }

    override fun solvePartTwo(): String {
        val pools = getPools(connections)
        println(pools)
        val res = pools.maxByOrNull { it.size }!!
        return res.joinToString(",")
    }

    // will get all pools with a minimum size of 4
    // this method makes some naive assumptions, but works for the example and my puzzle input
    private fun getPools(connectionList: List<Pair<String, String>>): List<List<String>> {
        val pools = ArrayList<List<String>>()
        val triples = tripleCache
        val nodes = connectionList.map { listOf(it.first, it.second) }.flatten().toHashSet()

        for (node in nodes) {
            val contenders = triples.filter { it.first == node || it.second == node || it.third == node }
            if (contenders.size < 2) continue // expect the minimum result to be 4, for efficiency

            // if *all* nodes within *all* triples are interconnected, add to pool
            // this may not work for all inputs!
            val contenderList = contenders.map { listOf(it.first, it.second, it.third) }.flatten().distinct()
            if (checkAllConnections(contenderList, connectionList)) {
                pools.add(contenderList.sorted())
            }
        }

        return pools.distinct()
    }

    private fun checkAllConnections(pool: List<String>, connectionList: List<Pair<String, String>>): Boolean {
        for (i in pool.indices) {
            for (j in i + 1 until pool.size) {
                val con1 = pool[i]
                val con2 = pool[j]
                if (Pair(con1, con2) !in connectionList && Pair(con2, con1) !in connectionList) {
                    return false
                }
            }
        }
        return true
    }
}
[โ€“] [email protected] 2 points 1 week ago

That's a fun approach. The largest totally connected group will of course contain overlapping triples, so I think you're effectively doing the same thing as checking a node at a time, just more efficiently.

[โ€“] VegOwOtenks 2 points 1 week ago (1 children)

Haskell

The solution for part two could now be used for part one as well but then I would have to rewrite part 1 .-.

import Control.Arrow

import Data.Ord (comparing)

import qualified Data.List as List
import qualified Data.Map as Map
import qualified Data.Set as Set

parse = Map.fromListWith Set.union . List.map (second Set.singleton) . uncurry (++) . (id &&& List.map (uncurry (flip (,)))) . map (break (== '-') >>> second (drop 1)) . takeWhile (/= "") . lines

depthSearch connections ps
        | length ps == 4 && head ps == last ps = [ps]
        | length ps == 4 = []
        | otherwise  = head
                >>> (connections Map.!)
                >>> Set.toList
                >>> List.map (:ps)
                >>> List.concatMap (depthSearch connections)
                $ ps

interconnections (computer, connections) = depthSearch connections [computer]

part1 = (Map.assocs &&& repeat)
        >>> first (List.map (uncurry Set.insert))
        >>> first (Set.toList . Set.unions)
        >>> uncurry zip
        >>> List.concatMap interconnections
        >>> List.map (Set.fromList . take 3)
        >>> List.filter (Set.fold (List.head >>> (== 't') >>> (||)) False)
        >>> Set.fromList
        >>> Set.size

getLANParty computer connections = (connections Map.!)
        >>> findLanPartyComponent connections [computer]
        $ computer

filterCandidates connections participants candidates = List.map (connections Map.!)
        >>> List.foldl Set.intersection candidates
        >>> Set.filter ((connections Map.!) >>> \ s -> List.all (flip Set.member s) participants)
        $ participants

findLanPartyComponent connections participants candidates
        | Set.null validParticipants = participants
        | otherwise = findLanPartyComponent connections (nextParticipant : participants) (Set.delete nextParticipant candidates)
        where
                nextParticipant = Set.findMin validParticipants
                validParticipants = filterCandidates connections participants candidates

part2 = (Map.keys &&& repeat)
        >>> uncurry zip
        >>> List.map ((uncurry getLANParty) >>> List.sort)
        >>> List.nub
        >>> List.maximumBy (comparing List.length)
        >>> List.intercalate ","

main = getContents
        >>= print
        . (part1 &&& part2)
        . parse
[โ€“] [email protected] 2 points 1 week ago (1 children)

The solution for part two could now be used for part one as well but then I would have to rewrite part 1 .-.

I initially thought that, but now I reconsider I'm not so sure. Isn't it possible to have a 3-member clique overlapping two larger ones? In other words, there could be more than one way to partition the graph into completely connected components. Which means my solution to part 2 is technically incorrect. Bummer.

[โ€“] VegOwOtenks 2 points 1 week ago (1 children)

There probably are multiple ways to partition the graph. I haven't applied any optimizations and my program checks members of already detected groups again, would that yield all possible partitions because I choose all the possible starting points for a k-clique?

[โ€“] [email protected] 2 points 1 week ago

If you're re-checking all nodes you should be safe ๐Ÿ‘

[โ€“] mykl 2 points 1 week ago

Dart

A little bit of googling, a little bit of Wikipedia, and so a little bit of

magicthe basic Bron-Kerbosch algorithm

gave me the following which takes about 300ms.

import 'package:collection/collection.dart';
import 'package:more/more.dart';

SetMultimap<String, String> getNodes(List<String> lines) {
  var nodes = SetMultimap<String, String>();
  for (var l in lines) {
    var p = l.split('-');
    nodes[p.first].add(p.last);
    nodes[p.last].add(p.first);
  }
  return nodes;
}

part1(List<String> lines) {
  var nodes = getNodes(lines);
  var ts = nodes.keys.where((e) => e.startsWith('t'));
  var ret = <String>[];
  for (var t in ts) {
    ret.addAll(nodes[t]
        .combinations(2)
        .where((e) => nodes[e.first].contains(e.last))
        .map((e) => (([t] + e)..sort()).toString()));
  }
  return ret.toSet().length;
}

part2(List<String> lines) {
  var nodes = getNodes(lines);
  Iterable<Set<String>> useBK1(
      Set<String> R, Set<String> P, Set<String> X) sync* {
    if (P.isEmpty && X.isEmpty) {
      yield R;
      return;
    }
    for (var v in P.toSet()) {
      yield* useBK1(R.toSet()..add(v), P.intersection(nodes[v]),
          X.intersection(nodes[v]));
      P.remove(v);
      X.add(v);
    }
  }
  var vs = nodes.keys.toSet()..addAll(nodes.values.deepFlatten());
  var ret = useBK1({}, vs, {}).toList();
  var max = ret.map((e) => e.length).max;
  return ret.firstWhere((e) => e.length == max).sorted().join(',');
}
[โ€“] vole 2 points 1 week ago

Raku

My actual solution ran in about 2.5 seconds, but I optimized it to run in about 1 second.

sub MAIN($input) {
    my $file = open $input;
    my @connection-list := $file.slurp.trim.lines>>.split("-")>>.List.List;

    my %connections;
    my %all-computers is SetHash;
    for @connection-list -> @c {
        my ($first, $second) = @c.sort;
        %connections{$first} = [] if %connections{$first}:!exists;
        %connections{$second} = [] if %connections{$second}:!exists;
        %connections{$first}.push($second);
        %all-computers{@c.all}++;
    }
    for %connections.values -> $list is rw {
        $list = $list.sort.List;
    }

    my $part1-solution = 0;
    for %connections.keys -> $c1 {
        for %connections{$c1}.Seq -> $c2 {
            for (%connections{$c1} โˆฉ %connections{$c2}).keys -> $c3 {
                next unless ($c1, $c2, $c3).any.substr(0,1) eq "t";
                $part1-solution++;
            }
        }
    }
    say "part1 solution: $part1-solution";

    my $part2-solution = find-max-party((), %connections, %all-computers).join(",");
    say "part2 solution: $part2-solution";
}

sub find-max-party(@party, %connections, %available-members) {
    my @max-party = @party;
    for %available-members.keys.sort -> $c1 {
        my @new-party := (|@party, $c1);
        my %new-available-members := %available-members โˆฉ %connections{$c1};
        my @max-party-candidate = find-max-party(@new-party, %connections, %new-available-members);
        @max-party = @max-party-candidate if @max-party-candidate.elems > @max-party.elems;
        last if @max-party.elems == @party.elems + %available-members.elems;
    }
    return @max-party;
}
[โ€“] [email protected] 2 points 1 week ago* (last edited 1 week ago)

C

Graph problems are not my cake but this one I could work out: recursively iterate unique combination of adjacent nodes. Performance benefits from using a basic, int-indexed adjacency matrix.

Fast enough on my 2015 PC:

day23 0:00.05 1644 Kb 0+143 faults

Code

#include "common.h"

#define VZ 520	/* max no. vertices */
#define SZ 32	/* max set size */

static char names[VZ][3];
static char adj[VZ][VZ];
static int nvert;

static int
to_id(const char *name)
{
	int i;

	for (i=0; i<nvert; i++)
		if (!strcmp(name, names[i]))
			return i;

	assert(nvert < VZ);
	assert(strlen(name) < LEN(*names));

	snprintf(names[nvert++], sizeof(*names), "%s", name);
	return i;
}

static int
cmp_id(const void *a, const void *b)
{
	return strcmp(names[*(int*)a], names[*(int*)b]);
}

/*
 * Construct all unique combinations of nodes, with every extension,
 * confirm they're all connected. Essentally this but recursive:
 *
 *   for (a=0; a<nvert; a++)
 *   for (b=a+1; b<nvert; b++)
 *   for (c=b+1; c<nvert; c++)
 *     ...
 *
 * Note the inner loops continue forward from the point of the outside
 * loop, avoiding duplicate combinations.
 *
 * 'set' and 'best' are arrays of size SZ, length 'sz' and 'bz'. 'set'
 * is the current working state; 'best' is a copy of the longest known
 * set.
 */
static int
max_set(int *set, int sz, int *best, int bz)
{
	int bz1, v,i;

	assert(sz < SZ);

	/* for each potential candidate */
	for (v = sz ? set[sz-1]+1 : 0; v < nvert; v++) {
		 /* adjacent to all in current set? */
		for (i=0; i<sz && adj[set[i]][v]; i++) ;
		if (i != sz) continue;
		 /* recur and update 'best size' if needed */
		set[sz] = v;
		if (bz < (bz1 = max_set(set, sz+1, best, bz))) bz = bz1;
	}

	/* store longest known set in 'best' */
	if (sz > bz)
		memcpy(best, set, (bz = sz) * sizeof(*best));

	return bz;
}

int
main(int argc, char **argv)
{
	static int set[SZ], best[SZ];
	char buf[8];
	int p1=0, a,b,c, i, bz;

	if (argc > 1)
		DISCARD(freopen(argv[1], "r", stdin));
	
	while (fgets(buf, sizeof(buf), stdin)) {
		assert(strlen(buf) >= 5);
		buf[2] = buf[5] = '\0';
		a = to_id(buf);
		b = to_id(buf+3);
		adj[a][b] = adj[b][a] = 1;
	}

	for (a=0; a<nvert; a++)
	for (b=a+1; b<nvert; b++)
	for (c=b+1; c<nvert; c++)
		p1 += adj[a][b] && adj[a][c] && adj[b][c] && (
		      names[a][0] == 't' || names[b][0] == 't' ||
		      names[c][0] == 't');

	printf("23: %d ", p1);

	bz = max_set(set, 0, best, 0);
	qsort(best, bz, sizeof(*best), cmp_id);

	for (i=0; i<bz; i++)
		printf(i ? ",%s" : "%s", names[best[i]]);

	putchar('\n');
	return 0;
}

https://codeberg.org/sjmulder/aoc/src/branch/master/2024/c/day23.c

[โ€“] [email protected] 2 points 1 week ago* (last edited 1 week ago)

Rust

Part2 works, and is fast enough, but still feels kinda gross. No idea if this is a known algorithm or not.

Edit: Having looked at the Bron-Kerbosch wiki page, I have no idea how that works, and its way too much math for me. I'm more happy with my result now :D

Edit2: Because the code is messy, i'll try summarise pt2 here:

  • From pt1, I already have a list of all nodes and their peers.
  • For each node, I then have the potential network.
    • For each node in the potential network, count the links to the other network nodes.
    • Filter network to the N nodes with at least N-1 peers

Presumably this is a known algorithm, and I just naively ran into it. I don't think it can fail, but maybe on some more exotic graphs? Might not scale well either, but given the networks are small, its probably okay here?

#[cfg(test)]
mod tests {
    use std::collections::HashMap;

    #[test]
    fn day23_part1_test() {
        let input = std::fs::read_to_string("src/input/day_23.txt").unwrap();
        let links = input
            .lines()
            .map(|l| l.split_once('-').unwrap())
            .collect::<Vec<(&str, &str)>>();

        let mut peer_map = HashMap::new();

        for pair in links {
            peer_map.entry(pair.0).or_insert(Vec::new()).push(pair.1);
            peer_map.entry(pair.1).or_insert(Vec::new()).push(pair.0);
        }

        let mut rings = vec![];
        for (pc, peers) in &peer_map {
            if !pc.starts_with('t') {
                continue;
            }
            for (i, first) in peers.iter().enumerate() {
                for second in peers[i..].iter() {
                    if first == second {
                        continue;
                    }
                    if !peer_map.get(first).unwrap().contains(second) {
                        continue;
                    }
                    let mut set = vec![pc, second, first];
                    set.sort();
                    if !rings.contains(&set) {
                        rings.push(set);
                    }
                }
            }
        }

        println!("{}", rings.len());
    }

    #[test]
    fn day23_part2_test() {
        let input = std::fs::read_to_string("src/input/day_23.txt").unwrap();
        let links = input
            .lines()
            .map(|l| l.split_once('-').unwrap())
            .collect::<Vec<(&str, &str)>>();

        let mut peer_map = HashMap::new();

        for pair in links {
            let p1 = peer_map.entry(pair.0).or_insert(Vec::new());
            if !p1.contains(&pair.1) {
                p1.push(pair.1);
            }
            let p2 = peer_map.entry(pair.1).or_insert(Vec::new());
            if !p2.contains(&pair.0) {
                p2.push(pair.0);
            }
        }

        let mut biggest_network = String::new();

        for (pc, peers) in &peer_map {
            let mut network = HashMap::new();
            network.insert(pc, peers.len());

            for first in peers {
                let mut total = 1;
                for second in peers.iter() {
                    if first == second {
                        continue;
                    }
                    if peer_map.get(first).unwrap().contains(second) {
                        total += 1;
                    }
                }
                network.insert(first, total);
            }

            let mut network_size = peers.len();
            loop {
                if network_size == 0 {
                    break;
                }
                let mut out = network
                    .iter()
                    .filter_map(|(k, v)| {
                        if v >= &network_size {
                            return Some(**k);
                        }
                        None
                    })
                    .collect::<Vec<_>>();
                if out.len() == network_size + 1 {
                    out.sort();
                    let pw = out.join(",");
                    // println!("{}", pw);
                    if pw.len() > biggest_network.len() {
                        biggest_network = pw;
                    }
                    break;
                }

                network_size -= 1;
            }
        }

        println!("{}", biggest_network);
    }
}
[โ€“] [email protected] 1 points 1 week ago

Haskell

I was expecting a very difficult graph theory problem at first glance, but this one was actually pretty easy too!

import Data.Bifunctor
import Data.List
import Data.Ord
import Data.Set qualified as Set

views :: [a] -> [(a, [a])]
views [] = []
views (x : xs) = (x, xs) : (second (x :) <$> views xs)

choose :: Int -> [a] -> [[a]]
choose 0 _ = [[]]
choose _ [] = []
choose n (x : xs) = ((x :) <$> choose (n - 1) xs) ++ choose n xs

removeConnectedGroup connected = fmap (uncurry go . first Set.singleton) . Set.minView
  where
    go group hosts =
      maybe
        (group, hosts)
        (\h -> go (Set.insert h group) (Set.delete h hosts))
        $ find (flip all group . connected) hosts

main = do
  net <- Set.fromList . map (second tail . break (== '-')) . lines <$> readFile "input23"
  let hosts = Set.fromList $ [fst, snd] <*> Set.elems net
      connected a b = any (`Set.member` net) [(a, b), (b, a)]
      complete = all (uncurry $ all . connected) . views
  print
    . length
    . filter complete
    . filter (any ((== 't') . head))
    $ choose 3 (Set.elems hosts)
  putStrLn
    . (intercalate "," . Set.toAscList)
    . maximumBy (comparing Set.size)
    . unfoldr (removeConnectedGroup connected)
    $ hosts
``