80 lines
2.8 KiB
Nix
80 lines
2.8 KiB
Nix
|
{ lib, utils, ... }:
|
||
|
with lib;
|
||
|
with builtins;
|
||
|
with utils;
|
||
|
let
|
||
|
disk = map toIntBase10 (stringToCharacters (head (readLines ./input)));
|
||
|
files = genList (id: {
|
||
|
size = elemAt disk (id * 2);
|
||
|
freeAfter = elemAt disk (id * 2 + 1);
|
||
|
inherit id;
|
||
|
} ) ((length disk) / 2);
|
||
|
|
||
|
totalSize = listSumWith (getAttr "size") files;
|
||
|
totalFree = listSumWith (getAttr "size") files;
|
||
|
|
||
|
diskExpanded = concatLists (map (f: (genList (const f.id) f.size) ++ (genList (const null) f.freeAfter)) files);
|
||
|
diskSemiExpanded = concatLists (map (f: (genList (const f.id) f.size) ++ (genList (const null) f.freeAfter)) files);
|
||
|
|
||
|
toMove = reverseList (filter (invert isNull) (drop totalSize diskExpanded));
|
||
|
base = take totalSize diskExpanded;
|
||
|
|
||
|
# compress = base: toMove: let firstNullIndex = lists.findFirstIndex isNull null base; in
|
||
|
# if (toMove == []) || (isNull firstNullIndex) then
|
||
|
# base
|
||
|
# else (take firstNullIndex base) ++ [ (head toMove) ] ++ (compress (drop (firstNullIndex + 1) base) (tail toMove) );
|
||
|
|
||
|
build_link_chunk = f: { content = genList (const f.id) f.size; inherit (f) freeAfter; };
|
||
|
|
||
|
compress = uncompressed_chunks: toMove': let
|
||
|
h = head uncompressed_chunks;
|
||
|
moveAmount = min (length toMove') h.freeAfter;
|
||
|
in if toMove' == [] then h.content else h.content ++ (take moveAmount toMove') ++ (compress (tail uncompressed_chunks) (drop moveAmount toMove'));
|
||
|
|
||
|
# foldl' (current: c:
|
||
|
# if isNull c then
|
||
|
# {
|
||
|
# next = current.next + 1;
|
||
|
# content = current.content ++ [ (elemAt toMove current.next) ];
|
||
|
# } else {
|
||
|
# inherit (current) next;
|
||
|
# content = current.content ++ [ c ];
|
||
|
# }
|
||
|
# ) { next = 0; content = []; } base;
|
||
|
|
||
|
chunks = map build_link_chunk files;
|
||
|
# compressed = take totalSize (compress chunks toMove); # taking totalSize because compress overshoots. Compress overshoots because the last file contents are partially duplicated.
|
||
|
compressed = compress chunks toMove; # taking totalSize because compress overshoots. Compress overshoots because the last file contents are partially duplicated.
|
||
|
|
||
|
# expandToMove = foldl' () [] (genList)
|
||
|
|
||
|
checksum = imap0 mul compressed;
|
||
|
|
||
|
|
||
|
|
||
|
|
||
|
# compressed = genList (i: let current = in) (length diskExpanded)
|
||
|
# chunks = map build_link_chunk files;
|
||
|
# compressed = compress chunks;
|
||
|
|
||
|
in {
|
||
|
part1 = listSum checksum;
|
||
|
test1 = elemAt compressed 10000;
|
||
|
test2 = elemAt compressed 20000;
|
||
|
test3 = elemAt compressed 30000;
|
||
|
test4 = elemAt compressed 40000;
|
||
|
test5 = elemAt compressed 49000;
|
||
|
# inherit files;
|
||
|
# inherit chunks;
|
||
|
# inherit compressed;
|
||
|
lenCompressed = length compressed;
|
||
|
compressedHead = head compressed;
|
||
|
inherit totalSize;
|
||
|
moveLen = length toMove;
|
||
|
nullsInBase = count isNull base;
|
||
|
# sumTest = listSumLog [1 2 3 4 5];
|
||
|
|
||
|
diskSize = listSum disk; # TOO DAMN LARGE FOR LINEAR RECURSION!
|
||
|
fileCount = length files; # TOO DAMN LARGE FOR LINEAR RECURSION!
|
||
|
}
|