diff --git a/Cargo.lock b/Cargo.lock index d5e297e..6b5fc9e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3,20 +3,17 @@ version = 4 [[package]] -name = "addr2line" -version = "0.24.2" +name = "ahash" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" +checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75" dependencies = [ - "gimli", + "cfg-if", + "once_cell", + "version_check", + "zerocopy", ] -[[package]] -name = "adler2" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" - [[package]] name = "aho-corasick" version = "1.1.4" @@ -32,12 +29,6 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - [[package]] name = "android_system_properties" version = "0.1.5" @@ -49,9 +40,9 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.19" +version = "0.6.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "301af1932e46185686725e0fad2f8f2aa7da69dd70bf6ecc44d6b703844a3933" +checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a" dependencies = [ "anstyle", "anstyle-parse", @@ -64,9 +55,9 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.11" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "862ed96ca487e809f1c8e5a8447f6ee2cf102f846893800b20cebdf541fc6bbd" +checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78" [[package]] name = "anstyle-parse" @@ -79,29 +70,29 @@ dependencies = [ [[package]] name = "anstyle-query" -version = "1.1.3" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8bdeb6047d8983be085bab0ba1472e6dc604e7041dbf6fcd5e71523014fae9" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] name = "anstyle-wincon" -version = "3.0.9" +version = "3.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "403f75924867bb1033c59fbf0797484329750cfbe3c4325cd33127941fabc882" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" dependencies = [ "anstyle", "once_cell_polyfill", - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] name = "anyhow" -version = "1.0.98" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e16d2d3311acee920a9eb8d33b8cbc1787ce4a264e85f964c2404b969bdcd487" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" [[package]] name = "arrayvec" @@ -117,9 +108,24 @@ checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.112", +] + +[[package]] +name = "atomic" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a89cbf775b137e9b968e67227ef7f775587cde3fd31b0d8599dbd0f598a48340" +dependencies = [ + "bytemuck", ] +[[package]] +name = "atomic-waker" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" + [[package]] name = "autocfg" version = "1.5.0" @@ -127,25 +133,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] -name = "backtrace" -version = "0.3.75" +name = "aws-lc-rs" +version = "1.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6806a6321ec58106fea15becdad98371e28d92ccbc7c8f1b3b6dd724fe8f1002" +checksum = "6a88aab2464f1f25453baa7a07c84c5b7684e274054ba06817f382357f77a288" dependencies = [ - "addr2line", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", - "windows-targets 0.52.6", + "aws-lc-sys", + "zeroize", ] [[package]] -name = "base64" -version = "0.21.7" +name = "aws-lc-sys" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" +checksum = "b45afffdee1e7c9126814751f88dddc747f41d91da16c9551a0f1e8a11e788a1" +dependencies = [ + "cc", + "cmake", + "dunce", + "fs_extra", +] [[package]] name = "base64" @@ -155,11 +162,22 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "bincode" -version = "1.3.3" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad" +checksum = "36eaf5d7b090263e8150820482d5d93cd964a81e4019913c972f4edcc6edb740" dependencies = [ + "bincode_derive", "serde", + "unty", +] + +[[package]] +name = "bincode_derive" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf95709a440f45e986983918d0e8a1f30a9b1df04918fc828670606804ac3c09" +dependencies = [ + "virtue", ] [[package]] @@ -185,9 +203,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.9.1" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" +checksum = "812e12b5285cc515a9c72a5c1d3b6d46a19dac5acfef5265968c166106e31dd3" [[package]] name = "block-buffer" @@ -211,21 +229,41 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.19.0" +version = "3.19.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" + +[[package]] +name = "bytemuck" +version = "1.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43" +checksum = "1fbdf580320f38b612e485521afda1ee26d10cc9884efaaa750d383e13e3c5f4" [[package]] name = "bytes" -version = "1.10.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" +checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" [[package]] -name = "cassowary" -version = "0.3.0" +name = "c2rust-bitfields" +version = "0.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b43c3f07ab0ef604fa6f595aa46ec2f8a22172c975e186f6f5bf9829a3b72c41" +dependencies = [ + "c2rust-bitfields-derive", +] + +[[package]] +name = "c2rust-bitfields-derive" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" +checksum = "d3cbc102e2597c9744c8bd8c15915d554300601c91a079430d309816b0912545" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] [[package]] name = "castaway" @@ -238,18 +276,27 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.27" +version = "1.2.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d487aa071b5f64da6f19a3e848e3578944b726ee5a4854b82172f02aa876bfdc" +checksum = "7a0aeaff4ff1a90589618835a598e545176939b97874f7abc7851caa0618f203" dependencies = [ + "find-msvc-tools", + "jobserver", + "libc", "shlex", ] +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + [[package]] name = "cfg-if" -version = "1.0.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" [[package]] name = "cfg_aliases" @@ -259,11 +306,10 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chrono" -version = "0.4.41" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" +checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" dependencies = [ - "android-tzdata", "iana-time-zone", "js-sys", "num-traits", @@ -279,14 +325,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6139a8597ed92cf816dfb33f5dd6cf0bb93a6adc938f11039f371bc5bcd26c3" dependencies = [ "chrono", - "phf", + "phf 0.12.1", ] [[package]] name = "clap" -version = "4.5.41" +version = "4.5.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be92d32e80243a54711e5d7ce823c35c41c9d929dc4ab58e1276f625841aadf9" +checksum = "c9e340e012a1bf4935f5282ed1436d1489548e8f72308207ea5df0e23d2d03f8" dependencies = [ "clap_builder", "clap_derive", @@ -294,9 +340,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.41" +version = "4.5.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707eab41e9622f9139419d573eca0900137718000c517d47da73045f54331c3d" +checksum = "d76b5d13eaa18c901fd2f7fca939fefe3a0727a953561fefdf3b2922b8569d00" dependencies = [ "anstream", "anstyle", @@ -306,21 +352,30 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.41" +version = "4.5.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef4f52386a59ca4c860f7393bcf8abd8dfd91ecccc0f774635ff68e92eeef491" +checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" dependencies = [ "heck", "proc-macro2", "quote", - "syn", + "syn 2.0.112", ] [[package]] name = "clap_lex" -version = "0.7.5" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d728cc89cf3aee9ff92b05e62b19ee65a02b5702cff7d5a377e32c6ae29d8d" + +[[package]] +name = "cmake" +version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b94f61472cee1439c0b966b47e3aca9ae07e45d070759512cd390ea2bebc6675" +checksum = "75443c44cd6b379beb8c5b45d85d0773baf31cce901fe7bb252f4eff3008ef7d" +dependencies = [ + "cc", +] [[package]] name = "colorchoice" @@ -328,11 +383,21 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + [[package]] name = "compact_str" -version = "0.8.1" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b79c4069c6cad78e2e0cdfcbd26275770669fb39fd308a752dc110e83b9af32" +checksum = "3fdb1325a1cece981e8a296ab8f0f9b63ae357bd0784a9faaf548cc7b480707a" dependencies = [ "castaway", "cfg-if", @@ -344,13 +409,23 @@ dependencies = [ [[package]] name = "convert_case" -version = "0.7.1" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb402b8d4c85569410425650ce3eddc7d698ed96d39a73f941b08fb63082f1e7" +checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" dependencies = [ "unicode-segmentation", ] +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -366,28 +441,6 @@ dependencies = [ "libc", ] -[[package]] -name = "crossbeam" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1137cd7e7fc0fb5d3c5a8678be38ec56e819125d8d7907411fe24ccb943faca8" -dependencies = [ - "crossbeam-channel", - "crossbeam-deque", - "crossbeam-epoch", - "crossbeam-queue", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-channel" -version = "0.5.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" -dependencies = [ - "crossbeam-utils", -] - [[package]] name = "crossbeam-deque" version = "0.8.6" @@ -407,50 +460,25 @@ dependencies = [ "crossbeam-utils", ] -[[package]] -name = "crossbeam-queue" -version = "0.3.12" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f58bbc28f91df819d0aa2a2c00cd19754769c2fad90579b3592b1c9ba7a3115" -dependencies = [ - "crossbeam-utils", -] - [[package]] name = "crossbeam-utils" version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" -[[package]] -name = "crossterm" -version = "0.28.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "829d955a0bb380ef178a640b91779e3987da38c9aea133b20614cfed8cdea9c6" -dependencies = [ - "bitflags 2.9.1", - "crossterm_winapi", - "mio", - "parking_lot", - "rustix 0.38.44", - "signal-hook", - "signal-hook-mio", - "winapi", -] - [[package]] name = "crossterm" version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d8b9f2e4c67f833b660cdb0a3523065869fb35570177239812ed4c905aeff87b" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.10.0", "crossterm_winapi", "derive_more", "document-features", "mio", "parking_lot", - "rustix 1.0.8", + "rustix", "signal-hook", "signal-hook-mio", "winapi", @@ -467,14 +495,30 @@ dependencies = [ [[package]] name = "crypto-common" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" dependencies = [ "generic-array", "typenum", ] +[[package]] +name = "csscolorparser" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb2a7d3066da2de787b7f032c736763eb7ae5d355f81a68bab2675a96008b0bf" +dependencies = [ + "lab", + "phf 0.11.3", +] + +[[package]] +name = "cty" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b365fabc795046672053e29c954733ec3b05e4be654ab130fe8f1f94d7051f35" + [[package]] name = "darling" version = "0.20.11" @@ -487,12 +531,12 @@ dependencies = [ [[package]] name = "darling" -version = "0.21.3" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" +checksum = "25ae13da2f202d56bd7f91c25fba009e7717a1e4a1cc98a76d844b65ae912e9d" dependencies = [ - "darling_core 0.21.3", - "darling_macro 0.21.3", + "darling_core 0.23.0", + "darling_macro 0.23.0", ] [[package]] @@ -506,21 +550,20 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn", + "syn 2.0.112", ] [[package]] name = "darling_core" -version = "0.21.3" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" +checksum = "9865a50f7c335f53564bb694ef660825eb8610e0a53d3e11bf1b0d3df31e03b0" dependencies = [ - "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn", + "syn 2.0.112", ] [[package]] @@ -531,18 +574,18 @@ checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ "darling_core 0.20.11", "quote", - "syn", + "syn 2.0.112", ] [[package]] name = "darling_macro" -version = "0.21.3" +version = "0.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" +checksum = "ac3984ec7bd6cfa798e62b4a642426a5be0e68f9401cfc2a01e3fa9ea2fcdb8d" dependencies = [ - "darling_core 0.21.3", + "darling_core 0.23.0", "quote", - "syn", + "syn 2.0.112", ] [[package]] @@ -559,25 +602,41 @@ dependencies = [ "parking_lot_core", ] +[[package]] +name = "deltae" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5729f5117e208430e437df2f4843f5e5952997175992d1414f94c57d61e270b4" + +[[package]] +name = "deranged" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" +dependencies = [ + "powerfmt", +] + [[package]] name = "derive_more" -version = "2.0.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678" +checksum = "d751e9e49156b02b44f9c1815bcb94b984cdcc4396ecc32521c739452808b134" dependencies = [ "derive_more-impl", ] [[package]] name = "derive_more-impl" -version = "2.0.1" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3" +checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" dependencies = [ "convert_case", "proc-macro2", "quote", - "syn", + "rustc_version", + "syn 2.0.112", ] [[package]] @@ -608,7 +667,7 @@ dependencies = [ "libc", "option-ext", "redox_users", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -619,18 +678,24 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.112", ] [[package]] name = "document-features" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95249b50c6c185bee49034bcb378a49dc2b5dff0be90ff6616d31d64febab05d" +checksum = "d4b8a88685455ed29a21542a33abd9cb6510b6b129abadabdcef0f4c55bc8f61" dependencies = [ "litrs", ] +[[package]] +name = "dunce" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" + [[package]] name = "dyn-clone" version = "1.0.20" @@ -651,12 +716,21 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.60.2", + "windows-sys 0.61.2", +] + +[[package]] +name = "euclid" +version = "0.22.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ad9cdb4b747e485a12abb0e6566612956c7a1bafa3bdb8d682c5b6d403589e48" +dependencies = [ + "num-traits", ] [[package]] @@ -671,6 +745,16 @@ version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" +[[package]] +name = "fancy-regex" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b95f7c0680e4142284cf8b22c14a476e87d61b004a3a0861872b32ef7ead40a2" +dependencies = [ + "bit-set", + "regex", +] + [[package]] name = "fancy-regex" version = "0.13.0" @@ -688,6 +772,35 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +[[package]] +name = "filedescriptor" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e40758ed24c9b2eeb76c35fb0aebc66c626084edd827e07e1552279814c6682d" +dependencies = [ + "libc", + "thiserror 1.0.69", + "winapi", +] + +[[package]] +name = "find-msvc-tools" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "645cbb3a84e60b7531617d5ae4e57f7e27308f6445f5abf653209ea76dec8dff" + +[[package]] +name = "finl_unicode" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9844ddc3a6e533d62bba727eb6c28b5d360921d5175e9ff0f1e621a5c590a4d5" + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + [[package]] name = "float-cmp" version = "0.10.0" @@ -705,19 +818,25 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "foldhash" -version = "0.1.5" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" +checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + [[package]] name = "fsevent-sys" version = "4.1.0" @@ -783,7 +902,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.112", ] [[package]] @@ -835,43 +954,37 @@ dependencies = [ "cfg-if", "js-sys", "libc", - "wasi 0.11.1+wasi-snapshot-preview1", + "wasi", "wasm-bindgen", ] [[package]] name = "getrandom" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26145e563e54f2cadc477553f1ec5ee650b00862f0a58bcd12cbdc5f0ea2d2f4" +checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd" dependencies = [ "cfg-if", "js-sys", "libc", "r-efi", - "wasi 0.14.2+wasi-0.2.4", + "wasip2", "wasm-bindgen", ] -[[package]] -name = "gimli" -version = "0.31.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" - [[package]] name = "glob" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" [[package]] name = "halfbrown" -version = "0.3.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa2c385c6df70fd180bbb673d93039dbd2cd34e41d782600bdf6e1ca7bce39aa" +checksum = "0c7ed2f2edad8a14c8186b847909a41fbb9c3eafa44f88bd891114ed5019da09" dependencies = [ - "hashbrown 0.15.4", + "hashbrown 0.16.1", "serde", ] @@ -880,12 +993,16 @@ name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" +dependencies = [ + "ahash", + "allocator-api2", +] [[package]] name = "hashbrown" -version = "0.15.4" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" dependencies = [ "allocator-api2", "equivalent", @@ -894,11 +1011,11 @@ dependencies = [ [[package]] name = "hashlink" -version = "0.10.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7382cf6263419f2d8df38c55d7da83da5c18aef87fc7a7fc1fb1e344edfe14c1" +checksum = "ea0b22561a9c04a7cb1a302c013e0259cd3b4bb619f145b32f72b8b4bcbed230" dependencies = [ - "hashbrown 0.15.4", + "hashbrown 0.16.1", ] [[package]] @@ -907,14 +1024,19 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" +[[package]] +name = "hex" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" + [[package]] name = "http" -version = "1.3.1" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4a85d31aea989eead29a3aaf9e1115a180df8282431156e533de47660892565" +checksum = "e3ba2a386d7f85a81f119ad7498ebe444d2e22c2af0b86b069416ace48b3311a" dependencies = [ "bytes", - "fnv", "itoa", ] @@ -949,18 +1071,20 @@ checksum = "6dbf3de79e51f3d586ab4cb9d5c3e2c14aa28ed23d180cf89b4df0454a69cc87" [[package]] name = "hyper" -version = "1.6.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" +checksum = "2ab2d4f250c3d7b1c9fcdff1cece94ea4e2dfbec68614f7b87cb205f24ca9d11" dependencies = [ + "atomic-waker", "bytes", "futures-channel", - "futures-util", + "futures-core", "http", "http-body", "httparse", "itoa", "pin-project-lite", + "pin-utils", "smallvec", "tokio", "want", @@ -980,16 +1104,15 @@ dependencies = [ "tokio", "tokio-rustls", "tower-service", - "webpki-roots", ] [[package]] name = "hyper-util" -version = "0.1.15" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f66d5bd4c6f02bf0542fad85d626775bab9258cf795a4256dcaf3161114d1df" +checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f" dependencies = [ - "base64 0.22.1", + "base64", "bytes", "futures-channel", "futures-core", @@ -1009,9 +1132,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.63" +version = "0.1.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" +checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -1033,9 +1156,9 @@ dependencies = [ [[package]] name = "icu_collections" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "200072f5d0e3614556f94a9930d5dc3e0662a652823904c3a75dc3b0af7fee47" +checksum = "4c6b649701667bbe825c3b7e6388cb521c23d88644678e83c0c4d0a621a34b43" dependencies = [ "displaydoc", "potential_utf", @@ -1046,9 +1169,9 @@ dependencies = [ [[package]] name = "icu_locale_core" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0cde2700ccaed3872079a65fb1a78f6c0a36c91570f28755dda67bc8f7d9f00a" +checksum = "edba7861004dd3714265b4db54a3c390e880ab658fec5f7db895fae2046b5bb6" dependencies = [ "displaydoc", "litemap", @@ -1059,11 +1182,10 @@ dependencies = [ [[package]] name = "icu_normalizer" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "436880e8e18df4d7bbc06d58432329d6458cc84531f7ac5f024e93deadb37979" +checksum = "5f6c8828b67bf8908d82127b2054ea1b4427ff0230ee9141c54251934ab1b599" dependencies = [ - "displaydoc", "icu_collections", "icu_normalizer_data", "icu_properties", @@ -1074,42 +1196,38 @@ dependencies = [ [[package]] name = "icu_normalizer_data" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00210d6893afc98edb752b664b8890f0ef174c8adbb8d0be9710fa66fbbf72d3" +checksum = "7aedcccd01fc5fe81e6b489c15b247b8b0690feb23304303a9e560f37efc560a" [[package]] name = "icu_properties" -version = "2.0.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "016c619c1eeb94efb86809b015c58f479963de65bdb6253345c1a1276f22e32b" +checksum = "020bfc02fe870ec3a66d93e677ccca0562506e5872c650f893269e08615d74ec" dependencies = [ - "displaydoc", "icu_collections", "icu_locale_core", "icu_properties_data", "icu_provider", - "potential_utf", "zerotrie", "zerovec", ] [[package]] name = "icu_properties_data" -version = "2.0.1" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "298459143998310acd25ffe6810ed544932242d3f07083eee1084d83a71bd632" +checksum = "616c294cf8d725c6afcd8f55abc17c56464ef6211f9ed59cccffe534129c77af" [[package]] name = "icu_provider" -version = "2.0.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03c80da27b5f4187909049ee2d72f276f0d9f99a42c306bd0131ecfe04d8e5af" +checksum = "85962cf0ce02e1e0a629cc34e7ca3e373ce20dda4c4d7294bbd0bf1fdb59e614" dependencies = [ "displaydoc", "icu_locale_core", - "stable_deref_trait", - "tinystr", "writeable", "yoke", "zerofrom", @@ -1125,9 +1243,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", @@ -1146,19 +1264,22 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.10.0" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" +checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" dependencies = [ "equivalent", - "hashbrown 0.15.4", + "hashbrown 0.16.1", ] [[package]] name = "indoc" -version = "2.0.6" +version = "2.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd" +checksum = "79cf5c93f93228cf8efb3ba362535fb11199ac548a09ce117c9b1adc3030d706" +dependencies = [ + "rustversion", +] [[package]] name = "inotify" @@ -1166,7 +1287,7 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.10.0", "inotify-sys", "libc", ] @@ -1182,15 +1303,15 @@ dependencies = [ [[package]] name = "instability" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "435d80800b936787d62688c927b6490e887c7ef5ff9ce922c6c6050fca75eb9a" +checksum = "6778b0196eefee7df739db78758e5cf9b37412268bfa5650bfeed028aed20d9c" dependencies = [ "darling 0.20.11", "indoc", "proc-macro2", "quote", - "syn", + "syn 2.0.112", ] [[package]] @@ -1201,9 +1322,9 @@ checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" [[package]] name = "iri-string" -version = "0.7.8" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +checksum = "c91338f0783edbd6195decb37bae672fd3b165faffb89bf7b9e6942f8b1a731a" dependencies = [ "memchr", "serde", @@ -1211,9 +1332,9 @@ dependencies = [ [[package]] name = "is_terminal_polyfill" -version = "1.70.1" +version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" [[package]] name = "itertools" @@ -1224,45 +1345,87 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285" +dependencies = [ + "either", +] + [[package]] name = "itoa" -version = "1.0.15" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" +checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2" [[package]] -name = "js-sys" -version = "0.3.77" +name = "jni" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" dependencies = [ - "once_cell", - "wasm-bindgen", + "cesu8", + "cfg-if", + "combine", + "jni-sys", + "log", + "thiserror 1.0.69", + "walkdir", + "windows-sys 0.45.0", ] [[package]] -name = "jwalk" -version = "0.8.1" +name = "jni-sys" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2735847566356cd2179a2a38264839308f7079fa96e6bd5a42d740460e003c56" -dependencies = [ - "crossbeam", - "rayon", -] +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" [[package]] -name = "kqueue" -version = "1.1.1" +name = "jobserver" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" dependencies = [ - "kqueue-sys", + "getrandom 0.3.4", "libc", ] [[package]] -name = "kqueue-sys" -version = "1.0.4" +name = "js-sys" +version = "0.3.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "464a3709c7f55f1f721e5389aa6ea4e3bc6aba669353300af094b29ffbdde1d8" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "kasuari" +version = "0.4.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fe90c1150662e858c7d5f945089b7517b0a80d8bf7ba4b1b5ffc984e7230a5b" +dependencies = [ + "hashbrown 0.16.1", + "portable-atomic", + "thiserror 2.0.17", +] + +[[package]] +name = "kqueue" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" +dependencies = [ + "kqueue-sys", + "libc", +] + +[[package]] +name = "kqueue-sys" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" dependencies = [ @@ -1270,6 +1433,22 @@ dependencies = [ "libc", ] +[[package]] +name = "lab" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf36173d4167ed999940f804952e6b08197cae5ad5d572eb4db150ce8ad5d58f" + +[[package]] +name = "lasso" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e14eda50a3494b3bf7b9ce51c52434a761e383d7238ce1dd5dcec2fbc13e9fb" +dependencies = [ + "dashmap", + "hashbrown 0.14.5", +] + [[package]] name = "lazy_static" version = "1.5.0" @@ -1278,25 +1457,36 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "libc" -version = "0.2.174" +version = "0.2.178" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37c93d8daa9d8a012fd8ab92f088405fb202ea0b6ab73ee2482ae66af4f42091" + +[[package]] +name = "libmimalloc-sys" +version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776" +checksum = "667f4fec20f29dfc6bc7357c582d91796c169ad7e2fce709468aefeb2c099870" +dependencies = [ + "cc", + "cty", + "libc", +] [[package]] name = "libredox" -version = "0.1.10" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "416f7e718bdb06000964960ffa43b4335ad4012ae8b99060261aa4a8088d5ccb" +checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.10.0", "libc", ] [[package]] name = "libsqlite3-sys" -version = "0.33.0" +version = "0.36.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "947e6816f7825b2b45027c2c32e7085da9934defa535de4a6a46b10a4d5257fa" +checksum = "95b4103cffefa72eb8428cb6b47d6627161e51c2739fc5e3b734584157bc642a" dependencies = [ "cc", "pkg-config", @@ -1304,52 +1494,54 @@ dependencies = [ ] [[package]] -name = "linux-raw-sys" -version = "0.4.15" +name = "line-clipping" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" +checksum = "5f4de44e98ddbf09375cbf4d17714d18f39195f4f4894e8524501726fd9a8a4a" +dependencies = [ + "bitflags 2.10.0", +] [[package]] name = "linux-raw-sys" -version = "0.9.4" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" [[package]] name = "litemap" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "241eaef5fd12c88705a01fc1066c48c4b36e0dd4377dcdc7ec3942cea7a69956" +checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" [[package]] name = "litrs" -version = "0.4.1" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4ce301924b7887e9d637144fdade93f9dfff9b60981d4ac161db09720d39aa5" +checksum = "11d3d7f243d5c5a8b9bb5d6dd2b1602c0cb0b9db1621bafc7ed66e35ff9fe092" [[package]] name = "lock_api" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96936507f153605bddfcda068dd804796c84324ed2510809e5b2a624c81da765" +checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965" dependencies = [ - "autocfg", "scopeguard", ] [[package]] name = "log" -version = "0.4.27" +version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" +checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" [[package]] name = "lru" -version = "0.12.5" +version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" +checksum = "96051b46fc183dc9cd4a223960ef37b9af631b55191852a8274bfef064cda20f" dependencies = [ - "hashbrown 0.15.4", + "hashbrown 0.16.1", ] [[package]] @@ -1358,40 +1550,94 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" +[[package]] +name = "mac_address" +version = "1.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0aeb26bf5e836cc1c341c8106051b573f1766dfa05aa87f0b98be5e51b02303" +dependencies = [ + "nix", + "winapi", +] + [[package]] name = "memchr" -version = "2.7.5" +version = "2.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" + +[[package]] +name = "memmem" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" +checksum = "a64a92489e2744ce060c349162be1c5f33c6969234104dbd99ddb5feb08b8c15" [[package]] -name = "miniz_oxide" -version = "0.8.9" +name = "memoffset" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" dependencies = [ - "adler2", + "autocfg", ] +[[package]] +name = "mimalloc" +version = "0.1.48" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1ee66a4b64c74f4ef288bcbb9192ad9c3feaad75193129ac8509af543894fd8" +dependencies = [ + "libmimalloc-sys", +] + +[[package]] +name = "minimal-lexical" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" + [[package]] name = "mio" -version = "1.0.4" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78bed444cc8a2160f01cbcf811ef18cac863ad68ae8ca62092e8db51d51c761c" +checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc" dependencies = [ "libc", "log", - "wasi 0.11.1+wasi-snapshot-preview1", - "windows-sys 0.59.0", + "wasi", + "windows-sys 0.61.2", +] + +[[package]] +name = "nix" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" +dependencies = [ + "bitflags 2.10.0", + "cfg-if", + "cfg_aliases", + "libc", + "memoffset", +] + +[[package]] +name = "nom" +version = "7.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" +dependencies = [ + "memchr", + "minimal-lexical", ] [[package]] name = "notify" -version = "8.1.0" +version = "8.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3163f59cd3fa0e9ef8c32f242966a7b9994fd7378366099593e0e73077cd8c97" +checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.10.0", "fsevent-sys", "inotify", "kqueue", @@ -1409,6 +1655,23 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d" +[[package]] +name = "num-conv" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-derive" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", +] + [[package]] name = "num-format" version = "0.4.4" @@ -1429,12 +1692,12 @@ dependencies = [ ] [[package]] -name = "object" -version = "0.36.7" +name = "num_threads" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62948e14d923ea95ea2c7c86c71013138b66525b86bdc08d2dcc262bdb497b87" +checksum = "5c7398b9c8b70908f6371f47ed36737907c87c52af34c268fed0bf0ceb92ead9" dependencies = [ - "memchr", + "libc", ] [[package]] @@ -1445,9 +1708,15 @@ checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" [[package]] name = "once_cell_polyfill" -version = "1.70.1" +version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + +[[package]] +name = "openssl-probe" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f50d9b3dabb09ecd771ad0aa242ca6894994c130308ca3d7684634df8037391" [[package]] name = "option-ext" @@ -1455,11 +1724,20 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" +[[package]] +name = "ordered-float" +version = "4.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7bb71e1b3fa6ca1c61f383464aaf2bb0e2f8e772a1f01d486832464de363b951" +dependencies = [ + "num-traits", +] + [[package]] name = "parking_lot" -version = "0.12.4" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70d58bf43669b5795d1576d0641cfb6fbb2057bf629506267a92807158584a13" +checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a" dependencies = [ "lock_api", "parking_lot_core", @@ -1467,28 +1745,81 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.11" +version = "0.9.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc838d2a56b5b1a6c25f55575dfc605fabb63bb2365f6c2353ef9159aa69e4a5" +checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", - "windows-targets 0.52.6", + "windows-link", ] [[package]] -name = "paste" -version = "1.0.15" +name = "pastey" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" +checksum = "b867cad97c0791bbd3aaa6472142568c6c9e8f71937e98379f584cfb0cf35bec" [[package]] name = "percent-encoding" -version = "2.3.1" +version = "2.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" + +[[package]] +name = "pest" +version = "2.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "cbcfd20a6d4eeba40179f05735784ad32bdaef05ce8e8af05f180d45bb3e7e22" +dependencies = [ + "memchr", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51f72981ade67b1ca6adc26ec221be9f463f2b5839c7508998daa17c23d94d7f" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dee9efd8cdb50d719a80088b76f81aec7c41ed6d522ee750178f83883d271625" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "pest_meta" +version = "2.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf1d70880e76bdc13ba52eafa6239ce793d85c8e43896507e43dd8984ff05b82" +dependencies = [ + "pest", + "sha2", +] + +[[package]] +name = "phf" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" +dependencies = [ + "phf_macros 0.11.3", + "phf_shared 0.11.3", +] [[package]] name = "phf" @@ -1496,32 +1827,83 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7" dependencies = [ - "phf_macros", - "phf_shared", + "phf_shared 0.12.1", +] + +[[package]] +name = "phf" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1562dc717473dbaa4c1f85a36410e03c047b2e7df7f45ee938fbef64ae7fadf" +dependencies = [ + "phf_macros 0.13.1", + "phf_shared 0.13.1", "serde", ] +[[package]] +name = "phf_codegen" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a" +dependencies = [ + "phf_generator 0.11.3", + "phf_shared 0.11.3", +] + [[package]] name = "phf_generator" -version = "0.12.1" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" +dependencies = [ + "phf_shared 0.11.3", + "rand 0.8.5", +] + +[[package]] +name = "phf_generator" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2cbb1126afed61dd6368748dae63b1ee7dc480191c6262a3b4ff1e29d86a6c5b" +checksum = "135ace3a761e564ec88c03a77317a7c6b80bb7f7135ef2544dbe054243b89737" dependencies = [ "fastrand", - "phf_shared", + "phf_shared 0.13.1", ] [[package]] name = "phf_macros" -version = "0.12.1" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d713258393a82f091ead52047ca779d37e5766226d009de21696c4e667044368" +checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216" dependencies = [ - "phf_generator", - "phf_shared", + "phf_generator 0.11.3", + "phf_shared 0.11.3", "proc-macro2", "quote", - "syn", + "syn 2.0.112", +] + +[[package]] +name = "phf_macros" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "812f032b54b1e759ccd5f8b6677695d5268c588701effba24601f6932f8269ef" +dependencies = [ + "phf_generator 0.13.1", + "phf_shared 0.13.1", + "proc-macro2", + "quote", + "syn 2.0.112", +] + +[[package]] +name = "phf_shared" +version = "0.11.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5" +dependencies = [ + "siphasher", ] [[package]] @@ -1533,6 +1915,15 @@ dependencies = [ "siphasher", ] +[[package]] +name = "phf_shared" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e57fef6bc5981e38c2ce2d63bfa546861309f875b8a75f092d1d54ae2d64f266" +dependencies = [ + "siphasher", +] + [[package]] name = "pin-project-lite" version = "0.2.16" @@ -1551,15 +1942,27 @@ version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" +[[package]] +name = "portable-atomic" +version = "1.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f89776e4d69bb58bc6993e99ffa1d11f228b839984854c7daeb5d37f87cbe950" + [[package]] name = "potential_utf" -version = "0.1.2" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +checksum = "b73949432f5e2a09657003c25bca5e19a0e9c84f8058ca374f49e0ebe605af77" dependencies = [ "zerovec", ] +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + [[package]] name = "ppv-lite86" version = "0.2.21" @@ -1571,18 +1974,18 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.95" +version = "1.0.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +checksum = "9695f8df41bb4f3d222c95a67532365f569318332d03d5f3f67f37b20e6ebdf0" dependencies = [ "unicode-ident", ] [[package]] name = "quinn" -version = "0.11.8" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "626214629cda6781b6dc1d316ba307189c85ba657213ce642d9c77670f8202c8" +checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" dependencies = [ "bytes", "cfg_aliases", @@ -1592,7 +1995,7 @@ dependencies = [ "rustc-hash 2.1.1", "rustls", "socket2", - "thiserror", + "thiserror 2.0.17", "tokio", "tracing", "web-time", @@ -1600,20 +2003,21 @@ dependencies = [ [[package]] name = "quinn-proto" -version = "0.11.12" +version = "0.11.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49df843a9161c85bb8aae55f101bc0bac8bcafd637a620d9122fd7e0b2f7422e" +checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" dependencies = [ + "aws-lc-rs", "bytes", - "getrandom 0.3.3", + "getrandom 0.3.4", "lru-slab", - "rand", + "rand 0.9.2", "ring", "rustc-hash 2.1.1", "rustls", "rustls-pki-types", "slab", - "thiserror", + "thiserror 2.0.17", "tinyvec", "tracing", "web-time", @@ -1621,23 +2025,23 @@ dependencies = [ [[package]] name = "quinn-udp" -version = "0.5.13" +version = "0.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcebb1209ee276352ef14ff8732e24cc2b02bbac986cd74a4c81bcb2f9881970" +checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" dependencies = [ "cfg_aliases", "libc", "once_cell", "socket2", "tracing", - "windows-sys 0.59.0", + "windows-sys 0.60.2", ] [[package]] name = "quote" -version = "1.0.40" +version = "1.0.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +checksum = "a338cc41d27e6cc6dce6cefc13a0729dfbb81c262b1f519331575dd80ef3067f" dependencies = [ "proc-macro2", ] @@ -1648,6 +2052,15 @@ version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" +[[package]] +name = "rand" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" +dependencies = [ + "rand_core 0.6.4", +] + [[package]] name = "rand" version = "0.9.2" @@ -1655,7 +2068,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6db2770f06117d490610c7488547d543617b21bfa07796d7a12f6f1bd53850d1" dependencies = [ "rand_chacha", - "rand_core", + "rand_core 0.9.3", ] [[package]] @@ -1665,44 +2078,114 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3022b5f1df60f26e1ffddd6c66e8aa15de382ae63b3a0c1bfc0e4d3e3f325cb" dependencies = [ "ppv-lite86", - "rand_core", + "rand_core 0.9.3", ] +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" + [[package]] name = "rand_core" version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38" dependencies = [ - "getrandom 0.3.3", + "getrandom 0.3.4", ] [[package]] name = "ratatui" -version = "0.29.0" +version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eabd94c2f37801c20583fc49dd5cd6b0ba68c716787c2dd6ed18571e1e63117b" +checksum = "d1ce67fb8ba4446454d1c8dbaeda0557ff5e94d39d5e5ed7f10a65eb4c8266bc" dependencies = [ - "bitflags 2.9.1", - "cassowary", + "instability", + "ratatui-core", + "ratatui-crossterm", + "ratatui-macros", + "ratatui-termwiz", + "ratatui-widgets", +] + +[[package]] +name = "ratatui-core" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ef8dea09a92caaf73bff7adb70b76162e5937524058a7e5bff37869cbbec293" +dependencies = [ + "bitflags 2.10.0", "compact_str", - "crossterm 0.28.1", + "hashbrown 0.16.1", "indoc", - "instability", - "itertools", + "itertools 0.14.0", + "kasuari", "lru", - "paste", "strum", + "thiserror 2.0.17", "unicode-segmentation", "unicode-truncate", - "unicode-width 0.2.0", + "unicode-width", +] + +[[package]] +name = "ratatui-crossterm" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "577c9b9f652b4c121fb25c6a391dd06406d3b092ba68827e6d2f09550edc54b3" +dependencies = [ + "cfg-if", + "crossterm", + "instability", + "ratatui-core", +] + +[[package]] +name = "ratatui-macros" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7f1342a13e83e4bb9d0b793d0ea762be633f9582048c892ae9041ef39c936f4" +dependencies = [ + "ratatui-core", + "ratatui-widgets", +] + +[[package]] +name = "ratatui-termwiz" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f76fe0bd0ed4295f0321b1676732e2454024c15a35d01904ddb315afd3d545c" +dependencies = [ + "ratatui-core", + "termwiz", +] + +[[package]] +name = "ratatui-widgets" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7dbfa023cd4e604c2553483820c5fe8aa9d71a42eea5aa77c6e7f35756612db" +dependencies = [ + "bitflags 2.10.0", + "hashbrown 0.16.1", + "indoc", + "instability", + "itertools 0.14.0", + "line-clipping", + "ratatui-core", + "strum", + "time", + "unicode-segmentation", + "unicode-width", ] [[package]] name = "rayon" -version = "1.10.0" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" dependencies = [ "either", "rayon-core", @@ -1710,9 +2193,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.12.1" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +checksum = "22e18b0f0062d30d4230b2e85ff77fdfe4326feb054b9783a3460d8435c8ab91" dependencies = [ "crossbeam-deque", "crossbeam-utils", @@ -1720,11 +2203,11 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.15" +version = "0.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8af0dde094006011e6a740d4879319439489813bd0bcdc7d821beaeeff48ec" +checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.10.0", ] [[package]] @@ -1735,27 +2218,27 @@ checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" dependencies = [ "getrandom 0.2.16", "libredox", - "thiserror", + "thiserror 2.0.17", ] [[package]] name = "ref-cast" -version = "1.0.24" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a0ae411dbe946a674d89546582cea4ba2bb8defac896622d6496f14c23ba5cf" +checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" dependencies = [ "ref-cast-impl", ] [[package]] name = "ref-cast-impl" -version = "1.0.24" +version = "1.0.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" +checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.112", ] [[package]] @@ -1789,11 +2272,11 @@ checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" [[package]] name = "reqwest" -version = "0.12.22" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbc931937e6ca3a06e3b6c0aa7841849b160a90351d6ab467a8b9b9959767531" +checksum = "04e9018c9d814e5f30cc16a0f03271aeab3571e609612d9fe78c1aa8d11c2f62" dependencies = [ - "base64 0.22.1", + "base64", "bytes", "futures-core", "http", @@ -1809,9 +2292,7 @@ dependencies = [ "quinn", "rustls", "rustls-pki-types", - "serde", - "serde_json", - "serde_urlencoded", + "rustls-platform-verifier", "sync_wrapper", "tokio", "tokio-rustls", @@ -1822,7 +2303,6 @@ dependencies = [ "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "webpki-roots", ] [[package]] @@ -1841,21 +2321,21 @@ dependencies = [ [[package]] name = "rmcp" -version = "0.9.1" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaa07b85b779d1e1df52dd79f6c6bffbe005b191f07290136cc42a142da3409a" +checksum = "528d42f8176e6e5e71ea69182b17d1d0a19a6b3b894b564678b74cd7cab13cfa" dependencies = [ "async-trait", - "base64 0.22.1", + "base64", "chrono", "futures", - "paste", + "pastey", "pin-project-lite", "rmcp-macros", "schemars", "serde", "serde_json", - "thiserror", + "thiserror 2.0.17", "tokio", "tokio-util", "tracing", @@ -1863,37 +2343,32 @@ dependencies = [ [[package]] name = "rmcp-macros" -version = "0.9.1" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f6fa09933cac0d0204c8a5d647f558425538ed6a0134b1ebb1ae4dc00c96db3" +checksum = "e3f81daaa494eb8e985c9462f7d6ce1ab05e5299f48aafd76cdd3d8b060e6f59" dependencies = [ - "darling 0.21.3", + "darling 0.23.0", "proc-macro2", "quote", "serde_json", - "syn", + "syn 2.0.112", ] [[package]] name = "rusqlite" -version = "0.35.0" +version = "0.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a22715a5d6deef63c637207afbe68d0c72c3f8d0022d7cf9714c442d6157606b" +checksum = "f1c93dd1c9683b438c392c492109cb702b8090b2bfc8fed6f6e4eb4523f17af3" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.10.0", "fallible-iterator", "fallible-streaming-iterator", "hashlink", "libsqlite3-sys", "smallvec", + "sqlite-wasm-rs", ] -[[package]] -name = "rustc-demangle" -version = "0.1.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "989e6739f80c4ad5b13e0fd7fe89531180375b18520cc8c82080e4dc4035b84f" - [[package]] name = "rustc-hash" version = "1.1.0" @@ -1907,61 +2382,97 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" [[package]] -name = "rustix" -version = "0.38.44" +name = "rustc_version" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ - "bitflags 2.9.1", - "errno", - "libc", - "linux-raw-sys 0.4.15", - "windows-sys 0.59.0", + "semver", ] [[package]] name = "rustix" -version = "1.0.8" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" +checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.10.0", "errno", "libc", - "linux-raw-sys 0.9.4", - "windows-sys 0.60.2", + "linux-raw-sys", + "windows-sys 0.61.2", ] [[package]] name = "rustls" -version = "0.23.29" +version = "0.23.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2491382039b29b9b11ff08b76ff6c97cf287671dbb74f0be44bda389fffe9bd1" +checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f" dependencies = [ + "aws-lc-rs", "once_cell", - "ring", "rustls-pki-types", "rustls-webpki", "subtle", "zeroize", ] +[[package]] +name = "rustls-native-certs" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "612460d5f7bea540c490b2b6395d8e34a953e52b491accd6c86c8164c5932a63" +dependencies = [ + "openssl-probe", + "rustls-pki-types", + "schannel", + "security-framework", +] + [[package]] name = "rustls-pki-types" -version = "1.12.0" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "229a4a4c221013e7e1f1a043678c5cc39fe5171437c88fb47151a21e6f5b5c79" +checksum = "21e6f2ab2928ca4291b86736a8bd920a277a399bba1589409d72154ff87c1282" dependencies = [ "web-time", "zeroize", ] +[[package]] +name = "rustls-platform-verifier" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d99feebc72bae7ab76ba994bb5e121b8d83d910ca40b36e0921f53becc41784" +dependencies = [ + "core-foundation", + "core-foundation-sys", + "jni", + "log", + "once_cell", + "rustls", + "rustls-native-certs", + "rustls-platform-verifier-android", + "rustls-webpki", + "security-framework", + "security-framework-sys", + "webpki-root-certs", + "windows-sys 0.61.2", +] + +[[package]] +name = "rustls-platform-verifier-android" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" + [[package]] name = "rustls-webpki" -version = "0.103.4" +version = "0.103.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a17884ae0c1b773f1ccd2bd4a8c72f16da897310a98b0e84bf349ad5ead92fc" +checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" dependencies = [ + "aws-lc-rs", "ring", "rustls-pki-types", "untrusted", @@ -1969,15 +2480,15 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.21" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" +checksum = "b39cdef0fa800fc44525c84ccb54a029961a8215f9619753635a9c0d2538d46d" [[package]] name = "ryu" -version = "1.0.20" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" +checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984" [[package]] name = "same-file" @@ -1988,11 +2499,20 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "schannel" +version = "0.1.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" +dependencies = [ + "windows-sys 0.61.2", +] + [[package]] name = "schemars" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9558e172d4e8533736ba97870c4b2cd63f84b382a3d6eb063da41b91cce17289" +checksum = "54e910108742c57a770f492731f99be216a52fadd361b06c8fb59d74ccc267d2" dependencies = [ "chrono", "dyn-clone", @@ -2004,14 +2524,14 @@ dependencies = [ [[package]] name = "schemars_derive" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "301858a4023d78debd2353c7426dc486001bddc91ae31a76fb1f55132f7e2633" +checksum = "4908ad288c5035a8eb12cfdf0d49270def0a268ee162b75eeee0f85d155a7c45" dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn", + "syn 2.0.112", ] [[package]] @@ -2020,33 +2540,73 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "security-framework" +version = "3.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" +dependencies = [ + "bitflags 2.10.0", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "semver" +version = "1.0.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" + [[package]] name = "serde" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" dependencies = [ + "serde_core", "serde_derive", ] [[package]] name = "serde_bytes" -version = "0.11.17" +version = "0.11.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8437fd221bde2d4ca316d61b90e337e9e702b3820b87d63caa9ba6c02bd06d96" +checksum = "a5d440709e79d88e51ac01c4b72fc6cb7314017bb7da9eeff678aa94c10e3ea8" dependencies = [ "serde", + "serde_core", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.219" +version = "1.0.228" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.112", ] [[package]] @@ -2057,40 +2617,29 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.112", ] [[package]] name = "serde_json" -version = "1.0.142" +version = "1.0.148" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7" +checksum = "3084b546a1dd6289475996f182a22aba973866ea8e8b02c51d9f46b1336a22da" dependencies = [ "itoa", "memchr", - "ryu", "serde", + "serde_core", + "zmij", ] [[package]] name = "serde_spanned" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40734c41988f7306bb04f0ecf60ec0f3f1caa34290e4e8ea471dcd3346483b83" -dependencies = [ - "serde", -] - -[[package]] -name = "serde_urlencoded" -version = "0.7.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" +checksum = "f8bbf91e5a4d6315eee45e704372590b30e260ee83af6639d64557f51b067776" dependencies = [ - "form_urlencoded", - "itoa", - "ryu", - "serde", + "serde_core", ] [[package]] @@ -2122,9 +2671,9 @@ dependencies = [ [[package]] name = "signal-hook-mio" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34db1a06d485c9142248b7a054f034b349b212551f3dfd19c94d45a754a217cd" +checksum = "b75a19a7a740b25bc7944bdee6172368f988763b744e3d4dfe753f6b4ece40cc" dependencies = [ "libc", "mio", @@ -2133,20 +2682,20 @@ dependencies = [ [[package]] name = "signal-hook-registry" -version = "1.4.5" +version = "1.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9203b8055f63a2a00e2f593bb0510367fe707d7ff1e5c872de2f537b339e5410" +checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b" dependencies = [ + "errno", "libc", ] [[package]] name = "simd-json" -version = "0.15.1" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c962f626b54771990066e5435ec8331d1462576cd2d1e62f24076ae014f92112" +checksum = "4255126f310d2ba20048db6321c81ab376f6a6735608bf11f0785c41f01f64e3" dependencies = [ - "getrandom 0.3.3", "halfbrown", "ref-cast", "serde", @@ -2181,12 +2730,12 @@ checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03" [[package]] name = "socket2" -version = "0.5.10" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.60.2", ] [[package]] @@ -2196,21 +2745,24 @@ dependencies = [ "anyhow", "async-trait", "bincode", + "c2rust-bitfields", "chrono", "chrono-tz", "clap", - "crossterm 0.29.0", + "crossterm", "dashmap", "dirs", "futures", "glob", "iana-time-zone", - "jwalk", + "lasso", + "libmimalloc-sys", + "mimalloc", "notify", "notify-types", "num-format", "parking_lot", - "phf", + "phf 0.13.1", "ratatui", "rayon", "reqwest", @@ -2223,16 +2775,31 @@ dependencies = [ "simd-json", "tempfile", "tiktoken-rs", + "tinyvec", "tokio", "toml", + "walkdir", "xxhash-rust", ] +[[package]] +name = "sqlite-wasm-rs" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05e98301bf8b0540c7de45ecd760539b9c62f5772aed172f08efba597c11cd5d" +dependencies = [ + "cc", + "hashbrown 0.16.1", + "js-sys", + "thiserror 2.0.17", + "wasm-bindgen", +] + [[package]] name = "stable_deref_trait" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "static_assertions" @@ -2248,24 +2815,23 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "strum" -version = "0.26.3" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" dependencies = [ "strum_macros", ] [[package]] name = "strum_macros" -version = "0.26.4" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" dependencies = [ "heck", "proc-macro2", "quote", - "rustversion", - "syn", + "syn 2.0.112", ] [[package]] @@ -2276,9 +2842,20 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" -version = "2.0.104" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.112" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" +checksum = "21f182278bf2d2bcb3c88b1b08a37df029d71ce3d3ae26168e3c653b213b99d4" dependencies = [ "proc-macro2", "quote", @@ -2302,63 +2879,166 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.112", ] [[package]] name = "tempfile" -version = "3.22.0" +version = "3.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84fa4d11fadde498443cca10fd3ac23c951f0dc59e080e9f4b93d4df4e4eea53" +checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" dependencies = [ "fastrand", - "getrandom 0.3.3", + "getrandom 0.3.4", "once_cell", - "rustix 1.0.8", - "windows-sys 0.60.2", + "rustix", + "windows-sys 0.61.2", +] + +[[package]] +name = "terminfo" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4ea810f0692f9f51b382fff5893887bb4580f5fa246fde546e0b13e7fcee662" +dependencies = [ + "fnv", + "nom", + "phf 0.11.3", + "phf_codegen", +] + +[[package]] +name = "termios" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "411c5bf740737c7918b8b1fe232dca4dc9f8e754b8ad5e20966814001ed0ac6b" +dependencies = [ + "libc", +] + +[[package]] +name = "termwiz" +version = "0.23.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4676b37242ccbd1aabf56edb093a4827dc49086c0ffd764a5705899e0f35f8f7" +dependencies = [ + "anyhow", + "base64", + "bitflags 2.10.0", + "fancy-regex 0.11.0", + "filedescriptor", + "finl_unicode", + "fixedbitset", + "hex", + "lazy_static", + "libc", + "log", + "memmem", + "nix", + "num-derive", + "num-traits", + "ordered-float", + "pest", + "pest_derive", + "phf 0.11.3", + "sha2", + "signal-hook", + "siphasher", + "terminfo", + "termios", + "thiserror 1.0.69", + "ucd-trie", + "unicode-segmentation", + "vtparse", + "wezterm-bidi", + "wezterm-blob-leases", + "wezterm-color-types", + "wezterm-dynamic", + "wezterm-input-types", + "winapi", +] + +[[package]] +name = "thiserror" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", ] [[package]] name = "thiserror" -version = "2.0.12" +version = "2.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567b8a2dae586314f7be2a752ec7474332959c6460e02bde30d702a66d488708" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" dependencies = [ - "thiserror-impl", + "thiserror-impl 2.0.17", ] [[package]] name = "thiserror-impl" -version = "2.0.12" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7cf42b4507d8ea322120659672cf1b9dbb93f8f2d4ecfd6e51350ff5b17a1d" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.112", +] + +[[package]] +name = "thiserror-impl" +version = "2.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.112", ] [[package]] name = "tiktoken-rs" -version = "0.6.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44075987ee2486402f0808505dd65692163d243a337fc54363d49afac41087f6" +checksum = "3a19830747d9034cd9da43a60eaa8e552dfda7712424aebf187b7a60126bae0d" dependencies = [ "anyhow", - "base64 0.21.7", + "base64", "bstr", - "fancy-regex", + "fancy-regex 0.13.0", "lazy_static", - "parking_lot", "regex", "rustc-hash 1.1.0", ] +[[package]] +name = "time" +version = "0.3.44" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" +dependencies = [ + "deranged", + "libc", + "num-conv", + "num_threads", + "powerfmt", + "serde", + "time-core", +] + +[[package]] +name = "time-core" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" + [[package]] name = "tinystr" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d4f6d1145dcb577acf783d4e601bc1d76a13337bb54e6233add580b07344c8b" +checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" dependencies = [ "displaydoc", "zerovec", @@ -2366,9 +3046,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09b3661f17e86524eccd4371ab0429194e0d7c008abb45f7a7495b1719463c71" +checksum = "bfa5fdc3bce6191a1dbc8c02d5c8bffcf557bafa17c124c5264a458f1b0613fa" dependencies = [ "tinyvec_macros", ] @@ -2381,11 +3061,10 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.45.1" +version = "1.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75ef51a33ef1da925cea3e4eb122833cb377c61439ca401b770f54902b806779" +checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408" dependencies = [ - "backtrace", "bytes", "libc", "mio", @@ -2394,25 +3073,25 @@ dependencies = [ "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys 0.52.0", + "windows-sys 0.61.2", ] [[package]] name = "tokio-macros" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" +checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.112", ] [[package]] name = "tokio-rustls" -version = "0.26.2" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b" +checksum = "1729aa945f29d91ba541258c8df89027d5792d85a8841fb65e8bf0f4ede4ef61" dependencies = [ "rustls", "tokio", @@ -2433,12 +3112,12 @@ dependencies = [ [[package]] name = "toml" -version = "0.9.2" +version = "0.9.10+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed0aee96c12fa71097902e0bb061a5e1ebd766a6636bb605ba401c45c1650eac" +checksum = "0825052159284a1a8b4d6c0c86cbc801f2da5afd2b225fa548c72f2e74002f48" dependencies = [ "indexmap", - "serde", + "serde_core", "serde_spanned", "toml_datetime", "toml_parser", @@ -2448,27 +3127,27 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.7.0" +version = "0.7.5+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bade1c3e902f58d73d3f294cd7f20391c1cb2fbcb643b73566bc773971df91e3" +checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" dependencies = [ - "serde", + "serde_core", ] [[package]] name = "toml_parser" -version = "1.0.1" +version = "1.0.6+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97200572db069e74c512a14117b296ba0a80a30123fbbb5aa1f4a348f639ca30" +checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44" dependencies = [ "winnow", ] [[package]] name = "toml_writer" -version = "1.0.2" +version = "1.0.6+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcc842091f2def52017664b53082ecbbeb5c7731092bad69d2c63050401dfd64" +checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607" [[package]] name = "tower" @@ -2487,11 +3166,11 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.6.6" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.10.0", "bytes", "futures-util", "http", @@ -2517,9 +3196,9 @@ checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tracing" -version = "0.1.41" +version = "0.1.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" +checksum = "63e71662fa4b2a2c3a26f570f037eb95bb1f85397f3cd8076caed2f026a6d100" dependencies = [ "pin-project-lite", "tracing-attributes", @@ -2534,14 +3213,14 @@ checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.112", ] [[package]] name = "tracing-core" -version = "0.1.34" +version = "0.1.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9d12581f227e93f094d3af2ae690a574abb8a2b9b7a96e7cfe9647b2b617678" +checksum = "db97caf9d906fbde555dd62fa95ddba9eecfd14cb388e4f491a66d74cd5fb79a" dependencies = [ "once_cell", ] @@ -2554,15 +3233,21 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "typenum" -version = "1.18.0" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb" + +[[package]] +name = "ucd-trie" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" [[package]] name = "unicode-ident" -version = "1.0.18" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" +checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" [[package]] name = "unicode-segmentation" @@ -2572,21 +3257,15 @@ checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" [[package]] name = "unicode-truncate" -version = "1.1.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3644627a5af5fa321c95b9b235a72fd24cd29c648c2c379431e6628655627bf" +checksum = "8fbf03860ff438702f3910ca5f28f8dac63c1c11e7efb5012b8b175493606330" dependencies = [ - "itertools", + "itertools 0.13.0", "unicode-segmentation", - "unicode-width 0.1.14", + "unicode-width", ] -[[package]] -name = "unicode-width" -version = "0.1.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" - [[package]] name = "unicode-width" version = "0.2.0" @@ -2599,15 +3278,22 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" +[[package]] +name = "unty" +version = "0.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d49784317cd0d1ee7ec5c716dd598ec5b4483ea832a2dced265471cc0f690ae" + [[package]] name = "url" -version = "2.5.4" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" dependencies = [ "form_urlencoded", "idna", "percent-encoding", + "serde", ] [[package]] @@ -2622,11 +3308,23 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "uuid" +version = "1.19.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a" +dependencies = [ + "atomic", + "getrandom 0.3.4", + "js-sys", + "wasm-bindgen", +] + [[package]] name = "value-trait" -version = "0.11.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0508fce11ad19e0aab49ce20b6bec7f8f82902ded31df1c9fc61b90f0eb396b8" +checksum = "8e80f0c733af0720a501b3905d22e2f97662d8eacfe082a75ed7ffb5ab08cb59" dependencies = [ "float-cmp", "halfbrown", @@ -2646,6 +3344,21 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "virtue" +version = "0.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "051eb1abcf10076295e815102942cc58f9d5e3b4560e46e53c21e8ff6f3af7b1" + +[[package]] +name = "vtparse" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d9b2acfb050df409c972a37d3b8e08cdea3bddb0c09db9d53137e504cfabed0" +dependencies = [ + "utf8parse", +] + [[package]] name = "walkdir" version = "2.5.0" @@ -2672,45 +3385,32 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] -name = "wasi" -version = "0.14.2+wasi-0.2.4" +name = "wasip2" +version = "1.0.1+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" dependencies = [ - "wit-bindgen-rt", + "wit-bindgen", ] [[package]] name = "wasm-bindgen" -version = "0.2.100" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +checksum = "0d759f433fa64a2d763d1340820e46e111a7a5ab75f993d1852d70b03dbb80fd" dependencies = [ "cfg-if", "once_cell", "rustversion", "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.100" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" -dependencies = [ - "bumpalo", - "log", - "proc-macro2", - "quote", - "syn", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.50" +version = "0.4.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +checksum = "836d9622d604feee9e5de25ac10e3ea5f2d65b41eac0d9ce72eb5deae707ce7c" dependencies = [ "cfg-if", "js-sys", @@ -2721,9 +3421,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.100" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +checksum = "48cb0d2638f8baedbc542ed444afc0644a29166f1595371af4fecf8ce1e7eeb3" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -2731,31 +3431,31 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.100" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +checksum = "cefb59d5cd5f92d9dcf80e4683949f15ca4b511f4ac0a6e14d4e1ac60c6ecd40" dependencies = [ + "bumpalo", "proc-macro2", "quote", - "syn", - "wasm-bindgen-backend", + "syn 2.0.112", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.100" +version = "0.2.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +checksum = "cbc538057e648b67f72a982e708d485b2efa771e1ac05fec311f9f63e5800db4" dependencies = [ "unicode-ident", ] [[package]] name = "web-sys" -version = "0.3.77" +version = "0.3.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +checksum = "9b32828d774c412041098d182a8b38b16ea816958e07cf40eec2bc080ae137ac" dependencies = [ "js-sys", "wasm-bindgen", @@ -2772,14 +3472,86 @@ dependencies = [ ] [[package]] -name = "webpki-roots" -version = "1.0.2" +name = "webpki-root-certs" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8983c3ab33d6fb807cfcdad2491c4ea8cbc8ed839181c7dfd9c67c83e261b2" +checksum = "ee3e3b5f5e80bc89f30ce8d0343bf4e5f12341c51f3e26cbeecbc7c85443e85b" dependencies = [ "rustls-pki-types", ] +[[package]] +name = "wezterm-bidi" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0a6e355560527dd2d1cf7890652f4f09bb3433b6aadade4c9b5ed76de5f3ec" +dependencies = [ + "log", + "wezterm-dynamic", +] + +[[package]] +name = "wezterm-blob-leases" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "692daff6d93d94e29e4114544ef6d5c942a7ed998b37abdc19b17136ea428eb7" +dependencies = [ + "getrandom 0.3.4", + "mac_address", + "sha2", + "thiserror 1.0.69", + "uuid", +] + +[[package]] +name = "wezterm-color-types" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7de81ef35c9010270d63772bebef2f2d6d1f2d20a983d27505ac850b8c4b4296" +dependencies = [ + "csscolorparser", + "deltae", + "lazy_static", + "wezterm-dynamic", +] + +[[package]] +name = "wezterm-dynamic" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f2ab60e120fd6eaa68d9567f3226e876684639d22a4219b313ff69ec0ccd5ac" +dependencies = [ + "log", + "ordered-float", + "strsim", + "thiserror 1.0.69", + "wezterm-dynamic-derive", +] + +[[package]] +name = "wezterm-dynamic-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46c0cf2d539c645b448eaffec9ec494b8b19bd5077d9e58cb1ae7efece8d575b" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "wezterm-input-types" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7012add459f951456ec9d6c7e6fc340b1ce15d6fc9629f8c42853412c029e57e" +dependencies = [ + "bitflags 1.3.2", + "euclid", + "lazy_static", + "serde", + "wezterm-dynamic", +] + [[package]] name = "winapi" version = "0.3.9" @@ -2798,11 +3570,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.9" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.2", ] [[package]] @@ -2813,9 +3585,9 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-core" -version = "0.61.2" +version = "0.62.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" +checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" dependencies = [ "windows-implement", "windows-interface", @@ -2826,64 +3598,64 @@ dependencies = [ [[package]] name = "windows-implement" -version = "0.60.0" +version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" +checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.112", ] [[package]] name = "windows-interface" -version = "0.59.1" +version = "0.59.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" +checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.112", ] [[package]] name = "windows-link" -version = "0.1.3" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" [[package]] name = "windows-result" -version = "0.3.4" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" dependencies = [ "windows-link", ] [[package]] name = "windows-strings" -version = "0.4.2" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" dependencies = [ "windows-link", ] [[package]] name = "windows-sys" -version = "0.52.0" +version = "0.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" dependencies = [ - "windows-targets 0.52.6", + "windows-targets 0.42.2", ] [[package]] name = "windows-sys" -version = "0.59.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets 0.52.6", ] @@ -2894,7 +3666,31 @@ version = "0.60.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb" dependencies = [ - "windows-targets 0.53.2", + "windows-targets 0.53.5", +] + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", ] [[package]] @@ -2915,20 +3711,27 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.53.2" +version = "0.53.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c66f69fcc9ce11da9966ddb31a40968cad001c5bedeb5c2b82ede4253ab48aef" +checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" dependencies = [ - "windows_aarch64_gnullvm 0.53.0", - "windows_aarch64_msvc 0.53.0", - "windows_i686_gnu 0.53.0", - "windows_i686_gnullvm 0.53.0", - "windows_i686_msvc 0.53.0", - "windows_x86_64_gnu 0.53.0", - "windows_x86_64_gnullvm 0.53.0", - "windows_x86_64_msvc 0.53.0", + "windows-link", + "windows_aarch64_gnullvm 0.53.1", + "windows_aarch64_msvc 0.53.1", + "windows_i686_gnu 0.53.1", + "windows_i686_gnullvm 0.53.1", + "windows_i686_msvc 0.53.1", + "windows_x86_64_gnu 0.53.1", + "windows_x86_64_gnullvm 0.53.1", + "windows_x86_64_msvc 0.53.1", ] +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" @@ -2937,9 +3740,15 @@ checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_gnullvm" -version = "0.53.0" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" [[package]] name = "windows_aarch64_msvc" @@ -2949,9 +3758,15 @@ checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_aarch64_msvc" -version = "0.53.0" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" [[package]] name = "windows_i686_gnu" @@ -2961,9 +3776,9 @@ checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnu" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" +checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3" [[package]] name = "windows_i686_gnullvm" @@ -2973,9 +3788,15 @@ checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_gnullvm" -version = "0.53.0" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" [[package]] name = "windows_i686_msvc" @@ -2985,9 +3806,15 @@ checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_i686_msvc" -version = "0.53.0" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" [[package]] name = "windows_x86_64_gnu" @@ -2997,9 +3824,15 @@ checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnu" -version = "0.53.0" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" [[package]] name = "windows_x86_64_gnullvm" @@ -3009,9 +3842,15 @@ checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_gnullvm" -version = "0.53.0" +version = "0.53.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" [[package]] name = "windows_x86_64_msvc" @@ -3021,30 +3860,27 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "windows_x86_64_msvc" -version = "0.53.0" +version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" +checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" [[package]] name = "winnow" -version = "0.7.12" +version = "0.7.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95" +checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" [[package]] -name = "wit-bindgen-rt" -version = "0.39.0" +name = "wit-bindgen" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" -dependencies = [ - "bitflags 2.9.1", -] +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" [[package]] name = "writeable" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea2f10b9bb0928dfb1b42b65e1f9e36f7f54dbdf08457afefb38afcdec4fa2bb" +checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9" [[package]] name = "xxhash-rust" @@ -3054,11 +3890,10 @@ checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3" [[package]] name = "yoke" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f41bb01b8226ef4bfd589436a297c53d118f65921786300e427be8d487695cc" +checksum = "72d6e5c6afb84d73944e5cedb052c4680d5657337201555f9f2a16b7406d4954" dependencies = [ - "serde", "stable_deref_trait", "yoke-derive", "zerofrom", @@ -3066,34 +3901,34 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.8.0" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" +checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.112", "synstructure", ] [[package]] name = "zerocopy" -version = "0.8.26" +version = "0.8.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f" +checksum = "fd74ec98b9250adb3ca554bdde269adf631549f51d8a8f8f0a10b50f1cb298c3" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.26" +version = "0.8.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" +checksum = "d8a8d209fdf45cf5138cbb5a506f6b52522a25afccc534d1475dad8e31105c6a" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.112", ] [[package]] @@ -3113,21 +3948,21 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.112", "synstructure", ] [[package]] name = "zeroize" -version = "1.8.1" +version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" +checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0" [[package]] name = "zerotrie" -version = "0.2.2" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36f0bbd478583f79edad978b407914f61b2972f5af6fa089686016be8f9af595" +checksum = "2a59c17a5562d507e4b54960e8569ebee33bee890c70aa3fe7b97e85a9fd7851" dependencies = [ "displaydoc", "yoke", @@ -3136,9 +3971,9 @@ dependencies = [ [[package]] name = "zerovec" -version = "0.11.2" +version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a05eb080e015ba39cc9e23bbe5e7fb04d5fb040350f99f34e338d5fdd294428" +checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" dependencies = [ "yoke", "zerofrom", @@ -3147,11 +3982,17 @@ dependencies = [ [[package]] name = "zerovec-derive" -version = "0.11.1" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" +checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.112", ] + +[[package]] +name = "zmij" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3280a1b827474fcd5dbef4b35a674deb52ba5c312363aef9135317df179d81b" diff --git a/Cargo.toml b/Cargo.toml index 4694c49..4bbba6e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,49 +5,60 @@ edition = "2024" license = "MIT" authors = ["Piebald LLC "] +[features] +default = ["mimalloc"] +# Use mimalloc allocator for reduced memory usage. Disable for heaptrack profiling: +# cargo build --no-default-features +mimalloc = ["dep:mimalloc", "dep:libmimalloc-sys"] + [dependencies] -serde = { version = "1.0.219", features = ["derive"] } +mimalloc = { version = "0.1.48", default-features = false, features = ["v3"], optional = true } +libmimalloc-sys = { version = "0.1.44", features = ["extended"], optional = true } +serde = { version = "1.0.228", features = ["derive"] } anyhow = "1.0" glob = "0.3" -jwalk = "0.8" +walkdir = "2" xxhash-rust = { version = "0.8", features = ["xxh3"] } chrono = { version = "0.4", features = ["serde"] } tokio = { version = "1", features = ["full"] } -rayon = "1.8" +lasso = { version = "0.7", features = ["multi-threaded"] } futures = "0.3" +rayon = "1.10" dashmap = "6" num-format = "0.4" -ratatui = "0.29" +ratatui = "0.30.0" crossterm = "0.29" -toml = "0.9.2" +toml = "0.9.10" async-trait = "0.1" -notify = "8.1" +notify = "8.2" notify-types = "2.0" sha2 = "0.10" -phf = { version = "0.12.1", features = ["macros"] } -serde_bytes = "0.11.17" -simd-json = { version = "0.15.1", features = ["serde"] } -tiktoken-rs = "0.6" +phf = { version = "0.13.1", features = ["macros"] } +serde_bytes = "0.11.19" +simd-json = { version = "0.17.0", features = ["serde"] } +tiktoken-rs = "0.9.1" parking_lot = "0.12" -bincode = "1.3" +tinyvec = { version = "1.8", features = ["alloc"] } +c2rust-bitfields = "0.18" +bincode = "2.0.1" dirs = "6.0" chrono-tz = "0.10" -rusqlite = { version = "0.35", features = ["bundled"] } +rusqlite = { version = "0.38.0", features = ["bundled"] } iana-time-zone = "0.1" # MCP server support -rmcp = { version = "0.9.1", features = ["server", "macros", "transport-io"] } -schemars = "1.0" +rmcp = { version = "0.12.0", features = ["server", "macros", "transport-io"] } +schemars = "1.2" [dependencies.clap] -version = "4.5.41" +version = "4.5.53" features = ["derive"] [dependencies.reqwest] -version = "0.12.22" +version = "0.13.1" default-features = false -features = ["rustls-tls"] +features = ["rustls"] [dev-dependencies] -tempfile = "3.0" +tempfile = "3.24" diff --git a/src/analyzer.rs b/src/analyzer.rs index f7c3ced..babf7a5 100644 --- a/src/analyzer.rs +++ b/src/analyzer.rs @@ -1,10 +1,19 @@ use anyhow::Result; use async_trait::async_trait; use dashmap::DashMap; -use jwalk::WalkDir; -use std::path::PathBuf; - -use crate::types::{AgenticCodingToolStats, ConversationMessage}; +use rayon::prelude::*; +use std::collections::BTreeMap; +use std::path::{Path, PathBuf}; +use std::sync::Arc; +use walkdir::WalkDir; + +use crate::contribution_cache::{ + ContributionCache, ContributionStrategy, MultiSessionContribution, PathHash, + RemovedContribution, SingleMessageContribution, SingleSessionContribution, +}; +use crate::types::{ + AgenticCodingToolStats, AnalyzerStatsView, ConversationMessage, SharedAnalyzerView, +}; /// VSCode GUI forks that might have extensions installed const VSCODE_GUI_FORKS: &[&str] = &[ @@ -88,7 +97,27 @@ pub fn get_vscode_extension_tasks_dirs(extension_id: &str) -> Vec { dirs } -/// Discover data sources for VSCode extension-based analyzers using jwalk. +fn walk_vscode_extension_tasks(extension_id: &str) -> impl Iterator { + get_vscode_extension_tasks_dirs(extension_id) + .into_iter() + .map(|tasks_dir| WalkDir::new(tasks_dir).min_depth(2).max_depth(2)) +} + +/// Check if any data sources exist for a VSCode extension-based analyzer. +/// Short-circuits after finding the first match. +pub fn vscode_extension_has_sources(extension_id: &str, target_filename: &str) -> bool { + walk_vscode_extension_tasks(extension_id) + .flat_map(|w| w.into_iter()) + .filter_map(|e| e.ok()) + .any(|e| { + e.file_type().is_file() + && e.path() + .file_name() + .is_some_and(|name| name == target_filename) + }) +} + +/// Discover data sources for VSCode extension-based analyzers. /// /// # Arguments /// * `extension_id` - The VSCode extension ID (e.g., "saoudrizwan.claude-dev") @@ -99,33 +128,24 @@ pub fn discover_vscode_extension_sources( target_filename: &str, return_parent_dir: bool, ) -> Result> { - let mut sources = Vec::new(); - - for tasks_dir in get_vscode_extension_tasks_dirs(extension_id) { - // Pattern: {task_id}/{target_filename} - for entry in WalkDir::new(&tasks_dir) - .min_depth(2) - .max_depth(2) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| { - e.file_type().is_file() - && e.path() - .file_name() - .is_some_and(|name| name == target_filename) - }) - { - let path = if return_parent_dir { + let sources = walk_vscode_extension_tasks(extension_id) + .flat_map(|w| w.into_iter()) + .filter_map(|e| e.ok()) + .filter(|e| { + e.file_type().is_file() + && e.path() + .file_name() + .is_some_and(|name| name == target_filename) + }) + .filter_map(|entry| { + if return_parent_dir { entry.path().parent().map(|p| p.to_path_buf()) } else { - Some(entry.path()) - }; - - if let Some(p) = path { - sources.push(DataSource { path: p }); + Some(entry.into_path()) } - } - } + }) + .map(|path| DataSource { path }) + .collect(); Ok(sources) } @@ -145,47 +165,125 @@ pub trait Analyzer: Send + Sync { /// Get glob patterns for discovering data sources fn get_data_glob_patterns(&self) -> Vec; - /// Discover data sources for this analyzer + /// Discover data sources for this analyzer (returns all sources) fn discover_data_sources(&self) -> Result>; - /// Parse conversations from data sources into normalized messages - async fn parse_conversations( - &self, - sources: Vec, - ) -> Result>; - - /// Get complete statistics for this analyzer - async fn get_stats(&self) -> Result; + /// Parse a single data source into messages. + /// This is the core parsing logic without parallelism decisions. + fn parse_source(&self, source: &DataSource) -> Result>; - /// Check if this analyzer is available on the current system - fn is_available(&self) -> bool; - - /// Get directories to watch for file changes. + /// Parse multiple data sources in parallel, returning messages grouped by source path. /// - /// Returns the root data directories for this analyzer. The file watcher will - /// recursively watch these directories for new, modified, or deleted files. + /// Default: parses all sources in parallel using rayon. + /// Override for shared context loading (e.g., OpenCode loads session data once). + /// Must be called within a rayon threadpool context for parallelism. /// - /// This is important for analyzers with nested directory structures (e.g., - /// `sessions/{id}/file.json`) where new subdirectories need to be detected. - /// Without this, only existing subdirectories would be watched, missing new - /// sessions/projects/tasks. + /// Note: Messages are NOT deduplicated - caller should deduplicate if needed. + fn parse_sources_parallel_with_paths( + &self, + sources: &[DataSource], + ) -> Vec<(PathBuf, Vec)> { + sources + .par_iter() + .filter_map(|source| match self.parse_source(source) { + Ok(msgs) => Some((source.path.clone(), msgs)), + Err(e) => { + eprintln!( + "Failed to parse {} source {:?}: {}", + self.display_name(), + source.path, + e + ); + None + } + }) + .collect() + } + + /// Parse multiple data sources in parallel and deduplicate. /// - /// Default implementation returns empty vec, which falls back to watching - /// parent directories of discovered data sources (legacy behavior). - fn get_watch_directories(&self) -> Vec { - Vec::new() + /// Default: calls `parse_sources_parallel_with_paths` and deduplicates by `global_hash`. + /// Override for different dedup strategy (e.g., Piebald uses local_hash). + /// Must be called within a rayon threadpool context for parallelism. + fn parse_sources_parallel(&self, sources: &[DataSource]) -> Vec { + let all_messages: Vec = self + .parse_sources_parallel_with_paths(sources) + .into_iter() + .flat_map(|(_, msgs)| msgs) + .collect(); + crate::utils::deduplicate_by_global_hash(all_messages) + } + + /// Get directories to watch for file changes. + /// Returns the root data directories for this analyzer. + fn get_watch_directories(&self) -> Vec; + + /// Check if a path is a valid data source for this analyzer. + /// Used by file watcher to filter events before processing. + /// Default: returns true for files, false for directories. + fn is_valid_data_path(&self, path: &Path) -> bool { + path.is_file() + } + + /// Check if this analyzer is available (has any data). + /// Default: checks if discover_data_sources returns at least one source. + /// Analyzers can override with optimized versions that stop after finding 1 file. + fn is_available(&self) -> bool { + self.discover_data_sources() + .is_ok_and(|sources| !sources.is_empty()) + } + + /// Returns the contribution caching strategy for this analyzer. + /// - `SingleMessage`: 1 file = 1 message (~40 bytes/file) - e.g., OpenCode + /// - `SingleSession`: 1 file = 1 session (~72 bytes/file) - e.g., Claude Code, Cline + /// - `MultiSession`: 1 file = many sessions (~100+ bytes/file) - e.g., Piebald + fn contribution_strategy(&self) -> ContributionStrategy; + + /// Get stats with pre-discovered sources (avoids double discovery). + /// Default implementation parses sources in parallel via `parse_sources_parallel()`. + /// Override for analyzers with complex cross-file logic (e.g., claude_code). + fn get_stats_with_sources(&self, sources: Vec) -> Result { + let messages = self.parse_sources_parallel(&sources); + + let mut daily_stats = crate::utils::aggregate_by_date(&messages); + daily_stats.retain(|date, _| date != "unknown"); + let num_conversations = daily_stats + .values() + .map(|stats| stats.conversations as u64) + .sum(); + + Ok(AgenticCodingToolStats { + daily_stats, + num_conversations, + messages, + analyzer_name: self.display_name().to_string(), + }) + } + + /// Get complete statistics for this analyzer. + /// Default: discovers sources then calls get_stats_with_sources(). + fn get_stats(&self) -> Result { + let sources = self.discover_data_sources()?; + self.get_stats_with_sources(sources) } } /// Registry for managing multiple analyzers pub struct AnalyzerRegistry { analyzers: Vec>, - /// Cached data sources per analyzer (display_name -> sources) - data_source_cache: DashMap>, - /// In-memory message cache for fast incremental updates during file watching. - /// Key: file path, Value: parsed messages from that file. - /// This allows us to reparse only changed files instead of all files. - message_cache: DashMap>, + /// Unified contribution cache for incremental updates. + /// Strategy-specific storage: SingleMessage (~40B), SingleSession (~72B), MultiSession (~100+B). + contribution_cache: ContributionCache, + /// Cached analyzer views for incremental updates. + /// Key: analyzer display name, Value: shared view with RwLock for in-place mutation. + analyzer_views_cache: DashMap, + /// Tracks the order in which analyzers were registered to maintain stable tab ordering. + /// Contains display names in registration order. + analyzer_order: parking_lot::RwLock>, + /// Tracks files that have been modified since the last upload. + /// Used for incremental uploads - only modified files are parsed for upload. + /// Wrapped in Arc so cloning gives a shared handle for async tasks. + dirty_files_for_upload: Arc>, } impl Default for AnalyzerRegistry { @@ -199,54 +297,53 @@ impl AnalyzerRegistry { pub fn new() -> Self { Self { analyzers: Vec::new(), - data_source_cache: DashMap::new(), - message_cache: DashMap::new(), + contribution_cache: ContributionCache::new(), + analyzer_views_cache: DashMap::new(), + analyzer_order: parking_lot::RwLock::new(Vec::new()), + dirty_files_for_upload: Arc::new(DashMap::new()), } } /// Register an analyzer pub fn register(&mut self, analyzer: A) { - self.analyzers.push(Box::new(analyzer)); - } - - /// Get or discover data sources for an analyzer (cached) - pub fn get_cached_data_sources(&self, analyzer: &dyn Analyzer) -> Result> { let name = analyzer.display_name().to_string(); - - // Check cache first - if let Some(cached) = self.data_source_cache.get(&name) { - return Ok(cached.clone()); - } - - // Discover and cache - let sources = analyzer.discover_data_sources()?; - self.data_source_cache.insert(name, sources.clone()); - Ok(sources) - } - - /// Invalidate cache for a specific analyzer - pub fn invalidate_cache(&self, analyzer_name: &str) { - self.data_source_cache.remove(analyzer_name); + self.analyzers.push(Box::new(analyzer)); + // Track registration order for stable tab ordering in TUI + self.analyzer_order.write().push(name); } - /// Invalidate all caches + /// Invalidate all caches (file contributions and analyzer views) pub fn invalidate_all_caches(&self) { - self.data_source_cache.clear(); + self.contribution_cache.clear(); + self.analyzer_views_cache.clear(); } - /// Get available analyzers (those that are present on the system) - /// Uses cached data sources to check availability, avoiding redundant glob scans + /// Get available analyzers (fast check, no source discovery). + /// Returns analyzers that have at least one data source on the system. pub fn available_analyzers(&self) -> Vec<&dyn Analyzer> { self.analyzers .iter() - .filter(|a| { - self.get_cached_data_sources(a.as_ref()) - .is_ok_and(|sources| !sources.is_empty()) - }) + .filter(|a| a.is_available()) .map(|a| a.as_ref()) .collect() } + /// Get available analyzers with their discovered data sources. + /// Returns analyzers that have at least one data source on the system. + /// Sources are discovered once and returned for callers to use directly. + pub fn available_analyzers_with_sources(&self) -> Vec<(&dyn Analyzer, Vec)> { + self.analyzers + .iter() + .filter_map(|a| { + let sources = a.discover_data_sources().ok()?; + if sources.is_empty() { + return None; + } + Some((a.as_ref(), sources)) + }) + .collect() + } + /// Get analyzer by display name pub fn get_analyzer_by_display_name(&self, display_name: &str) -> Option<&dyn Analyzer> { self.analyzers @@ -255,39 +352,37 @@ impl AnalyzerRegistry { .map(|a| a.as_ref()) } - /// Load stats from all available analyzers in parallel. - /// Also populates the message cache for fast incremental updates during watching. - pub async fn load_all_stats(&self) -> Result { - use futures::future::join_all; - - let available_analyzers = self.available_analyzers(); + /// Load stats from all available analyzers in parallel using a scoped threadpool. + /// Creates a temporary rayon threadpool that is dropped after use, releasing memory. + /// Use this when you need full stats but aren't already inside a rayon context. + pub fn load_all_stats_parallel_scoped(&self) -> Result { + let pool = rayon::ThreadPoolBuilder::new() + .build() + .expect("Failed to create rayon threadpool"); + pool.install(|| self.load_all_stats_parallel()) + // Pool is dropped here, releasing threads + } - // Create futures for all analyzers - they'll run concurrently - let futures: Vec<_> = available_analyzers - .into_iter() - .map(|analyzer| async move { - let name = analyzer.display_name().to_string(); - let sources = analyzer.discover_data_sources().ok(); - let result = analyzer.get_stats().await; - (name, sources, result) - }) + /// Load stats from all available analyzers in parallel. + /// Used for uploads - returns full stats with messages. + /// Must be called within a rayon threadpool context for parallelism. + pub fn load_all_stats_parallel(&self) -> Result { + let available = self.available_analyzers_with_sources(); + + // Parse all analyzers in parallel using rayon + let results: Vec<_> = available + .into_par_iter() + .map(|(analyzer, sources)| analyzer.get_stats_with_sources(sources)) .collect(); - // Run all analyzers in parallel - let results = join_all(futures).await; - let mut all_stats = Vec::new(); - for (name, sources, result) in results { + for result in results { match result { Ok(stats) => { - // Populate message cache: store messages keyed by file path - if let Some(sources) = sources { - self.populate_message_cache(&name, &sources, &stats.messages); - } all_stats.push(stats); } Err(e) => { - eprintln!("⚠️ Error analyzing {} data: {}", name, e); + eprintln!("⚠️ Error analyzing data: {}", e); } } } @@ -297,147 +392,393 @@ impl AnalyzerRegistry { }) } - /// Populate the message cache from parsed messages. - /// Groups messages by their source file using conversation_hash matching. - fn populate_message_cache( - &self, - _analyzer_name: &str, - sources: &[DataSource], - messages: &[crate::types::ConversationMessage], - ) { - use crate::utils::hash_text; + /// Load view-only stats using rayon for parallel file reads. + /// Called once at startup. Uses rayon threadpool for parallel I/O operations. + /// Populates file contribution cache for true incremental updates. + /// Must be called within a rayon threadpool context for parallelism. + pub fn load_all_stats_views_parallel(&self) -> Result { + // Contribution cache variants based on analyzer strategy + enum CachedContributions { + SingleMessage(Vec<(PathHash, SingleMessageContribution)>), + SingleSession(Vec<(PathHash, SingleSessionContribution)>), + MultiSession(Vec<(PathHash, MultiSessionContribution)>), + } - // Create a map of conversation_hash -> file_path - let hash_to_path: std::collections::HashMap = sources - .iter() - .map(|s| (hash_text(&s.path.to_string_lossy()), s.path.clone())) + // Get available analyzers with their sources (single discovery) + let analyzer_data: Vec<_> = self + .available_analyzers_with_sources() + .into_iter() + .map(|(a, sources)| { + let strategy = a.contribution_strategy(); + (a, a.display_name().to_string(), sources, strategy) + }) .collect(); - // Group messages by their source file - for msg in messages { - if let Some(path) = hash_to_path.get(&msg.conversation_hash) { - self.message_cache - .entry(path.clone()) - .or_default() - .push(msg.clone()); + // Parse all analyzers in parallel using rayon + let all_results: Vec<_> = analyzer_data + .into_par_iter() + .map(|(analyzer, name, sources, strategy)| { + let analyzer_name_arc: Arc = Arc::from(name.as_str()); + + // Parse sources with path association preserved. + let grouped = analyzer.parse_sources_parallel_with_paths(&sources); + + // Compute contributions per source based on strategy + let (contributions, all_messages): (CachedContributions, Vec>) = + match strategy { + ContributionStrategy::SingleMessage => { + let (contribs, msgs): (Vec<_>, Vec<_>) = grouped + .into_par_iter() + .map(|(path, msgs)| { + let path_hash = PathHash::new(&path); + let contribution = msgs + .first() + .map(SingleMessageContribution::from_message) + .unwrap_or_default(); + ((path_hash, contribution), msgs) + }) + .unzip(); + (CachedContributions::SingleMessage(contribs), msgs) + } + ContributionStrategy::SingleSession => { + let (contribs, msgs): (Vec<_>, Vec<_>) = grouped + .into_par_iter() + .map(|(path, msgs)| { + let path_hash = PathHash::new(&path); + let contribution = + SingleSessionContribution::from_messages(&msgs); + ((path_hash, contribution), msgs) + }) + .unzip(); + (CachedContributions::SingleSession(contribs), msgs) + } + ContributionStrategy::MultiSession => { + let (contribs, msgs): (Vec<_>, Vec<_>) = grouped + .into_par_iter() + .map(|(path, msgs)| { + let path_hash = PathHash::new(&path); + let contribution = MultiSessionContribution::from_messages( + &msgs, + Arc::clone(&analyzer_name_arc), + ); + ((path_hash, contribution), msgs) + }) + .unzip(); + (CachedContributions::MultiSession(contribs), msgs) + } + }; + + let all_messages: Vec<_> = all_messages.into_iter().flatten().collect(); + + // Deduplicate messages across sources + let messages = crate::utils::deduplicate_by_global_hash(all_messages); + + // Aggregate stats + let mut daily_stats = crate::utils::aggregate_by_date(&messages); + daily_stats.retain(|date, _| date != "unknown"); + let num_conversations = daily_stats + .values() + .map(|stats| stats.conversations as u64) + .sum(); + + let stats = AgenticCodingToolStats { + daily_stats, + num_conversations, + messages, + analyzer_name: name.clone(), + }; + + ( + name, + contributions, + Ok(stats) as Result, + ) + }) + .collect(); + + // Build views from results and cache contributions + let mut all_views = Vec::new(); + for (name, contributions, result) in all_results { + match result { + Ok(stats) => { + // Cache file contributions based on type + match contributions { + CachedContributions::SingleMessage(contribs) => { + for (path_hash, contribution) in contribs { + self.contribution_cache + .insert_single_message(path_hash, contribution); + } + } + CachedContributions::SingleSession(contribs) => { + for (path_hash, contribution) in contribs { + self.contribution_cache + .insert_single_session(path_hash, contribution); + } + } + CachedContributions::MultiSession(contribs) => { + for (path_hash, contribution) in contribs { + self.contribution_cache + .insert_multi_session(path_hash, contribution); + } + } + } + // Convert to view (drops messages) + let view = stats.into_view(); + // Cache the view for incremental updates + self.analyzer_views_cache.insert(name, view.clone()); + all_views.push(view); + } + Err(e) => { + eprintln!("⚠️ Error analyzing {} data: {}", name, e); + } } } + + // Shrink caches after bulk insertion + self.contribution_cache.shrink_to_fit(); + + Ok(crate::types::MultiAnalyzerStatsView { + analyzer_stats: all_views, + }) } - /// Reload stats for a single file change (incremental update). - /// Much faster than reparsing all files - only reparses the changed file. - pub async fn reload_file( + /// Reload stats for a single file change using true incremental update. + /// O(1) update - only reparses the changed file, subtracts old contribution, + /// adds new contribution. No cloning needed thanks to RwLock. + /// Uses sequential parsing (no threadpool) since it's just one file. + pub fn reload_file_incremental( &self, analyzer_name: &str, changed_path: &std::path::Path, - ) -> Result { + ) -> Result<()> { let analyzer = self .get_analyzer_by_display_name(analyzer_name) .ok_or_else(|| anyhow::anyhow!("Analyzer not found: {}", analyzer_name))?; - // Parse just the changed file + // Skip invalid paths (directories, wrong file types, etc.) + if !analyzer.is_valid_data_path(changed_path) { + return Ok(()); + } + + // Mark file as dirty for incremental upload (only for valid data paths) + self.mark_file_dirty(analyzer_name, changed_path); + + // Create Arc once for this update + let analyzer_name_arc: Arc = Arc::from(analyzer_name); + + // Hash the path for cache lookup (no allocation) + let path_hash = PathHash::new(changed_path); + + // Get contribution strategy for this analyzer + let strategy = analyzer.contribution_strategy(); + + // Parse just the changed file (sequential, no threadpool needed for single file) let source = DataSource { path: changed_path.to_path_buf(), }; - let new_messages = analyzer.parse_conversations(vec![source]).await?; + let new_messages = match analyzer.parse_source(&source) { + Ok(msgs) => crate::utils::deduplicate_by_global_hash(msgs), + Err(e) => { + eprintln!( + "Failed to parse {} source {:?}: {}", + analyzer.display_name(), + source.path, + e + ); + Vec::new() + } + }; - // Update the message cache for this file - self.message_cache - .insert(changed_path.to_path_buf(), new_messages); + // Get or create the cached view for this analyzer + let shared_view = self + .analyzer_views_cache + .entry(analyzer_name.to_string()) + .or_insert_with(|| { + Arc::new(parking_lot::RwLock::new(AnalyzerStatsView { + daily_stats: BTreeMap::new(), + session_aggregates: Vec::new(), + num_conversations: 0, + analyzer_name: Arc::clone(&analyzer_name_arc), + })) + }) + .clone(); - // Rebuild stats from all cached messages for this analyzer - self.rebuild_stats_from_cache(analyzer_name, analyzer).await - } + match strategy { + ContributionStrategy::SingleMessage => { + let old_contribution = self.contribution_cache.get_single_message(&path_hash); - /// Remove a file from the message cache (for file deletion events). - pub fn remove_file_from_cache(&self, path: &std::path::Path) { - self.message_cache.remove(path); - } + let new_contribution = new_messages + .first() + .map(SingleMessageContribution::from_message) + .unwrap_or_default(); - /// Rebuild stats from the message cache for a specific analyzer. - async fn rebuild_stats_from_cache( - &self, - analyzer_name: &str, - analyzer: &dyn Analyzer, - ) -> Result { - // Get all sources for this analyzer - let sources = self.get_cached_data_sources(analyzer)?; - - // Collect all cached messages for files belonging to this analyzer - let mut all_messages = Vec::new(); - for source in &sources { - if let Some(messages) = self.message_cache.get(&source.path) { - all_messages.extend(messages.clone()); + self.contribution_cache + .insert_single_message(path_hash, new_contribution); + + let mut view = shared_view.write(); + if let Some(old) = old_contribution { + view.subtract_single_message_contribution(&old); + } + view.add_single_message_contribution(&new_contribution); } - } + ContributionStrategy::SingleSession => { + let old_contribution = self.contribution_cache.get_single_session(&path_hash); - // Deduplicate messages - let messages = crate::analyzers::deduplicate_messages(all_messages); + let new_contribution = SingleSessionContribution::from_messages(&new_messages); - // Aggregate by date - let mut daily_stats = crate::utils::aggregate_by_date(&messages); - daily_stats.retain(|date, _| date != "unknown"); + self.contribution_cache + .insert_single_session(path_hash, new_contribution.clone()); - let num_conversations = daily_stats - .values() - .map(|stats| stats.conversations as u64) - .sum(); + let mut view = shared_view.write(); + if let Some(old) = old_contribution { + view.subtract_single_session_contribution(&old); + } + view.add_single_session_contribution(&new_contribution); + } + ContributionStrategy::MultiSession => { + let old_contribution = self.contribution_cache.get_multi_session(&path_hash); - Ok(crate::types::AgenticCodingToolStats { - daily_stats, - num_conversations, - messages, - analyzer_name: analyzer_name.to_string(), - }) + let new_contribution = MultiSessionContribution::from_messages( + &new_messages, + Arc::clone(&analyzer_name_arc), + ); + + self.contribution_cache + .insert_multi_session(path_hash, new_contribution.clone()); + + let mut view = shared_view.write(); + if let Some(old) = old_contribution { + view.subtract_multi_session_contribution(&old); + } + view.add_multi_session_contribution(&new_contribution); + } + } + + Ok(()) } - /// Check if the message cache is populated for an analyzer. - pub fn has_cached_messages(&self, analyzer_name: &str) -> bool { - if let Some(analyzer) = self.get_analyzer_by_display_name(analyzer_name) - && let Ok(sources) = self.get_cached_data_sources(analyzer) - { - return sources - .iter() - .any(|s| self.message_cache.contains_key(&s.path)); + /// Remove a file from the cache and update the view (for file deletion events). + /// Returns true if the file was found and removed. + /// Also marks the file as dirty for upload if it was in the cache. + pub fn remove_file_from_cache(&self, analyzer_name: &str, path: &std::path::Path) -> bool { + let path_hash = PathHash::new(path); + + // Try to remove from any cache and update view accordingly + if let Some(removed) = self.contribution_cache.remove_any(&path_hash) { + if let Some(shared_view) = self.analyzer_views_cache.get(analyzer_name) { + let mut view = shared_view.write(); + match removed { + RemovedContribution::SingleMessage(old) => { + view.subtract_single_message_contribution(&old); + } + RemovedContribution::SingleSession(old) => { + view.subtract_single_session_contribution(&old); + } + RemovedContribution::MultiSession(old) => { + view.subtract_multi_session_contribution(&old); + } + } + } + true + } else { + false } - false + } + + /// Check if the contribution cache is populated for an analyzer. + pub fn has_cached_contributions(&self, analyzer_name: &str) -> bool { + self.analyzer_views_cache.contains_key(analyzer_name) + } + + /// Get the cached view for an analyzer. + pub fn get_cached_view(&self, analyzer_name: &str) -> Option { + self.analyzer_views_cache + .get(analyzer_name) + .map(|r| r.clone()) + } + + /// Get all cached views as a Vec, for building MultiAnalyzerStatsView. + /// Returns SharedAnalyzerView clones (cheap Arc pointer copies). + /// Views are returned in registration order for stable tab ordering in TUI. + pub fn get_all_cached_views(&self) -> Vec { + let order = self.analyzer_order.read(); + order + .iter() + .filter_map(|name| self.analyzer_views_cache.get(name).map(|v| v.clone())) + .collect() + } + + /// Update the cache with a new view for an analyzer. + /// Used when doing a full reload (not incremental). + pub fn update_cached_view(&self, analyzer_name: &str, view: SharedAnalyzerView) { + self.analyzer_views_cache + .insert(analyzer_name.to_string(), view); } /// Get a mapping of data directories to analyzer names for file watching. - /// - /// Prefers explicit watch directories from `get_watch_directories()` when available, - /// which allows detecting new subdirectories (sessions/projects/tasks). - /// Falls back to parent directories of data sources for backward compatibility. + /// Uses explicit watch directories from `get_watch_directories()`. pub fn get_directory_to_analyzer_mapping(&self) -> std::collections::HashMap { let mut dir_to_analyzer = std::collections::HashMap::new(); for analyzer in self.available_analyzers() { - let watch_dirs = analyzer.get_watch_directories(); - - if !watch_dirs.is_empty() { - // Use explicit watch directories (preferred - catches new subdirectories) - for dir in watch_dirs { - if dir.exists() { - dir_to_analyzer.insert(dir, analyzer.display_name().to_string()); - } - } - } else { - // Fallback: derive from data sources (legacy behavior) - if let Ok(sources) = self.get_cached_data_sources(analyzer) { - for source in sources { - if let Some(parent) = source.path.parent() - && parent.exists() - { - dir_to_analyzer - .insert(parent.to_path_buf(), analyzer.display_name().to_string()); - } - } + for dir in analyzer.get_watch_directories() { + if dir.exists() { + dir_to_analyzer.insert(dir, analyzer.display_name().to_string()); } } } dir_to_analyzer } + + /// Mark a file as dirty for the next upload (file has been modified). + pub fn mark_file_dirty(&self, analyzer_name: &str, path: &Path) { + self.dirty_files_for_upload + .insert(path.to_path_buf(), analyzer_name.to_string()); + } + + /// Returns a shared handle to dirty files for use in async tasks. + pub fn dirty_files_handle(&self) -> Arc> { + Arc::clone(&self.dirty_files_for_upload) + } + + /// Check if we have any dirty files tracked. + pub fn has_dirty_files(&self) -> bool { + !self.dirty_files_for_upload.is_empty() + } + + /// Load messages from dirty files for incremental upload. + /// Returns messages filtered to only those created since last_upload_timestamp. + /// Returns empty vec if no dirty files are tracked. + pub fn load_messages_for_upload( + &self, + last_upload_timestamp: i64, + ) -> Result> { + if self.dirty_files_for_upload.is_empty() { + return Ok(Vec::new()); + } + + // Parse dirty files sequentially (typically only 1-2 files) + let mut all_messages = Vec::with_capacity(4); + for entry in self.dirty_files_for_upload.iter() { + let (path, analyzer_name) = entry.pair(); + if let Some(analyzer) = self.get_analyzer_by_display_name(analyzer_name) { + let source = DataSource { path: path.clone() }; + if let Ok(msgs) = analyzer.parse_source(&source) { + all_messages.extend(msgs); + } + } + } + all_messages = crate::utils::deduplicate_by_global_hash(all_messages); + + // Filter by timestamp + let messages_later_than: Vec<_> = all_messages + .into_iter() + .filter(|msg| msg.date.timestamp_millis() >= last_upload_timestamp) + .collect(); + + Ok(messages_later_than) + } } #[cfg(test)] @@ -477,14 +818,23 @@ mod tests { .collect()) } - async fn parse_conversations( + fn parse_source(&self, _source: &DataSource) -> Result> { + Ok(Vec::new()) + } + + fn get_stats_with_sources( &self, _sources: Vec, - ) -> Result> { - Ok(Vec::new()) + ) -> Result { + if self.fail_stats { + anyhow::bail!("stats failed"); + } + self.stats + .clone() + .ok_or_else(|| anyhow::anyhow!("no stats")) } - async fn get_stats(&self) -> Result { + fn get_stats(&self) -> Result { if self.fail_stats { anyhow::bail!("stats failed"); } @@ -496,6 +846,18 @@ mod tests { fn is_available(&self) -> bool { self.available } + + fn get_watch_directories(&self) -> Vec { + // Return parent directories of sources for testing + self.sources + .iter() + .filter_map(|p| p.parent().map(|parent| parent.to_path_buf())) + .collect() + } + + fn contribution_strategy(&self) -> ContributionStrategy { + ContributionStrategy::SingleSession + } } fn sample_stats(name: &str) -> AgenticCodingToolStats { @@ -568,7 +930,7 @@ mod tests { .expect("analyzer 'ok'"); assert_eq!(by_name.display_name(), "ok"); - let stats = registry.load_all_stats().await.expect("load stats"); + let stats = registry.load_all_stats_parallel().expect("load stats"); // Only the successful analyzer should contribute stats. assert_eq!(stats.analyzer_stats.len(), 1); assert_eq!(stats.analyzer_stats[0].analyzer_name, "ok"); @@ -626,14 +988,11 @@ mod tests { .collect()) } - async fn parse_conversations( - &self, - _sources: Vec, - ) -> Result> { + fn parse_source(&self, _source: &DataSource) -> Result> { Ok(Vec::new()) } - async fn get_stats(&self) -> Result { + fn get_stats(&self) -> Result { Ok(AgenticCodingToolStats { daily_stats: BTreeMap::new(), num_conversations: 0, @@ -649,6 +1008,10 @@ mod tests { fn get_watch_directories(&self) -> Vec { self.watch_dirs.clone() } + + fn contribution_strategy(&self) -> ContributionStrategy { + ContributionStrategy::SingleSession + } } #[tokio::test] @@ -694,35 +1057,6 @@ mod tests { ); } - #[tokio::test] - async fn registry_falls_back_to_parent_dirs_when_no_watch_dirs() { - use std::fs; - - // Without explicit watch_dirs, should fall back to parent directories - let temp_dir = tempfile::tempdir().expect("tempdir"); - let base = temp_dir.path().join("data").join("files"); - fs::create_dir_all(&base).expect("mkdirs"); - let file_path = base.join("data.json"); - - let mut registry = AnalyzerRegistry::new(); - let analyzer = TestAnalyzerWithWatchDirs { - name: "fallback", - sources: vec![file_path.clone()], - watch_dirs: vec![], // Empty = use legacy behavior - }; - - registry.register(analyzer); - - let mapping = registry.get_directory_to_analyzer_mapping(); - - // Should fall back to parent directory of the source file - assert_eq!( - mapping.get(&base).map(String::as_str), - Some("fallback"), - "Should fall back to watching parent directory when watch_dirs is empty" - ); - } - // ========================================================================= // DISCOVER_VSCODE_EXTENSION_SOURCES TESTS // ========================================================================= @@ -758,4 +1092,176 @@ mod tests { assert!(result1.is_ok()); assert!(result2.is_ok()); } + + /// Test that analyzer tab order remains stable across initial load and updates. + /// Regression test for bug where DashMap iteration order caused tabs to jump. + #[tokio::test] + async fn test_analyzer_order_stable_across_updates() { + let mut registry = AnalyzerRegistry::new(); + let expected_order = vec!["analyzer-a", "analyzer-b", "analyzer-c"]; + + // Register analyzers in a specific order + for name in &expected_order { + registry.register(TestAnalyzer { + name, + available: true, + stats: Some(sample_stats(name)), + sources: vec![PathBuf::from(format!("/fake/{}.jsonl", name))], + fail_stats: false, + }); + } + + // Initial load should preserve registration order + let initial_views = registry + .load_all_stats_views_parallel() + .expect("load_all_stats_views_parallel"); + let initial_names: Vec = initial_views + .analyzer_stats + .iter() + .map(|v| v.read().analyzer_name.to_string()) + .collect(); + let expected_strings: Vec = expected_order.iter().map(|s| s.to_string()).collect(); + assert_eq!( + initial_names, expected_strings, + "Initial load order mismatch" + ); + + // get_all_cached_views() should return same order (used by watcher updates) + let cached_names: Vec = registry + .get_all_cached_views() + .iter() + .map(|v| v.read().analyzer_name.to_string()) + .collect(); + assert_eq!( + cached_names, expected_strings, + "Cached views order mismatch" + ); + + // Order stable after incremental file update + let _ = registry + .reload_file_incremental("analyzer-b", &PathBuf::from("/fake/analyzer-b.jsonl")); + let after_update: Vec = registry + .get_all_cached_views() + .iter() + .map(|v| v.read().analyzer_name.to_string()) + .collect(); + assert_eq!(after_update, expected_strings, "Order changed after update"); + + // Order stable after file removal + let _ = + registry.remove_file_from_cache("analyzer-c", &PathBuf::from("/fake/analyzer-c.jsonl")); + let after_removal: Vec = registry + .get_all_cached_views() + .iter() + .map(|v| v.read().analyzer_name.to_string()) + .collect(); + assert_eq!( + after_removal, expected_strings, + "Order changed after removal" + ); + } + + // ========================================================================= + // DIRTY FILE TRACKING TESTS + // ========================================================================= + + #[test] + fn test_mark_file_dirty_and_clear() { + let registry = AnalyzerRegistry::new(); + let path = PathBuf::from("/fake/test.json"); + + assert!(!registry.has_dirty_files()); + + registry.mark_file_dirty("test", &path); + assert!(registry.has_dirty_files()); + + registry.dirty_files_handle().clear(); + assert!(!registry.has_dirty_files()); + } + + #[test] + fn test_has_dirty_files_multiple() { + let registry = AnalyzerRegistry::new(); + + registry.mark_file_dirty("test", &PathBuf::from("/a.json")); + registry.mark_file_dirty("test", &PathBuf::from("/b.json")); + registry.mark_file_dirty("test", &PathBuf::from("/c.json")); + + assert!(registry.has_dirty_files()); + + registry.dirty_files_handle().clear(); + assert!(!registry.has_dirty_files()); + } + + #[test] + fn test_load_messages_for_upload_empty_dirty_set_no_analyzers() { + let registry = AnalyzerRegistry::new(); + + // No analyzers, no dirty files - should return empty + let messages = registry.load_messages_for_upload(0).expect("load"); + assert!(messages.is_empty()); + } + + #[test] + fn test_remove_file_from_cache_marks_dirty() { + let registry = AnalyzerRegistry::new(); + let path = PathBuf::from("/fake/test.json"); + + // File not in cache - should return false and not mark dirty + assert!(!registry.remove_file_from_cache("test", &path)); + assert!(!registry.has_dirty_files()); + } + + #[tokio::test] + async fn test_reload_file_incremental_marks_dirty_for_valid_path() { + use std::fs; + + let temp_dir = tempfile::tempdir().expect("tempdir"); + let path = temp_dir.path().join("test.json"); + fs::write(&path, "{}").expect("write"); + + let mut registry = AnalyzerRegistry::new(); + registry.register(TestAnalyzer { + name: "test", + available: true, + stats: Some(sample_stats("test")), + sources: vec![path.clone()], + fail_stats: false, + }); + + // Load initial stats to populate cache + let _ = registry.load_all_stats_views_parallel(); + + // Reload should mark file dirty + assert!(!registry.has_dirty_files()); + let _ = registry.reload_file_incremental("test", &path); + assert!(registry.has_dirty_files()); + } + + #[tokio::test] + async fn test_reload_file_incremental_skips_invalid_path() { + use std::fs; + + let temp_dir = tempfile::tempdir().expect("tempdir"); + let valid_path = temp_dir.path().join("test.json"); + fs::write(&valid_path, "{}").expect("write"); + let invalid_path = temp_dir.path().join("subdir"); + fs::create_dir(&invalid_path).expect("mkdir"); + + let mut registry = AnalyzerRegistry::new(); + registry.register(TestAnalyzer { + name: "test", + available: true, + stats: Some(sample_stats("test")), + sources: vec![valid_path], + fail_stats: false, + }); + + // Load initial stats + let _ = registry.load_all_stats_views_parallel(); + + // Invalid path (directory) should not mark dirty + let _ = registry.reload_file_incremental("test", &invalid_path); + assert!(!registry.has_dirty_files()); + } } diff --git a/src/analyzers/claude_code.rs b/src/analyzers/claude_code.rs index afb0897..7ecb7d2 100644 --- a/src/analyzers/claude_code.rs +++ b/src/analyzers/claude_code.rs @@ -1,20 +1,19 @@ use anyhow::Result; use async_trait::async_trait; use chrono::{DateTime, Utc}; -use dashmap::DashMap; use serde::{Deserialize, Serialize}; use simd_json::prelude::*; use std::collections::{HashMap, HashSet}; use std::fs::File; use std::io::Read; use std::path::{Path, PathBuf}; -use std::sync::atomic::{AtomicUsize, Ordering}; use crate::analyzer::{Analyzer, DataSource}; +use crate::contribution_cache::ContributionStrategy; use crate::models::calculate_total_cost; -use crate::types::{AgenticCodingToolStats, Application, ConversationMessage, MessageRole, Stats}; +use crate::types::{Application, ConversationMessage, MessageRole, Stats}; use crate::utils::{fast_hash, hash_text}; -use jwalk::WalkDir; +use walkdir::WalkDir; // Type alias for parse_jsonl_file return type type ParseResult = ( @@ -31,7 +30,6 @@ impl ClaudeCodeAnalyzer { Self } - /// Returns the root directory for Claude Code project data. fn data_dir() -> Option { dirs::home_dir().map(|h| h.join(".claude").join("projects")) } @@ -55,118 +53,134 @@ impl Analyzer for ClaudeCodeAnalyzer { } fn discover_data_sources(&self) -> Result> { - let mut sources = Vec::new(); - - if let Some(projects_dir) = Self::data_dir() - && projects_dir.is_dir() - { - // jwalk walks directories in parallel - for entry in WalkDir::new(&projects_dir) - .min_depth(2) - .max_depth(2) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().is_some_and(|ext| ext == "jsonl")) - { - sources.push(DataSource { path: entry.path() }); - } - } + let sources = Self::data_dir() + .filter(|d| d.is_dir()) + .into_iter() + .flat_map(|projects_dir| { + WalkDir::new(projects_dir) + .min_depth(2) + .max_depth(2) + .into_iter() + }) + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().is_some_and(|ext| ext == "jsonl")) + .map(|e| DataSource { + path: e.into_path(), + }) + .collect(); Ok(sources) } - async fn parse_conversations( + fn parse_source(&self, source: &DataSource) -> Result> { + let project_hash = extract_and_hash_project_id(&source.path); + let conversation_hash = crate::utils::hash_text(&source.path.to_string_lossy()); + let file = File::open(&source.path)?; + let (messages, _, _, _) = + parse_jsonl_file(&source.path, file, &project_hash, &conversation_hash)?; + Ok(messages) + } + + // Claude Code has complex cross-file deduplication, so we override get_stats_with_sources + fn get_stats_with_sources( &self, sources: Vec, - ) -> Result> { - use rayon::iter::{IntoParallelIterator, ParallelIterator}; - - // Type for concurrent deduplication entry: (insertion_order, message, seen_fingerprints) + ) -> Result { + // Type for deduplication entry: (insertion_order, message, seen_fingerprints) type TokenFingerprint = (u64, u64, u64, u64, u64); type DedupEntry = (usize, ConversationMessage, HashSet); - // Concurrent deduplication map and session tracking - let dedup_map: DashMap = DashMap::with_capacity(sources.len() * 50); - let insertion_counter = AtomicUsize::new(0); - let no_hash_counter = AtomicUsize::new(0); + // Deduplication map and session tracking + let mut dedup_map: HashMap = HashMap::with_capacity(sources.len() * 50); + let mut insertion_counter: usize = 0; + let mut no_hash_counter: usize = 0; - // Concurrent session name mappings - let session_names: DashMap = DashMap::new(); - let conversation_summaries: DashMap = DashMap::new(); - let conversation_fallbacks: DashMap = DashMap::new(); - let conversation_uuids: DashMap> = DashMap::new(); + // Session name mappings + let mut session_names: HashMap = HashMap::new(); + let mut conversation_summaries: HashMap = HashMap::new(); + let mut conversation_fallbacks: HashMap = HashMap::new(); + let mut conversation_uuids: HashMap> = HashMap::new(); - // Parse all files in parallel, deduplicating as we go - sources.into_par_iter().for_each(|source| { + // Parse all files sequentially, deduplicating as we go + for source in sources { let project_hash = extract_and_hash_project_id(&source.path); let conversation_hash = crate::utils::hash_text(&source.path.to_string_lossy()); - if let Ok(file) = File::open(&source.path) - && let Ok((msgs, summaries, uuids, fallback)) = - parse_jsonl_file(&source.path, file, &project_hash, &conversation_hash) - { - // Store summaries - for (uuid, name) in summaries { - session_names.insert(uuid, name); + let file = match File::open(&source.path) { + Ok(f) => f, + Err(e) => { + eprintln!("Failed to open Claude Code file {:?}: {}", source.path, e); + continue; } + }; - // Store UUIDs for this conversation - conversation_uuids.insert(conversation_hash.clone(), uuids); + let (msgs, summaries, uuids, fallback) = + match parse_jsonl_file(&source.path, file, &project_hash, &conversation_hash) { + Ok(result) => result, + Err(e) => { + eprintln!("Failed to parse Claude Code file {:?}: {}", source.path, e); + continue; + } + }; - // Store fallback - if let Some(fb) = fallback { - conversation_fallbacks.insert(conversation_hash.clone(), fb); - } + // Store summaries + for (uuid, name) in summaries { + session_names.insert(uuid, name); + } - // Deduplicate messages as we insert - for msg in msgs { - if let Some(local_hash) = &msg.local_hash { - let order = insertion_counter.fetch_add(1, Ordering::Relaxed); - let fp = ( - msg.stats.input_tokens, - msg.stats.output_tokens, - msg.stats.cache_creation_tokens, - msg.stats.cache_read_tokens, - msg.stats.cached_tokens, - ); - - dedup_map - .entry(local_hash.clone()) - .and_modify(|(_, existing, seen_fps)| { - merge_message_into(existing, &msg, seen_fps, fp); - }) - .or_insert_with(|| { - let mut fps = HashSet::new(); - fps.insert(fp); - (order, msg, fps) - }); - } else { - // No local hash, always keep with unique key - let order = insertion_counter.fetch_add(1, Ordering::Relaxed); - let unique_key = format!( - "__no_hash_{}", - no_hash_counter.fetch_add(1, Ordering::Relaxed) - ); - let fp = ( - msg.stats.input_tokens, - msg.stats.output_tokens, - msg.stats.cache_creation_tokens, - msg.stats.cache_read_tokens, - msg.stats.cached_tokens, - ); - let mut fps = HashSet::new(); - fps.insert(fp); - dedup_map.insert(unique_key, (order, msg, fps)); - } + // Store UUIDs for this conversation + conversation_uuids.insert(conversation_hash.clone(), uuids); + + // Store fallback + if let Some(fb) = fallback { + conversation_fallbacks.insert(conversation_hash.clone(), fb); + } + + // Deduplicate messages as we insert + for msg in msgs { + if let Some(local_hash) = &msg.local_hash { + let order = insertion_counter; + insertion_counter += 1; + let fp = ( + msg.stats.input_tokens, + msg.stats.output_tokens, + msg.stats.cache_creation_tokens, + msg.stats.cache_read_tokens, + msg.stats.cached_tokens, + ); + + dedup_map + .entry(local_hash.clone()) + .and_modify(|(_, existing, seen_fps)| { + merge_message_into(existing, &msg, seen_fps, fp); + }) + .or_insert_with(|| { + let mut fps = HashSet::new(); + fps.insert(fp); + (order, msg, fps) + }); + } else { + // No local hash, always keep with unique key + let order = insertion_counter; + insertion_counter += 1; + let unique_key = format!("__no_hash_{}", no_hash_counter); + no_hash_counter += 1; + let fp = ( + msg.stats.input_tokens, + msg.stats.output_tokens, + msg.stats.cache_creation_tokens, + msg.stats.cache_read_tokens, + msg.stats.cached_tokens, + ); + let mut fps = HashSet::new(); + fps.insert(fp); + dedup_map.insert(unique_key, (order, msg, fps)); } } - }); + } // Link session names to conversations (after all parsing complete) - for entry in conversation_uuids.iter() { - let conversation_hash = entry.key(); - let uuids = entry.value(); - + for (conversation_hash, uuids) in &conversation_uuids { let mut found_summary = false; for uuid in uuids { if let Some(name) = session_names.get(uuid) { @@ -198,23 +212,17 @@ impl Analyzer for ClaudeCodeAnalyzer { // Sort by insertion order for deterministic output result.sort_by_key(|(order, _)| *order); - Ok(result.into_iter().map(|(_, msg)| msg).collect()) - } + let messages: Vec = result.into_iter().map(|(_, msg)| msg).collect(); - async fn get_stats(&self) -> Result { - let sources = self.discover_data_sources()?; - let messages = self.parse_conversations(sources).await?; + // Aggregate stats let mut daily_stats = crate::utils::aggregate_by_date(&messages); - - // Remove any remaining "unknown" entries from daily_stats daily_stats.retain(|date, _| date != "unknown"); - let num_conversations = daily_stats .values() .map(|stats| stats.conversations as u64) .sum(); - Ok(AgenticCodingToolStats { + Ok(crate::types::AgenticCodingToolStats { daily_stats, num_conversations, messages, @@ -222,17 +230,43 @@ impl Analyzer for ClaudeCodeAnalyzer { }) } - fn is_available(&self) -> bool { - self.discover_data_sources() - .is_ok_and(|sources| !sources.is_empty()) - } - fn get_watch_directories(&self) -> Vec { Self::data_dir() .filter(|d| d.is_dir()) .into_iter() .collect() } + + fn is_valid_data_path(&self, path: &Path) -> bool { + // Must be a .jsonl file at depth 2 from projects dir + if !path.is_file() || path.extension().is_none_or(|ext| ext != "jsonl") { + return false; + } + if let Some(data_dir) = Self::data_dir() + && let Ok(relative) = path.strip_prefix(&data_dir) + { + return relative.components().count() == 2; + } + false + } + + fn is_available(&self) -> bool { + Self::data_dir() + .filter(|d| d.is_dir()) + .into_iter() + .flat_map(|projects_dir| { + WalkDir::new(projects_dir) + .min_depth(2) + .max_depth(2) + .into_iter() + }) + .filter_map(|e| e.ok()) + .any(|e| e.path().extension().is_some_and(|ext| ext == "jsonl")) + } + + fn contribution_strategy(&self) -> ContributionStrategy { + ContributionStrategy::SingleSession + } } // Claude Code specific implementation functions @@ -719,316 +753,3 @@ pub fn merge_message_into( } } } - -/// Deduplicate messages by local_hash, merging stats for duplicates. -/// This is used for incremental cache loading where messages from multiple -/// files need to be deduplicated after loading. -pub fn deduplicate_messages(messages: Vec) -> Vec { - let estimated_unique = messages.len() / 2 + 1; - let mut seen_hashes = HashMap::::with_capacity(estimated_unique); - let mut seen_token_fingerprints: HashMap> = - HashMap::with_capacity(estimated_unique); - let mut deduplicated_entries: Vec = Vec::with_capacity(estimated_unique); - - for message in messages { - if let Some(local_hash) = &message.local_hash { - let fp = ( - message.stats.input_tokens, - message.stats.output_tokens, - message.stats.cache_creation_tokens, - message.stats.cache_read_tokens, - message.stats.cached_tokens, - ); - - if let Some(&existing_index) = seen_hashes.get(local_hash) { - let seen_fps = seen_token_fingerprints - .entry(local_hash.clone()) - .or_default(); - merge_message_into( - &mut deduplicated_entries[existing_index], - &message, - seen_fps, - fp, - ); - } else { - seen_hashes.insert(local_hash.clone(), deduplicated_entries.len()); - seen_token_fingerprints - .entry(local_hash.clone()) - .or_default() - .insert(fp); - deduplicated_entries.push(message); - } - } else { - deduplicated_entries.push(message); - } - } - - deduplicated_entries -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::types::{MessageRole, Stats}; - - #[test] - fn test_deduplicate_partial_split_messages() { - // Test the new format (Oct 18+ 2025): Split messages with DIFFERENT partial tokens - let hash = "test_partial_split".to_string(); - - let messages = vec![ - ConversationMessage { - global_hash: "unique1".to_string(), - local_hash: Some(hash.clone()), - application: Application::ClaudeCode, - model: Some("claude-sonnet-4-5-20250929".to_string()), - date: chrono::Utc::now(), - project_hash: "proj1".to_string(), - conversation_hash: "conv1".to_string(), - role: MessageRole::Assistant, - stats: Stats { - input_tokens: 10, // Different from others - output_tokens: 2, // thinking block - tool_calls: 0, - ..Default::default() - }, - uuid: None, - session_name: None, - }, - ConversationMessage { - global_hash: "unique2".to_string(), - local_hash: Some(hash.clone()), - application: Application::ClaudeCode, - model: Some("claude-sonnet-4-5-20250929".to_string()), - date: chrono::Utc::now(), - project_hash: "proj1".to_string(), - conversation_hash: "conv1".to_string(), - role: MessageRole::Assistant, - stats: Stats { - input_tokens: 5, // Different from others - output_tokens: 2, // text block - tool_calls: 0, - ..Default::default() - }, - uuid: None, - session_name: None, - }, - ConversationMessage { - global_hash: "unique3".to_string(), - local_hash: Some(hash.clone()), - application: Application::ClaudeCode, - model: Some("claude-sonnet-4-5-20250929".to_string()), - date: chrono::Utc::now(), - project_hash: "proj1".to_string(), - conversation_hash: "conv1".to_string(), - role: MessageRole::Assistant, - stats: Stats { - input_tokens: 0, // Different from others - output_tokens: 447, // tool_use block - tool_calls: 1, - ..Default::default() - }, - uuid: None, - session_name: None, - }, - ]; - - let deduplicated = deduplicate_messages(messages); - - // Should have exactly 1 message (all 3 merged) - assert_eq!(deduplicated.len(), 1); - - // Output tokens should be summed: 2 + 2 + 447 = 451 - assert_eq!(deduplicated[0].stats.output_tokens, 451); - - // Input tokens should be summed: 10 + 5 + 0 = 15 - assert_eq!(deduplicated[0].stats.input_tokens, 15); - - // Tool calls should be summed too - assert_eq!(deduplicated[0].stats.tool_calls, 1); - } - - #[test] - fn test_deduplicate_redundant_split_messages() { - // Test the old format (Oct 16-17 2025): Split messages with IDENTICAL redundant tokens - let hash = "test_redundant_split".to_string(); - - let messages = vec![ - ConversationMessage { - global_hash: "unique1".to_string(), - local_hash: Some(hash.clone()), - application: Application::ClaudeCode, - model: Some("claude-sonnet-4-5-20250929".to_string()), - date: chrono::Utc::now(), - project_hash: "proj1".to_string(), - conversation_hash: "conv1".to_string(), - role: MessageRole::Assistant, - stats: Stats { - output_tokens: 4, // All blocks report same total - input_tokens: 100, - tool_calls: 2, - ..Default::default() - }, - uuid: None, - session_name: None, - }, - ConversationMessage { - global_hash: "unique2".to_string(), - local_hash: Some(hash.clone()), - application: Application::ClaudeCode, - model: Some("claude-sonnet-4-5-20250929".to_string()), - date: chrono::Utc::now(), - project_hash: "proj1".to_string(), - conversation_hash: "conv1".to_string(), - role: MessageRole::Assistant, - stats: Stats { - output_tokens: 4, // Identical - input_tokens: 100, // Identical - tool_calls: 2, // Not checked for identity, but will be kept - ..Default::default() - }, - uuid: None, - session_name: None, - }, - ]; - - let deduplicated = deduplicate_messages(messages); - - // Should have exactly 1 message (duplicate skipped) - assert_eq!(deduplicated.len(), 1); - - // Tokens should NOT be summed (identical entries) - assert_eq!(deduplicated[0].stats.output_tokens, 4); - assert_eq!(deduplicated[0].stats.input_tokens, 100); - - // Tool calls are from the first entry only - assert_eq!(deduplicated[0].stats.tool_calls, 2); - } - - #[test] - fn test_identical_tokens_merge_tool_stats() { - // First row has no tools; second row has tool_calls=1, tokens identical - let hash = "identical_merge_tools".to_string(); - - let msg1 = ConversationMessage { - global_hash: "g1".to_string(), - local_hash: Some(hash.clone()), - application: Application::ClaudeCode, - model: Some("claude-sonnet-4-5-20250929".to_string()), - date: chrono::Utc::now(), - project_hash: "proj".to_string(), - conversation_hash: "conv".to_string(), - role: MessageRole::Assistant, - stats: Stats { - input_tokens: 100, - output_tokens: 4, - cache_creation_tokens: 0, - cache_read_tokens: 0, - cached_tokens: 0, - tool_calls: 0, - ..Default::default() - }, - uuid: None, - session_name: None, - }; - - let msg2 = ConversationMessage { - global_hash: "g2".to_string(), - local_hash: Some(hash.clone()), - application: Application::ClaudeCode, - model: Some("claude-sonnet-4-5-20250929".to_string()), - date: chrono::Utc::now(), - project_hash: "proj".to_string(), - conversation_hash: "conv".to_string(), - role: MessageRole::Assistant, - stats: Stats { - input_tokens: 100, - output_tokens: 4, - cache_creation_tokens: 0, - cache_read_tokens: 0, - cached_tokens: 0, - tool_calls: 1, - ..Default::default() - }, - uuid: None, - session_name: None, - }; - - let dedup = deduplicate_messages(vec![msg1, msg2]); - assert_eq!(dedup.len(), 1); - // Tokens unchanged - assert_eq!(dedup[0].stats.input_tokens, 100); - assert_eq!(dedup[0].stats.output_tokens, 4); - // Tool calls merged from second row - assert_eq!(dedup[0].stats.tool_calls, 1); - } - - #[test] - fn test_deduplicate_skips_identical_after_partial_aggregate() { - // Mix of partials and redundant duplicates for the same local_hash - let hash = "test_mixed_duplicates".to_string(); - - let a1 = ConversationMessage { - global_hash: "ga1".to_string(), - local_hash: Some(hash.clone()), - application: Application::ClaudeCode, - model: Some("claude-sonnet-4-5-20250929".to_string()), - date: chrono::Utc::now(), - project_hash: "proj".to_string(), - conversation_hash: "conv".to_string(), - role: MessageRole::Assistant, - stats: Stats { - input_tokens: 10, - output_tokens: 2, - ..Default::default() - }, - uuid: None, - session_name: None, - }; - // Exact duplicate of a1 (should be skipped) - let a2 = ConversationMessage { - global_hash: "ga2".to_string(), - ..a1.clone() - }; - - // Tool-use partial - let b1 = ConversationMessage { - global_hash: "gb1".to_string(), - local_hash: Some(hash.clone()), - application: Application::ClaudeCode, - model: Some("claude-sonnet-4-5-20250929".to_string()), - date: chrono::Utc::now(), - project_hash: "proj".to_string(), - conversation_hash: "conv".to_string(), - role: MessageRole::Assistant, - stats: Stats { - input_tokens: 0, - output_tokens: 447, - tool_calls: 1, - ..Default::default() - }, - uuid: None, - session_name: None, - }; - // Exact duplicate of b1 (should be skipped) - let b2 = ConversationMessage { - global_hash: "gb2".to_string(), - ..b1.clone() - }; - - // Another duplicate of a1 after aggregation (should still be skipped) - let a3 = ConversationMessage { - global_hash: "ga3".to_string(), - ..a1.clone() - }; - - let messages = vec![a1, a2, b1, b2, a3]; - let deduplicated = deduplicate_messages(messages); - - assert_eq!(deduplicated.len(), 1); - // Should include A once and B once - assert_eq!(deduplicated[0].stats.input_tokens, 10); - assert_eq!(deduplicated[0].stats.output_tokens, 2 + 447); - assert_eq!(deduplicated[0].stats.tool_calls, 1); - } -} diff --git a/src/analyzers/cline.rs b/src/analyzers/cline.rs index a8493b3..6d57b93 100644 --- a/src/analyzers/cline.rs +++ b/src/analyzers/cline.rs @@ -1,12 +1,13 @@ use crate::analyzer::{ Analyzer, DataSource, discover_vscode_extension_sources, get_vscode_extension_tasks_dirs, + vscode_extension_has_sources, }; -use crate::types::{AgenticCodingToolStats, Application, ConversationMessage, MessageRole, Stats}; +use crate::contribution_cache::ContributionStrategy; +use crate::types::{Application, ConversationMessage, MessageRole, Stats}; use crate::utils::hash_text; use anyhow::{Context, Result}; use async_trait::async_trait; use chrono::{DateTime, Utc}; -use rayon::prelude::*; use serde::{Deserialize, Serialize}; use simd_json::prelude::*; use std::path::{Path, PathBuf}; @@ -304,59 +305,24 @@ impl Analyzer for ClineAnalyzer { discover_vscode_extension_sources(CLINE_EXTENSION_ID, "ui_messages.json", true) } - async fn parse_conversations( - &self, - sources: Vec, - ) -> Result> { - // Parse all task directories in parallel - let all_entries: Vec = sources - .into_par_iter() - .flat_map(|source| match parse_cline_task_directory(&source.path) { - Ok(messages) => messages, - Err(e) => { - eprintln!( - "Failed to parse Cline task directory {:?}: {}", - source.path, e - ); - Vec::new() - } - }) - .collect(); + fn is_available(&self) -> bool { + vscode_extension_has_sources(CLINE_EXTENSION_ID, "ui_messages.json") + } - // Parallel deduplicate by global hash - Ok(crate::utils::deduplicate_by_global_hash_parallel( - all_entries, - )) + fn parse_source(&self, source: &DataSource) -> Result> { + parse_cline_task_directory(&source.path) } - async fn get_stats(&self) -> Result { - let sources = self.discover_data_sources()?; - let messages = self.parse_conversations(sources).await?; - let mut daily_stats = crate::utils::aggregate_by_date(&messages); - - // Remove any "unknown" entries - daily_stats.retain(|date, _| date != "unknown"); - - let num_conversations = daily_stats - .values() - .map(|stats| stats.conversations as u64) - .sum(); - - Ok(AgenticCodingToolStats { - daily_stats, - num_conversations, - messages, - analyzer_name: self.display_name().to_string(), - }) + fn get_watch_directories(&self) -> Vec { + get_vscode_extension_tasks_dirs(CLINE_EXTENSION_ID) } - fn is_available(&self) -> bool { - self.discover_data_sources() - .is_ok_and(|sources| !sources.is_empty()) + fn is_valid_data_path(&self, path: &Path) -> bool { + path.is_file() && path.file_name().is_some_and(|n| n == "ui_messages.json") } - fn get_watch_directories(&self) -> Vec { - get_vscode_extension_tasks_dirs(CLINE_EXTENSION_ID) + fn contribution_strategy(&self) -> ContributionStrategy { + ContributionStrategy::SingleSession } } diff --git a/src/analyzers/codex_cli.rs b/src/analyzers/codex_cli.rs index 93962ee..2591954 100644 --- a/src/analyzers/codex_cli.rs +++ b/src/analyzers/codex_cli.rs @@ -1,16 +1,18 @@ use anyhow::Result; use async_trait::async_trait; use chrono::{DateTime, Utc}; -use jwalk::WalkDir; +use rayon::prelude::*; use serde::{Deserialize, Serialize}; use std::collections::HashSet; use std::fs::File; use std::io::Read; use std::path::{Path, PathBuf}; +use walkdir::WalkDir; use crate::analyzer::{Analyzer, DataSource}; +use crate::contribution_cache::ContributionStrategy; use crate::models::calculate_total_cost; -use crate::types::{AgenticCodingToolStats, Application, ConversationMessage, MessageRole, Stats}; +use crate::types::{Application, ConversationMessage, MessageRole, Stats}; use crate::utils::{deserialize_utc_timestamp, hash_text, warn_once}; const DEFAULT_FALLBACK_MODEL: &str = "gpt-5"; @@ -22,7 +24,6 @@ impl CodexCliAnalyzer { Self } - /// Returns the root directory for Codex CLI session data. fn data_dir() -> Option { dirs::home_dir().map(|h| h.join(".codex").join("sessions")) } @@ -46,78 +47,44 @@ impl Analyzer for CodexCliAnalyzer { } fn discover_data_sources(&self) -> Result> { - let mut sources = Vec::new(); - - if let Some(sessions_dir) = Self::data_dir() - && sessions_dir.is_dir() - { - // jwalk walks directories in parallel, recursively - for entry in WalkDir::new(&sessions_dir) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| { - e.file_type().is_file() - && e.path().extension().is_some_and(|ext| ext == "jsonl") - }) - { - sources.push(DataSource { path: entry.path() }); - } - } + let sources = Self::data_dir() + .filter(|d| d.is_dir()) + .into_iter() + .flat_map(|dir| WalkDir::new(dir).into_iter()) + .filter_map(|e| e.ok()) + .filter(|e| { + e.file_type().is_file() && e.path().extension().is_some_and(|ext| ext == "jsonl") + }) + .map(|e| DataSource { + path: e.into_path(), + }) + .collect(); Ok(sources) } - async fn parse_conversations( - &self, - sources: Vec, - ) -> Result> { - // Parse all data sources in parallel while properly propagating any - // error that occurs while processing an individual file. Rayon’s - // `try_reduce` utility allows us to aggregate `Result` values coming - // from each parallel worker without having to fall back to - // sequential processing. - - use rayon::prelude::*; - - let aggregated: Result> = sources - .into_par_iter() - .map(|source| { - // parse_codex_cli_jsonl_file returns (messages, model), we only need messages here - parse_codex_cli_jsonl_file(&source.path).map(|(msgs, _model)| msgs) + fn is_available(&self) -> bool { + Self::data_dir() + .filter(|d| d.is_dir()) + .into_iter() + .flat_map(|dir| WalkDir::new(dir).into_iter()) + .filter_map(|e| e.ok()) + .any(|e| { + e.file_type().is_file() && e.path().extension().is_some_and(|ext| ext == "jsonl") }) - // Start the reduction with an empty vector and extend it with the - // entries coming from each successfully-parsed file. - .try_reduce(Vec::new, |mut acc, mut entries| { - acc.append(&mut entries); - Ok(acc) - }); - - // For Codex CLI, we don't need to deduplicate since each session is separate - // but we keep the logic encapsulated for future changes. - aggregated } - async fn get_stats(&self) -> Result { - let sources = self.discover_data_sources()?; - let messages = self.parse_conversations(sources).await?; - let daily_stats = crate::utils::aggregate_by_date(&messages); - - let num_conversations = daily_stats - .values() - .map(|stats| stats.conversations as u64) - .sum(); - - Ok(AgenticCodingToolStats { - daily_stats, - num_conversations, - messages, - analyzer_name: self.display_name().to_string(), - }) + fn parse_source(&self, source: &DataSource) -> Result> { + // parse_codex_cli_jsonl_file returns (messages, model), we only need messages here + parse_codex_cli_jsonl_file(&source.path).map(|(msgs, _model)| msgs) } - fn is_available(&self) -> bool { - self.discover_data_sources() - .is_ok_and(|sources| !sources.is_empty()) + // Codex CLI doesn't need deduplication since each session is separate + fn parse_sources_parallel(&self, sources: &[DataSource]) -> Vec { + sources + .par_iter() + .flat_map(|source| self.parse_source(source).unwrap_or_default()) + .collect() } fn get_watch_directories(&self) -> Vec { @@ -126,6 +93,15 @@ impl Analyzer for CodexCliAnalyzer { .into_iter() .collect() } + + fn is_valid_data_path(&self, path: &Path) -> bool { + // Must be a .jsonl file under sessions directory + path.is_file() && path.extension().is_some_and(|ext| ext == "jsonl") + } + + fn contribution_strategy(&self) -> ContributionStrategy { + ContributionStrategy::SingleSession + } } // CODEX CLI JSONL FILES SCHEMA - NEW WRAPPER FORMAT diff --git a/src/analyzers/copilot.rs b/src/analyzers/copilot.rs index 271fbb3..1ec28f2 100644 --- a/src/analyzers/copilot.rs +++ b/src/analyzers/copilot.rs @@ -1,15 +1,15 @@ use crate::analyzer::{Analyzer, DataSource}; -use crate::types::{AgenticCodingToolStats, Application, ConversationMessage, MessageRole, Stats}; +use crate::contribution_cache::ContributionStrategy; +use crate::types::{Application, ConversationMessage, MessageRole, Stats}; use crate::utils::hash_text; use anyhow::{Context, Result}; use async_trait::async_trait; use chrono::{DateTime, Utc}; -use jwalk::WalkDir; -use rayon::prelude::*; use serde::{Deserialize, Serialize}; use simd_json::prelude::*; use std::path::{Path, PathBuf}; use tiktoken_rs::get_bpe_from_model; +use walkdir::WalkDir; pub struct CopilotAnalyzer; @@ -29,7 +29,6 @@ impl CopilotAnalyzer { Self } - /// Returns all VSCode workspaceStorage directories where Copilot data may exist. fn workspace_storage_dirs() -> Vec { let mut dirs = Vec::new(); @@ -451,85 +450,61 @@ impl Analyzer for CopilotAnalyzer { } fn discover_data_sources(&self) -> Result> { - let mut sources = Vec::new(); - - for workspace_storage in Self::workspace_storage_dirs() { - // Pattern: */chatSessions/*.json - // jwalk walks directories in parallel - for entry in WalkDir::new(&workspace_storage) - .min_depth(3) // */chatSessions/*.json - .max_depth(3) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| { - e.file_type().is_file() - && e.path().extension().is_some_and(|ext| ext == "json") - && e.path() - .parent() - .and_then(|p| p.file_name()) - .is_some_and(|name| name == "chatSessions") - }) - { - sources.push(DataSource { path: entry.path() }); - } - } + let sources = Self::workspace_storage_dirs() + .into_iter() + .flat_map(|dir| WalkDir::new(dir).min_depth(3).max_depth(3).into_iter()) + .filter_map(|e| e.ok()) + .filter(|e| { + e.file_type().is_file() + && e.path().extension().is_some_and(|ext| ext == "json") + && e.path() + .parent() + .and_then(|p| p.file_name()) + .is_some_and(|name| name == "chatSessions") + }) + .map(|e| DataSource { + path: e.into_path(), + }) + .collect(); Ok(sources) } - async fn parse_conversations( - &self, - sources: Vec, - ) -> Result> { - // Parse all session files in parallel - let all_entries: Vec = sources - .into_par_iter() - .flat_map(|source| match parse_copilot_session_file(&source.path) { - Ok(messages) => messages, - Err(e) => { - eprintln!( - "Failed to parse Copilot session file {:?}: {}", - source.path, e - ); - Vec::new() - } + fn is_available(&self) -> bool { + Self::workspace_storage_dirs() + .into_iter() + .flat_map(|dir| WalkDir::new(dir).min_depth(3).max_depth(3).into_iter()) + .filter_map(|e| e.ok()) + .any(|e| { + e.file_type().is_file() + && e.path().extension().is_some_and(|ext| ext == "json") + && e.path() + .parent() + .and_then(|p| p.file_name()) + .is_some_and(|name| name == "chatSessions") }) - .collect(); - - // Parallel deduplicate by global hash - Ok(crate::utils::deduplicate_by_global_hash_parallel( - all_entries, - )) } - async fn get_stats(&self) -> Result { - let sources = self.discover_data_sources()?; - let messages = self.parse_conversations(sources).await?; - let mut daily_stats = crate::utils::aggregate_by_date(&messages); - - // Remove any "unknown" entries - daily_stats.retain(|date, _| date != "unknown"); - - let num_conversations = daily_stats - .values() - .map(|stats| stats.conversations as u64) - .sum(); + fn parse_source(&self, source: &DataSource) -> Result> { + parse_copilot_session_file(&source.path) + } - Ok(AgenticCodingToolStats { - daily_stats, - num_conversations, - messages, - analyzer_name: self.display_name().to_string(), - }) + fn get_watch_directories(&self) -> Vec { + Self::workspace_storage_dirs() } - fn is_available(&self) -> bool { - self.discover_data_sources() - .is_ok_and(|sources| !sources.is_empty()) + fn is_valid_data_path(&self, path: &Path) -> bool { + // Must be a .json file in a "chatSessions" directory + path.is_file() + && path.extension().is_some_and(|ext| ext == "json") + && path + .parent() + .and_then(|p| p.file_name()) + .is_some_and(|name| name == "chatSessions") } - fn get_watch_directories(&self) -> Vec { - Self::workspace_storage_dirs() + fn contribution_strategy(&self) -> ContributionStrategy { + ContributionStrategy::SingleSession } } diff --git a/src/analyzers/gemini_cli.rs b/src/analyzers/gemini_cli.rs index e3672bc..4e19d26 100644 --- a/src/analyzers/gemini_cli.rs +++ b/src/analyzers/gemini_cli.rs @@ -1,17 +1,16 @@ use crate::analyzer::{Analyzer, DataSource}; +use crate::contribution_cache::ContributionStrategy; use crate::models::{calculate_cache_cost, calculate_input_cost, calculate_output_cost}; -use crate::types::{ - AgenticCodingToolStats, Application, ConversationMessage, FileCategory, MessageRole, Stats, -}; +use crate::types::{Application, ConversationMessage, FileCategory, MessageRole, Stats}; use crate::utils::{deserialize_utc_timestamp, hash_text}; use anyhow::Result; use async_trait::async_trait; use chrono::{DateTime, Utc}; -use jwalk::WalkDir; use rayon::prelude::*; use serde::{Deserialize, Serialize}; use simd_json::prelude::*; use std::path::{Path, PathBuf}; +use walkdir::WalkDir; pub struct GeminiCliAnalyzer; @@ -20,7 +19,6 @@ impl GeminiCliAnalyzer { Self } - /// Returns the root directory for Gemini CLI data. fn data_dir() -> Option { dirs::home_dir().map(|h| h.join(".gemini").join("tmp")) } @@ -311,81 +309,53 @@ impl Analyzer for GeminiCliAnalyzer { } fn discover_data_sources(&self) -> Result> { - let mut sources = Vec::new(); - - if let Some(tmp_dir) = Self::data_dir() - && tmp_dir.is_dir() - { - // Pattern: ~/.gemini/tmp/*/chats/*.json - // jwalk walks directories in parallel - for entry in WalkDir::new(&tmp_dir) - .min_depth(3) // */chats/*.json - .max_depth(3) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| { - e.file_type().is_file() - && e.path().extension().is_some_and(|ext| ext == "json") - && e.path() - .parent() - .and_then(|p| p.file_name()) - .is_some_and(|name| name == "chats") - }) - { - sources.push(DataSource { path: entry.path() }); - } - } + let sources = Self::data_dir() + .filter(|d| d.is_dir()) + .into_iter() + .flat_map(|tmp_dir| WalkDir::new(tmp_dir).min_depth(3).max_depth(3).into_iter()) + .filter_map(|e| e.ok()) + .filter(|e| { + e.file_type().is_file() + && e.path().extension().is_some_and(|ext| ext == "json") + && e.path() + .parent() + .and_then(|p| p.file_name()) + .is_some_and(|name| name == "chats") + }) + .map(|e| DataSource { + path: e.into_path(), + }) + .collect(); Ok(sources) } - async fn parse_conversations( - &self, - sources: Vec, - ) -> Result> { - // Parse all session files in parallel - let all_entries: Vec = sources - .into_par_iter() - .filter_map(|source| match parse_json_session_file(&source.path) { - Ok(messages) => Some(messages), - Err(e) => { - eprintln!( - "Failed to parse Gemini session file {}: {e:#}", - source.path.display(), - ); - None - } + fn is_available(&self) -> bool { + Self::data_dir() + .filter(|d| d.is_dir()) + .into_iter() + .flat_map(|tmp_dir| WalkDir::new(tmp_dir).min_depth(3).max_depth(3).into_iter()) + .filter_map(|e| e.ok()) + .any(|e| { + e.file_type().is_file() + && e.path().extension().is_some_and(|ext| ext == "json") + && e.path() + .parent() + .and_then(|p| p.file_name()) + .is_some_and(|name| name == "chats") }) - .flat_map(|messages| messages) - .collect(); - - // Parallel deduplicate by local hash - Ok(crate::utils::deduplicate_by_local_hash_parallel( - all_entries, - )) } - async fn get_stats(&self) -> Result { - let sources = self.discover_data_sources()?; - let messages = self.parse_conversations(sources).await?; - let daily_stats = crate::utils::aggregate_by_date(&messages); - - let num_conversations = daily_stats - .values() - .map(|stats| stats.conversations as u64) - .sum(); - - Ok(AgenticCodingToolStats { - analyzer_name: self.display_name().to_string(), - daily_stats, - messages, - num_conversations, - }) + fn parse_source(&self, source: &DataSource) -> Result> { + parse_json_session_file(&source.path) } - fn is_available(&self) -> bool { - self.discover_data_sources() - .is_ok_and(|sources| !sources.is_empty()) + fn parse_sources_parallel(&self, sources: &[DataSource]) -> Vec { + let all_messages: Vec = sources + .par_iter() + .flat_map(|source| self.parse_source(source).unwrap_or_default()) + .collect(); + crate::utils::deduplicate_by_local_hash(all_messages) } fn get_watch_directories(&self) -> Vec { @@ -394,4 +364,18 @@ impl Analyzer for GeminiCliAnalyzer { .into_iter() .collect() } + + fn is_valid_data_path(&self, path: &Path) -> bool { + // Must be a .json file in a "chats" directory + path.is_file() + && path.extension().is_some_and(|ext| ext == "json") + && path + .parent() + .and_then(|p| p.file_name()) + .is_some_and(|name| name == "chats") + } + + fn contribution_strategy(&self) -> ContributionStrategy { + ContributionStrategy::SingleSession + } } diff --git a/src/analyzers/kilo_code.rs b/src/analyzers/kilo_code.rs index e8a9728..f6f7733 100644 --- a/src/analyzers/kilo_code.rs +++ b/src/analyzers/kilo_code.rs @@ -1,12 +1,13 @@ use crate::analyzer::{ Analyzer, DataSource, discover_vscode_extension_sources, get_vscode_extension_tasks_dirs, + vscode_extension_has_sources, }; -use crate::types::{AgenticCodingToolStats, Application, ConversationMessage, MessageRole, Stats}; +use crate::contribution_cache::ContributionStrategy; +use crate::types::{Application, ConversationMessage, MessageRole, Stats}; use crate::utils::hash_text; use anyhow::{Context, Result}; use async_trait::async_trait; use chrono::{DateTime, Utc}; -use rayon::prelude::*; use serde::{Deserialize, Serialize}; use simd_json::prelude::*; use std::path::{Path, PathBuf}; @@ -299,62 +300,25 @@ impl Analyzer for KiloCodeAnalyzer { discover_vscode_extension_sources(KILO_CODE_EXTENSION_ID, "ui_messages.json", true) } - async fn parse_conversations( - &self, - sources: Vec, - ) -> Result> { - // Parse all task directories in parallel - let all_entries: Vec = sources - .into_par_iter() - .flat_map( - |source| match parse_kilo_code_task_directory(&source.path) { - Ok(messages) => messages, - Err(e) => { - eprintln!( - "Failed to parse Kilo Code task directory {:?}: {}", - source.path, e - ); - Vec::new() - } - }, - ) - .collect(); - - // Parallel deduplicate by global hash - Ok(crate::utils::deduplicate_by_global_hash_parallel( - all_entries, - )) - } - - async fn get_stats(&self) -> Result { - let sources = self.discover_data_sources()?; - let messages = self.parse_conversations(sources).await?; - let mut daily_stats = crate::utils::aggregate_by_date(&messages); - - // Remove any "unknown" entries - daily_stats.retain(|date, _| date != "unknown"); - - let num_conversations = daily_stats - .values() - .map(|stats| stats.conversations as u64) - .sum(); - - Ok(AgenticCodingToolStats { - daily_stats, - num_conversations, - messages, - analyzer_name: self.display_name().to_string(), - }) + fn is_available(&self) -> bool { + vscode_extension_has_sources(KILO_CODE_EXTENSION_ID, "ui_messages.json") } - fn is_available(&self) -> bool { - self.discover_data_sources() - .is_ok_and(|sources| !sources.is_empty()) + fn parse_source(&self, source: &DataSource) -> Result> { + parse_kilo_code_task_directory(&source.path) } fn get_watch_directories(&self) -> Vec { get_vscode_extension_tasks_dirs(KILO_CODE_EXTENSION_ID) } + + fn is_valid_data_path(&self, path: &Path) -> bool { + path.is_file() && path.file_name().is_some_and(|n| n == "ui_messages.json") + } + + fn contribution_strategy(&self) -> ContributionStrategy { + ContributionStrategy::SingleSession + } } #[cfg(test)] diff --git a/src/analyzers/mod.rs b/src/analyzers/mod.rs index b19cbe3..9a13337 100644 --- a/src/analyzers/mod.rs +++ b/src/analyzers/mod.rs @@ -10,7 +10,7 @@ pub mod piebald; pub mod qwen_code; pub mod roo_code; -pub use claude_code::{ClaudeCodeAnalyzer, deduplicate_messages}; +pub use claude_code::ClaudeCodeAnalyzer; pub use cline::ClineAnalyzer; pub use codex_cli::CodexCliAnalyzer; pub use copilot::CopilotAnalyzer; diff --git a/src/analyzers/opencode.rs b/src/analyzers/opencode.rs index 46f2958..6f338c6 100644 --- a/src/analyzers/opencode.rs +++ b/src/analyzers/opencode.rs @@ -1,12 +1,12 @@ use crate::analyzer::{Analyzer, DataSource}; +use crate::contribution_cache::ContributionStrategy; use crate::models::calculate_total_cost; -use crate::types::{AgenticCodingToolStats, Application, ConversationMessage, MessageRole, Stats}; +use crate::types::{Application, ConversationMessage, MessageRole, Stats}; use crate::utils::hash_text; use anyhow::{Context, Result}; use async_trait::async_trait; use chrono::{DateTime, TimeZone, Utc}; use glob::glob; -use jwalk::WalkDir; use rayon::prelude::*; use serde::Deserialize; use simd_json::OwnedValue; @@ -14,6 +14,7 @@ use simd_json::prelude::*; use std::collections::HashMap; use std::fs; use std::path::{Path, PathBuf}; +use walkdir::WalkDir; pub struct OpenCodeAnalyzer; @@ -22,7 +23,6 @@ impl OpenCodeAnalyzer { Self } - /// Returns the root directory for OpenCode message data. fn data_dir() -> Option { dirs::home_dir().map(|h| h.join(".local/share/opencode/storage/message")) } @@ -440,33 +440,111 @@ impl Analyzer for OpenCodeAnalyzer { } fn discover_data_sources(&self) -> Result> { - let mut sources = Vec::new(); - - if let Some(message_dir) = Self::data_dir() - && message_dir.is_dir() - { - // Pattern: ~/.local/share/opencode/storage/message/*/*.json - // jwalk walks directories in parallel - for entry in WalkDir::new(&message_dir) - .min_depth(2) // */*.json - .max_depth(2) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| { - e.file_type().is_file() && e.path().extension().is_some_and(|ext| ext == "json") - }) - { - sources.push(DataSource { path: entry.path() }); - } - } + let sources = Self::data_dir() + .filter(|d| d.is_dir()) + .into_iter() + .flat_map(|message_dir| { + WalkDir::new(message_dir) + .min_depth(2) + .max_depth(2) + .into_iter() + }) + .filter_map(|e| e.ok()) + .filter(|e| { + e.file_type().is_file() && e.path().extension().is_some_and(|ext| ext == "json") + }) + .map(|e| DataSource { + path: e.into_path(), + }) + .collect(); Ok(sources) } - async fn parse_conversations( + fn is_available(&self) -> bool { + Self::data_dir() + .filter(|d| d.is_dir()) + .into_iter() + .flat_map(|message_dir| { + WalkDir::new(message_dir) + .min_depth(2) + .max_depth(2) + .into_iter() + }) + .filter_map(|e| e.ok()) + .any(|e| { + e.file_type().is_file() && e.path().extension().is_some_and(|ext| ext == "json") + }) + } + + fn parse_source(&self, source: &DataSource) -> Result> { + // For single-file parsing (incremental updates), load context fresh + let home_dir = dirs::home_dir().context("Could not find home directory")?; + let storage_root = home_dir.join(".local/share/opencode/storage"); + let project_root = storage_root.join("project"); + let session_root = storage_root.join("session"); + let part_root = storage_root.join("part"); + + let projects = load_projects(&project_root); + let sessions = load_sessions(&session_root); + + let content = fs::read_to_string(&source.path)?; + let mut bytes = content.into_bytes(); + let msg = simd_json::from_slice::(&mut bytes)?; + + Ok(vec![to_conversation_message( + msg, &sessions, &projects, &part_root, + )]) + } + + // Load shared context once, then process all files in parallel. + // Returns messages grouped by source path for file contribution caching. + fn parse_sources_parallel_with_paths( + &self, + sources: &[DataSource], + ) -> Vec<(PathBuf, Vec)> { + let Some(home_dir) = dirs::home_dir() else { + eprintln!("Could not find home directory for OpenCode"); + return Vec::new(); + }; + + let storage_root = home_dir.join(".local/share/opencode/storage"); + let project_root = storage_root.join("project"); + let session_root = storage_root.join("session"); + let part_root = storage_root.join("part"); + + // Load shared context once + let projects = load_projects(&project_root); + let sessions = load_sessions(&session_root); + + // Read, parse, and convert all files in parallel + // Each source file produces exactly one message + sources + .par_iter() + .filter_map(|source| { + let content = fs::read_to_string(&source.path).ok()?; + let mut bytes = content.into_bytes(); + let msg = simd_json::from_slice::(&mut bytes).ok()?; + let conversation_msg = + to_conversation_message(msg, &sessions, &projects, &part_root); + Some((source.path.clone(), vec![conversation_msg])) + }) + .collect() + } + + // OpenCode doesn't need deduplication - each message file is unique. + fn parse_sources_parallel(&self, sources: &[DataSource]) -> Vec { + self.parse_sources_parallel_with_paths(sources) + .into_iter() + .flat_map(|(_, msgs)| msgs) + .collect() + } + + // Override get_stats_with_sources to load shared context once for efficiency + fn get_stats_with_sources( &self, sources: Vec, - ) -> Result> { + ) -> Result { let home_dir = dirs::home_dir().context("Could not find home directory")?; let storage_root = home_dir.join(".local/share/opencode/storage"); let project_root = storage_root.join("project"); @@ -476,65 +554,60 @@ impl Analyzer for OpenCodeAnalyzer { let projects = load_projects(&project_root); let sessions = load_sessions(&session_root); + // Parse all files in parallel let messages: Vec = sources - .into_par_iter() + .par_iter() .filter_map(|source| { - let path = source.path; - let content = match fs::read_to_string(&path) { - Ok(c) => c, - Err(e) => { - eprintln!("Failed to read OpenCode message {}: {e}", path.display()); - return None; - } - }; - + let content = fs::read_to_string(&source.path).ok()?; let mut bytes = content.into_bytes(); - let msg = match simd_json::from_slice::(&mut bytes) { - Ok(m) => m, - Err(e) => { - eprintln!("Failed to parse OpenCode message {}: {e}", path.display()); - return None; - } - }; - + let msg = simd_json::from_slice::(&mut bytes).ok()?; Some(to_conversation_message( msg, &sessions, &projects, &part_root, )) }) .collect(); - Ok(messages) - } - - async fn get_stats(&self) -> Result { - let sources = self.discover_data_sources()?; - let messages = self.parse_conversations(sources).await?; - let daily_stats = crate::utils::aggregate_by_date(&messages); - + // Aggregate stats + let mut daily_stats = crate::utils::aggregate_by_date(&messages); + daily_stats.retain(|date, _| date != "unknown"); let num_conversations = daily_stats .values() .map(|stats| stats.conversations as u64) .sum(); - Ok(AgenticCodingToolStats { - analyzer_name: self.display_name().to_string(), + Ok(crate::types::AgenticCodingToolStats { daily_stats, - messages, num_conversations, + messages, + analyzer_name: self.display_name().to_string(), }) } - fn is_available(&self) -> bool { - self.discover_data_sources() - .is_ok_and(|sources| !sources.is_empty()) - } - fn get_watch_directories(&self) -> Vec { Self::data_dir() .filter(|d| d.is_dir()) .into_iter() .collect() } + + fn is_valid_data_path(&self, path: &Path) -> bool { + // Must be a file with .json extension + if !path.is_file() || path.extension().is_none_or(|ext| ext != "json") { + return false; + } + // Must be at depth 2 from data_dir (session_id/message_id.json) + if let Some(data_dir) = Self::data_dir() + && let Ok(relative) = path.strip_prefix(&data_dir) + { + return relative.components().count() == 2; + } + false + } + + // Each OpenCode message file contains exactly one message + fn contribution_strategy(&self) -> ContributionStrategy { + ContributionStrategy::SingleMessage + } } #[cfg(test)] diff --git a/src/analyzers/pi_agent.rs b/src/analyzers/pi_agent.rs index 91ec318..0faa975 100644 --- a/src/analyzers/pi_agent.rs +++ b/src/analyzers/pi_agent.rs @@ -1,15 +1,16 @@ use crate::analyzer::{Analyzer, DataSource}; -use crate::types::{AgenticCodingToolStats, Application, ConversationMessage, MessageRole, Stats}; +use crate::contribution_cache::ContributionStrategy; +use crate::types::{Application, ConversationMessage, MessageRole, Stats}; use crate::utils::hash_text; use anyhow::Result; use async_trait::async_trait; use chrono::{DateTime, Utc}; -use jwalk::WalkDir; use rayon::prelude::*; use serde::Deserialize; use std::fs::File; use std::io::Read; use std::path::{Path, PathBuf}; +use walkdir::WalkDir; pub struct PiAgentAnalyzer; @@ -18,7 +19,6 @@ impl PiAgentAnalyzer { Self } - /// Returns the root directory for Pi Agent session data. fn data_dir() -> Option { dirs::home_dir().map(|h| h.join(".pi").join("agent").join("sessions")) } @@ -413,93 +413,55 @@ impl Analyzer for PiAgentAnalyzer { } fn discover_data_sources(&self) -> Result> { - let mut sources = Vec::new(); - - if let Some(sessions_dir) = Self::data_dir() - && sessions_dir.is_dir() - { - // Pattern: ~/.pi/agent/sessions/*/*.jsonl - for entry in WalkDir::new(&sessions_dir) - .min_depth(2) - .max_depth(2) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| e.path().extension().is_some_and(|ext| ext == "jsonl")) - { - sources.push(DataSource { path: entry.path() }); - } - } + let sources = Self::data_dir() + .filter(|d| d.is_dir()) + .into_iter() + .flat_map(|sessions_dir| { + WalkDir::new(sessions_dir) + .min_depth(2) + .max_depth(2) + .into_iter() + }) + .filter_map(|e| e.ok()) + .filter(|e| e.path().extension().is_some_and(|ext| ext == "jsonl")) + .map(|e| DataSource { + path: e.into_path(), + }) + .collect(); Ok(sources) } - async fn parse_conversations( - &self, - sources: Vec, - ) -> Result> { - let all_entries: Vec = sources - .into_par_iter() - .filter_map(|source| { - let project_hash = extract_and_hash_project_id(&source.path); - let conversation_hash = hash_text(&source.path.to_string_lossy()); - - match File::open(&source.path) { - Ok(file) => { - match parse_jsonl_file( - &source.path, - file, - &project_hash, - &conversation_hash, - ) { - Ok((messages, _)) => Some(messages), - Err(e) => { - eprintln!( - "Failed to parse Pi Agent session {}: {e:#}", - source.path.display() - ); - None - } - } - } - Err(e) => { - eprintln!( - "Failed to open Pi Agent session {}: {e}", - source.path.display() - ); - None - } - } + fn is_available(&self) -> bool { + Self::data_dir() + .filter(|d| d.is_dir()) + .into_iter() + .flat_map(|sessions_dir| { + WalkDir::new(sessions_dir) + .min_depth(2) + .max_depth(2) + .into_iter() }) - .flat_map(|messages| messages) - .collect(); - - // Deduplicate by local hash - Ok(crate::utils::deduplicate_by_local_hash_parallel( - all_entries, - )) + .filter_map(|e| e.ok()) + .any(|e| e.path().extension().is_some_and(|ext| ext == "jsonl")) } - async fn get_stats(&self) -> Result { - let sources = self.discover_data_sources()?; - let messages = self.parse_conversations(sources).await?; - let daily_stats = crate::utils::aggregate_by_date(&messages); - - let num_conversations = daily_stats - .values() - .map(|stats| stats.conversations as u64) - .sum(); - - Ok(AgenticCodingToolStats { - analyzer_name: self.display_name().to_string(), - daily_stats, - messages, - num_conversations, - }) + fn parse_source(&self, source: &DataSource) -> Result> { + let project_hash = extract_and_hash_project_id(&source.path); + let conversation_hash = hash_text(&source.path.to_string_lossy()); + + let file = File::open(&source.path)?; + let (messages, _) = + parse_jsonl_file(&source.path, file, &project_hash, &conversation_hash)?; + Ok(messages) } - fn is_available(&self) -> bool { - self.discover_data_sources() - .is_ok_and(|sources| !sources.is_empty()) + fn parse_sources_parallel(&self, sources: &[DataSource]) -> Vec { + let all_messages: Vec = sources + .par_iter() + .flat_map(|source| self.parse_source(source).unwrap_or_default()) + .collect(); + crate::utils::deduplicate_by_local_hash(all_messages) } fn get_watch_directories(&self) -> Vec { @@ -508,4 +470,21 @@ impl Analyzer for PiAgentAnalyzer { .into_iter() .collect() } + + fn is_valid_data_path(&self, path: &Path) -> bool { + // Must be a .jsonl file at depth 2 from sessions dir + if !path.is_file() || path.extension().is_none_or(|ext| ext != "jsonl") { + return false; + } + if let Some(data_dir) = Self::data_dir() + && let Ok(relative) = path.strip_prefix(&data_dir) + { + return relative.components().count() == 2; + } + false + } + + fn contribution_strategy(&self) -> ContributionStrategy { + ContributionStrategy::SingleSession + } } diff --git a/src/analyzers/piebald.rs b/src/analyzers/piebald.rs index 5290ff9..738d7ce 100644 --- a/src/analyzers/piebald.rs +++ b/src/analyzers/piebald.rs @@ -1,14 +1,16 @@ //! Piebald analyzer - reads usage data from Piebald's SQLite database. //! -//! https://piebald.ai +//! use crate::analyzer::{Analyzer, DataSource}; +use crate::contribution_cache::ContributionStrategy; use crate::models::calculate_total_cost; -use crate::types::{AgenticCodingToolStats, Application, ConversationMessage, MessageRole, Stats}; +use crate::types::{Application, ConversationMessage, MessageRole, Stats}; use crate::utils::hash_text; use anyhow::Result; use async_trait::async_trait; use chrono::{DateTime, Utc}; +use rayon::prelude::*; use rusqlite::{Connection, OpenFlags}; use std::collections::HashMap; use std::path::PathBuf; @@ -241,53 +243,37 @@ impl Analyzer for PiebaldAnalyzer { Ok(Vec::new()) } - async fn parse_conversations( - &self, - sources: Vec, - ) -> Result> { - let mut all_messages = Vec::new(); - - for source in sources { - match open_piebald_db(&source.path) { - Ok(conn) => { - let chats = query_chats(&conn)?; - let messages = query_messages(&conn)?; - let converted = convert_messages(&chats, messages); - all_messages.extend(converted); - } - Err(e) => { - eprintln!("Failed to open Piebald database {:?}: {}", source.path, e); - } - } - } + fn parse_source(&self, source: &DataSource) -> Result> { + let conn = open_piebald_db(&source.path)?; + let chats = query_chats(&conn)?; + let messages = query_messages(&conn)?; + Ok(convert_messages(&chats, messages)) + } - // Deduplicate by local hash - Ok(crate::utils::deduplicate_by_local_hash_parallel( - all_messages, - )) + fn parse_sources_parallel(&self, sources: &[DataSource]) -> Vec { + let all_messages: Vec = sources + .par_iter() + .flat_map(|source| self.parse_source(source).unwrap_or_default()) + .collect(); + crate::utils::deduplicate_by_local_hash(all_messages) } - async fn get_stats(&self) -> Result { - let sources = self.discover_data_sources()?; - let messages = self.parse_conversations(sources).await?; - let daily_stats = crate::utils::aggregate_by_date(&messages); - - let num_conversations = daily_stats - .values() - .map(|stats| stats.conversations as u64) - .sum(); - - Ok(AgenticCodingToolStats { - daily_stats, - num_conversations, - messages, - analyzer_name: self.display_name().to_string(), - }) + fn get_watch_directories(&self) -> Vec { + dirs::data_dir() + .map(|data_dir| data_dir.join("piebald")) + .filter(|d| d.is_dir()) + .into_iter() + .collect() } - fn is_available(&self) -> bool { - self.discover_data_sources() - .is_ok_and(|sources| !sources.is_empty()) + fn is_valid_data_path(&self, path: &std::path::Path) -> bool { + // Must be the app.db file + path.is_file() && path.file_name().is_some_and(|n| n == "app.db") + } + + // Piebald uses SQLite database containing all sessions + fn contribution_strategy(&self) -> ContributionStrategy { + ContributionStrategy::MultiSession } } @@ -308,12 +294,12 @@ mod tests { assert!(result.is_ok()); } - #[tokio::test] - async fn test_parse_conversations_empty_sources() { + #[test] + fn test_get_stats_empty_sources() { let analyzer = PiebaldAnalyzer::new(); - let result = analyzer.parse_conversations(Vec::new()).await; + let result = analyzer.get_stats_with_sources(Vec::new()); assert!(result.is_ok()); - assert!(result.unwrap().is_empty()); + assert!(result.unwrap().messages.is_empty()); } #[test] diff --git a/src/analyzers/qwen_code.rs b/src/analyzers/qwen_code.rs index 9e69cfe..96e9301 100644 --- a/src/analyzers/qwen_code.rs +++ b/src/analyzers/qwen_code.rs @@ -1,17 +1,16 @@ use crate::analyzer::{Analyzer, DataSource}; +use crate::contribution_cache::ContributionStrategy; use crate::models::{calculate_cache_cost, calculate_input_cost, calculate_output_cost}; -use crate::types::{ - AgenticCodingToolStats, Application, ConversationMessage, FileCategory, MessageRole, Stats, -}; +use crate::types::{Application, ConversationMessage, FileCategory, MessageRole, Stats}; use crate::utils::{deserialize_utc_timestamp, hash_text}; use anyhow::Result; use async_trait::async_trait; use chrono::{DateTime, Utc}; -use jwalk::WalkDir; use rayon::prelude::*; use serde::{Deserialize, Serialize}; use simd_json::prelude::*; use std::path::{Path, PathBuf}; +use walkdir::WalkDir; pub struct QwenCodeAnalyzer; @@ -20,7 +19,6 @@ impl QwenCodeAnalyzer { Self } - /// Returns the root directory for Qwen Code data. fn data_dir() -> Option { dirs::home_dir().map(|h| h.join(".qwen").join("tmp")) } @@ -304,81 +302,53 @@ impl Analyzer for QwenCodeAnalyzer { } fn discover_data_sources(&self) -> Result> { - let mut sources = Vec::new(); - - if let Some(tmp_dir) = Self::data_dir() - && tmp_dir.is_dir() - { - // Pattern: ~/.qwen/tmp/*/chats/*.json - // jwalk walks directories in parallel - for entry in WalkDir::new(&tmp_dir) - .min_depth(3) // */chats/*.json - .max_depth(3) - .into_iter() - .filter_map(|e| e.ok()) - .filter(|e| { - e.file_type().is_file() - && e.path().extension().is_some_and(|ext| ext == "json") - && e.path() - .parent() - .and_then(|p| p.file_name()) - .is_some_and(|name| name == "chats") - }) - { - sources.push(DataSource { path: entry.path() }); - } - } + let sources = Self::data_dir() + .filter(|d| d.is_dir()) + .into_iter() + .flat_map(|tmp_dir| WalkDir::new(tmp_dir).min_depth(3).max_depth(3).into_iter()) + .filter_map(|e| e.ok()) + .filter(|e| { + e.file_type().is_file() + && e.path().extension().is_some_and(|ext| ext == "json") + && e.path() + .parent() + .and_then(|p| p.file_name()) + .is_some_and(|name| name == "chats") + }) + .map(|e| DataSource { + path: e.into_path(), + }) + .collect(); Ok(sources) } - async fn parse_conversations( - &self, - sources: Vec, - ) -> Result> { - // Parse all session files in parallel - let all_entries: Vec = sources - .into_par_iter() - .filter_map(|source| match parse_json_session_file(&source.path) { - Ok(messages) => Some(messages), - Err(e) => { - eprintln!( - "Failed to parse Qwen Code session file {}: {e:#}", - source.path.display(), - ); - None - } + fn is_available(&self) -> bool { + Self::data_dir() + .filter(|d| d.is_dir()) + .into_iter() + .flat_map(|tmp_dir| WalkDir::new(tmp_dir).min_depth(3).max_depth(3).into_iter()) + .filter_map(|e| e.ok()) + .any(|e| { + e.file_type().is_file() + && e.path().extension().is_some_and(|ext| ext == "json") + && e.path() + .parent() + .and_then(|p| p.file_name()) + .is_some_and(|name| name == "chats") }) - .flat_map(|messages| messages) - .collect(); - - // Parallel deduplicate by local hash - Ok(crate::utils::deduplicate_by_local_hash_parallel( - all_entries, - )) } - async fn get_stats(&self) -> Result { - let sources = self.discover_data_sources()?; - let messages = self.parse_conversations(sources).await?; - let daily_stats = crate::utils::aggregate_by_date(&messages); - - let num_conversations = daily_stats - .values() - .map(|stats| stats.conversations as u64) - .sum(); - - Ok(AgenticCodingToolStats { - analyzer_name: self.display_name().to_string(), - daily_stats, - messages, - num_conversations, - }) + fn parse_source(&self, source: &DataSource) -> Result> { + parse_json_session_file(&source.path) } - fn is_available(&self) -> bool { - self.discover_data_sources() - .is_ok_and(|sources| !sources.is_empty()) + fn parse_sources_parallel(&self, sources: &[DataSource]) -> Vec { + let all_messages: Vec = sources + .par_iter() + .flat_map(|source| self.parse_source(source).unwrap_or_default()) + .collect(); + crate::utils::deduplicate_by_local_hash(all_messages) } fn get_watch_directories(&self) -> Vec { @@ -387,4 +357,18 @@ impl Analyzer for QwenCodeAnalyzer { .into_iter() .collect() } + + fn is_valid_data_path(&self, path: &Path) -> bool { + // Must be a .json file in a "chats" directory + path.is_file() + && path.extension().is_some_and(|ext| ext == "json") + && path + .parent() + .and_then(|p| p.file_name()) + .is_some_and(|name| name == "chats") + } + + fn contribution_strategy(&self) -> ContributionStrategy { + ContributionStrategy::SingleSession + } } diff --git a/src/analyzers/roo_code.rs b/src/analyzers/roo_code.rs index 76fb92e..53520f5 100644 --- a/src/analyzers/roo_code.rs +++ b/src/analyzers/roo_code.rs @@ -1,12 +1,13 @@ use crate::analyzer::{ Analyzer, DataSource, discover_vscode_extension_sources, get_vscode_extension_tasks_dirs, + vscode_extension_has_sources, }; -use crate::types::{AgenticCodingToolStats, Application, ConversationMessage, MessageRole, Stats}; +use crate::contribution_cache::ContributionStrategy; +use crate::types::{Application, ConversationMessage, MessageRole, Stats}; use crate::utils::hash_text; use anyhow::{Context, Result}; use async_trait::async_trait; use chrono::{DateTime, Utc}; -use rayon::prelude::*; use serde::{Deserialize, Serialize}; use simd_json::prelude::*; use std::path::{Path, PathBuf}; @@ -328,59 +329,24 @@ impl Analyzer for RooCodeAnalyzer { discover_vscode_extension_sources(ROO_CODE_EXTENSION_ID, "ui_messages.json", true) } - async fn parse_conversations( - &self, - sources: Vec, - ) -> Result> { - // Parse all task directories in parallel - let all_entries: Vec = sources - .into_par_iter() - .flat_map(|source| match parse_roo_code_task_directory(&source.path) { - Ok(messages) => messages, - Err(e) => { - eprintln!( - "Failed to parse Roo Code task directory {:?}: {}", - source.path, e - ); - Vec::new() - } - }) - .collect(); + fn is_available(&self) -> bool { + vscode_extension_has_sources(ROO_CODE_EXTENSION_ID, "ui_messages.json") + } - // Parallel deduplicate by global hash - Ok(crate::utils::deduplicate_by_global_hash_parallel( - all_entries, - )) + fn parse_source(&self, source: &DataSource) -> Result> { + parse_roo_code_task_directory(&source.path) } - async fn get_stats(&self) -> Result { - let sources = self.discover_data_sources()?; - let messages = self.parse_conversations(sources).await?; - let mut daily_stats = crate::utils::aggregate_by_date(&messages); - - // Remove any "unknown" entries - daily_stats.retain(|date, _| date != "unknown"); - - let num_conversations = daily_stats - .values() - .map(|stats| stats.conversations as u64) - .sum(); - - Ok(AgenticCodingToolStats { - daily_stats, - num_conversations, - messages, - analyzer_name: self.display_name().to_string(), - }) + fn get_watch_directories(&self) -> Vec { + get_vscode_extension_tasks_dirs(ROO_CODE_EXTENSION_ID) } - fn is_available(&self) -> bool { - self.discover_data_sources() - .is_ok_and(|sources| !sources.is_empty()) + fn is_valid_data_path(&self, path: &Path) -> bool { + path.is_file() && path.file_name().is_some_and(|n| n == "ui_messages.json") } - fn get_watch_directories(&self) -> Vec { - get_vscode_extension_tasks_dirs(ROO_CODE_EXTENSION_ID) + fn contribution_strategy(&self) -> ContributionStrategy { + ContributionStrategy::SingleSession } } diff --git a/src/analyzers/tests/cline.rs b/src/analyzers/tests/cline.rs index ad1b1d6..359fb5f 100644 --- a/src/analyzers/tests/cline.rs +++ b/src/analyzers/tests/cline.rs @@ -24,9 +24,9 @@ fn test_cline_discover_data_sources_no_panic() { } #[tokio::test] -async fn test_cline_parse_empty_sources() { +async fn test_cline_get_stats_empty_sources() { let analyzer = ClineAnalyzer::new(); - let result = analyzer.parse_conversations(vec![]).await; + let result = analyzer.get_stats_with_sources(vec![]); assert!(result.is_ok()); - assert!(result.unwrap().is_empty()); + assert!(result.unwrap().messages.is_empty()); } diff --git a/src/analyzers/tests/copilot.rs b/src/analyzers/tests/copilot.rs index a634381..f8298d6 100644 --- a/src/analyzers/tests/copilot.rs +++ b/src/analyzers/tests/copilot.rs @@ -1,6 +1,7 @@ use crate::analyzer::Analyzer; use crate::analyzers::copilot::*; use crate::types::MessageRole; +use std::collections::HashSet; use std::path::PathBuf; #[test] @@ -46,7 +47,7 @@ fn test_parse_sample_copilot_session() { } // Verify hash uniqueness - let mut hashes = std::collections::HashSet::new(); + let mut hashes = HashSet::new(); for msg in &messages { assert!( hashes.insert(msg.global_hash.clone()), diff --git a/src/analyzers/tests/gemini_cli.rs b/src/analyzers/tests/gemini_cli.rs index 9a76c27..c60d6d0 100644 --- a/src/analyzers/tests/gemini_cli.rs +++ b/src/analyzers/tests/gemini_cli.rs @@ -46,11 +46,9 @@ async fn test_gemini_cli_reasoning_tokens() { let analyzer = GeminiCliAnalyzer::new(); - // We can't easily inject sources into `get_stats` without mocking `glob` or `discover_data_sources`. - // But `parse_conversations` takes a list of sources. - - let sources = vec![crate::analyzer::DataSource { path: session_path }]; - let messages = analyzer.parse_conversations(sources).await.unwrap(); + // Use parse_sources_parallel to parse and deduplicate + let source = crate::analyzer::DataSource { path: session_path }; + let messages = analyzer.parse_sources_parallel(&[source]); assert_eq!(messages.len(), 2); diff --git a/src/analyzers/tests/kilo_code.rs b/src/analyzers/tests/kilo_code.rs index 4b6552e..1e56a5e 100644 --- a/src/analyzers/tests/kilo_code.rs +++ b/src/analyzers/tests/kilo_code.rs @@ -24,9 +24,9 @@ fn test_kilo_code_discover_data_sources_no_panic() { } #[tokio::test] -async fn test_kilo_code_parse_empty_sources() { +async fn test_kilo_code_get_stats_empty_sources() { let analyzer = KiloCodeAnalyzer::new(); - let result = analyzer.parse_conversations(vec![]).await; + let result = analyzer.get_stats_with_sources(vec![]); assert!(result.is_ok()); - assert!(result.unwrap().is_empty()); + assert!(result.unwrap().messages.is_empty()); } diff --git a/src/analyzers/tests/opencode.rs b/src/analyzers/tests/opencode.rs index 6218891..7654f1a 100644 --- a/src/analyzers/tests/opencode.rs +++ b/src/analyzers/tests/opencode.rs @@ -24,9 +24,9 @@ fn test_opencode_discover_data_sources_no_panic() { } #[tokio::test] -async fn test_opencode_parse_empty_sources() { +async fn test_opencode_get_stats_empty_sources() { let analyzer = OpenCodeAnalyzer::new(); - let result = analyzer.parse_conversations(vec![]).await; + let result = analyzer.get_stats_with_sources(vec![]); assert!(result.is_ok()); - assert!(result.unwrap().is_empty()); + assert!(result.unwrap().messages.is_empty()); } diff --git a/src/analyzers/tests/qwen_code.rs b/src/analyzers/tests/qwen_code.rs index 5dd36be..a1d7707 100644 --- a/src/analyzers/tests/qwen_code.rs +++ b/src/analyzers/tests/qwen_code.rs @@ -24,9 +24,9 @@ fn test_qwen_code_discover_data_sources_no_panic() { } #[tokio::test] -async fn test_qwen_code_parse_empty_sources() { +async fn test_qwen_code_get_stats_empty_sources() { let analyzer = QwenCodeAnalyzer::new(); - let result = analyzer.parse_conversations(vec![]).await; + let result = analyzer.get_stats_with_sources(vec![]); assert!(result.is_ok()); - assert!(result.unwrap().is_empty()); + assert!(result.unwrap().messages.is_empty()); } diff --git a/src/analyzers/tests/roo_code.rs b/src/analyzers/tests/roo_code.rs index f64d0cf..be3a6cc 100644 --- a/src/analyzers/tests/roo_code.rs +++ b/src/analyzers/tests/roo_code.rs @@ -24,9 +24,9 @@ fn test_roo_code_discover_data_sources_no_panic() { } #[tokio::test] -async fn test_roo_code_parse_empty_sources() { +async fn test_roo_code_get_stats_empty_sources() { let analyzer = RooCodeAnalyzer::new(); - let result = analyzer.parse_conversations(vec![]).await; + let result = analyzer.get_stats_with_sources(vec![]); assert!(result.is_ok()); - assert!(result.unwrap().is_empty()); + assert!(result.unwrap().messages.is_empty()); } diff --git a/src/cache/mod.rs b/src/cache/mod.rs new file mode 100644 index 0000000..b952fd2 --- /dev/null +++ b/src/cache/mod.rs @@ -0,0 +1,5 @@ +//! Caching utilities for memory-efficient data storage. + +mod model_intern; + +pub use model_intern::{ModelKey, intern_model, resolve_model}; diff --git a/src/cache/model_intern.rs b/src/cache/model_intern.rs new file mode 100644 index 0000000..b819da2 --- /dev/null +++ b/src/cache/model_intern.rs @@ -0,0 +1,67 @@ +//! Model string interning for memory-efficient model name storage. + +use lasso::{MiniSpur, ThreadedRodeo}; +use std::sync::LazyLock; + +/// Global thread-safe string interner for model names. +/// Uses MiniSpur (2 bytes) instead of Spur (4 bytes) since we have < 65536 unique models. +static MODEL_INTERNER: LazyLock> = LazyLock::new(ThreadedRodeo::new); + +/// Interned key for a model name. +/// +/// Model names like "claude-3-5-sonnet" repeat across thousands of sessions. +/// Interning reduces memory from 24-byte String + heap per occurrence to 2-byte key. +/// +/// Use [`intern_model`] to create a key and [`resolve_model`] to get the string back. +/// +/// Note: `Default` is implemented to satisfy `TinyVec` bounds but should not be used +/// directly - the default value's resolution is undefined until the interner is populated. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] +#[repr(transparent)] +pub struct ModelKey(MiniSpur); + +impl ModelKey { + /// Resolve this key back to its model name string. + #[inline] + pub fn resolve(self) -> &'static str { + MODEL_INTERNER.resolve(&self.0) + } +} + +/// Intern a model name, returning a cheap 2-byte key. +#[inline] +pub fn intern_model(model: &str) -> ModelKey { + ModelKey(MODEL_INTERNER.get_or_intern(model)) +} + +/// Resolve an interned model key back to its string. +#[inline] +pub fn resolve_model(key: ModelKey) -> &'static str { + key.resolve() +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn intern_and_resolve() { + let key = intern_model("claude-3-5-sonnet"); + assert_eq!(resolve_model(key), "claude-3-5-sonnet"); + assert_eq!(key.resolve(), "claude-3-5-sonnet"); + } + + #[test] + fn same_string_same_key() { + let key1 = intern_model("gpt-4"); + let key2 = intern_model("gpt-4"); + assert_eq!(key1, key2); + } + + #[test] + fn different_strings_different_keys() { + let key1 = intern_model("model-a"); + let key2 = intern_model("model-b"); + assert_ne!(key1, key2); + } +} diff --git a/src/contribution_cache/mod.rs b/src/contribution_cache/mod.rs new file mode 100644 index 0000000..8394560 --- /dev/null +++ b/src/contribution_cache/mod.rs @@ -0,0 +1,386 @@ +//! Contribution caching for incremental updates. +//! +//! Provides memory-efficient caching strategies for different analyzer types: +//! - [`SingleMessageContribution`]: 32 bytes for 1-message-per-file analyzers (OpenCode) +//! - [`SingleSessionContribution`]: ~72 bytes for 1-session-per-file analyzers (most) +//! - [`MultiSessionContribution`]: ~100+ bytes for all-in-one-file analyzers (Piebald) + +mod multi_session; +mod single_message; +mod single_session; + +pub use multi_session::MultiSessionContribution; +pub use single_message::SingleMessageContribution; +pub use single_session::SingleSessionContribution; + +use std::path::Path; + +use dashmap::DashMap; +use xxhash_rust::xxh3::xxh3_64; + +use crate::types::{AnalyzerStatsView, CompactDate, DailyStats}; + +// ============================================================================ +// PathHash - Cache key type +// ============================================================================ + +/// Newtype wrapper for xxh3 path hashes, used as cache keys. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub struct PathHash(u64); + +impl PathHash { + /// Hash a path using xxh3 for cache key lookup. + #[inline] + pub fn new(path: &Path) -> Self { + Self(xxh3_64(path.as_os_str().as_encoded_bytes())) + } +} + +/// Newtype wrapper for xxh3 session hashes, used to avoid String allocation for session lookup. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default)] +pub struct SessionHash(u64); + +impl SessionHash { + /// Hash a session/conversation ID string using xxh3. + #[inline] + pub fn from_str(s: &str) -> Self { + Self(xxh3_64(s.as_bytes())) + } +} + +// ============================================================================ +// ContributionStrategy - Analyzer categorization +// ============================================================================ + +/// Strategy for caching file contributions based on analyzer data structure. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ContributionStrategy { + /// 1 file = 1 message (e.g., OpenCode) + /// Uses `SingleMessageContribution` (32 bytes per file) + SingleMessage, + + /// 1 file = 1 session = many messages (e.g., Claude Code, Cline, Copilot) + /// Uses `SingleSessionContribution` (~72 bytes per file) + SingleSession, + + /// 1 file = many sessions (e.g., Piebald with SQLite) + /// Uses `MultiSessionContribution` (~100+ bytes per file) + MultiSession, +} + +// ============================================================================ +// ContributionCache - Unified cache wrapper +// ============================================================================ + +/// Unified cache for file contributions with strategy-specific storage. +/// Uses three separate DashMaps for type safety and memory efficiency. +pub struct ContributionCache { + /// Cache for single-message-per-file analyzers (32 bytes per entry) + single_message: DashMap, + /// Cache for single-session-per-file analyzers (~72 bytes per entry) + single_session: DashMap, + /// Cache for multi-session-per-file analyzers (~100+ bytes per entry) + multi_session: DashMap, +} + +impl Default for ContributionCache { + fn default() -> Self { + Self::new() + } +} + +impl ContributionCache { + /// Create a new empty contribution cache. + pub fn new() -> Self { + Self { + single_message: DashMap::new(), + single_session: DashMap::new(), + multi_session: DashMap::new(), + } + } + + /// Clear all caches. + pub fn clear(&self) { + self.single_message.clear(); + self.single_session.clear(); + self.multi_session.clear(); + } + + /// Shrink all caches to fit. + pub fn shrink_to_fit(&self) { + self.single_message.shrink_to_fit(); + self.single_session.shrink_to_fit(); + self.multi_session.shrink_to_fit(); + } + + // --- Single Message operations --- + + /// Insert a single-message contribution. + #[inline] + pub fn insert_single_message(&self, key: PathHash, contrib: SingleMessageContribution) { + self.single_message.insert(key, contrib); + } + + /// Get a single-message contribution. + #[inline] + pub fn get_single_message(&self, key: &PathHash) -> Option { + self.single_message.get(key).map(|r| *r) + } + + // --- Single Session operations --- + + /// Insert a single-session contribution. + #[inline] + pub fn insert_single_session(&self, key: PathHash, contrib: SingleSessionContribution) { + self.single_session.insert(key, contrib); + } + + /// Get a single-session contribution. + #[inline] + pub fn get_single_session(&self, key: &PathHash) -> Option { + self.single_session.get(key).map(|r| r.clone()) + } + + // --- Multi Session operations --- + + /// Insert a multi-session contribution. + #[inline] + pub fn insert_multi_session(&self, key: PathHash, contrib: MultiSessionContribution) { + self.multi_session.insert(key, contrib); + } + + /// Get a multi-session contribution. + #[inline] + pub fn get_multi_session(&self, key: &PathHash) -> Option { + self.multi_session.get(key).map(|r| r.clone()) + } + + // --- Strategy-agnostic removal --- + + /// Try to remove a contribution from any cache, returning which type was found. + /// Returns None if not found in any cache. + pub fn remove_any(&self, key: &PathHash) -> Option { + if let Some((_, c)) = self.single_message.remove(key) { + return Some(RemovedContribution::SingleMessage(c)); + } + if let Some((_, c)) = self.single_session.remove(key) { + return Some(RemovedContribution::SingleSession(c)); + } + if let Some((_, c)) = self.multi_session.remove(key) { + return Some(RemovedContribution::MultiSession(c)); + } + None + } +} + +/// Result of removing a contribution from the cache. +pub enum RemovedContribution { + SingleMessage(SingleMessageContribution), + SingleSession(SingleSessionContribution), + MultiSession(MultiSessionContribution), +} + +// ============================================================================ +// AnalyzerStatsView extensions for contribution operations +// ============================================================================ + +impl AnalyzerStatsView { + /// Add a single-message contribution to this view. + pub fn add_single_message_contribution(&mut self, contrib: &SingleMessageContribution) { + // Update daily stats + let date = contrib.date(); + let date_str = date.to_string(); + let day_stats = self + .daily_stats + .entry(date_str) + .or_insert_with(|| DailyStats { + date, + ..Default::default() + }); + + // Single message contributes to AI message count and stats + if contrib.model.is_some() { + day_stats.ai_messages += 1; + day_stats.stats += contrib.to_tui_stats(); + } + + // Find session by hash and update + if let Some(existing) = self.session_aggregates.iter_mut().find(|s| { + SingleMessageContribution::hash_session_id(&s.session_id) == contrib.session_hash + }) { + existing.stats += contrib.to_tui_stats(); + if let Some(model) = contrib.model { + existing.models.increment(model, 1); + } + } + // Note: We don't create new sessions here - they should already exist from initial load. + } + + /// Subtract a single-message contribution from this view. + pub fn subtract_single_message_contribution(&mut self, contrib: &SingleMessageContribution) { + // Update daily stats + let date_str = contrib.date().to_string(); + if let Some(day_stats) = self.daily_stats.get_mut(&date_str) { + if contrib.model.is_some() { + day_stats.ai_messages = day_stats.ai_messages.saturating_sub(1); + day_stats.stats -= contrib.to_tui_stats(); + } + + // Remove if empty + if day_stats.user_messages == 0 + && day_stats.ai_messages == 0 + && day_stats.conversations == 0 + { + self.daily_stats.remove(&date_str); + } + } + + // Find session by hash and subtract + if let Some(existing) = self.session_aggregates.iter_mut().find(|s| { + SingleMessageContribution::hash_session_id(&s.session_id) == contrib.session_hash + }) { + existing.stats -= contrib.to_tui_stats(); + if let Some(model) = contrib.model { + existing.models.decrement(model, 1); + } + } + } + + /// Add a single-session contribution to this view. + pub fn add_single_session_contribution(&mut self, contrib: &SingleSessionContribution) { + // Update daily stats + let date_str = contrib.date.to_string(); + let day_stats = self + .daily_stats + .entry(date_str) + .or_insert_with(|| DailyStats { + date: contrib.date, + ..Default::default() + }); + + day_stats.ai_messages += contrib.ai_message_count; + day_stats.stats += contrib.stats; + + // Find session by hash and update + if let Some(existing) = self.session_aggregates.iter_mut().find(|s| { + SingleMessageContribution::hash_session_id(&s.session_id) == contrib.session_hash + }) { + existing.stats += contrib.stats; + for &(model, count) in contrib.models.iter() { + existing.models.increment(model, count); + } + } + } + + /// Subtract a single-session contribution from this view. + pub fn subtract_single_session_contribution(&mut self, contrib: &SingleSessionContribution) { + // Update daily stats + let date_str = contrib.date.to_string(); + if let Some(day_stats) = self.daily_stats.get_mut(&date_str) { + day_stats.ai_messages = day_stats + .ai_messages + .saturating_sub(contrib.ai_message_count); + day_stats.stats -= contrib.stats; + + // Remove if empty + if day_stats.user_messages == 0 + && day_stats.ai_messages == 0 + && day_stats.conversations == 0 + { + self.daily_stats.remove(&date_str); + } + } + + // Find session by hash and subtract + if let Some(existing) = self.session_aggregates.iter_mut().find(|s| { + SingleMessageContribution::hash_session_id(&s.session_id) == contrib.session_hash + }) { + existing.stats -= contrib.stats; + for &(model, count) in contrib.models.iter() { + existing.models.decrement(model, count); + } + } + } + + /// Add a multi-session contribution to this view. + pub fn add_multi_session_contribution(&mut self, contrib: &MultiSessionContribution) { + // Add daily stats + for (date, day_stats) in &contrib.daily_stats { + *self + .daily_stats + .entry(date.clone()) + .or_insert_with(|| DailyStats { + date: CompactDate::from_str(date).unwrap_or_default(), + ..Default::default() + }) += day_stats; + } + + // Add session aggregates - merge if same session_id exists, otherwise append + for new_session in &contrib.session_aggregates { + if let Some(existing) = self + .session_aggregates + .iter_mut() + .find(|s| s.session_id == new_session.session_id) + { + // Merge into existing session + existing.stats += new_session.stats; + for &(model, count) in new_session.models.iter() { + existing.models.increment(model, count); + } + if new_session.first_timestamp < existing.first_timestamp { + existing.first_timestamp = new_session.first_timestamp; + existing.date = new_session.date; + } + if existing.session_name.is_none() { + existing.session_name = new_session.session_name.clone(); + } + } else { + // New session + self.session_aggregates.push(new_session.clone()); + } + } + + self.num_conversations += contrib.conversation_count; + + // Keep sessions sorted by timestamp + self.session_aggregates.sort_by_key(|s| s.first_timestamp); + } + + /// Subtract a multi-session contribution from this view. + pub fn subtract_multi_session_contribution(&mut self, contrib: &MultiSessionContribution) { + // Subtract daily stats + for (date, day_stats) in &contrib.daily_stats { + if let Some(existing) = self.daily_stats.get_mut(date) { + *existing -= day_stats; + // Remove if empty + if existing.user_messages == 0 + && existing.ai_messages == 0 + && existing.conversations == 0 + { + self.daily_stats.remove(date); + } + } + } + + // Subtract session stats + for old_session in &contrib.session_aggregates { + if let Some(existing) = self + .session_aggregates + .iter_mut() + .find(|s| s.session_id == old_session.session_id) + { + existing.stats -= old_session.stats; + for &(model, count) in old_session.models.iter() { + existing.models.decrement(model, count); + } + } + } + + self.num_conversations = self + .num_conversations + .saturating_sub(contrib.conversation_count); + } +} + +#[cfg(test)] +mod tests; diff --git a/src/contribution_cache/multi_session.rs b/src/contribution_cache/multi_session.rs new file mode 100644 index 0000000..5b9d6d6 --- /dev/null +++ b/src/contribution_cache/multi_session.rs @@ -0,0 +1,42 @@ +//! Multi-session contribution type for all-in-one-file analyzers. + +use std::collections::BTreeMap; +use std::sync::Arc; + +use crate::tui::logic::aggregate_sessions_from_messages; +use crate::types::{ConversationMessage, DailyStats, SessionAggregate}; +use crate::utils::aggregate_by_date; + +// ============================================================================ +// MultiSessionContribution - For all-in-one-file analyzers +// ============================================================================ + +/// Full contribution for multi-session-per-file analyzers. +/// Used when a single file contains multiple conversations (e.g., Piebald SQLite). +#[derive(Debug, Clone, Default)] +pub struct MultiSessionContribution { + /// Session aggregates from this file + pub session_aggregates: Vec, + /// Daily stats from this file keyed by date + pub daily_stats: BTreeMap, + /// Number of conversations in this file + pub conversation_count: u64, +} + +impl MultiSessionContribution { + /// Compute from parsed messages. + /// Takes `Arc` for analyzer_name to avoid allocating a new String per session. + pub fn from_messages(messages: &[ConversationMessage], analyzer_name: Arc) -> Self { + let session_aggregates = aggregate_sessions_from_messages(messages, analyzer_name); + let mut daily_stats = aggregate_by_date(messages); + daily_stats.retain(|date, _| date != "unknown"); + + let conversation_count = session_aggregates.len() as u64; + + Self { + session_aggregates, + daily_stats, + conversation_count, + } + } +} diff --git a/src/contribution_cache/single_message.rs b/src/contribution_cache/single_message.rs new file mode 100644 index 0000000..b467db6 --- /dev/null +++ b/src/contribution_cache/single_message.rs @@ -0,0 +1,275 @@ +//! Single-message contribution type for 1-file-1-message analyzers. +//! +//! Optimized to 32 bytes for cache alignment using bitfield packing. + +use c2rust_bitfields::BitfieldStruct; + +use super::SessionHash; +use crate::cache::ModelKey; +use crate::types::{CompactDate, ConversationMessage, TuiStats, intern_model}; + +// ============================================================================ +// PackedStatsDate - Bitfield-packed stats and date (22 bytes) +// ============================================================================ + +// Diagnostic reference (95,532 messages analyzed): +// +// | Field | Observed Max | Rec. Bits | Rec. Bits Max | +// |-------------------|--------------|-----------|-------------------| +// | input_tokens | 170,749 | 27 | 134,217,727 | +// | output_tokens | 31,999 | 26 | 67,108,863 | +// | reasoning_tokens | 7,005 | 26 | 67,108,863 | +// | cached_tokens | 186,677 | 27 | 134,217,727 | +// | cost_cents | 356 | 16 | 65,535 ($655.35) | +// | tool_calls | 73 | 14 | 16,383 | +// | year_offset | 2025-2026 | 6 | 63 (2020-2083) | +// | month | 1-12 | 4 | 15 | +// | day | 1-31 | 5 | 31 | +// | duration_ms | — | 25 | 33,554,431 (~9.3h)| +// +// Total: 176 bits = 22 bytes + +/// Packed stats and date in 176 bits (22 bytes). +/// +/// Layout: +/// - input_tokens: bits 0-26 (27 bits, max 134,217,727) +/// - output_tokens: bits 27-52 (26 bits, max 67,108,863) +/// - reasoning_tokens: bits 53-78 (26 bits, max 67,108,863) +/// - cached_tokens: bits 79-105 (27 bits, max 134,217,727) +/// - cost_cents: bits 106-121 (16 bits, max 65,535 = $655.35) +/// - tool_calls: bits 122-135 (14 bits, max 16,383) +/// - year_offset: bits 136-141 (6 bits, years 2020-2083) +/// - month: bits 142-145 (4 bits, 1-12) +/// - day: bits 146-150 (5 bits, 1-31) +/// - duration_ms: bits 151-175 (25 bits, max ~9.3 hours) +#[repr(C, align(1))] +#[derive(BitfieldStruct, Clone, Copy, Default)] +pub struct PackedStatsDate { + #[bitfield(name = "input_tokens", ty = "u32", bits = "0..=26")] + #[bitfield(name = "output_tokens", ty = "u32", bits = "27..=52")] + #[bitfield(name = "reasoning_tokens", ty = "u32", bits = "53..=78")] + #[bitfield(name = "cached_tokens", ty = "u32", bits = "79..=105")] + #[bitfield(name = "cost_cents", ty = "u16", bits = "106..=121")] + #[bitfield(name = "tool_calls", ty = "u16", bits = "122..=135")] + #[bitfield(name = "year_offset", ty = "u8", bits = "136..=141")] + #[bitfield(name = "month", ty = "u8", bits = "142..=145")] + #[bitfield(name = "day", ty = "u8", bits = "146..=150")] + #[bitfield(name = "duration_ms", ty = "u32", bits = "151..=175")] + data: [u8; 22], +} + +impl std::fmt::Debug for PackedStatsDate { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("PackedStatsDate") + .field("input_tokens", &self.input_tokens()) + .field("output_tokens", &self.output_tokens()) + .field("reasoning_tokens", &self.reasoning_tokens()) + .field("cached_tokens", &self.cached_tokens()) + .field("cost_cents", &self.cost_cents()) + .field("tool_calls", &self.tool_calls()) + .field("year_offset", &self.year_offset()) + .field("month", &self.month()) + .field("day", &self.day()) + .field("duration_ms", &self.duration_ms()) + .finish() + } +} + +/// Base year for year_offset encoding (6 bits covers 2020-2083). +const BASE_YEAR: u16 = 2020; + +impl PackedStatsDate { + /// Pack stats and date into the bitfield. + #[inline] + pub fn pack(stats: &crate::types::Stats, date: CompactDate) -> Self { + let mut packed = Self::default(); + + // Pack stats (with saturation for safety) + packed.set_input_tokens(stats.input_tokens.min(0x7FF_FFFF) as u32); + packed.set_output_tokens(stats.output_tokens.min(0x3FF_FFFF) as u32); + packed.set_reasoning_tokens(stats.reasoning_tokens.min(0x3FF_FFFF) as u32); + packed.set_cached_tokens(stats.cached_tokens.min(0x7FF_FFFF) as u32); + packed.set_cost_cents((stats.cost * 100.0).round().min(u16::MAX as f64) as u16); + packed.set_tool_calls(stats.tool_calls.min(0x3FFF) as u16); + + // Pack date + let year_offset = date.year().saturating_sub(BASE_YEAR).min(63) as u8; + packed.set_year_offset(year_offset); + packed.set_month(date.month()); + packed.set_day(date.day()); + + // duration_ms reserved for future use + packed.set_duration_ms(0); + + packed + } + + /// Extract date from packed representation. + #[inline] + pub fn unpack_date(&self) -> CompactDate { + CompactDate::from_parts( + BASE_YEAR + self.year_offset() as u16, + self.month(), + self.day(), + ) + } + + /// Convert packed stats to TuiStats for display. + #[inline] + pub fn to_tui_stats(self) -> TuiStats { + TuiStats { + input_tokens: self.input_tokens(), + output_tokens: self.output_tokens(), + reasoning_tokens: self.reasoning_tokens(), + cached_tokens: self.cached_tokens(), + cost_cents: self.cost_cents() as u32, + tool_calls: self.tool_calls() as u32, + } + } +} + +// ============================================================================ +// SingleMessageContribution - For 1 file = 1 message analyzers (32 bytes) +// ============================================================================ + +/// Lightweight contribution for single-message-per-file analyzers. +/// Uses 32 bytes (cache-aligned) instead of previous 40 bytes. +/// Designed for analyzers like OpenCode where each file contains exactly one message. +#[derive(Debug, Clone, Copy, Default)] +#[repr(C)] +pub struct SingleMessageContribution { + /// Hash of conversation_hash for session lookup (avoids String allocation) + pub session_hash: SessionHash, // 8 bytes (offset 0) + /// Model used (interned key), None if no model specified + pub model: Option, // 2 bytes (offset 8, niche-optimized) + /// Packed stats and date + pub packed: PackedStatsDate, // 22 bytes (offset 10) +} // Total: 32 bytes + +// Compile-time size assertion +const _: () = assert!(std::mem::size_of::() == 32); + +impl SingleMessageContribution { + /// Create from a single message. + #[inline] + pub fn from_message(msg: &ConversationMessage) -> Self { + Self { + session_hash: SessionHash::from_str(&msg.conversation_hash), + model: msg.model.as_ref().map(|m| intern_model(m)), + packed: PackedStatsDate::pack(&msg.stats, CompactDate::from_local(&msg.date)), + } + } + + /// Get the date from the packed representation. + #[inline] + pub fn date(&self) -> CompactDate { + self.packed.unpack_date() + } + + /// Convert packed stats to TuiStats for display. + #[inline] + pub fn to_tui_stats(self) -> TuiStats { + self.packed.to_tui_stats() + } + + /// Hash a session_id string for comparison with stored session_hash. + #[inline] + pub fn hash_session_id(session_id: &str) -> SessionHash { + SessionHash::from_str(session_id) + } +} + +#[cfg(test)] +mod size_tests { + use super::*; + use std::mem::{align_of, size_of}; + + #[test] + fn struct_sizes_optimized() { + println!("\n=== Struct Size Analysis ==="); + println!( + "PackedStatsDate: {} bytes, align {}", + size_of::(), + align_of::() + ); + println!( + "Option: {} bytes, align {}", + size_of::>(), + align_of::>() + ); + println!( + "SessionHash: {} bytes, align {}", + size_of::(), + align_of::() + ); + println!( + "SingleMessageContribution: {} bytes, align {}", + size_of::(), + align_of::() + ); + println!("=== End Analysis ===\n"); + + // Verify sizes + assert_eq!(size_of::(), 22); + assert_eq!(size_of::(), 32); + } + + #[test] + fn bitfield_roundtrip() { + use crate::types::Stats; + + let stats = Stats { + input_tokens: 170_749, + output_tokens: 31_999, + reasoning_tokens: 7_005, + cached_tokens: 186_677, + cost: 3.56, + tool_calls: 73, + ..Default::default() + }; + let date = CompactDate::from_parts(2025, 6, 15); + + let packed = PackedStatsDate::pack(&stats, date); + + assert_eq!(packed.input_tokens(), 170_749); + assert_eq!(packed.output_tokens(), 31_999); + assert_eq!(packed.reasoning_tokens(), 7_005); + assert_eq!(packed.cached_tokens(), 186_677); + assert_eq!(packed.cost_cents(), 356); + assert_eq!(packed.tool_calls(), 73); + + let unpacked_date = packed.unpack_date(); + assert_eq!(unpacked_date.year(), 2025); + assert_eq!(unpacked_date.month(), 6); + assert_eq!(unpacked_date.day(), 15); + } + + #[test] + fn bitfield_max_values() { + use crate::types::Stats; + + // Test maximum values within bit limits + let stats = Stats { + input_tokens: 134_217_727, // 27-bit max + output_tokens: 67_108_863, // 26-bit max + reasoning_tokens: 67_108_863, + cached_tokens: 134_217_727, + cost: 655.35, // u16 max cents + tool_calls: 16_383, // 14-bit max + ..Default::default() + }; + let date = CompactDate::from_parts(2083, 12, 31); // Max year (2020 + 63) + + let packed = PackedStatsDate::pack(&stats, date); + + assert_eq!(packed.input_tokens(), 134_217_727); + assert_eq!(packed.output_tokens(), 67_108_863); + assert_eq!(packed.cost_cents(), 65535); + assert_eq!(packed.tool_calls(), 16383); + + let unpacked_date = packed.unpack_date(); + assert_eq!(unpacked_date.year(), 2083); + assert_eq!(unpacked_date.month(), 12); + assert_eq!(unpacked_date.day(), 31); + } +} diff --git a/src/contribution_cache/single_session.rs b/src/contribution_cache/single_session.rs new file mode 100644 index 0000000..c41b995 --- /dev/null +++ b/src/contribution_cache/single_session.rs @@ -0,0 +1,57 @@ +//! Single-session contribution type for 1-file-1-session analyzers. + +use super::SessionHash; +use crate::types::{CompactDate, ConversationMessage, ModelCounts, TuiStats, intern_model}; + +// ============================================================================ +// SingleSessionContribution - For 1 file = 1 session analyzers +// ============================================================================ + +/// Contribution for single-session-per-file analyzers. +/// Uses ~72 bytes instead of ~100+ bytes for full contributions. +/// Designed for most analyzers where each file contains one conversation/session. +#[derive(Debug, Clone)] +pub struct SingleSessionContribution { + /// Aggregated stats from all messages in this session + pub stats: TuiStats, + /// Primary date (date of first message) + pub date: CompactDate, + /// Models used in this session with reference counts + pub models: ModelCounts, + /// Hash of conversation_hash for session lookup + pub session_hash: SessionHash, + /// Number of AI messages (for daily_stats.ai_messages) + pub ai_message_count: u32, +} + +impl SingleSessionContribution { + /// Create from messages belonging to a single session. + pub fn from_messages(messages: &[ConversationMessage]) -> Self { + let mut stats = TuiStats::default(); + let mut models = ModelCounts::new(); + let mut ai_message_count = 0u32; + let mut first_date = CompactDate::default(); + let mut session_hash = SessionHash::default(); + + for (i, msg) in messages.iter().enumerate() { + if i == 0 { + first_date = CompactDate::from_local(&msg.date); + session_hash = SessionHash::from_str(&msg.conversation_hash); + } + + if let Some(model) = &msg.model { + ai_message_count += 1; + models.increment(intern_model(model), 1); + stats += TuiStats::from(&msg.stats); + } + } + + Self { + stats, + date: first_date, + models, + session_hash, + ai_message_count, + } + } +} diff --git a/src/contribution_cache/tests/basic_operations.rs b/src/contribution_cache/tests/basic_operations.rs new file mode 100644 index 0000000..ef853f9 --- /dev/null +++ b/src/contribution_cache/tests/basic_operations.rs @@ -0,0 +1,177 @@ +//! Basic ContributionCache operations tests + +use std::path::PathBuf; + +use super::super::{ + ContributionCache, MultiSessionContribution, PathHash, SessionHash, SingleMessageContribution, + SingleSessionContribution, +}; +use super::make_message; + +// ============================================================================ +// ContributionCache Basic Operations Tests +// ============================================================================ + +#[test] +fn test_contribution_cache_single_message_insert_get() { + let cache = ContributionCache::new(); + let path = PathBuf::from("/test/file1.json"); + let path_hash = PathHash::new(&path); + + let msg = make_message( + "test_session", + Some("claude-3-5-sonnet"), + 100, + 50, + 0.05, + 2, + "2025-01-15", + ); + let contrib = SingleMessageContribution::from_message(&msg); + + cache.insert_single_message(path_hash, contrib); + let retrieved = cache.get_single_message(&path_hash); + + assert!(retrieved.is_some()); + let retrieved = retrieved.unwrap(); + let stats = retrieved.to_tui_stats(); + assert_eq!(stats.input_tokens, 100); + assert_eq!(stats.output_tokens, 50); + assert_eq!( + retrieved.session_hash, + SessionHash::from_str("test_session") + ); +} + +#[test] +fn test_contribution_cache_single_session_insert_get() { + let cache = ContributionCache::new(); + let path = PathBuf::from("/test/session1.jsonl"); + let path_hash = PathHash::new(&path); + + let contrib = SingleSessionContribution { + stats: Default::default(), + date: crate::types::CompactDate::from_str("2025-01-15").unwrap(), + models: crate::types::ModelCounts::new(), + session_hash: SessionHash::from_str("session1"), + ai_message_count: 5, + }; + + cache.insert_single_session(path_hash, contrib); + let retrieved = cache.get_single_session(&path_hash); + + assert!(retrieved.is_some()); + let retrieved = retrieved.unwrap(); + assert_eq!(retrieved.ai_message_count, 5); +} + +#[test] +fn test_contribution_cache_multi_session_insert_get() { + let cache = ContributionCache::new(); + let path = PathBuf::from("/test/app.db"); + let path_hash = PathHash::new(&path); + + let contrib = MultiSessionContribution { + session_aggregates: vec![], + daily_stats: Default::default(), + conversation_count: 10, + }; + + cache.insert_multi_session(path_hash, contrib); + let retrieved = cache.get_multi_session(&path_hash); + + assert!(retrieved.is_some()); + assert_eq!(retrieved.unwrap().conversation_count, 10); +} + +#[test] +fn test_contribution_cache_remove_any() { + let cache = ContributionCache::new(); + + // Insert one of each type + let path1 = PathBuf::from("/test/msg.json"); + let path2 = PathBuf::from("/test/session.jsonl"); + let path3 = PathBuf::from("/test/app.db"); + + let hash1 = PathHash::new(&path1); + let hash2 = PathHash::new(&path2); + let hash3 = PathHash::new(&path3); + + cache.insert_single_message(hash1, SingleMessageContribution::default()); + cache.insert_single_session( + hash2, + SingleSessionContribution { + stats: Default::default(), + date: Default::default(), + models: crate::types::ModelCounts::new(), + session_hash: SessionHash::from_str("s2"), + ai_message_count: 0, + }, + ); + cache.insert_multi_session( + hash3, + MultiSessionContribution { + session_aggregates: vec![], + daily_stats: Default::default(), + conversation_count: 3, + }, + ); + + // Remove and verify correct type returned + let removed1 = cache.remove_any(&hash1); + assert!(matches!( + removed1, + Some(super::super::RemovedContribution::SingleMessage(_)) + )); + + let removed2 = cache.remove_any(&hash2); + assert!(matches!( + removed2, + Some(super::super::RemovedContribution::SingleSession(_)) + )); + + let removed3 = cache.remove_any(&hash3); + assert!(matches!( + removed3, + Some(super::super::RemovedContribution::MultiSession(_)) + )); + + // Verify they're actually removed + assert!(cache.get_single_message(&hash1).is_none()); + assert!(cache.get_single_session(&hash2).is_none()); + assert!(cache.get_multi_session(&hash3).is_none()); +} + +#[test] +fn test_contribution_cache_clear() { + let cache = ContributionCache::new(); + + let path = PathBuf::from("/test/file.json"); + let hash = PathHash::new(&path); + + cache.insert_single_message(hash, SingleMessageContribution::default()); + + assert!(cache.get_single_message(&hash).is_some()); + + cache.clear(); + + assert!(cache.get_single_message(&hash).is_none()); +} + +// ============================================================================ +// Utility Tests +// ============================================================================ + +#[test] +fn test_path_hash_consistency() { + let path1 = PathBuf::from("/test/file.json"); + let path2 = PathBuf::from("/test/file.json"); + let path3 = PathBuf::from("/test/other.json"); + + let hash1 = PathHash::new(&path1); + let hash2 = PathHash::new(&path2); + let hash3 = PathHash::new(&path3); + + assert_eq!(hash1, hash2, "Same paths should have same hash"); + assert_ne!(hash1, hash3, "Different paths should have different hash"); +} diff --git a/src/contribution_cache/tests/compact_stats.rs b/src/contribution_cache/tests/compact_stats.rs new file mode 100644 index 0000000..21c0cde --- /dev/null +++ b/src/contribution_cache/tests/compact_stats.rs @@ -0,0 +1,160 @@ +//! Tests for PackedStatsDate bitfield operations + +use super::super::single_message::PackedStatsDate; +use crate::types::{CompactDate, Stats}; + +#[test] +fn test_packed_stats_date_from_stats() { + let stats = Stats { + input_tokens: 1000, + output_tokens: 500, + reasoning_tokens: 100, + cached_tokens: 200, + cost: 0.05, + tool_calls: 3, + ..Default::default() + }; + let date = CompactDate::from_parts(2025, 6, 15); + + let packed = PackedStatsDate::pack(&stats, date); + + assert_eq!(packed.input_tokens(), 1000); + assert_eq!(packed.output_tokens(), 500); + assert_eq!(packed.reasoning_tokens(), 100); + assert_eq!(packed.cached_tokens(), 200); + assert_eq!(packed.cost_cents(), 5); // 0.05 * 100 = 5 cents + assert_eq!(packed.tool_calls(), 3); + + let unpacked_date = packed.unpack_date(); + assert_eq!(unpacked_date.year(), 2025); + assert_eq!(unpacked_date.month(), 6); + assert_eq!(unpacked_date.day(), 15); +} + +#[test] +fn test_packed_stats_date_to_tui_stats() { + let stats = Stats { + input_tokens: 1000, + output_tokens: 500, + reasoning_tokens: 100, + cached_tokens: 200, + cost: 0.50, + tool_calls: 5, + ..Default::default() + }; + let date = CompactDate::from_parts(2025, 1, 1); + + let packed = PackedStatsDate::pack(&stats, date); + let tui = packed.to_tui_stats(); + + assert_eq!(tui.input_tokens, 1000); + assert_eq!(tui.output_tokens, 500); + assert_eq!(tui.reasoning_tokens, 100); + assert_eq!(tui.cached_tokens, 200); + assert_eq!(tui.cost_cents, 50); + assert_eq!(tui.tool_calls, 5); +} + +#[test] +fn test_packed_stats_date_max_values() { + // Test maximum values within bit limits (from diagnostic reference) + let stats = Stats { + input_tokens: 134_217_727, // 27-bit max + output_tokens: 67_108_863, // 26-bit max + reasoning_tokens: 67_108_863, + cached_tokens: 134_217_727, + cost: 655.35, // u16 max cents + tool_calls: 16_383, // 14-bit max + ..Default::default() + }; + let date = CompactDate::from_parts(2083, 12, 31); // Max year (2020 + 63) + + let packed = PackedStatsDate::pack(&stats, date); + + assert_eq!(packed.input_tokens(), 134_217_727); + assert_eq!(packed.output_tokens(), 67_108_863); + assert_eq!(packed.reasoning_tokens(), 67_108_863); + assert_eq!(packed.cached_tokens(), 134_217_727); + assert_eq!(packed.cost_cents(), 65535); + assert_eq!(packed.tool_calls(), 16383); + + let unpacked_date = packed.unpack_date(); + assert_eq!(unpacked_date.year(), 2083); + assert_eq!(unpacked_date.month(), 12); + assert_eq!(unpacked_date.day(), 31); +} + +#[test] +fn test_packed_stats_date_saturation() { + // Test values beyond bit limits get saturated + let stats = Stats { + input_tokens: 200_000_000, // Exceeds 27-bit max + output_tokens: 100_000_000, // Exceeds 26-bit max + reasoning_tokens: 100_000_000, + cached_tokens: 200_000_000, + cost: 1000.00, // Exceeds u16 max cents + tool_calls: 50_000, // Exceeds 14-bit max + ..Default::default() + }; + let date = CompactDate::from_parts(2100, 1, 1); // Exceeds max year + + let packed = PackedStatsDate::pack(&stats, date); + + // Values should be saturated to max + assert_eq!(packed.input_tokens(), 0x7FF_FFFF); // 27-bit max + assert_eq!(packed.output_tokens(), 0x3FF_FFFF); // 26-bit max + assert_eq!(packed.cost_cents(), 65535); // u16 max + assert_eq!(packed.tool_calls(), 16383); // 14-bit max + + let unpacked_date = packed.unpack_date(); + assert_eq!(unpacked_date.year(), 2083); // Saturated to 2020 + 63 +} + +#[test] +fn test_packed_stats_date_observed_values() { + // Test with actual observed maximum values from diagnostic + let stats = Stats { + input_tokens: 170_749, + output_tokens: 31_999, + reasoning_tokens: 7_005, + cached_tokens: 186_677, + cost: 3.56, + tool_calls: 73, + ..Default::default() + }; + let date = CompactDate::from_parts(2025, 6, 15); + + let packed = PackedStatsDate::pack(&stats, date); + + assert_eq!(packed.input_tokens(), 170_749); + assert_eq!(packed.output_tokens(), 31_999); + assert_eq!(packed.reasoning_tokens(), 7_005); + assert_eq!(packed.cached_tokens(), 186_677); + assert_eq!(packed.cost_cents(), 356); + assert_eq!(packed.tool_calls(), 73); + + let unpacked_date = packed.unpack_date(); + assert_eq!(unpacked_date.year(), 2025); + assert_eq!(unpacked_date.month(), 6); + assert_eq!(unpacked_date.day(), 15); +} + +#[test] +fn test_packed_stats_date_zero_values() { + let stats = Stats::default(); + let date = CompactDate::from_parts(2020, 1, 1); // Minimum year + + let packed = PackedStatsDate::pack(&stats, date); + + assert_eq!(packed.input_tokens(), 0); + assert_eq!(packed.output_tokens(), 0); + assert_eq!(packed.reasoning_tokens(), 0); + assert_eq!(packed.cached_tokens(), 0); + assert_eq!(packed.cost_cents(), 0); + assert_eq!(packed.tool_calls(), 0); + + let unpacked_date = packed.unpack_date(); + assert_eq!(unpacked_date.year(), 2020); + assert_eq!(unpacked_date.month(), 1); + assert_eq!(unpacked_date.day(), 1); +} diff --git a/src/contribution_cache/tests/mod.rs b/src/contribution_cache/tests/mod.rs new file mode 100644 index 0000000..5b285a5 --- /dev/null +++ b/src/contribution_cache/tests/mod.rs @@ -0,0 +1,98 @@ +//! Integration tests for contribution cache strategies. +//! +//! Tests the full flow of file updates for each contribution strategy: +//! - SingleMessage (OpenCode-style: 1 file = 1 message) +//! - SingleSession (Claude Code-style: 1 file = 1 session with many messages) +//! - MultiSession (Piebald-style: 1 file = many sessions) + +mod basic_operations; +mod compact_stats; +mod multi_session; +mod single_message; +mod single_session; + +use std::collections::BTreeMap; +use std::sync::Arc; + +use chrono::{TimeZone, Utc}; + +use crate::types::{ + AnalyzerStatsView, Application, CompactDate, ConversationMessage, MessageRole, + SessionAggregate, Stats, TuiStats, +}; + +// ============================================================================ +// Test Helpers +// ============================================================================ + +/// Create a test message with configurable parameters. +pub fn make_message( + session_id: &str, + model: Option<&str>, + input_tokens: u64, + output_tokens: u64, + cost: f64, + tool_calls: u32, + date_str: &str, +) -> ConversationMessage { + let date = chrono::NaiveDate::parse_from_str(date_str, "%Y-%m-%d") + .map(|d| { + d.and_hms_opt(12, 0, 0) + .map(|dt| Utc.from_utc_datetime(&dt)) + .unwrap_or_else(|| Utc.with_ymd_and_hms(2025, 1, 1, 0, 0, 0).unwrap()) + }) + .unwrap_or_else(|_| Utc.with_ymd_and_hms(2025, 1, 1, 0, 0, 0).unwrap()); + + ConversationMessage { + application: Application::ClaudeCode, + date, + project_hash: "test_project".into(), + conversation_hash: session_id.into(), + local_hash: Some(format!("local_{}", session_id)), + global_hash: format!("global_{}_{}", session_id, input_tokens), + model: model.map(String::from), + stats: Stats { + input_tokens, + output_tokens, + cost, + tool_calls, + ..Default::default() + }, + role: if model.is_some() { + MessageRole::Assistant + } else { + MessageRole::User + }, + uuid: None, + session_name: Some(format!("Session {}", session_id)), + } +} + +/// Create a minimal AnalyzerStatsView for testing. +pub fn make_empty_view(analyzer_name: &str) -> AnalyzerStatsView { + AnalyzerStatsView { + daily_stats: BTreeMap::new(), + session_aggregates: Vec::new(), + num_conversations: 0, + analyzer_name: Arc::from(analyzer_name), + } +} + +/// Create a view with a pre-existing session for testing add operations. +pub fn make_view_with_session(analyzer_name: &str, session_id: &str) -> AnalyzerStatsView { + let analyzer_name: Arc = Arc::from(analyzer_name); + AnalyzerStatsView { + daily_stats: BTreeMap::new(), + session_aggregates: vec![SessionAggregate { + session_id: session_id.to_string(), + first_timestamp: Utc.with_ymd_and_hms(2025, 1, 1, 0, 0, 0).unwrap(), + analyzer_name: Arc::clone(&analyzer_name), + stats: TuiStats::default(), + models: crate::types::ModelCounts::new(), + session_name: Some(format!("Session {}", session_id)), + date: CompactDate::from_str("2025-01-01").unwrap(), + }], + num_conversations: 0, + analyzer_name, + } +} diff --git a/src/contribution_cache/tests/multi_session.rs b/src/contribution_cache/tests/multi_session.rs new file mode 100644 index 0000000..f3f3e5c --- /dev/null +++ b/src/contribution_cache/tests/multi_session.rs @@ -0,0 +1,304 @@ +//! Tests for MultiSession contribution strategy (Piebald-style: 1 file = many sessions) + +use std::path::PathBuf; +use std::sync::Arc; + +use super::super::{ContributionCache, MultiSessionContribution, PathHash}; +use super::{make_empty_view, make_message}; + +// ============================================================================ +// MultiSessionContribution Tests +// ============================================================================ + +#[test] +fn test_multi_session_contribution_from_messages() { + let messages = vec![ + // Session 1 + make_message( + "session1", + Some("claude-3-5-sonnet"), + 500, + 200, + 0.02, + 1, + "2025-01-15", + ), + make_message( + "session1", + Some("claude-3-5-sonnet"), + 600, + 250, + 0.025, + 2, + "2025-01-15", + ), + // Session 2 + make_message( + "session2", + Some("claude-3-opus"), + 800, + 300, + 0.05, + 3, + "2025-01-16", + ), + ]; + + let contrib = MultiSessionContribution::from_messages(&messages, Arc::from("TestAnalyzer")); + + // Should have 2 session aggregates + assert_eq!(contrib.session_aggregates.len(), 2); + assert_eq!(contrib.conversation_count, 2); + + // Daily stats will have gap-filled entries (from earliest date to today), + // but should contain our specific dates with non-empty stats + assert!(contrib.daily_stats.contains_key("2025-01-15")); + assert!(contrib.daily_stats.contains_key("2025-01-16")); + + // Verify the actual stats for our dates are populated + let day1 = contrib.daily_stats.get("2025-01-15").unwrap(); + assert_eq!(day1.ai_messages, 2); // Two AI messages on Jan 15 + let day2 = contrib.daily_stats.get("2025-01-16").unwrap(); + assert_eq!(day2.ai_messages, 1); // One AI message on Jan 16 +} + +#[test] +fn test_multi_session_contribution_empty_messages() { + let messages: Vec<_> = vec![]; + + let contrib = MultiSessionContribution::from_messages(&messages, Arc::from("TestAnalyzer")); + + assert_eq!(contrib.session_aggregates.len(), 0); + assert_eq!(contrib.conversation_count, 0); + assert!(contrib.daily_stats.is_empty()); +} + +// ============================================================================ +// AnalyzerStatsView Add/Subtract Tests - MultiSession Strategy +// ============================================================================ + +#[test] +fn test_view_add_multi_session_contribution() { + let mut view = make_empty_view("TestAnalyzer"); + let messages = vec![ + make_message( + "session1", + Some("claude-3-5-sonnet"), + 500, + 200, + 0.02, + 1, + "2025-01-15", + ), + make_message( + "session2", + Some("claude-3-opus"), + 800, + 300, + 0.05, + 3, + "2025-01-16", + ), + ]; + let contrib = MultiSessionContribution::from_messages(&messages, Arc::from("TestAnalyzer")); + + view.add_multi_session_contribution(&contrib); + + // Check conversation count increased + assert_eq!(view.num_conversations, 2); + + // Check sessions added + assert_eq!(view.session_aggregates.len(), 2); + + // Check daily stats + assert!(view.daily_stats.contains_key("2025-01-15")); + assert!(view.daily_stats.contains_key("2025-01-16")); +} + +#[test] +fn test_view_subtract_multi_session_contribution() { + let mut view = make_empty_view("TestAnalyzer"); + let messages = vec![ + make_message( + "session1", + Some("claude-3-5-sonnet"), + 500, + 200, + 0.02, + 1, + "2025-01-15", + ), + make_message( + "session2", + Some("claude-3-opus"), + 800, + 300, + 0.05, + 3, + "2025-01-16", + ), + ]; + let contrib = MultiSessionContribution::from_messages(&messages, Arc::from("TestAnalyzer")); + + // Add then subtract + view.add_multi_session_contribution(&contrib); + view.subtract_multi_session_contribution(&contrib); + + // Conversation count should be 0 + assert_eq!(view.num_conversations, 0); + + // Daily stats should be removed when empty + assert!(view.daily_stats.is_empty()); +} + +#[test] +fn test_view_multi_session_merges_existing_sessions() { + let mut view = make_empty_view("TestAnalyzer"); + + // First contribution with session1 + let messages1 = vec![make_message( + "session1", + Some("claude-3-5-sonnet"), + 500, + 200, + 0.02, + 1, + "2025-01-15", + )]; + let contrib1 = MultiSessionContribution::from_messages(&messages1, Arc::from("TestAnalyzer")); + view.add_multi_session_contribution(&contrib1); + + // Second contribution with same session1 (should merge) + let messages2 = vec![make_message( + "session1", + Some("claude-3-5-sonnet"), + 800, + 300, + 0.03, + 2, + "2025-01-15", + )]; + let contrib2 = MultiSessionContribution::from_messages(&messages2, Arc::from("TestAnalyzer")); + view.add_multi_session_contribution(&contrib2); + + // Should still have 1 session (merged) + assert_eq!(view.session_aggregates.len(), 1); + + // Stats should be combined + let session = &view.session_aggregates[0]; + assert_eq!(session.stats.input_tokens, 1300); // 500 + 800 +} + +// ============================================================================ +// File Update Simulation Tests - MultiSession Strategy +// ============================================================================ + +/// Tests the subtract-old/add-new contribution flow for file updates +#[test] +fn test_file_update_flow_multi_session() { + let cache = ContributionCache::new(); + let path = PathBuf::from("/test/app.db"); + let path_hash = PathHash::new(&path); + + let mut view = make_empty_view("TestAnalyzer"); + + // Initial: 1 session + let messages1 = vec![make_message( + "session1", + Some("claude-3-5-sonnet"), + 1000, + 500, + 0.05, + 3, + "2025-01-15", + )]; + let contrib1 = MultiSessionContribution::from_messages(&messages1, Arc::from("TestAnalyzer")); + + cache.insert_multi_session(path_hash, contrib1.clone()); + view.add_multi_session_contribution(&contrib1); + + assert_eq!(view.num_conversations, 1); + assert_eq!(view.session_aggregates.len(), 1); + + // File updated: now 2 sessions + let messages2 = vec![ + make_message( + "session1", + Some("claude-3-5-sonnet"), + 1000, + 500, + 0.05, + 3, + "2025-01-15", + ), + make_message( + "session2", + Some("claude-3-opus"), + 2000, + 800, + 0.10, + 5, + "2025-01-16", + ), + ]; + let contrib2 = MultiSessionContribution::from_messages(&messages2, Arc::from("TestAnalyzer")); + + // Subtract old, add new + let old = cache.get_multi_session(&path_hash).unwrap(); + view.subtract_multi_session_contribution(&old); + view.add_multi_session_contribution(&contrib2); + cache.insert_multi_session(path_hash, contrib2); + + // Should have new values + assert_eq!(view.num_conversations, 2); + assert_eq!(view.session_aggregates.len(), 2); + assert!(view.daily_stats.contains_key("2025-01-15")); + assert!(view.daily_stats.contains_key("2025-01-16")); +} + +/// Tests file deletion for MultiSession +#[test] +fn test_file_deletion_multi_session() { + let cache = ContributionCache::new(); + let path = PathBuf::from("/test/app.db"); + let path_hash = PathHash::new(&path); + + let mut view = make_empty_view("TestAnalyzer"); + + // Add file with 2 sessions + let messages = vec![ + make_message( + "session1", + Some("claude-3-5-sonnet"), + 1000, + 500, + 0.05, + 3, + "2025-01-15", + ), + make_message( + "session2", + Some("claude-3-opus"), + 2000, + 800, + 0.10, + 5, + "2025-01-16", + ), + ]; + let contrib = MultiSessionContribution::from_messages(&messages, Arc::from("TestAnalyzer")); + cache.insert_multi_session(path_hash, contrib.clone()); + view.add_multi_session_contribution(&contrib); + + assert_eq!(view.num_conversations, 2); + + // Delete file + if let Some(super::super::RemovedContribution::MultiSession(old)) = cache.remove_any(&path_hash) + { + view.subtract_multi_session_contribution(&old); + } + + // Stats should be cleared + assert_eq!(view.num_conversations, 0); + assert!(view.daily_stats.is_empty()); +} diff --git a/src/contribution_cache/tests/single_message.rs b/src/contribution_cache/tests/single_message.rs new file mode 100644 index 0000000..adac32d --- /dev/null +++ b/src/contribution_cache/tests/single_message.rs @@ -0,0 +1,280 @@ +//! Tests for SingleMessage contribution strategy (OpenCode-style: 1 file = 1 message) + +use std::path::PathBuf; + +use super::super::{ContributionCache, PathHash, SingleMessageContribution}; +use super::{make_message, make_view_with_session}; + +// ============================================================================ +// SingleMessageContribution Tests +// ============================================================================ + +#[test] +fn test_single_message_contribution_from_message() { + let msg = make_message( + "session1", + Some("claude-3-5-sonnet"), + 1000, + 500, + 0.05, + 3, + "2025-01-15", + ); + + let contrib = SingleMessageContribution::from_message(&msg); + let stats = contrib.to_tui_stats(); + + assert_eq!(stats.input_tokens, 1000); + assert_eq!(stats.output_tokens, 500); + assert_eq!(stats.cost_cents, 5); + assert_eq!(stats.tool_calls, 3); + assert!(contrib.model.is_some()); + assert_eq!(contrib.date().to_string(), "2025-01-15"); +} + +#[test] +fn test_single_message_contribution_from_user_message() { + let msg = make_message("session1", None, 0, 0, 0.0, 0, "2025-01-15"); + + let contrib = SingleMessageContribution::from_message(&msg); + let stats = contrib.to_tui_stats(); + + assert!(contrib.model.is_none()); + assert_eq!(stats.input_tokens, 0); +} + +#[test] +fn test_single_message_hash_session_id_consistency() { + let session_id = "test_session_123"; + + let hash1 = SingleMessageContribution::hash_session_id(session_id); + let hash2 = SingleMessageContribution::hash_session_id(session_id); + + assert_eq!(hash1, hash2, "Same session ID should produce same hash"); + + let hash3 = SingleMessageContribution::hash_session_id("different_session"); + assert_ne!( + hash1, hash3, + "Different session IDs should produce different hashes" + ); +} + +// ============================================================================ +// AnalyzerStatsView Add/Subtract Tests - SingleMessage Strategy +// ============================================================================ + +#[test] +fn test_view_add_single_message_contribution() { + let mut view = make_view_with_session("TestAnalyzer", "session1"); + let msg = make_message( + "session1", + Some("claude-3-5-sonnet"), + 1000, + 500, + 0.05, + 3, + "2025-01-15", + ); + let contrib = SingleMessageContribution::from_message(&msg); + + view.add_single_message_contribution(&contrib); + + // Check daily stats updated + let daily = view.daily_stats.get("2025-01-15").expect("daily stats"); + assert_eq!(daily.ai_messages, 1); + assert_eq!(daily.stats.input_tokens, 1000); + assert_eq!(daily.stats.output_tokens, 500); + + // Check session stats updated + let session = &view.session_aggregates[0]; + assert_eq!(session.stats.input_tokens, 1000); + assert_eq!(session.stats.output_tokens, 500); +} + +#[test] +fn test_view_subtract_single_message_contribution() { + let mut view = make_view_with_session("TestAnalyzer", "session1"); + + // Add first + let msg = make_message( + "session1", + Some("claude-3-5-sonnet"), + 1000, + 500, + 0.05, + 3, + "2025-01-15", + ); + let contrib = SingleMessageContribution::from_message(&msg); + view.add_single_message_contribution(&contrib); + + // Then subtract + view.subtract_single_message_contribution(&contrib); + + // Daily stats should be removed when empty + assert!( + view.daily_stats.is_empty(), + "Empty daily stats should be removed" + ); + + // Session stats should be zeroed + let session = &view.session_aggregates[0]; + assert_eq!(session.stats.input_tokens, 0); +} + +#[test] +fn test_view_add_single_message_user_message_no_change() { + let mut view = make_view_with_session("TestAnalyzer", "session1"); + let msg = make_message("session1", None, 0, 0, 0.0, 0, "2025-01-15"); // User message + let contrib = SingleMessageContribution::from_message(&msg); + + view.add_single_message_contribution(&contrib); + + // User messages (model=None) should not increment ai_messages + assert!( + view.daily_stats.is_empty() || view.daily_stats.get("2025-01-15").unwrap().ai_messages == 0 + ); +} + +// ============================================================================ +// File Update Simulation Tests - SingleMessage Strategy +// ============================================================================ + +/// Tests the subtract-old/add-new contribution flow for file updates +#[test] +fn test_file_update_flow_single_message() { + let cache = ContributionCache::new(); + let path = PathBuf::from("/test/message1.json"); + let path_hash = PathHash::new(&path); + + let mut view = make_view_with_session("TestAnalyzer", "session1"); + + // Initial file: 1000 input tokens + let msg1 = make_message( + "session1", + Some("claude-3-5-sonnet"), + 1000, + 500, + 0.05, + 3, + "2025-01-15", + ); + let contrib1 = SingleMessageContribution::from_message(&msg1); + + cache.insert_single_message(path_hash, contrib1); + view.add_single_message_contribution(&contrib1); + + assert_eq!(view.session_aggregates[0].stats.input_tokens, 1000); + + // File updated: now 2000 input tokens + let msg2 = make_message( + "session1", + Some("claude-3-5-sonnet"), + 2000, + 800, + 0.08, + 5, + "2025-01-15", + ); + let contrib2 = SingleMessageContribution::from_message(&msg2); + + // Subtract old, add new (simulating reload_file_incremental) + let old = cache.get_single_message(&path_hash).unwrap(); + view.subtract_single_message_contribution(&old); + view.add_single_message_contribution(&contrib2); + cache.insert_single_message(path_hash, contrib2); + + // Should have new values + assert_eq!(view.session_aggregates[0].stats.input_tokens, 2000); + assert_eq!(view.session_aggregates[0].stats.output_tokens, 800); +} + +/// Tests file deletion correctly removes contribution +#[test] +fn test_file_deletion_single_message() { + let cache = ContributionCache::new(); + let path = PathBuf::from("/test/message1.json"); + let path_hash = PathHash::new(&path); + + let mut view = make_view_with_session("TestAnalyzer", "session1"); + + // Add file + let msg = make_message( + "session1", + Some("claude-3-5-sonnet"), + 1000, + 500, + 0.05, + 3, + "2025-01-15", + ); + let contrib = SingleMessageContribution::from_message(&msg); + cache.insert_single_message(path_hash, contrib); + view.add_single_message_contribution(&contrib); + + assert_eq!(view.session_aggregates[0].stats.input_tokens, 1000); + + // Delete file (simulating remove_file_from_cache) + if let Some(super::super::RemovedContribution::SingleMessage(old)) = + cache.remove_any(&path_hash) + { + view.subtract_single_message_contribution(&old); + } + + // Stats should be zeroed + assert_eq!(view.session_aggregates[0].stats.input_tokens, 0); + assert!(view.daily_stats.is_empty()); +} + +/// Tests multiple files contributing to the same session +#[test] +fn test_multiple_files_same_session() { + let cache = ContributionCache::new(); + let mut view = make_view_with_session("TestAnalyzer", "session1"); + + // Two files contributing to the same session (SingleMessage strategy) + let path1 = PathBuf::from("/test/msg1.json"); + let path2 = PathBuf::from("/test/msg2.json"); + let hash1 = PathHash::new(&path1); + let hash2 = PathHash::new(&path2); + + let msg1 = make_message( + "session1", + Some("claude-3-5-sonnet"), + 1000, + 500, + 0.05, + 2, + "2025-01-15", + ); + let msg2 = make_message( + "session1", + Some("claude-3-5-sonnet"), + 800, + 400, + 0.04, + 3, + "2025-01-15", + ); + + let contrib1 = SingleMessageContribution::from_message(&msg1); + let contrib2 = SingleMessageContribution::from_message(&msg2); + + cache.insert_single_message(hash1, contrib1); + cache.insert_single_message(hash2, contrib2); + view.add_single_message_contribution(&contrib1); + view.add_single_message_contribution(&contrib2); + + // Session should have combined stats + assert_eq!(view.session_aggregates[0].stats.input_tokens, 1800); + assert_eq!(view.session_aggregates[0].stats.tool_calls, 5); + + // Delete one file + if let Some(super::super::RemovedContribution::SingleMessage(old)) = cache.remove_any(&hash1) { + view.subtract_single_message_contribution(&old); + } + + // Should still have stats from remaining file + assert_eq!(view.session_aggregates[0].stats.input_tokens, 800); + assert_eq!(view.session_aggregates[0].stats.tool_calls, 3); +} diff --git a/src/contribution_cache/tests/single_session.rs b/src/contribution_cache/tests/single_session.rs new file mode 100644 index 0000000..291fb8b --- /dev/null +++ b/src/contribution_cache/tests/single_session.rs @@ -0,0 +1,388 @@ +//! Tests for SingleSession contribution strategy (Claude Code-style: 1 file = 1 session with many messages) + +use std::path::PathBuf; + +use super::super::{ContributionCache, PathHash, SessionHash, SingleSessionContribution}; +use super::{make_message, make_view_with_session}; + +// ============================================================================ +// SingleSessionContribution Tests +// ============================================================================ + +#[test] +fn test_single_session_contribution_from_messages() { + let messages = vec![ + make_message( + "session1", + Some("claude-3-5-sonnet"), + 500, + 200, + 0.02, + 1, + "2025-01-15", + ), + make_message("session1", None, 0, 0, 0.0, 0, "2025-01-15"), // User message + make_message( + "session1", + Some("claude-3-5-sonnet"), + 800, + 300, + 0.03, + 2, + "2025-01-15", + ), + ]; + + let contrib = SingleSessionContribution::from_messages(&messages); + + // Should aggregate only AI messages (2 of them) + assert_eq!(contrib.ai_message_count, 2); + assert_eq!(contrib.stats.input_tokens, 1300); // 500 + 800 + assert_eq!(contrib.stats.output_tokens, 500); // 200 + 300 + assert_eq!(contrib.stats.cost_cents, 5); // 2 + 3 + assert_eq!(contrib.stats.tool_calls, 3); // 1 + 2 + assert_eq!(contrib.date.to_string(), "2025-01-15"); +} + +#[test] +fn test_single_session_contribution_empty_messages() { + let messages: Vec<_> = vec![]; + + let contrib = SingleSessionContribution::from_messages(&messages); + + assert_eq!(contrib.ai_message_count, 0); + assert_eq!(contrib.stats.input_tokens, 0); + assert_eq!(contrib.session_hash, SessionHash::default()); +} + +#[test] +fn test_single_session_contribution_multiple_models() { + let messages = vec![ + make_message( + "session1", + Some("claude-3-5-sonnet"), + 500, + 200, + 0.02, + 1, + "2025-01-15", + ), + make_message( + "session1", + Some("claude-3-opus"), + 800, + 300, + 0.05, + 2, + "2025-01-15", + ), + make_message( + "session1", + Some("claude-3-5-sonnet"), + 600, + 250, + 0.025, + 1, + "2025-01-15", + ), + ]; + + let contrib = SingleSessionContribution::from_messages(&messages); + + assert_eq!(contrib.ai_message_count, 3); + // Models should be tracked with counts + // claude-3-5-sonnet appears twice, claude-3-opus once + let sonnet_key = crate::types::intern_model("claude-3-5-sonnet"); + let opus_key = crate::types::intern_model("claude-3-opus"); + + assert_eq!(contrib.models.get(sonnet_key), Some(2)); + assert_eq!(contrib.models.get(opus_key), Some(1)); +} + +// ============================================================================ +// AnalyzerStatsView Add/Subtract Tests - SingleSession Strategy +// ============================================================================ + +#[test] +fn test_view_add_single_session_contribution() { + let mut view = make_view_with_session("TestAnalyzer", "session1"); + let messages = vec![ + make_message( + "session1", + Some("claude-3-5-sonnet"), + 500, + 200, + 0.02, + 1, + "2025-01-15", + ), + make_message( + "session1", + Some("claude-3-5-sonnet"), + 800, + 300, + 0.03, + 2, + "2025-01-15", + ), + ]; + let contrib = SingleSessionContribution::from_messages(&messages); + + view.add_single_session_contribution(&contrib); + + // Check daily stats + let daily = view.daily_stats.get("2025-01-15").expect("daily stats"); + assert_eq!(daily.ai_messages, 2); + assert_eq!(daily.stats.input_tokens, 1300); + + // Check session stats + let session = &view.session_aggregates[0]; + assert_eq!(session.stats.input_tokens, 1300); +} + +#[test] +fn test_view_subtract_single_session_contribution() { + let mut view = make_view_with_session("TestAnalyzer", "session1"); + let messages = vec![ + make_message( + "session1", + Some("claude-3-5-sonnet"), + 500, + 200, + 0.02, + 1, + "2025-01-15", + ), + make_message( + "session1", + Some("claude-3-5-sonnet"), + 800, + 300, + 0.03, + 2, + "2025-01-15", + ), + ]; + let contrib = SingleSessionContribution::from_messages(&messages); + + // Add then subtract + view.add_single_session_contribution(&contrib); + view.subtract_single_session_contribution(&contrib); + + // Daily stats should be removed + assert!(view.daily_stats.is_empty()); + + // Session stats should be zeroed + let session = &view.session_aggregates[0]; + assert_eq!(session.stats.input_tokens, 0); +} + +#[test] +fn test_view_single_session_model_count_tracking() { + let mut view = make_view_with_session("TestAnalyzer", "session1"); + let messages = vec![ + make_message( + "session1", + Some("claude-3-5-sonnet"), + 500, + 200, + 0.02, + 1, + "2025-01-15", + ), + make_message( + "session1", + Some("claude-3-opus"), + 800, + 300, + 0.05, + 2, + "2025-01-15", + ), + ]; + let contrib = SingleSessionContribution::from_messages(&messages); + + view.add_single_session_contribution(&contrib); + + // Check model counts in session + let session = &view.session_aggregates[0]; + let sonnet_key = crate::types::intern_model("claude-3-5-sonnet"); + let opus_key = crate::types::intern_model("claude-3-opus"); + + assert_eq!(session.models.get(sonnet_key), Some(1)); + assert_eq!(session.models.get(opus_key), Some(1)); + + // Subtract and verify counts go to zero + view.subtract_single_session_contribution(&contrib); + let session = &view.session_aggregates[0]; + assert_eq!(session.models.get(sonnet_key), None); // Removed when count=0 + assert_eq!(session.models.get(opus_key), None); +} + +// ============================================================================ +// File Update Simulation Tests - SingleSession Strategy +// ============================================================================ + +/// Tests the subtract-old/add-new contribution flow for file updates +#[test] +fn test_file_update_flow_single_session() { + let cache = ContributionCache::new(); + let path = PathBuf::from("/test/session1.jsonl"); + let path_hash = PathHash::new(&path); + + let mut view = make_view_with_session("TestAnalyzer", "session1"); + + // Initial file: 2 messages + let messages1 = vec![ + make_message( + "session1", + Some("claude-3-5-sonnet"), + 500, + 200, + 0.02, + 1, + "2025-01-15", + ), + make_message( + "session1", + Some("claude-3-5-sonnet"), + 500, + 200, + 0.02, + 1, + "2025-01-15", + ), + ]; + let contrib1 = SingleSessionContribution::from_messages(&messages1); + + cache.insert_single_session(path_hash, contrib1.clone()); + view.add_single_session_contribution(&contrib1); + + assert_eq!(view.session_aggregates[0].stats.input_tokens, 1000); + assert_eq!(view.daily_stats.get("2025-01-15").unwrap().ai_messages, 2); + + // File updated: now 3 messages with different totals + let messages2 = vec![ + make_message( + "session1", + Some("claude-3-5-sonnet"), + 500, + 200, + 0.02, + 1, + "2025-01-15", + ), + make_message( + "session1", + Some("claude-3-5-sonnet"), + 500, + 200, + 0.02, + 1, + "2025-01-15", + ), + make_message( + "session1", + Some("claude-3-opus"), + 1000, + 400, + 0.05, + 3, + "2025-01-15", + ), + ]; + let contrib2 = SingleSessionContribution::from_messages(&messages2); + + // Subtract old, add new + let old = cache.get_single_session(&path_hash).unwrap(); + view.subtract_single_session_contribution(&old); + view.add_single_session_contribution(&contrib2); + cache.insert_single_session(path_hash, contrib2); + + // Should have new values + assert_eq!(view.session_aggregates[0].stats.input_tokens, 2000); + assert_eq!(view.daily_stats.get("2025-01-15").unwrap().ai_messages, 3); +} + +/// Tests file deletion for SingleSession +#[test] +fn test_file_deletion_single_session() { + let cache = ContributionCache::new(); + let path = PathBuf::from("/test/session1.jsonl"); + let path_hash = PathHash::new(&path); + + let mut view = make_view_with_session("TestAnalyzer", "session1"); + + // Add file + let messages = vec![ + make_message( + "session1", + Some("claude-3-5-sonnet"), + 500, + 200, + 0.02, + 1, + "2025-01-15", + ), + make_message( + "session1", + Some("claude-3-5-sonnet"), + 500, + 200, + 0.02, + 1, + "2025-01-15", + ), + ]; + let contrib = SingleSessionContribution::from_messages(&messages); + cache.insert_single_session(path_hash, contrib.clone()); + view.add_single_session_contribution(&contrib); + + assert_eq!(view.daily_stats.get("2025-01-15").unwrap().ai_messages, 2); + + // Delete file + if let Some(super::super::RemovedContribution::SingleSession(old)) = + cache.remove_any(&path_hash) + { + view.subtract_single_session_contribution(&old); + } + + // Stats should be cleared + assert!(view.daily_stats.is_empty()); + assert_eq!(view.session_aggregates[0].stats.input_tokens, 0); +} + +/// Tests that messages spanning multiple dates are handled correctly +#[test] +fn test_date_boundary_handling() { + let mut view = make_view_with_session("TestAnalyzer", "session1"); + + // Messages on different dates in same session + let messages = vec![ + make_message( + "session1", + Some("claude-3-5-sonnet"), + 500, + 200, + 0.02, + 1, + "2025-01-15", + ), + make_message( + "session1", + Some("claude-3-5-sonnet"), + 800, + 300, + 0.03, + 2, + "2025-01-16", + ), + ]; + let contrib = SingleSessionContribution::from_messages(&messages); + + view.add_single_session_contribution(&contrib); + + // Daily stats use the first message's date for SingleSession + assert!(view.daily_stats.contains_key("2025-01-15")); + // Second message's date is not separately tracked in SingleSession strategy +} diff --git a/src/main.rs b/src/main.rs index 1370597..51c6ab3 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,6 +1,7 @@ use anyhow::{Context, Result}; use clap::{Args, Parser, Subcommand}; -use std::sync::{Arc, Mutex}; +use parking_lot::Mutex; +use std::sync::Arc; use analyzer::AnalyzerRegistry; use analyzers::{ @@ -11,7 +12,9 @@ use analyzers::{ mod analyzer; mod analyzers; +mod cache; mod config; +mod contribution_cache; mod mcp; mod models; mod reqwest_simd_json; @@ -22,6 +25,10 @@ mod utils; mod version_check; mod watcher; +#[cfg(feature = "mimalloc")] +#[global_allocator] +static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; + #[derive(Parser)] #[command(name = "splitrail")] #[command(version)] @@ -205,17 +212,26 @@ async fn run_default(format_options: utils::NumberFormatOptions) { } }; - // Create real-time stats manager - let mut stats_manager = match watcher::RealtimeStatsManager::new(registry).await { - Ok(manager) => manager, - Err(e) => { - eprintln!("Error loading analyzer stats: {e}"); - std::process::exit(1); + // Create real-time stats manager using temporary rayon threadpool for parallel loading + let mut stats_manager = { + let pool = rayon::ThreadPoolBuilder::new() + .build() + .expect("Failed to create rayon threadpool"); + + let result = pool.install(|| watcher::RealtimeStatsManager::new(registry)); + + // Pool is dropped here, releasing threads + match result { + Ok(manager) => manager, + Err(e) => { + eprintln!("Error loading analyzer stats: {e}"); + std::process::exit(1); + } } }; - // Get the initial stats to check if we have data - let initial_stats = stats_manager.get_stats_receiver().borrow().clone(); + // Release memory from parallel parsing back to OS + release_unused_memory(); // Create upload status for TUI let upload_status = Arc::new(Mutex::new(tui::UploadStatus::None)); @@ -230,28 +246,33 @@ async fn run_default(format_options: utils::NumberFormatOptions) { let config = config::Config::load().unwrap_or(None).unwrap_or_default(); if config.upload.auto_upload { if config.is_configured() { + // For initial auto-upload, load full stats separately (sync, no threadpool for background task) + let registry_for_upload = create_analyzer_registry(); let upload_status_clone = upload_status.clone(); tokio::spawn(async move { - upload::perform_background_upload( - initial_stats, - Some(upload_status_clone), - Some(500), - ) - .await; + if let Ok(full_stats) = registry_for_upload.load_all_stats_parallel_scoped() { + // Scoped threadpool already released, also release allocator memory + release_unused_memory(); + upload::perform_background_upload( + full_stats, + Some(upload_status_clone), + Some(500), + ) + .await; + } }); } else { // Auto-upload is enabled but configuration is incomplete - if let Ok(mut status) = upload_status.lock() { - if config.is_api_token_missing() && config.is_server_url_missing() { - *status = tui::UploadStatus::MissingConfig; - } else if config.is_api_token_missing() { - *status = tui::UploadStatus::MissingApiToken; - } else if config.is_server_url_missing() { - *status = tui::UploadStatus::MissingServerUrl; - } else { - // Shouldn't happen since is_configured() returned false - *status = tui::UploadStatus::MissingConfig; - } + let mut status = upload_status.lock(); + if config.is_api_token_missing() && config.is_server_url_missing() { + *status = tui::UploadStatus::MissingConfig; + } else if config.is_api_token_missing() { + *status = tui::UploadStatus::MissingApiToken; + } else if config.is_server_url_missing() { + *status = tui::UploadStatus::MissingServerUrl; + } else { + // Shouldn't happen since is_configured() returned false + *status = tui::UploadStatus::MissingConfig; } } } @@ -271,7 +292,18 @@ async fn run_default(format_options: utils::NumberFormatOptions) { async fn run_upload(args: UploadArgs) -> Result<()> { let registry = create_analyzer_registry(); - let stats = registry.load_all_stats().await?; + + // Load stats using temporary rayon threadpool for parallel parsing + let stats = { + let pool = rayon::ThreadPoolBuilder::new() + .build() + .expect("Failed to create rayon threadpool"); + pool.install(|| registry.load_all_stats_parallel())? + // Pool is dropped here, releasing threads + }; + + // Release memory from parallel parsing back to OS + release_unused_memory(); // Load config file to get formatting options and upload date let config_file = config::Config::load().unwrap_or(None).unwrap_or_default(); @@ -358,7 +390,18 @@ async fn run_upload(args: UploadArgs) -> Result<()> { async fn run_stats(args: StatsArgs) -> Result<()> { let registry = create_analyzer_registry(); - let mut stats = registry.load_all_stats().await?; + + // Load stats using temporary rayon threadpool for parallel parsing + let mut stats = { + let pool = rayon::ThreadPoolBuilder::new() + .build() + .expect("Failed to create rayon threadpool"); + pool.install(|| registry.load_all_stats_parallel())? + // Pool is dropped here, releasing threads + }; + + // Release memory from parallel parsing back to OS + release_unused_memory(); if !args.include_messages { for analyzer_stats in &mut stats.analyzer_stats { @@ -399,3 +442,19 @@ async fn handle_config_subcommand(config_args: ConfigArgs) { } } } + +/// Release unused memory back to the OS after heavy allocations. +/// Call this after heavy allocations (e.g., parsing) to reclaim memory. +#[cfg(feature = "mimalloc")] +pub fn release_unused_memory() { + // SAFETY: mi_collect is a safe FFI call that triggers garbage collection + // and returns unused memory to the OS. The `force` parameter (true) ensures + // aggressive collection. + unsafe { + libmimalloc_sys::mi_collect(true); + } +} + +/// No-op when mimalloc is disabled. +#[cfg(not(feature = "mimalloc"))] +pub fn release_unused_memory() {} diff --git a/src/mcp/server.rs b/src/mcp/server.rs index baeec01..aa9dfcf 100644 --- a/src/mcp/server.rs +++ b/src/mcp/server.rs @@ -1,4 +1,4 @@ -use std::collections::HashMap; +use std::collections::{BTreeMap, HashMap}; use rmcp::handler::server::router::tool::ToolRouter; use rmcp::handler::server::wrapper::Parameters; @@ -13,7 +13,7 @@ use rmcp::{ tool_router, }; -use crate::types::{MultiAnalyzerStats, Stats}; +use crate::types::MultiAnalyzerStats; use crate::{create_analyzer_registry, utils}; use super::types::*; @@ -38,19 +38,55 @@ impl SplitrailMcpServer { } /// Load stats from all analyzers (reuses existing infrastructure) - async fn load_stats(&self) -> Result { + fn load_stats(&self) -> Result { let registry = create_analyzer_registry(); registry - .load_all_stats() - .await + .load_all_stats_parallel() .map_err(|e| McpError::internal_error(format!("Failed to load stats: {}", e), None)) } + /// Compute file operation stats grouped by date from raw messages. + /// This is computed on-demand since DailyStats only contains TUI-relevant fields. + fn compute_file_ops_by_date( + stats: &MultiAnalyzerStats, + analyzer: Option<&str>, + ) -> HashMap { + let messages: Vec<_> = if let Some(analyzer_name) = analyzer { + stats + .analyzer_stats + .iter() + .filter(|a| a.analyzer_name.eq_ignore_ascii_case(analyzer_name)) + .flat_map(|a| a.messages.iter()) + .collect() + } else { + stats + .analyzer_stats + .iter() + .flat_map(|a| a.messages.iter()) + .collect() + }; + + let mut file_ops_by_date: HashMap = HashMap::new(); + for msg in messages { + let date = msg + .date + .with_timezone(&chrono::Local) + .format("%Y-%m-%d") + .to_string(); + let entry = file_ops_by_date.entry(date).or_default(); + entry.files_read += msg.stats.files_read; + entry.files_edited += msg.stats.files_edited; + entry.files_added += msg.stats.files_added; + entry.terminal_commands += msg.stats.terminal_commands; + } + file_ops_by_date + } + /// Get daily stats for a specific analyzer or combined across all fn get_daily_stats_for_analyzer( stats: &MultiAnalyzerStats, analyzer: Option<&str>, - ) -> std::collections::BTreeMap { + ) -> BTreeMap { if let Some(analyzer_name) = analyzer { // Find specific analyzer for analyzer_stats in &stats.analyzer_stats { @@ -61,7 +97,7 @@ impl SplitrailMcpServer { return analyzer_stats.daily_stats.clone(); } } - std::collections::BTreeMap::new() + BTreeMap::new() } else { // Combine all messages and aggregate let all_messages: Vec<_> = stats @@ -84,20 +120,24 @@ impl SplitrailMcpServer { &self, Parameters(req): Parameters, ) -> Result, String> { - let stats = self.load_stats().await.map_err(|e| e.to_string())?; + let stats = self.load_stats().map_err(|e| e.to_string())?; let daily_stats = Self::get_daily_stats_for_analyzer(&stats, req.analyzer.as_deref()); + let file_ops_by_date = Self::compute_file_ops_by_date(&stats, req.analyzer.as_deref()); - let mut results: Vec = if let Some(date) = req.date { + let mut results: Vec = if let Some(ref date) = req.date { // Filter by specific date - daily_stats - .get(&date) - .map(|ds| vec![DailySummary::from((date.as_str(), ds))]) - .unwrap_or_default() + daily_stats.get(date).map_or_else(Vec::new, |ds| { + let file_ops = file_ops_by_date.get(date).cloned().unwrap_or_default(); + vec![DailySummary::new(date.as_str(), ds, &file_ops)] + }) } else { // All dates daily_stats .iter() - .map(|(date, ds)| DailySummary::from((date.as_str(), ds))) + .map(|(date, ds)| { + let file_ops = file_ops_by_date.get(date).cloned().unwrap_or_default(); + DailySummary::new(date.as_str(), ds, &file_ops) + }) .collect() }; @@ -120,7 +160,7 @@ impl SplitrailMcpServer { &self, Parameters(req): Parameters, ) -> Result, String> { - let stats = self.load_stats().await.map_err(|e| e.to_string())?; + let stats = self.load_stats().map_err(|e| e.to_string())?; let daily_stats = Self::get_daily_stats_for_analyzer(&stats, req.analyzer.as_deref()); let mut model_counts: HashMap = HashMap::new(); @@ -163,7 +203,7 @@ impl SplitrailMcpServer { &self, Parameters(req): Parameters, ) -> Result, String> { - let stats = self.load_stats().await.map_err(|e| e.to_string())?; + let stats = self.load_stats().map_err(|e| e.to_string())?; let daily_stats = Self::get_daily_stats_for_analyzer(&stats, req.analyzer.as_deref()); let daily_costs: Vec = daily_stats @@ -183,7 +223,7 @@ impl SplitrailMcpServer { }) .map(|(date, ds)| DailyCost { date: date.clone(), - cost: ds.stats.cost, + cost: ds.stats.cost(), }) .collect(); @@ -209,36 +249,61 @@ impl SplitrailMcpServer { &self, Parameters(req): Parameters, ) -> Result, String> { - let stats = self.load_stats().await.map_err(|e| e.to_string())?; - let daily_stats = Self::get_daily_stats_for_analyzer(&stats, req.analyzer.as_deref()); + let stats = self.load_stats().map_err(|e| e.to_string())?; - let mut aggregated = Stats::default(); + // Collect messages, optionally filtered by analyzer + let messages: Vec<_> = if let Some(ref analyzer_name) = req.analyzer { + stats + .analyzer_stats + .iter() + .filter(|a| a.analyzer_name.eq_ignore_ascii_case(analyzer_name)) + .flat_map(|a| a.messages.iter()) + .collect() + } else { + stats + .analyzer_stats + .iter() + .flat_map(|a| a.messages.iter()) + .collect() + }; - if let Some(date) = req.date { - if let Some(ds) = daily_stats.get(&date) { - aggregated = ds.stats.clone(); - } + // Filter by date if specified + let filtered: Vec<_> = if let Some(ref date) = req.date { + messages + .into_iter() + .filter(|m| { + m.date + .with_timezone(&chrono::Local) + .format("%Y-%m-%d") + .to_string() + == *date + }) + .collect() } else { - for ds in daily_stats.values() { - aggregated.files_read += ds.stats.files_read; - aggregated.files_edited += ds.stats.files_edited; - aggregated.files_added += ds.stats.files_added; - aggregated.files_deleted += ds.stats.files_deleted; - aggregated.lines_read += ds.stats.lines_read; - aggregated.lines_edited += ds.stats.lines_edited; - aggregated.lines_added += ds.stats.lines_added; - aggregated.lines_deleted += ds.stats.lines_deleted; - aggregated.bytes_read += ds.stats.bytes_read; - aggregated.bytes_edited += ds.stats.bytes_edited; - aggregated.bytes_added += ds.stats.bytes_added; - aggregated.bytes_deleted += ds.stats.bytes_deleted; - aggregated.terminal_commands += ds.stats.terminal_commands; - aggregated.file_searches += ds.stats.file_searches; - aggregated.file_content_searches += ds.stats.file_content_searches; - } + messages + }; + + // Sum file operations from raw Stats + let mut response = FileOpsResponse::default(); + for msg in filtered { + response.files_read += msg.stats.files_read; + response.files_edited += msg.stats.files_edited; + response.files_added += msg.stats.files_added; + response.files_deleted += msg.stats.files_deleted; + response.lines_read += msg.stats.lines_read; + response.lines_edited += msg.stats.lines_edited; + response.lines_added += msg.stats.lines_added; + response.lines_deleted += msg.stats.lines_deleted; + response.bytes_read += msg.stats.bytes_read; + response.bytes_edited += msg.stats.bytes_edited; + response.bytes_added += msg.stats.bytes_added; + response.bytes_deleted += msg.stats.bytes_deleted; + response.terminal_commands += msg.stats.terminal_commands; + response.file_searches += msg.stats.file_searches; + response.file_content_searches += msg.stats.file_content_searches; } - Ok(Json(FileOpsResponse::from(&aggregated))) + Ok(Json(response)) } #[tool( @@ -249,7 +314,7 @@ impl SplitrailMcpServer { &self, Parameters(req): Parameters, ) -> Result, String> { - let stats = self.load_stats().await.map_err(|e| e.to_string())?; + let stats = self.load_stats().map_err(|e| e.to_string())?; let tools: Vec = stats .analyzer_stats @@ -273,7 +338,7 @@ impl SplitrailMcpServer { }) .collect(); - let total_cost: f64 = filtered_stats.iter().map(|(_, ds)| ds.stats.cost).sum(); + let total_cost: f64 = filtered_stats.iter().map(|(_, ds)| ds.stats.cost()).sum(); let total_messages: u64 = filtered_stats .iter() .map(|(_, ds)| (ds.user_messages + ds.ai_messages) as u64) @@ -284,7 +349,7 @@ impl SplitrailMcpServer { .sum(); let total_tokens: u64 = filtered_stats .iter() - .map(|(_, ds)| ds.stats.input_tokens + ds.stats.output_tokens) + .map(|(_, ds)| (ds.stats.input_tokens as u64) + (ds.stats.output_tokens as u64)) .sum(); let total_tool_calls: u32 = filtered_stats .iter() @@ -316,7 +381,7 @@ impl SplitrailMcpServer { let registry = create_analyzer_registry(); let analyzers: Vec = registry .available_analyzers() - .iter() + .into_iter() .map(|a| a.display_name().to_string()) .collect(); @@ -364,6 +429,7 @@ impl ServerHandler for SplitrailMcpServer { .no_annotation(), ], next_cursor: None, + meta: None, }) } @@ -374,7 +440,7 @@ impl ServerHandler for SplitrailMcpServer { ) -> Result { match uri.as_str() { resource_uris::DAILY_SUMMARY => { - let stats = self.load_stats().await?; + let stats = self.load_stats()?; let all_messages: Vec<_> = stats .analyzer_stats .iter() @@ -390,7 +456,7 @@ impl ServerHandler for SplitrailMcpServer { ds.user_messages, ds.ai_messages, ds.conversations, - ds.stats.cost, + ds.stats.cost(), ds.stats.input_tokens, ds.stats.output_tokens ) @@ -403,7 +469,7 @@ impl ServerHandler for SplitrailMcpServer { }) } resource_uris::MODEL_BREAKDOWN => { - let stats = self.load_stats().await?; + let stats = self.load_stats()?; let mut model_counts: HashMap = HashMap::new(); for analyzer_stats in &stats.analyzer_stats { diff --git a/src/mcp/types.rs b/src/mcp/types.rs index b264ba7..cfeb004 100644 --- a/src/mcp/types.rs +++ b/src/mcp/types.rs @@ -2,7 +2,7 @@ use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use std::collections::BTreeMap; -use crate::types::{DailyStats, Stats}; +use crate::types::DailyStats; // ============================================================================ // Request Types @@ -101,22 +101,35 @@ pub struct DailyStatsResponse { pub results: Vec, } -impl From<(&str, &DailyStats)> for DailySummary { - fn from((date, ds): (&str, &DailyStats)) -> Self { +/// File operation stats computed on-demand from raw messages. +/// Used to supplement DailyStats (which only contains TUI-relevant fields). +#[derive(Debug, Clone, Default)] +pub struct DateFileOps { + pub files_read: u64, + pub files_edited: u64, + pub files_added: u64, + pub terminal_commands: u64, +} + +impl DailySummary { + /// Create a DailySummary from DailyStats and file operation stats. + /// File ops are computed separately from raw messages since DailyStats + /// only contains TUI-relevant fields. + pub fn new(date: &str, ds: &DailyStats, file_ops: &DateFileOps) -> Self { Self { date: date.to_string(), user_messages: ds.user_messages, ai_messages: ds.ai_messages, conversations: ds.conversations, - total_cost: ds.stats.cost, - input_tokens: ds.stats.input_tokens, - output_tokens: ds.stats.output_tokens, - cache_read_tokens: ds.stats.cache_read_tokens, + total_cost: ds.stats.cost(), + input_tokens: ds.stats.input_tokens as u64, + output_tokens: ds.stats.output_tokens as u64, + cache_read_tokens: ds.stats.cached_tokens as u64, tool_calls: ds.stats.tool_calls, - files_read: ds.stats.files_read, - files_edited: ds.stats.files_edited, - files_added: ds.stats.files_added, - terminal_commands: ds.stats.terminal_commands, + files_read: file_ops.files_read, + files_edited: file_ops.files_edited, + files_added: file_ops.files_added, + terminal_commands: file_ops.terminal_commands, models: ds.models.clone(), } } @@ -147,7 +160,7 @@ pub struct CostBreakdownResponse { pub average_daily_cost: f64, } -#[derive(Debug, Clone, Serialize, JsonSchema)] +#[derive(Debug, Clone, Default, Serialize, JsonSchema)] pub struct FileOpsResponse { pub files_read: u64, pub files_edited: u64, @@ -166,28 +179,6 @@ pub struct FileOpsResponse { pub file_content_searches: u64, } -impl From<&Stats> for FileOpsResponse { - fn from(s: &Stats) -> Self { - Self { - files_read: s.files_read, - files_edited: s.files_edited, - files_added: s.files_added, - files_deleted: s.files_deleted, - lines_read: s.lines_read, - lines_edited: s.lines_edited, - lines_added: s.lines_added, - lines_deleted: s.lines_deleted, - bytes_read: s.bytes_read, - bytes_edited: s.bytes_edited, - bytes_added: s.bytes_added, - bytes_deleted: s.bytes_deleted, - terminal_commands: s.terminal_commands, - file_searches: s.file_searches, - file_content_searches: s.file_content_searches, - } - } -} - #[derive(Debug, Clone, Serialize, JsonSchema)] pub struct ToolSummary { pub name: String, diff --git a/src/tui.rs b/src/tui.rs index cbb1aeb..0285fe9 100644 --- a/src/tui.rs +++ b/src/tui.rs @@ -3,7 +3,9 @@ pub mod logic; mod tests; use crate::models::is_model_estimated; -use crate::types::{AgenticCodingToolStats, MultiAnalyzerStats}; +use crate::types::{ + AnalyzerStatsView, CompactDate, MultiAnalyzerStatsView, SharedAnalyzerView, resolve_model, +}; use crate::utils::{NumberFormatOptions, format_date_for_display, format_number}; use crate::watcher::{FileWatcher, RealtimeStatsManager, WatcherEvent}; use anyhow::Result; @@ -14,19 +16,18 @@ use crossterm::terminal::{ EnterAlternateScreen, LeaveAlternateScreen, disable_raw_mode, enable_raw_mode, }; use crossterm::{ExecutableCommand, execute}; -use logic::{ - SessionAggregate, aggregate_sessions_for_all_tools, aggregate_sessions_for_all_tools_owned, - date_matches_buffer, has_data, -}; +use logic::{SessionAggregate, date_matches_buffer, has_data_shared}; +use parking_lot::Mutex; use ratatui::backend::CrosstermBackend; use ratatui::layout::{Constraint, Layout, Rect}; -use ratatui::style::{Color, Modifier, Style, Stylize}; +use ratatui::style::{Color, Modifier, Style}; use ratatui::text::{Line, Span, Text}; use ratatui::widgets::{Block, Cell, Paragraph, Row, Table, TableState, Tabs}; use ratatui::{Frame, Terminal}; +use std::collections::HashSet; use std::io::{Write, stdout}; +use std::sync::Arc; use std::sync::atomic::{AtomicU64, AtomicUsize, Ordering}; -use std::sync::{Arc, Mutex}; use std::time::{Duration, SystemTime, UNIX_EPOCH}; use tokio::sync::{mpsc, watch}; @@ -59,640 +60,94 @@ enum EitherIter { impl Iterator for EitherIter where - L: Iterator, - R: Iterator, -{ - type Item = T; - - #[inline] - fn next(&mut self) -> Option { - match self { - EitherIter::Left(l) => l.next(), - EitherIter::Right(r) => r.next(), - } - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - match self { - EitherIter::Left(l) => l.size_hint(), - EitherIter::Right(r) => r.size_hint(), - } - } -} - -struct UiState<'a> { - table_states: &'a mut [TableState], - _scroll_offset: usize, - selected_tab: usize, - stats_view_mode: StatsViewMode, - session_window_offsets: &'a mut [usize], - session_day_filters: &'a mut [Option], - date_jump_active: bool, - date_jump_buffer: &'a str, - sort_reversed: bool, - show_totals: bool, -} - -#[derive(Debug, Clone)] -struct SessionTableCache { - sessions: Vec, -} - -fn build_session_table_cache(sessions: Vec) -> SessionTableCache { - SessionTableCache { sessions } -} - -pub fn run_tui( - stats_receiver: watch::Receiver, - format_options: &NumberFormatOptions, - upload_status: Arc>, - update_status: Arc>, - file_watcher: FileWatcher, - mut stats_manager: RealtimeStatsManager, -) -> Result<()> { - enable_raw_mode()?; - stdout().execute(EnterAlternateScreen)?; - let backend = CrosstermBackend::new(stdout()); - let mut terminal = Terminal::new(backend)?; - - let mut selected_tab = 0; - let mut scroll_offset = 0; - let mut stats_view_mode = StatsViewMode::Daily; - - let (watcher_tx, mut watcher_rx) = mpsc::unbounded_channel::(); - - tokio::spawn(async move { - while let Some(event) = watcher_rx.recv().await { - if let Err(e) = stats_manager.handle_watcher_event(event).await { - eprintln!("Error handling watcher event: {e}"); - } - } - // Persist cache when TUI exits - stats_manager.persist_cache(); - }); - - let result = tokio::task::block_in_place(|| { - tokio::runtime::Handle::current().block_on(run_app( - &mut terminal, - stats_receiver, - format_options, - &mut selected_tab, - &mut scroll_offset, - &mut stats_view_mode, - upload_status, - update_status, - file_watcher, - watcher_tx, - )) - }); - - disable_raw_mode()?; - terminal.backend_mut().execute(LeaveAlternateScreen)?; - result -} - -/// Result state from running the TUI in test mode. -#[cfg(test)] -#[derive(Debug, Clone)] -pub struct TestRunResult { - /// Whether sort order is currently reversed. - pub sort_reversed: bool, - /// The day filter for each tab (set when drilling into a specific day). - pub session_day_filters: Vec>, - /// The selected row index for each tab. - pub selected_rows: Vec>, -} - -#[cfg(test)] -#[allow(clippy::too_many_arguments)] -async fn run_app_for_tests( - terminal: &mut Terminal, - mut stats_receiver: watch::Receiver, - format_options: &NumberFormatOptions, - selected_tab: &mut usize, - scroll_offset: &mut usize, - stats_view_mode: &mut StatsViewMode, - upload_status: Arc>, - update_status: Arc>, - file_watcher: FileWatcher, - watcher_tx: mpsc::UnboundedSender, - mut poll: FPoll, - mut read: FRead, - max_iterations: usize, -) -> Result -where - B: ratatui::backend::Backend, - FPoll: FnMut(Duration) -> std::io::Result, - FRead: FnMut() -> std::io::Result, -{ - let mut table_states: Vec = Vec::new(); - let mut session_window_offsets: Vec = Vec::new(); - let mut session_day_filters: Vec> = Vec::new(); - let mut date_jump_active = false; - let mut date_jump_buffer = String::new(); - let mut sort_reversed = false; - let mut show_totals = true; - let mut current_stats = stats_receiver.borrow().clone(); - - // Initialize table states for current stats - update_table_states(&mut table_states, ¤t_stats, selected_tab); - update_window_offsets(&mut session_window_offsets, &table_states.len()); - update_day_filters(&mut session_day_filters, &table_states.len()); - - let mut needs_redraw = true; - let mut last_upload_status = { - let status = upload_status.lock().unwrap(); - format!("{:?}", *status) - }; - let mut dots_counter = 0; // Counter for dots animation (advance every 5 frames = 500ms) - - // Filter analyzer stats to only include those with data - calculate once and update when stats change - let mut filtered_stats: Vec<&AgenticCodingToolStats> = current_stats - .analyzer_stats - .iter() - .filter(|stats| has_data(stats)) - .collect(); - - let session_stats_per_tool = aggregate_sessions_for_all_tools(&filtered_stats); - let mut session_table_cache: Vec = session_stats_per_tool - .iter() - .cloned() - .map(build_session_table_cache) - .collect(); - type SessionRecomputeHandle = - tokio::task::JoinHandle<(u64, Vec>, Vec)>; - - let mut recompute_version: u64 = 0; - let mut pending_session_recompute: Option = None; - let mut iterations: usize = 0; - - loop { - if iterations >= max_iterations { - break; - } - iterations = iterations.saturating_add(1); - - // Check for stats updates - if stats_receiver.has_changed()? { - current_stats = stats_receiver.borrow_and_update().clone(); - // Recalculate filtered stats only when stats change - filtered_stats = current_stats - .analyzer_stats - .iter() - .filter(|stats| has_data(stats)) - .collect(); - update_table_states(&mut table_states, ¤t_stats, selected_tab); - update_window_offsets(&mut session_window_offsets, &table_states.len()); - update_day_filters(&mut session_day_filters, &table_states.len()); - recompute_version = recompute_version.wrapping_add(1); - let version = recompute_version; - if let Some(handle) = pending_session_recompute.take() { - handle.abort(); - } - let stats_for_recompute: Vec = - filtered_stats.iter().map(|s| (*s).clone()).collect(); - pending_session_recompute = Some(tokio::task::spawn_blocking(move || { - let session_stats = aggregate_sessions_for_all_tools_owned(&stats_for_recompute); - let caches = session_stats - .iter() - .cloned() - .map(build_session_table_cache) - .collect(); - (version, session_stats, caches) - })); - needs_redraw = true; - } - - // Check for file watcher events; hand off processing so UI thread stays responsive - while let Some(watcher_event) = file_watcher.try_recv() { - let _ = watcher_tx.send(watcher_event); - } - - // Check if upload status has changed or advance dots animation - let current_upload_status = { - let mut status = upload_status.lock().unwrap(); - // Advance dots animation for uploading status every 500ms (5 frames at 100ms) - if let UploadStatus::Uploading { - current: _, - total: _, - dots, - } = &mut *status - { - // Always animate dots during upload - dots_counter += 1; - if dots_counter >= 5 { - *dots = (*dots + 1) % 4; - dots_counter = 0; - needs_redraw = true; - } - } else { - // Reset counter when not uploading - dots_counter = 0; - } - format!("{:?}", *status) - }; - if current_upload_status != last_upload_status { - last_upload_status = current_upload_status; - needs_redraw = true; - } - - // Only redraw if something has changed - if needs_redraw { - terminal.draw(|frame| { - let mut ui_state = UiState { - table_states: &mut table_states, - _scroll_offset: *scroll_offset, - selected_tab: *selected_tab, - stats_view_mode: *stats_view_mode, - session_window_offsets: &mut session_window_offsets, - session_day_filters: &mut session_day_filters, - date_jump_active, - date_jump_buffer: &date_jump_buffer, - sort_reversed, - show_totals, - }; - draw_ui( - frame, - &filtered_stats, - format_options, - &mut ui_state, - upload_status.clone(), - update_status.clone(), - &session_table_cache, - ); - })?; - needs_redraw = false; - } - - if let Some(handle) = pending_session_recompute.as_mut() - && handle.is_finished() - { - if let Ok((version, _, new_cache)) = handle.await - && version == recompute_version - { - session_table_cache = new_cache; - needs_redraw = true; - } - pending_session_recompute = None; - } - - // Use a timeout to allow periodic refreshes for upload status updates - if let Ok(event_available) = poll(Duration::from_millis(100)) { - if !event_available { - continue; - } - - // Handle different event types - let key = match read()? { - Event::Key(key) if key.is_press() => key, - Event::Resize(_, _) => { - // Terminal was resized, trigger redraw - needs_redraw = true; - continue; - } - _ => continue, - }; - - // Handle quitting. - if matches!(key.code, KeyCode::Char('q') | KeyCode::Esc) { - break; - } - - // Only handle navigation keys if we have data (`filtered_stats` is non-empty). - if filtered_stats.is_empty() { - continue; - } - - if date_jump_active { - match key.code { - KeyCode::Char(c) if c.is_ascii_alphanumeric() || c == '-' || c == '/' => { - date_jump_buffer.push(c); - // Auto-jump to first matching date - if let Some(current_stats) = filtered_stats.get(*selected_tab) - && let Some(table_state) = table_states.get_mut(*selected_tab) - && let Some((index, _)) = - current_stats.daily_stats.iter().enumerate().find( - |(_, (day, _))| date_matches_buffer(day, &date_jump_buffer), - ) - { - table_state.select(Some(index)); - } - needs_redraw = true; - } - KeyCode::Backspace => { - date_jump_buffer.pop(); - // Re-evaluate match after backspace - if let Some(current_stats) = filtered_stats.get(*selected_tab) - && let Some(table_state) = table_states.get_mut(*selected_tab) - && let Some((index, _)) = - current_stats.daily_stats.iter().enumerate().find( - |(_, (day, _))| date_matches_buffer(day, &date_jump_buffer), - ) - { - table_state.select(Some(index)); - } - needs_redraw = true; - } - KeyCode::Enter | KeyCode::Esc => { - date_jump_active = false; - date_jump_buffer.clear(); - needs_redraw = true; - } - _ => {} - } - continue; - } - - match key.code { - KeyCode::Left | KeyCode::Char('h') => { - if *selected_tab > 0 { - *selected_tab -= 1; - - if let StatsViewMode::Session = *stats_view_mode - && let Some(table_state) = table_states.get_mut(*selected_tab) - && let Some(cache) = session_table_cache.get(*selected_tab) - { - let target_len = match session_day_filters - .get(*selected_tab) - .and_then(|f| f.as_ref()) - { - Some(day) => { - cache.sessions.iter().filter(|s| &s.day_key == day).count() - } - None => cache.sessions.len(), - }; - if target_len > 0 { - table_state.select(Some(target_len.saturating_sub(1))); - } - } - - needs_redraw = true; - } - } - KeyCode::Right | KeyCode::Char('l') => { - if *selected_tab < filtered_stats.len().saturating_sub(1) { - *selected_tab += 1; - - if let StatsViewMode::Session = *stats_view_mode - && let Some(table_state) = table_states.get_mut(*selected_tab) - && let Some(cache) = session_table_cache.get(*selected_tab) - { - let target_len = match session_day_filters - .get(*selected_tab) - .and_then(|f| f.as_ref()) - { - Some(day) => { - cache.sessions.iter().filter(|s| &s.day_key == day).count() - } - None => cache.sessions.len(), - }; - if target_len > 0 { - table_state.select(Some(target_len.saturating_sub(1))); - } - } - - needs_redraw = true; - } - } - KeyCode::Down | KeyCode::Char('j') => { - if let Some(table_state) = table_states.get_mut(*selected_tab) - && let Some(selected) = table_state.selected() - { - match *stats_view_mode { - StatsViewMode::Daily => { - if let Some(current_stats) = filtered_stats.get(*selected_tab) { - let total_rows = current_stats.daily_stats.len(); - if selected < total_rows.saturating_add(1) { - table_state.select(Some( - if selected == total_rows.saturating_sub(1) { - selected + 2 - } else { - selected + 1 - }, - )); - needs_redraw = true; - } - } - } - StatsViewMode::Session => { - let filtered_len = session_table_cache - .get(*selected_tab) - .map(|cache| { - session_day_filters - .get(*selected_tab) - .and_then(|f| f.as_ref()) - .map(|day| { - cache - .sessions - .iter() - .filter(|s| &s.day_key == day) - .count() - }) - .unwrap_or_else(|| cache.sessions.len()) - }) - .unwrap_or(0); - - if filtered_len > 0 && selected < filtered_len.saturating_add(1) { - // sessions: 0..len-1, separator: len, totals: len+1 - table_state.select(Some( - if selected == filtered_len.saturating_sub(1) { - selected + 2 - } else { - selected + 1 - }, - )); - needs_redraw = true; - } - } - } - } - } - KeyCode::Up | KeyCode::Char('k') => { - if let Some(table_state) = table_states.get_mut(*selected_tab) - && let Some(selected) = table_state.selected() - { - match *stats_view_mode { - StatsViewMode::Daily => { - if selected > 0 - && let Some(current_stats) = filtered_stats.get(*selected_tab) - { - let total_rows = current_stats.daily_stats.len(); - table_state.select(Some(if selected == total_rows + 1 { - selected.saturating_sub(2) - } else { - selected.saturating_sub(1) - })); - needs_redraw = true; - } - } - StatsViewMode::Session => { - if selected > 0 - && let Some(cache) = session_table_cache.get(*selected_tab) - { - let filtered_len = session_day_filters - .get(*selected_tab) - .and_then(|f| f.as_ref()) - .map(|day| { - cache - .sessions - .iter() - .filter(|s| &s.day_key == day) - .count() - }) - .unwrap_or_else(|| cache.sessions.len()); - - if filtered_len > 0 { - table_state.select(Some(if selected == filtered_len + 1 { - selected.saturating_sub(2) - } else { - selected.saturating_sub(1) - })); - needs_redraw = true; - } - } - } - } - } - } - KeyCode::PageDown => { - if let Some(table_state) = table_states.get_mut(*selected_tab) - && let Some(selected) = table_state.selected() - { - match *stats_view_mode { - StatsViewMode::Daily => { - if let Some(current_stats) = filtered_stats.get(*selected_tab) { - let total_rows = current_stats.daily_stats.len() + 2; - let new_selected = - (selected + 10).min(total_rows.saturating_sub(1)); - table_state.select(Some(new_selected)); - needs_redraw = true; - } - } - StatsViewMode::Session => { - let filtered_len = session_table_cache - .get(*selected_tab) - .map(|cache| { - session_day_filters - .get(*selected_tab) - .and_then(|f| f.as_ref()) - .map(|day| { - cache - .sessions - .iter() - .filter(|s| &s.day_key == day) - .count() - }) - .unwrap_or_else(|| cache.sessions.len()) - }) - .unwrap_or(0); + L: Iterator, + R: Iterator, +{ + type Item = T; - if filtered_len > 0 { - let total_rows = filtered_len + 2; - let new_selected = - (selected + 10).min(total_rows.saturating_sub(1)); - table_state.select(Some(new_selected)); - needs_redraw = true; - } - } - } - } - } - KeyCode::PageUp => { - if let Some(table_state) = table_states.get_mut(*selected_tab) - && let Some(selected) = table_state.selected() - { - let new_selected = selected.saturating_sub(10); - table_state.select(Some(new_selected)); - needs_redraw = true; - } - } - KeyCode::Char('/') => { - if let StatsViewMode::Daily = *stats_view_mode { - date_jump_active = true; - date_jump_buffer.clear(); - needs_redraw = true; - } - } - KeyCode::Char('t') => { - if key.modifiers.contains(KeyModifiers::CONTROL) { - *stats_view_mode = match *stats_view_mode { - StatsViewMode::Daily => { - session_day_filters[*selected_tab] = None; - StatsViewMode::Session - } - StatsViewMode::Session => StatsViewMode::Daily, - }; + #[inline] + fn next(&mut self) -> Option { + match self { + EitherIter::Left(l) => l.next(), + EitherIter::Right(r) => r.next(), + } + } - date_jump_active = false; - date_jump_buffer.clear(); + #[inline] + fn size_hint(&self) -> (usize, Option) { + match self { + EitherIter::Left(l) => l.size_hint(), + EitherIter::Right(r) => r.size_hint(), + } + } +} - if let StatsViewMode::Session = *stats_view_mode - && let Some(table_state) = table_states.get_mut(*selected_tab) - && let Some(cache) = session_table_cache.get(*selected_tab) - && !cache.sessions.is_empty() - { - let target_len = session_day_filters - .get(*selected_tab) - .and_then(|f| f.as_ref()) - .map(|day| { - cache.sessions.iter().filter(|s| &s.day_key == day).count() - }) - .unwrap_or_else(|| cache.sessions.len()); - if target_len > 0 { - table_state.select(Some(target_len.saturating_sub(1))); - } - } +struct UiState<'a> { + table_states: &'a mut [TableState], + _scroll_offset: usize, + selected_tab: usize, + stats_view_mode: StatsViewMode, + session_window_offsets: &'a mut [usize], + session_day_filters: &'a mut [Option], + date_jump_active: bool, + date_jump_buffer: &'a str, + sort_reversed: bool, + show_totals: bool, +} - needs_redraw = true; - } - } - KeyCode::Enter => { - if let StatsViewMode::Daily = *stats_view_mode - && let Some(current_stats) = filtered_stats.get(*selected_tab) - && let Some(table_state) = table_states.get_mut(*selected_tab) - && let Some(selected_idx) = table_state.selected() - && selected_idx < current_stats.daily_stats.len() - { - let day_key = if sort_reversed { - current_stats.daily_stats.iter().rev().nth(selected_idx) - } else { - current_stats.daily_stats.iter().nth(selected_idx) - } - .map(|(k, _)| k); - if let Some(day_key) = day_key { - session_day_filters[*selected_tab] = Some(day_key.to_string()); - *stats_view_mode = StatsViewMode::Session; - session_window_offsets[*selected_tab] = 0; - table_state.select(Some(0)); - needs_redraw = true; - } - } - } - KeyCode::Char('r') => { - sort_reversed = !sort_reversed; - needs_redraw = true; - } - KeyCode::Char('s') => { - show_totals = !show_totals; - needs_redraw = true; - } - _ => {} +pub fn run_tui( + stats_receiver: watch::Receiver, + format_options: &NumberFormatOptions, + upload_status: Arc>, + update_status: Arc>, + file_watcher: FileWatcher, + mut stats_manager: RealtimeStatsManager, +) -> Result<()> { + enable_raw_mode()?; + stdout().execute(EnterAlternateScreen)?; + let backend = CrosstermBackend::new(stdout()); + let mut terminal = Terminal::new(backend)?; + + let mut selected_tab = 0; + let mut scroll_offset = 0; + let mut stats_view_mode = StatsViewMode::Daily; + + let (watcher_tx, mut watcher_rx) = mpsc::unbounded_channel::(); + + tokio::spawn(async move { + while let Some(event) = watcher_rx.recv().await { + if let Err(e) = stats_manager.handle_watcher_event(event).await { + eprintln!("Error handling watcher event: {e}"); } } - } + // Persist cache when TUI exits + stats_manager.persist_cache(); + }); + + let result = tokio::task::block_in_place(|| { + tokio::runtime::Handle::current().block_on(run_app( + &mut terminal, + stats_receiver, + format_options, + &mut selected_tab, + &mut scroll_offset, + &mut stats_view_mode, + upload_status, + update_status, + file_watcher, + watcher_tx, + )) + }); - Ok(TestRunResult { - sort_reversed, - session_day_filters, - selected_rows: table_states.iter().map(|ts| ts.selected()).collect(), - }) + disable_raw_mode()?; + terminal.backend_mut().execute(LeaveAlternateScreen)?; + result } #[allow(clippy::too_many_arguments)] async fn run_app( terminal: &mut Terminal>, - mut stats_receiver: watch::Receiver, + mut stats_receiver: watch::Receiver, format_options: &NumberFormatOptions, selected_tab: &mut usize, scroll_offset: &mut usize, @@ -704,7 +159,7 @@ async fn run_app( ) -> Result<()> { let mut table_states: Vec = Vec::new(); let mut session_window_offsets: Vec = Vec::new(); - let mut session_day_filters: Vec> = Vec::new(); + let mut session_day_filters: Vec> = Vec::new(); let mut date_jump_active = false; let mut date_jump_buffer = String::new(); let mut sort_reversed = false; @@ -718,38 +173,28 @@ async fn run_app( let mut needs_redraw = true; let mut last_upload_status = { - let status = upload_status.lock().unwrap(); + let status = upload_status.lock(); format!("{:?}", *status) }; let mut last_update_status = { - let status = update_status.lock().unwrap(); + let status = update_status.lock(); format!("{:?}", *status) }; let mut dots_counter = 0; // Counter for dots animation (advance every 5 frames = 500ms) // Filter analyzer stats to only include those with data - calculate once and update when stats change - let mut filtered_stats: Vec<&AgenticCodingToolStats> = current_stats + // SharedAnalyzerView = Arc> - clone is cheap (just Arc pointer) + let mut filtered_stats: Vec = current_stats .analyzer_stats .iter() - .filter(|stats| has_data(stats)) - .collect(); - - let session_stats_per_tool = aggregate_sessions_for_all_tools(&filtered_stats); - let mut session_table_cache: Vec = session_stats_per_tool - .iter() + .filter(|stats| has_data_shared(stats)) .cloned() - .map(build_session_table_cache) .collect(); - type SessionRecomputeHandle = - tokio::task::JoinHandle<(u64, Vec>, Vec)>; - - let mut recompute_version: u64 = 0; - let mut pending_session_recompute: Option = None; loop { // Check for update status changes let current_update_status = { - let status = update_status.lock().unwrap(); + let status = update_status.lock(); format!("{:?}", *status) }; if current_update_status != last_update_status { @@ -764,27 +209,13 @@ async fn run_app( filtered_stats = current_stats .analyzer_stats .iter() - .filter(|stats| has_data(stats)) + .filter(|stats| has_data_shared(stats)) + .cloned() .collect(); update_table_states(&mut table_states, ¤t_stats, selected_tab); update_window_offsets(&mut session_window_offsets, &table_states.len()); update_day_filters(&mut session_day_filters, &table_states.len()); - recompute_version = recompute_version.wrapping_add(1); - let version = recompute_version; - if let Some(handle) = pending_session_recompute.take() { - handle.abort(); - } - let stats_for_recompute: Vec = - filtered_stats.iter().map(|s| (*s).clone()).collect(); - pending_session_recompute = Some(tokio::task::spawn_blocking(move || { - let session_stats = aggregate_sessions_for_all_tools_owned(&stats_for_recompute); - let caches = session_stats - .iter() - .cloned() - .map(build_session_table_cache) - .collect(); - (version, session_stats, caches) - })); + needs_redraw = true; } @@ -795,7 +226,7 @@ async fn run_app( // Check if upload status has changed or advance dots animation let current_upload_status = { - let mut status = upload_status.lock().unwrap(); + let mut status = upload_status.lock(); // Advance dots animation for uploading status every 500ms (5 frames at 100ms) if let UploadStatus::Uploading { current: _, @@ -843,24 +274,11 @@ async fn run_app( &mut ui_state, upload_status.clone(), update_status.clone(), - &session_table_cache, ); })?; needs_redraw = false; } - if let Some(handle) = pending_session_recompute.as_mut() - && handle.is_finished() - { - if let Ok((version, _, new_cache)) = handle.await - && version == recompute_version - { - session_table_cache = new_cache; - needs_redraw = true; - } - pending_session_recompute = None; - } - // Use a timeout to allow periodic refreshes for upload status updates if let Ok(event_available) = event::poll(Duration::from_millis(100)) { if !event_available { @@ -884,15 +302,15 @@ async fn run_app( } // Handle update notification dismissal - if matches!(key.code, KeyCode::Char('u')) - && let Ok(mut status) = update_status.lock() - && matches!( + if matches!(key.code, KeyCode::Char('u')) { + let mut status = update_status.lock(); + if matches!( *status, crate::version_check::UpdateStatus::Available { .. } - ) - { - *status = crate::version_check::UpdateStatus::Dismissed; - needs_redraw = true; + ) { + *status = crate::version_check::UpdateStatus::Dismissed; + needs_redraw = true; + } } // Only handle navigation keys if we have data (`filtered_stats` is non-empty). @@ -907,12 +325,15 @@ async fn run_app( // Auto-jump to first matching date if let Some(current_stats) = filtered_stats.get(*selected_tab) && let Some(table_state) = table_states.get_mut(*selected_tab) - && let Some((index, _)) = - current_stats.daily_stats.iter().enumerate().find( - |(_, (day, _))| date_matches_buffer(day, &date_jump_buffer), - ) { - table_state.select(Some(index)); + let stats = current_stats.read(); + if let Some((index, _)) = + stats.daily_stats.iter().enumerate().find(|(_, (day, _))| { + date_matches_buffer(day, &date_jump_buffer) + }) + { + table_state.select(Some(index)); + } } needs_redraw = true; } @@ -921,12 +342,15 @@ async fn run_app( // Re-evaluate match after backspace if let Some(current_stats) = filtered_stats.get(*selected_tab) && let Some(table_state) = table_states.get_mut(*selected_tab) - && let Some((index, _)) = - current_stats.daily_stats.iter().enumerate().find( - |(_, (day, _))| date_matches_buffer(day, &date_jump_buffer), - ) { - table_state.select(Some(index)); + let stats = current_stats.read(); + if let Some((index, _)) = + stats.daily_stats.iter().enumerate().find(|(_, (day, _))| { + date_matches_buffer(day, &date_jump_buffer) + }) + { + table_state.select(Some(index)); + } } needs_redraw = true; } @@ -947,16 +371,19 @@ async fn run_app( if let StatsViewMode::Session = *stats_view_mode && let Some(table_state) = table_states.get_mut(*selected_tab) - && let Some(cache) = session_table_cache.get(*selected_tab) + && let Some(view) = filtered_stats.get(*selected_tab) { + let view = view.read(); let target_len = match session_day_filters .get(*selected_tab) .and_then(|f| f.as_ref()) { - Some(day) => { - cache.sessions.iter().filter(|s| &s.day_key == day).count() - } - None => cache.sessions.len(), + Some(day) => view + .session_aggregates + .iter() + .filter(|s| &s.date == day) + .count(), + None => view.session_aggregates.len(), }; if target_len > 0 { table_state.select(Some(target_len.saturating_sub(1))); @@ -972,16 +399,19 @@ async fn run_app( if let StatsViewMode::Session = *stats_view_mode && let Some(table_state) = table_states.get_mut(*selected_tab) - && let Some(cache) = session_table_cache.get(*selected_tab) + && let Some(view) = filtered_stats.get(*selected_tab) { + let view = view.read(); let target_len = match session_day_filters .get(*selected_tab) .and_then(|f| f.as_ref()) { - Some(day) => { - cache.sessions.iter().filter(|s| &s.day_key == day).count() - } - None => cache.sessions.len(), + Some(day) => view + .session_aggregates + .iter() + .filter(|s| &s.date == day) + .count(), + None => view.session_aggregates.len(), }; if target_len > 0 { table_state.select(Some(target_len.saturating_sub(1))); @@ -998,7 +428,8 @@ async fn run_app( match *stats_view_mode { StatsViewMode::Daily => { if let Some(current_stats) = filtered_stats.get(*selected_tab) { - let total_rows = current_stats.daily_stats.len(); + let view = current_stats.read(); + let total_rows = view.daily_stats.len(); if selected < total_rows.saturating_add(1) { table_state.select(Some( if selected == total_rows.saturating_sub(1) { @@ -1012,20 +443,20 @@ async fn run_app( } } StatsViewMode::Session => { - let filtered_len = session_table_cache + let filtered_len = filtered_stats .get(*selected_tab) - .map(|cache| { + .map(|view| { + let v = view.read(); session_day_filters .get(*selected_tab) .and_then(|f| f.as_ref()) .map(|day| { - cache - .sessions + v.session_aggregates .iter() - .filter(|s| &s.day_key == day) + .filter(|s| &s.date == day) .count() }) - .unwrap_or_else(|| cache.sessions.len()) + .unwrap_or_else(|| v.session_aggregates.len()) }) .unwrap_or(0); @@ -1052,8 +483,9 @@ async fn run_app( match *stats_view_mode { StatsViewMode::Daily => { if let Some(current_stats) = filtered_stats.get(*selected_tab) { + let view = current_stats.read(); table_state.select(Some(selected.saturating_sub( - if selected == current_stats.daily_stats.len() + 1 { + if selected == view.daily_stats.len() + 1 { 2 } else { 1 @@ -1063,20 +495,20 @@ async fn run_app( } } StatsViewMode::Session => { - let filtered_len = session_table_cache + let filtered_len = filtered_stats .get(*selected_tab) - .map(|cache| { + .map(|view| { + let v = view.read(); session_day_filters .get(*selected_tab) .and_then(|f| f.as_ref()) .map(|day| { - cache - .sessions + v.session_aggregates .iter() - .filter(|s| &s.day_key == day) + .filter(|s| &s.date == day) .count() }) - .unwrap_or_else(|| cache.sessions.len()) + .unwrap_or_else(|| v.session_aggregates.len()) }) .unwrap_or(0); @@ -1104,26 +536,27 @@ async fn run_app( match *stats_view_mode { StatsViewMode::Daily => { if let Some(current_stats) = filtered_stats.get(*selected_tab) { - let total_rows = current_stats.daily_stats.len() + 2; + let view = current_stats.read(); + let total_rows = view.daily_stats.len() + 2; table_state.select(Some(total_rows.saturating_sub(1))); needs_redraw = true; } } StatsViewMode::Session => { - let filtered_len = session_table_cache + let filtered_len = filtered_stats .get(*selected_tab) - .map(|cache| { + .map(|view| { + let v = view.read(); session_day_filters .get(*selected_tab) .and_then(|f| f.as_ref()) .map(|day| { - cache - .sessions + v.session_aggregates .iter() - .filter(|s| &s.day_key == day) + .filter(|s| &s.date == day) .count() }) - .unwrap_or_else(|| cache.sessions.len()) + .unwrap_or_else(|| v.session_aggregates.len()) }) .unwrap_or(0); @@ -1143,7 +576,8 @@ async fn run_app( match *stats_view_mode { StatsViewMode::Daily => { if let Some(current_stats) = filtered_stats.get(*selected_tab) { - let total_rows = current_stats.daily_stats.len() + 2; + let view = current_stats.read(); + let total_rows = view.daily_stats.len() + 2; let new_selected = (selected + 10).min(total_rows.saturating_sub(1)); table_state.select(Some(new_selected)); @@ -1151,20 +585,20 @@ async fn run_app( } } StatsViewMode::Session => { - let filtered_len = session_table_cache + let filtered_len = filtered_stats .get(*selected_tab) - .map(|cache| { + .map(|view| { + let v = view.read(); session_day_filters .get(*selected_tab) .and_then(|f| f.as_ref()) .map(|day| { - cache - .sessions + v.session_aggregates .iter() - .filter(|s| &s.day_key == day) + .filter(|s| &s.date == day) .count() }) - .unwrap_or_else(|| cache.sessions.len()) + .unwrap_or_else(|| v.session_aggregates.len()) }) .unwrap_or(0); @@ -1210,18 +644,23 @@ async fn run_app( if let StatsViewMode::Session = *stats_view_mode && let Some(table_state) = table_states.get_mut(*selected_tab) - && let Some(cache) = session_table_cache.get(*selected_tab) - && !cache.sessions.is_empty() + && let Some(view) = filtered_stats.get(*selected_tab) { - let target_len = session_day_filters - .get(*selected_tab) - .and_then(|f| f.as_ref()) - .map(|day| { - cache.sessions.iter().filter(|s| &s.day_key == day).count() - }) - .unwrap_or_else(|| cache.sessions.len()); - if target_len > 0 { - table_state.select(Some(target_len.saturating_sub(1))); + let v = view.read(); + if !v.session_aggregates.is_empty() { + let target_len = session_day_filters + .get(*selected_tab) + .and_then(|f| f.as_ref()) + .map(|day| { + v.session_aggregates + .iter() + .filter(|s| &s.date == day) + .count() + }) + .unwrap_or_else(|| v.session_aggregates.len()); + if target_len > 0 { + table_state.select(Some(target_len.saturating_sub(1))); + } } } @@ -1233,20 +672,22 @@ async fn run_app( && let Some(current_stats) = filtered_stats.get(*selected_tab) && let Some(table_state) = table_states.get_mut(*selected_tab) && let Some(selected_idx) = table_state.selected() - && selected_idx < current_stats.daily_stats.len() { - let day_key = if sort_reversed { - current_stats.daily_stats.iter().rev().nth(selected_idx) - } else { - current_stats.daily_stats.iter().nth(selected_idx) - } - .map(|(k, _)| k); - if let Some(day_key) = day_key { - session_day_filters[*selected_tab] = Some(day_key.to_string()); - *stats_view_mode = StatsViewMode::Session; - session_window_offsets[*selected_tab] = 0; - table_state.select(Some(0)); - needs_redraw = true; + let view = current_stats.read(); + if selected_idx < view.daily_stats.len() { + let day_key = if sort_reversed { + view.daily_stats.iter().rev().nth(selected_idx) + } else { + view.daily_stats.iter().nth(selected_idx) + } + .and_then(|(k, _)| CompactDate::from_str(k)); + if let Some(day_key) = day_key { + session_day_filters[*selected_tab] = Some(day_key); + *stats_view_mode = StatsViewMode::Session; + session_window_offsets[*selected_tab] = 0; + table_state.select(Some(0)); + needs_redraw = true; + } } } } @@ -1268,32 +709,23 @@ async fn run_app( fn draw_ui( frame: &mut Frame, - filtered_stats: &[&AgenticCodingToolStats], + filtered_stats: &[SharedAnalyzerView], format_options: &NumberFormatOptions, ui_state: &mut UiState, upload_status: Arc>, update_status: Arc>, - session_table_cache: &[SessionTableCache], ) { // Since we're already working with filtered stats, has_data is simply whether we have any stats let has_data = !filtered_stats.is_empty(); // Check if we have an error to determine help area height - let has_error = if let Ok(status) = upload_status.lock() { - matches!(*status, UploadStatus::Failed(_)) - } else { - false - }; + let has_error = matches!(*upload_status.lock(), UploadStatus::Failed(_)); // Check if update is available - let show_update_banner = if let Ok(status) = update_status.lock() { - matches!( - *status, - crate::version_check::UpdateStatus::Available { .. } - ) - } else { - false - }; + let show_update_banner = matches!( + *update_status.lock(), + crate::version_check::UpdateStatus::Available { .. } + ); // Adjust layout based on whether we have data and update banner let (chunks, chunk_offset) = if has_data { @@ -1353,20 +785,20 @@ fn draw_ui( frame.render_widget(header, chunks[0]); // Update banner (if showing) - if show_update_banner - && let Ok(status) = update_status.lock() - && let crate::version_check::UpdateStatus::Available { latest, current } = &*status - { - let banner = Paragraph::new(format!( - " New version available: {} -> {} (press 'u' to dismiss)", - current, latest - )) - .style( - Style::default() - .fg(Color::Yellow) - .add_modifier(Modifier::BOLD), - ); - frame.render_widget(banner, chunks[1]); + if show_update_banner { + let status = update_status.lock(); + if let crate::version_check::UpdateStatus::Available { latest, current } = &*status { + let banner = Paragraph::new(format!( + " New version available: {} -> {} (press 'u' to dismiss)", + current, latest + )) + .style( + Style::default() + .fg(Color::Yellow) + .add_modifier(Modifier::BOLD), + ); + frame.render_widget(banner, chunks[1]); + } } if has_data { @@ -1374,10 +806,8 @@ fn draw_ui( let tab_titles: Vec = filtered_stats .iter() .map(|stats| { - Line::from(format!( - " {} ({}) ", - stats.analyzer_name, stats.num_conversations - )) + let s = stats.read(); + Line::from(format!(" {} ({}) ", s.analyzer_name, s.num_conversations)) }) .collect(); @@ -1394,49 +824,52 @@ fn draw_ui( if let Some(current_stats) = filtered_stats.get(ui_state.selected_tab) && let Some(current_table_state) = ui_state.table_states.get_mut(ui_state.selected_tab) { - // Main table - let has_estimated_models = match ui_state.stats_view_mode { - StatsViewMode::Daily => { - let (_, has_estimated) = draw_daily_stats_table( - frame, - chunks[2 + chunk_offset], - current_stats, - format_options, - current_table_state, - if ui_state.date_jump_active { - ui_state.date_jump_buffer - } else { - "" - }, - ui_state.sort_reversed, - ); - has_estimated - } - StatsViewMode::Session => { - if let Some(cache) = session_table_cache.get(ui_state.selected_tab) { + // Draw main table - hold read lock only for this scope + let has_estimated_models = { + let view = current_stats.read(); + match ui_state.stats_view_mode { + StatsViewMode::Daily => { + let (_, has_estimated) = draw_daily_stats_table( + frame, + chunks[2 + chunk_offset], + &view, + format_options, + current_table_state, + if ui_state.date_jump_active { + ui_state.date_jump_buffer + } else { + "" + }, + ui_state.sort_reversed, + ); + has_estimated + } + StatsViewMode::Session => { draw_session_stats_table( frame, chunks[2 + chunk_offset], - cache, + &view.session_aggregates, format_options, current_table_state, &mut ui_state.session_window_offsets[ui_state.selected_tab], - ui_state.session_day_filters[ui_state.selected_tab].as_ref(), + ui_state.session_day_filters[ui_state.selected_tab], ui_state.sort_reversed, ); + false // Session view doesn't track estimated models yet } - false // Session view doesn't track estimated models yet } - }; + }; // Read lock on current_stats released here BEFORE draw_summary_stats // Summary stats - pass all filtered stats for aggregation (only if visible) // When in Session mode with a day filter, only show totals for that day + // NOTE: This acquires its own read locks, so we must not hold any above let help_chunk_offset = if ui_state.show_totals { let day_filter = match ui_state.stats_view_mode { StatsViewMode::Session => ui_state .session_day_filters .get(ui_state.selected_tab) - .and_then(|f| f.as_ref()), + .copied() + .flatten(), StatsViewMode::Daily => None, }; draw_summary_stats( @@ -1482,58 +915,58 @@ fn draw_ui( frame.render_widget(help, help_chunks[0]); // Upload status on right side - if let Ok(status) = upload_status.lock() { - let (status_text, status_style) = match &*status { - UploadStatus::None => (String::new(), Style::default()), - UploadStatus::Uploading { - current, - total, - dots, - } => { - let dots_str = match dots % 4 { - 0 => " ", - 1 => ". ", - 2 => ".. ", - _ => "...", - }; - ( - format!( - "Uploading {}/{} messages{}", - format_number(*current as u64, format_options), - format_number(*total as u64, format_options), - dots_str - ), - Style::default().add_modifier(Modifier::DIM), - ) - } - UploadStatus::Uploaded => ( - "✓ Uploaded successfully".to_string(), - Style::default().fg(Color::Green), - ), - UploadStatus::Failed(error) => { - (format!("✕ {error}"), Style::default().fg(Color::Red)) - } - UploadStatus::MissingApiToken => ( - "No API token for uploading".to_string(), - Style::default().fg(Color::Yellow), - ), - UploadStatus::MissingServerUrl => ( - "No server URL for uploading".to_string(), - Style::default().fg(Color::Yellow), - ), - UploadStatus::MissingConfig => ( - "Upload config incomplete".to_string(), - Style::default().fg(Color::Yellow), - ), - }; - - if !status_text.is_empty() { - let status_widget = Paragraph::new(status_text) - .style(status_style) - .alignment(ratatui::layout::Alignment::Right) - .wrap(ratatui::widgets::Wrap { trim: true }); - frame.render_widget(status_widget, help_chunks[1]); + let status = upload_status.lock(); + let (status_text, status_style) = match &*status { + UploadStatus::None => (String::new(), Style::default()), + UploadStatus::Uploading { + current, + total, + dots, + } => { + let dots_str = match dots % 4 { + 0 => " ", + 1 => ". ", + 2 => ".. ", + _ => "...", + }; + ( + format!( + "Uploading {}/{} messages{}", + format_number(*current as u64, format_options), + format_number(*total as u64, format_options), + dots_str + ), + Style::default().add_modifier(Modifier::DIM), + ) + } + UploadStatus::Uploaded => ( + "✓ Uploaded successfully".to_string(), + Style::default().fg(Color::Green), + ), + UploadStatus::Failed(error) => { + (format!("✕ {error}"), Style::default().fg(Color::Red)) } + UploadStatus::MissingApiToken => ( + "No API token for uploading".to_string(), + Style::default().fg(Color::Yellow), + ), + UploadStatus::MissingServerUrl => ( + "No server URL for uploading".to_string(), + Style::default().fg(Color::Yellow), + ), + UploadStatus::MissingConfig => ( + "Upload config incomplete".to_string(), + Style::default().fg(Color::Yellow), + ), + }; + drop(status); // Release lock before rendering + + if !status_text.is_empty() { + let status_widget = Paragraph::new(status_text) + .style(status_style) + .alignment(ratatui::layout::Alignment::Right) + .wrap(ratatui::widgets::Wrap { trim: true }); + frame.render_widget(status_widget, help_chunks[1]); } } } else { @@ -1554,7 +987,7 @@ fn draw_ui( fn draw_daily_stats_table( frame: &mut Frame, area: Rect, - stats: &AgenticCodingToolStats, + stats: &AnalyzerStatsView, format_options: &NumberFormatOptions, table_state: &mut TableState, date_filter: &str, @@ -1579,24 +1012,24 @@ fn draw_daily_stats_table( // Find best values for highlighting // TODO: Let's refactor this. - let mut best_cost = 0.0; + let mut best_cost_cents: u32 = 0; let mut best_cost_i = 0; - let mut best_cached_tokens = 0; + let mut best_cached_tokens: u32 = 0; let mut best_cached_tokens_i = 0; - let mut best_input_tokens = 0; + let mut best_input_tokens: u32 = 0; let mut best_input_tokens_i = 0; - let mut best_output_tokens = 0; + let mut best_output_tokens: u32 = 0; let mut best_output_tokens_i = 0; - let mut best_reasoning_tokens = 0; + let mut best_reasoning_tokens: u32 = 0; let mut best_reasoning_tokens_i = 0; let mut best_conversations = 0; let mut best_conversations_i = 0; - let mut best_tool_calls = 0; + let mut best_tool_calls: u32 = 0; let mut best_tool_calls_i = 0; for (i, day_stats) in stats.daily_stats.values().enumerate() { - if day_stats.stats.cost > best_cost { - best_cost = day_stats.stats.cost; + if day_stats.stats.cost_cents > best_cost_cents { + best_cost_cents = day_stats.stats.cost_cents; best_cost_i = i; } if day_stats.stats.cached_tokens > best_cached_tokens { @@ -1626,13 +1059,13 @@ fn draw_daily_stats_table( } let mut rows = Vec::new(); - let mut total_cost = 0.0; - let mut total_cached = 0; - let mut total_input = 0; - let mut total_output = 0; - let mut total_reasoning = 0; - let mut total_tool_calls = 0; - let mut total_conversations = 0; + let mut total_cost_cents: u64 = 0; + let mut total_cached: u64 = 0; + let mut total_input: u64 = 0; + let mut total_output: u64 = 0; + let mut total_reasoning: u64 = 0; + let mut total_tool_calls: u64 = 0; + let mut total_conversations: u64 = 0; // Use EitherIter to avoid allocation when reversing let items_to_render = if sort_reversed { @@ -1647,13 +1080,13 @@ fn draw_daily_stats_table( continue; } - total_cost += day_stats.stats.cost; - total_cached += day_stats.stats.cached_tokens; - total_input += day_stats.stats.input_tokens; - total_output += day_stats.stats.output_tokens; - total_reasoning += day_stats.stats.reasoning_tokens; - total_tool_calls += day_stats.stats.tool_calls; - total_conversations += day_stats.conversations; + total_cost_cents += day_stats.stats.cost_cents as u64; + total_cached += day_stats.stats.cached_tokens as u64; + total_input += day_stats.stats.input_tokens as u64; + total_output += day_stats.stats.output_tokens as u64; + total_reasoning += day_stats.stats.reasoning_tokens as u64; + total_tool_calls += day_stats.stats.tool_calls as u64; + total_conversations += day_stats.conversations as u64; let mut models_vec: Vec = day_stats .models @@ -1669,15 +1102,8 @@ fn draw_daily_stats_table( models_vec.sort(); let models = models_vec.join(", "); - let lines_summary = format!( - "{}/{}/{}", - format_number(day_stats.stats.lines_read, format_options), - format_number(day_stats.stats.lines_edited, format_options), - format_number(day_stats.stats.lines_added, format_options) - ); - // Check if this is an empty row - let is_empty_row = day_stats.stats.cost == 0.0 + let is_empty_row = day_stats.stats.cost_cents == 0 && day_stats.stats.cached_tokens == 0 && day_stats.stats.input_tokens == 0 && day_stats.stats.output_tokens == 0 @@ -1699,17 +1125,17 @@ fn draw_daily_stats_table( let cost_cell = if is_empty_row { Line::from(Span::styled( - format!("${:.2}", day_stats.stats.cost), + format!("${:.2}", day_stats.stats.cost()), Style::default().add_modifier(Modifier::DIM), )) } else if i == best_cost_i { Line::from(Span::styled( - format!("${:.2}", day_stats.stats.cost), + format!("${:.2}", day_stats.stats.cost()), Style::default().fg(Color::Red), )) } else { Line::from(Span::styled( - format!("${:.2}", day_stats.stats.cost), + format!("${:.2}", day_stats.stats.cost()), Style::default().fg(Color::Yellow), )) } @@ -1823,19 +1249,6 @@ fn draw_daily_stats_table( } .right_aligned(); - let _lines_cell = if is_empty_row { - Line::from(Span::styled( - lines_summary, - Style::default().add_modifier(Modifier::DIM), - )) - } else { - Line::from(Span::styled( - lines_summary, - Style::default().fg(Color::Blue), - )) - } - .right_aligned(); - let models_cell = Line::from(Span::styled( models, Style::default().add_modifier(Modifier::DIM), @@ -1871,7 +1284,7 @@ fn draw_daily_stats_table( } // Collect all unique models for the totals row - let mut all_models = std::collections::HashSet::new(); + let mut all_models = HashSet::new(); let mut has_estimated_models = false; for day_stats in stats.daily_stats.values() { for model in day_stats.models.keys() { @@ -1946,21 +1359,7 @@ fn draw_daily_stats_table( rows.push(separator_row); // Add totals row - let _total_lines_r = stats - .daily_stats - .values() - .map(|s| s.stats.lines_read) - .sum::(); - let _total_lines_e = stats - .daily_stats - .values() - .map(|s| s.stats.lines_edited) - .sum::(); - let _total_lines_a = stats - .daily_stats - .values() - .map(|s| s.stats.lines_added) - .sum::(); + let total_cost = total_cost_cents as f64 / 100.0; let totals_row = Row::new(vec![ // Arrow indicator for totals row when selected @@ -2008,31 +1407,17 @@ fn draw_daily_stats_table( )) .right_aligned(), Line::from(Span::styled( - format_number(total_conversations as u64, format_options), + format_number(total_conversations, format_options), Style::default().add_modifier(Modifier::BOLD), )) .right_aligned(), Line::from(Span::styled( - format_number(total_tool_calls as u64, format_options), + format_number(total_tool_calls, format_options), Style::default() .fg(Color::Green) .add_modifier(Modifier::BOLD), )) .right_aligned(), - /* - Line::from(Span::styled( - format!( - "{}/{}/{}", - format_number(total_lines_r, format_options), - format_number(total_lines_e, format_options), - format_number(total_lines_a, format_options) - ), - Style::default() - .fg(Color::Blue) - .add_modifier(Modifier::BOLD), - )) - .right_aligned(), - */ Line::from(Span::styled( all_models_text, Style::default().add_modifier(Modifier::DIM), @@ -2075,11 +1460,11 @@ fn draw_daily_stats_table( fn draw_session_stats_table( frame: &mut Frame, area: Rect, - cache: &SessionTableCache, + sessions: &[SessionAggregate], format_options: &NumberFormatOptions, table_state: &mut TableState, window_offset: &mut usize, - day_filter: Option<&String>, + day_filter: Option, sort_reversed: bool, ) { let header = Row::new(vec![ @@ -2099,12 +1484,8 @@ fn draw_session_stats_table( let filtered_sessions: Vec<&SessionAggregate> = { let mut sessions: Vec<_> = match day_filter { - Some(day) => cache - .sessions - .iter() - .filter(|s| &s.day_key == day) - .collect(), - None => cache.sessions.iter().collect(), + Some(day) => sessions.iter().filter(|s| s.date == day).collect(), + None => sessions.iter().collect(), }; if sort_reversed { sessions.reverse(); @@ -2158,17 +1539,17 @@ fn draw_session_stats_table( let mut best_reasoning_tokens_i: Option = None; let mut best_tool_calls_i: Option = None; - let mut total_cost = 0.0; - let mut total_input_tokens = 0u64; - let mut total_output_tokens = 0u64; - let mut total_cached_tokens = 0u64; - let mut total_reasoning_tokens = 0u64; - let mut total_tool_calls = 0u64; - let mut all_models = std::collections::HashSet::new(); + let mut total_cost_cents: u64 = 0; + let mut total_input_tokens: u64 = 0; + let mut total_output_tokens: u64 = 0; + let mut total_cached_tokens: u64 = 0; + let mut total_reasoning_tokens: u64 = 0; + let mut total_tool_calls: u64 = 0; + let mut all_models = HashSet::new(); for (idx, session) in filtered_sessions.iter().enumerate() { if best_cost_i - .map(|best_idx| session.stats.cost > filtered_sessions[best_idx].stats.cost) + .map(|best_idx| session.stats.cost_cents > filtered_sessions[best_idx].stats.cost_cents) .unwrap_or(true) { best_cost_i = Some(idx); @@ -2217,19 +1598,19 @@ fn draw_session_stats_table( best_tool_calls_i = Some(idx); } - total_cost += session.stats.cost; - total_input_tokens += session.stats.input_tokens; - total_output_tokens += session.stats.output_tokens; - total_cached_tokens += session.stats.cached_tokens; - total_reasoning_tokens += session.stats.reasoning_tokens; + total_cost_cents += session.stats.cost_cents as u64; + total_input_tokens += session.stats.input_tokens as u64; + total_output_tokens += session.stats.output_tokens as u64; + total_cached_tokens += session.stats.cached_tokens as u64; + total_reasoning_tokens += session.stats.reasoning_tokens as u64; total_tool_calls += session.stats.tool_calls as u64; - for model in &session.models { - all_models.insert(model.clone()); + for &(model, _) in session.models.iter() { + all_models.insert(model); } } - let mut all_models_vec = all_models.into_iter().collect::>(); + let mut all_models_vec: Vec<&str> = all_models.into_iter().map(resolve_model).collect(); all_models_vec.sort(); let all_models_text = all_models_vec.join(", "); @@ -2265,12 +1646,12 @@ fn draw_session_stats_table( let cost_cell = if best_cost_i == Some(i) { Line::from(Span::styled( - format!("${:.2}", session.stats.cost), + format!("${:.2}", session.stats.cost()), Style::default().fg(Color::Red), )) } else { Line::from(Span::styled( - format!("${:.2}", session.stats.cost), + format!("${:.2}", session.stats.cost()), Style::default().fg(Color::Yellow), )) } @@ -2330,19 +1711,23 @@ fn draw_session_stats_table( let tools_cell = if best_tool_calls_i == Some(i) { Line::from(Span::styled( - format_number(session.stats.tool_calls as u64, format_options), + format_number(session.stats.tool_calls, format_options), Style::default().fg(Color::Red), )) } else { Line::from(Span::styled( - format_number(session.stats.tool_calls as u64, format_options), + format_number(session.stats.tool_calls, format_options), Style::default().add_modifier(Modifier::DIM), )) } .right_aligned(); // Per-session models column: sorted, deduplicated list of models used in this session - let mut models_vec = session.models.clone(); + let mut models_vec: Vec<&str> = session + .models + .iter() + .map(|(s, _)| resolve_model(*s)) + .collect(); models_vec.sort(); models_vec.dedup(); let models_text = models_vec.join(", "); @@ -2413,6 +1798,7 @@ fn draw_session_stats_table( rows.push(separator_row); } else { // Totals row + let total_cost = total_cost_cents as f64 / 100.0; let totals_row = Row::new(vec![ Line::from(Span::raw("")), Line::from(Span::styled( @@ -2495,38 +1881,39 @@ fn draw_session_stats_table( fn draw_summary_stats( frame: &mut Frame, area: Rect, - filtered_stats: &[&AgenticCodingToolStats], + filtered_stats: &[SharedAnalyzerView], format_options: &NumberFormatOptions, - day_filter: Option<&String>, + day_filter: Option, ) { // Aggregate stats from all tools, optionally filtered to a single day - let mut total_cost: f64 = 0.0; + let mut total_cost_cents: u64 = 0; let mut total_cached: u64 = 0; let mut total_input: u64 = 0; let mut total_output: u64 = 0; let mut total_reasoning: u64 = 0; let mut total_tool_calls: u64 = 0; - let mut all_days = std::collections::HashSet::new(); - - for stats in filtered_stats { - // Filter to specific day if day_filter is set - let daily_iter: Box> = - if let Some(day) = day_filter { - Box::new(stats.daily_stats.iter().filter(move |(d, _)| *d == day)) - } else { - Box::new(stats.daily_stats.iter()) - }; + let mut all_days = HashSet::new(); + + for stats_arc in filtered_stats { + let stats = stats_arc.read(); + // Iterate directly - filter inline if day_filter is set + for (day, day_stats) in stats.daily_stats.iter() { + // Skip if day doesn't match filter + if let Some(filter_day) = day_filter + && CompactDate::from_str(day) != Some(filter_day) + { + continue; + } - for (day, day_stats) in daily_iter { - total_cost += day_stats.stats.cost; - total_cached += day_stats.stats.cached_tokens; - total_input += day_stats.stats.input_tokens; - total_output += day_stats.stats.output_tokens; - total_reasoning += day_stats.stats.reasoning_tokens; + total_cost_cents += day_stats.stats.cost_cents as u64; + total_cached += day_stats.stats.cached_tokens as u64; + total_input += day_stats.stats.input_tokens as u64; + total_output += day_stats.stats.output_tokens as u64; + total_reasoning += day_stats.stats.reasoning_tokens as u64; total_tool_calls += day_stats.stats.tool_calls as u64; // Collect unique days across all tools that have actual data - if day_stats.stats.cost > 0.0 + if day_stats.stats.cost_cents > 0 || day_stats.stats.input_tokens > 0 || day_stats.stats.output_tokens > 0 || day_stats.stats.reasoning_tokens > 0 @@ -2535,12 +1922,13 @@ fn draw_summary_stats( || day_stats.ai_messages > 0 || day_stats.conversations > 0 { - all_days.insert(day); + all_days.insert(day.clone()); } } } let total_tokens = total_cached + total_input + total_output; + let total_cost = total_cost_cents as f64 / 100.0; let tools_count = filtered_stats.len(); // Define summary rows with labels and values @@ -2594,7 +1982,10 @@ fn draw_summary_stats( // Show "Totals" or "Totals for " depending on filter let title = if let Some(day) = day_filter { - format!("Totals for {}", crate::utils::format_date_for_display(day)) + format!( + "Totals for {}", + crate::utils::format_date_for_display(&day.to_string()) + ) } else { "Totals".to_string() }; @@ -2610,13 +2001,13 @@ fn draw_summary_stats( fn update_table_states( table_states: &mut Vec, - current_stats: &MultiAnalyzerStats, + current_stats: &MultiAnalyzerStatsView, selected_tab: &mut usize, ) { let filtered_count = current_stats .analyzer_stats .iter() - .filter(|stats| has_data(stats)) + .filter(|stats| has_data_shared(stats)) .count(); // Preserve existing table states when resizing @@ -2626,7 +2017,7 @@ fn update_table_states( for i in 0..filtered_count { let state = if i < old_states.len() { // Preserve existing state if available - old_states[i].clone() + old_states[i] } else { // Create new state for new analyzers let mut new_state = TableState::default(); @@ -2655,16 +2046,11 @@ fn update_window_offsets(window_offsets: &mut Vec, filtered_count: &usize } } -fn update_day_filters(filters: &mut Vec>, filtered_count: &usize) { - let old = filters.clone(); - filters.clear(); - - for i in 0..*filtered_count { - if i < old.len() { - filters.push(old[i].clone()); - } else { - filters.push(None); - } +fn update_day_filters(filters: &mut Vec>, filtered_count: &usize) { + let old_len = filters.len(); + filters.resize(*filtered_count, None); + if *filtered_count < old_len { + filters.truncate(*filtered_count); } } diff --git a/src/tui/logic.rs b/src/tui/logic.rs index f587ef1..9ca94a6 100644 --- a/src/tui/logic.rs +++ b/src/tui/logic.rs @@ -1,128 +1,21 @@ -use crate::types::{AgenticCodingToolStats, Stats}; -use chrono::{DateTime, Local, Utc}; +use crate::types::{CompactDate, ConversationMessage, ModelCounts, Stats, TuiStats, intern_model}; use std::collections::BTreeMap; - -#[derive(Debug, Clone)] -pub struct SessionAggregate { - pub session_id: String, - pub first_timestamp: DateTime, - #[allow(dead_code)] // Used in tests and preserved for external API - pub analyzer_name: String, - pub stats: Stats, - pub models: Vec, - pub session_name: Option, - pub day_key: String, -} - -pub fn accumulate_stats(dst: &mut Stats, src: &Stats) { - // Token and cost stats - dst.input_tokens += src.input_tokens; - dst.output_tokens += src.output_tokens; - dst.reasoning_tokens += src.reasoning_tokens; - dst.cache_creation_tokens += src.cache_creation_tokens; - dst.cache_read_tokens += src.cache_read_tokens; - dst.cached_tokens += src.cached_tokens; - dst.cost += src.cost; - dst.tool_calls += src.tool_calls; - - // File operation stats - dst.terminal_commands += src.terminal_commands; - dst.file_searches += src.file_searches; - dst.file_content_searches += src.file_content_searches; - dst.files_read += src.files_read; - dst.files_added += src.files_added; - dst.files_edited += src.files_edited; - dst.files_deleted += src.files_deleted; - dst.lines_read += src.lines_read; - dst.lines_added += src.lines_added; - dst.lines_edited += src.lines_edited; - dst.lines_deleted += src.lines_deleted; - dst.bytes_read += src.bytes_read; - dst.bytes_added += src.bytes_added; - dst.bytes_edited += src.bytes_edited; - dst.bytes_deleted += src.bytes_deleted; - - // Todo stats - dst.todos_created += src.todos_created; - dst.todos_completed += src.todos_completed; - dst.todos_in_progress += src.todos_in_progress; - dst.todo_writes += src.todo_writes; - dst.todo_reads += src.todo_reads; - - // Composition stats - dst.code_lines += src.code_lines; - dst.docs_lines += src.docs_lines; - dst.data_lines += src.data_lines; - dst.media_lines += src.media_lines; - dst.config_lines += src.config_lines; - dst.other_lines += src.other_lines; -} - -pub fn aggregate_sessions_for_tool(stats: &AgenticCodingToolStats) -> Vec { - let mut sessions: BTreeMap = BTreeMap::new(); - - for msg in &stats.messages { - let session_key = msg.conversation_hash.clone(); - let entry = sessions - .entry(session_key.clone()) - .or_insert_with(|| SessionAggregate { - session_id: session_key.clone(), - first_timestamp: msg.date, - analyzer_name: stats.analyzer_name.clone(), - stats: Stats::default(), - models: Vec::new(), - session_name: None, - day_key: msg - .date - .with_timezone(&Local) - .format("%Y-%m-%d") - .to_string(), - }); - - if msg.date < entry.first_timestamp { - entry.first_timestamp = msg.date; - entry.day_key = msg - .date - .with_timezone(&Local) - .format("%Y-%m-%d") - .to_string(); - } - - // Only aggregate stats for assistant/model messages and track models - if let Some(model) = &msg.model { - if !entry.models.iter().any(|m| m == model) { - entry.models.push(model.clone()); - } - accumulate_stats(&mut entry.stats, &msg.stats); - } - - // Capture session name if available (last one wins, or first one, doesn't matter much as they should be consistent per file/session) - if let Some(name) = &msg.session_name { - entry.session_name = Some(name.clone()); - } - } - - let mut result: Vec = sessions.into_values().collect(); - - // Sort oldest sessions first so newest appear at the bottom (like per-day view) - result.sort_by_key(|s| s.first_timestamp); - - result -} - -pub fn aggregate_sessions_for_all_tools( - filtered_stats: &[&AgenticCodingToolStats], -) -> Vec> { - filtered_stats - .iter() - .map(|stats| aggregate_sessions_for_tool(stats)) - .collect() -} - -pub fn aggregate_sessions_for_all_tools_owned( - stats: &[AgenticCodingToolStats], -) -> Vec> { - stats.iter().map(aggregate_sessions_for_tool).collect() +use std::sync::Arc; + +// Re-export SessionAggregate from types +pub use crate::types::SessionAggregate; + +/// Accumulate TUI-relevant stats from a full Stats into a TuiStats. +/// Only copies the 6 fields displayed in the TUI. +pub fn accumulate_tui_stats(dst: &mut TuiStats, src: &Stats) { + dst.input_tokens = dst.input_tokens.saturating_add(src.input_tokens as u32); + dst.output_tokens = dst.output_tokens.saturating_add(src.output_tokens as u32); + dst.reasoning_tokens = dst + .reasoning_tokens + .saturating_add(src.reasoning_tokens as u32); + dst.cached_tokens = dst.cached_tokens.saturating_add(src.cached_tokens as u32); + dst.add_cost(src.cost); + dst.tool_calls = dst.tool_calls.saturating_add(src.tool_calls); } /// Check if a date string (YYYY-MM-DD format) matches the user's search buffer @@ -212,13 +105,105 @@ pub fn date_matches_buffer(day: &str, buffer: &str) -> bool { false } -pub fn has_data(stats: &AgenticCodingToolStats) -> bool { +/// Check if an AnalyzerStatsView has any data to display. +pub fn has_data_view(stats: &crate::types::AnalyzerStatsView) -> bool { stats.num_conversations > 0 || stats.daily_stats.values().any(|day| { - day.stats.cost > 0.0 + day.stats.cost_cents > 0 || day.stats.input_tokens > 0 || day.stats.output_tokens > 0 || day.stats.reasoning_tokens > 0 || day.stats.tool_calls > 0 }) } + +/// Check if a SharedAnalyzerView has any data to display. +/// Acquires a read lock to check the data. +pub fn has_data_shared(stats: &crate::types::SharedAnalyzerView) -> bool { + has_data_view(&stats.read()) +} + +/// Aggregate sessions from a slice of messages with a specified analyzer name. +/// Used when converting AgenticCodingToolStats to AnalyzerStatsView. +/// +/// Takes `Arc` for analyzer_name to avoid allocating a new String per session. +/// The Arc is cloned (cheap pointer copy) into each SessionAggregate. +pub fn aggregate_sessions_from_messages( + messages: &[ConversationMessage], + analyzer_name: Arc, +) -> Vec { + let mut sessions: BTreeMap = BTreeMap::new(); + + for msg in messages { + // Use or_insert_with_key to avoid redundant cloning: + // - Pass owned key to entry() (1 clone of conversation_hash) + // - Clone key only when inserting a new session (via closure's &key) + let entry = sessions + .entry(msg.conversation_hash.clone()) + .or_insert_with_key(|key| SessionAggregate { + session_id: key.clone(), + first_timestamp: msg.date, + analyzer_name: Arc::clone(&analyzer_name), + stats: TuiStats::default(), + models: ModelCounts::new(), + session_name: None, + date: CompactDate::from_local(&msg.date), + }); + + if msg.date < entry.first_timestamp { + entry.first_timestamp = msg.date; + entry.date = CompactDate::from_local(&msg.date); + } + + // Only aggregate stats for assistant/model messages and track models + if let Some(model) = &msg.model { + entry.models.increment(intern_model(model), 1); + accumulate_tui_stats(&mut entry.stats, &msg.stats); + } + + // Capture session name if available + if let Some(name) = &msg.session_name { + entry.session_name = Some(name.clone()); + } + } + + let mut result: Vec = sessions.into_values().collect(); + + // Sort oldest sessions first so newest appear at the bottom + result.sort_by_key(|s| s.first_timestamp); + + // Shrink to fit to release excess capacity + result.shrink_to_fit(); + + result +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::types::AnalyzerStatsView; + + #[test] + fn has_data_view_returns_true_for_non_empty() { + let view = AnalyzerStatsView { + daily_stats: BTreeMap::new(), + session_aggregates: vec![], + num_conversations: 1, + analyzer_name: Arc::from("Test"), + }; + + assert!(has_data_view(&view)); + } + + #[test] + fn has_data_view_returns_false_for_empty() { + let view = AnalyzerStatsView { + daily_stats: BTreeMap::new(), + session_aggregates: vec![], + num_conversations: 0, + analyzer_name: Arc::from("Test"), + }; + + assert!(!has_data_view(&view)); + } +} diff --git a/src/tui/tests.rs b/src/tui/tests.rs index 519ac8d..3f1e3ea 100644 --- a/src/tui/tests.rs +++ b/src/tui/tests.rs @@ -1,25 +1,11 @@ -use crate::tui::logic::*; +use crate::tui::logic::{accumulate_tui_stats, date_matches_buffer}; use crate::tui::{ - StatsViewMode, TestRunResult, UploadStatus, create_upload_progress_callback, run_app_for_tests, - show_upload_error, show_upload_success, update_day_filters, update_table_states, - update_window_offsets, + create_upload_progress_callback, show_upload_error, show_upload_success, update_day_filters, + update_table_states, update_window_offsets, }; -use crate::types::{ - AgenticCodingToolStats, Application, ConversationMessage, DailyStats, MessageRole, - MultiAnalyzerStats, Stats, -}; -use chrono::{TimeZone, Utc}; -use crossterm::event::{Event, KeyCode, KeyEvent, KeyModifiers}; -use ratatui::Terminal; -use ratatui::backend::TestBackend; +use crate::types::{AgenticCodingToolStats, CompactDate, MultiAnalyzerStats, Stats, TuiStats}; use ratatui::widgets::TableState; -use std::collections::{BTreeMap, VecDeque}; -use std::sync::{Arc, Mutex}; -use tokio::runtime::Builder; -use tokio::sync::{mpsc, watch}; - -use crate::utils::NumberFormatOptions; -use crate::watcher::FileWatcher; +use std::collections::BTreeMap; // ============================================================================ // TABLE STATE MANAGEMENT TESTS (tui.rs helpers) @@ -31,14 +17,14 @@ fn make_tool_stats(name: &str, has_data: bool) -> AgenticCodingToolStats { daily_stats.insert( "2025-01-01".to_string(), crate::types::DailyStats { - date: "2025-01-01".to_string(), + date: CompactDate::from_str("2025-01-01").unwrap(), user_messages: 0, ai_messages: 1, conversations: 1, models: BTreeMap::new(), - stats: Stats { + stats: TuiStats { input_tokens: 10, - ..Stats::default() + ..TuiStats::default() }, }, ); @@ -52,127 +38,6 @@ fn make_tool_stats(name: &str, has_data: bool) -> AgenticCodingToolStats { } } -fn make_multi_two_tools() -> MultiAnalyzerStats { - let tool_a = make_tool_stats("Tool A", true); - let tool_b = make_tool_stats("Tool B", true); - MultiAnalyzerStats { - analyzer_stats: vec![tool_a, tool_b], - } -} - -fn make_multi_single_tool_two_days() -> MultiAnalyzerStats { - let mut daily_stats = BTreeMap::new(); - daily_stats.insert( - "2025-01-01".to_string(), - DailyStats { - date: "2025-01-01".to_string(), - user_messages: 0, - ai_messages: 1, - conversations: 1, - models: BTreeMap::new(), - stats: Stats { - input_tokens: 10, - ..Stats::default() - }, - }, - ); - daily_stats.insert( - "2025-02-01".to_string(), - DailyStats { - date: "2025-02-01".to_string(), - user_messages: 0, - ai_messages: 1, - conversations: 1, - models: BTreeMap::new(), - stats: Stats { - input_tokens: 20, - ..Stats::default() - }, - }, - ); - - let tool = AgenticCodingToolStats { - daily_stats, - num_conversations: 2, - messages: vec![], - analyzer_name: "Tool A".to_string(), - }; - - MultiAnalyzerStats { - analyzer_stats: vec![tool], - } -} - -struct TuiTestResult { - selected_tab: usize, - stats_view_mode: StatsViewMode, - test_state: TestRunResult, -} - -fn run_tui_with_events( - stats: MultiAnalyzerStats, - events: Vec, - max_iterations: usize, -) -> TuiTestResult { - let backend = TestBackend::new(80, 24); - let mut terminal = Terminal::new(backend).expect("terminal"); - - let (_tx, rx) = watch::channel(stats); - - let format_options = NumberFormatOptions { - use_comma: false, - use_human: false, - locale: "en".to_string(), - decimal_places: 2, - }; - - let mut selected_tab = 0usize; - let mut scroll_offset = 0usize; - let mut stats_view_mode = StatsViewMode::Daily; - let upload_status = Arc::new(Mutex::new(UploadStatus::None)); - let update_status = Arc::new(Mutex::new(crate::version_check::UpdateStatus::UpToDate)); - let file_watcher = FileWatcher::for_tests(); - let (watcher_tx, _watcher_rx) = mpsc::unbounded_channel(); - - let event_queue: std::cell::RefCell> = - std::cell::RefCell::new(VecDeque::from(events)); - - let rt = Builder::new_current_thread() - .enable_all() - .build() - .expect("runtime"); - - let test_state = rt.block_on(async { - run_app_for_tests( - &mut terminal, - rx, - &format_options, - &mut selected_tab, - &mut scroll_offset, - &mut stats_view_mode, - upload_status, - update_status, - file_watcher, - watcher_tx, - |_: std::time::Duration| Ok(!event_queue.borrow().is_empty()), - || { - event_queue.borrow_mut().pop_front().ok_or_else(|| { - std::io::Error::new(std::io::ErrorKind::UnexpectedEof, "no event") - }) - }, - max_iterations, - ) - .await - .expect("run_app_for_tests ok") - }); - - TuiTestResult { - selected_tab, - stats_view_mode, - test_state, - } -} - #[test] fn test_update_table_states_filters_and_preserves_selection() { let stats_with_data = make_tool_stats("with-data", true); @@ -181,11 +46,12 @@ fn test_update_table_states_filters_and_preserves_selection() { let multi = MultiAnalyzerStats { analyzer_stats: vec![stats_with_data, stats_without_data], }; + let multi_view = multi.into_view(); let mut table_states: Vec = Vec::new(); let mut selected_tab = 0usize; - update_table_states(&mut table_states, &multi, &mut selected_tab); + update_table_states(&mut table_states, &multi_view, &mut selected_tab); // Only analyzers with data should be represented. assert_eq!(table_states.len(), 1); @@ -195,28 +61,36 @@ fn test_update_table_states_filters_and_preserves_selection() { // If selected_tab is out of range, it should be clamped. let mut table_states = vec![TableState::default(); 1]; let mut selected_tab = 10usize; - update_table_states(&mut table_states, &multi, &mut selected_tab); + let multi2 = MultiAnalyzerStats { + analyzer_stats: vec![ + make_tool_stats("with-data", true), + make_tool_stats("without-data", false), + ], + }; + let multi_view2 = multi2.into_view(); + update_table_states(&mut table_states, &multi_view2, &mut selected_tab); assert_eq!(selected_tab, 0); } #[test] fn test_update_window_offsets_and_day_filters_resize() { let mut offsets = vec![5usize]; - let mut filters: Vec> = vec![Some("2025-01-01".to_string())]; + let day = CompactDate::from_str("2025-01-01").unwrap(); + let mut filters: Vec> = vec![Some(day)]; let count_two = 2usize; update_window_offsets(&mut offsets, &count_two); update_day_filters(&mut filters, &count_two); assert_eq!(offsets, vec![5, 0]); - assert_eq!(filters, vec![Some("2025-01-01".to_string()), None]); + assert_eq!(filters, vec![Some(day), None]); let count_one = 1usize; update_window_offsets(&mut offsets, &count_one); update_day_filters(&mut filters, &count_one); assert_eq!(offsets, vec![5]); - assert_eq!(filters, vec![Some("2025-01-01".to_string())]); + assert_eq!(filters, vec![Some(day)]); } // ============================================================================ @@ -335,8 +209,8 @@ fn test_date_matches_buffer_month_day_year_format() { // ============================================================================ #[test] -fn test_accumulate_stats_basic() { - let mut dst = Stats::default(); +fn test_accumulate_tui_stats_basic() { + let mut dst = TuiStats::default(); let src = Stats { input_tokens: 100, output_tokens: 50, @@ -344,15 +218,15 @@ fn test_accumulate_stats_basic() { ..Stats::default() }; - accumulate_stats(&mut dst, &src); + accumulate_tui_stats(&mut dst, &src); assert_eq!(dst.input_tokens, 100); assert_eq!(dst.output_tokens, 50); - assert_eq!(dst.cost, 0.01); + assert_eq!(dst.cost(), 0.01); } #[test] -fn test_accumulate_stats_multiple_times() { - let mut dst = Stats::default(); +fn test_accumulate_tui_stats_multiple_times() { + let mut dst = TuiStats::default(); let src = Stats { input_tokens: 100, output_tokens: 50, @@ -360,489 +234,45 @@ fn test_accumulate_stats_multiple_times() { ..Stats::default() }; - accumulate_stats(&mut dst, &src); - accumulate_stats(&mut dst, &src); + accumulate_tui_stats(&mut dst, &src); + accumulate_tui_stats(&mut dst, &src); assert_eq!(dst.input_tokens, 200); assert_eq!(dst.output_tokens, 100); - assert_eq!(dst.cost, 0.02); + assert_eq!(dst.cost(), 0.02); } #[test] -fn test_accumulate_stats_comprehensive() { - let mut dst = Stats::default(); +fn test_accumulate_tui_stats_comprehensive() { + let mut dst = TuiStats::default(); let src = Stats { input_tokens: 100, output_tokens: 50, reasoning_tokens: 25, - cache_creation_tokens: 10, - cache_read_tokens: 5, cached_tokens: 15, cost: 0.01, tool_calls: 3, - terminal_commands: 2, - file_searches: 1, - files_read: 5, - files_edited: 2, - lines_added: 100, - lines_deleted: 50, - bytes_added: 5000, + // File operation fields exist in Stats but are not accumulated into TuiStats ..Stats::default() }; - accumulate_stats(&mut dst, &src); + accumulate_tui_stats(&mut dst, &src); assert_eq!(dst.input_tokens, 100); assert_eq!(dst.output_tokens, 50); assert_eq!(dst.reasoning_tokens, 25); - assert_eq!(dst.cache_creation_tokens, 10); - assert_eq!(dst.cache_read_tokens, 5); assert_eq!(dst.cached_tokens, 15); - assert_eq!(dst.cost, 0.01); + assert_eq!(dst.cost(), 0.01); assert_eq!(dst.tool_calls, 3); - assert_eq!(dst.terminal_commands, 2); - assert_eq!(dst.file_searches, 1); - assert_eq!(dst.files_read, 5); - assert_eq!(dst.files_edited, 2); - assert_eq!(dst.lines_added, 100); - assert_eq!(dst.lines_deleted, 50); - assert_eq!(dst.bytes_added, 5000); } #[test] -fn test_accumulate_stats_zero_values() { - let mut dst = Stats::default(); +fn test_accumulate_tui_stats_zero_values() { + let mut dst = TuiStats::default(); let src = Stats::default(); - accumulate_stats(&mut dst, &src); + accumulate_tui_stats(&mut dst, &src); assert_eq!(dst.input_tokens, 0); assert_eq!(dst.output_tokens, 0); - assert_eq!(dst.cost, 0.0); -} - -// ============================================================================ -// SESSION AGGREGATION TESTS -// ============================================================================ - -#[test] -fn test_aggregate_sessions_single() { - let date_utc = Utc.with_ymd_and_hms(2025, 11, 20, 2, 0, 0).unwrap(); - - let msg = ConversationMessage { - application: Application::GeminiCli, - date: date_utc, - project_hash: "hash".to_string(), - conversation_hash: "conv_hash".to_string(), - local_hash: None, - global_hash: "global_hash".to_string(), - model: Some("model".to_string()), - stats: Stats { - input_tokens: 10, - ..Stats::default() - }, - role: MessageRole::Assistant, - uuid: None, - session_name: Some("Test Session".to_string()), - }; - - let stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 1, - messages: vec![msg], - analyzer_name: "Test".to_string(), - }; - - let sessions = aggregate_sessions_for_tool(&stats); - assert_eq!(sessions.len(), 1); - assert_eq!(sessions[0].session_id, "conv_hash"); - assert_eq!(sessions[0].session_name, Some("Test Session".to_string())); - assert_eq!(sessions[0].stats.input_tokens, 10); -} - -#[test] -fn test_aggregate_sessions_multiple_same_conversation() { - let date1 = Utc.with_ymd_and_hms(2025, 11, 20, 2, 0, 0).unwrap(); - let date2 = Utc.with_ymd_and_hms(2025, 11, 20, 3, 0, 0).unwrap(); - - let msg1 = ConversationMessage { - application: Application::GeminiCli, - date: date1, - project_hash: "hash".to_string(), - conversation_hash: "conv_hash".to_string(), - local_hash: None, - global_hash: "global_hash1".to_string(), - model: Some("model".to_string()), - stats: Stats { - input_tokens: 10, - output_tokens: 5, - ..Stats::default() - }, - role: MessageRole::Assistant, - uuid: None, - session_name: Some("Test Session".to_string()), - }; - - let msg2 = ConversationMessage { - date: date2, - global_hash: "global_hash2".to_string(), - stats: Stats { - input_tokens: 20, - output_tokens: 10, - ..Stats::default() - }, - ..msg1.clone() - }; - - let stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 1, - messages: vec![msg1, msg2], - analyzer_name: "Test".to_string(), - }; - - let sessions = aggregate_sessions_for_tool(&stats); - assert_eq!(sessions.len(), 1); - assert_eq!(sessions[0].stats.input_tokens, 30); - assert_eq!(sessions[0].stats.output_tokens, 15); -} - -#[test] -fn test_aggregate_sessions_multiple_conversations() { - let date1 = Utc.with_ymd_and_hms(2025, 11, 20, 2, 0, 0).unwrap(); - let date2 = Utc.with_ymd_and_hms(2025, 11, 20, 3, 0, 0).unwrap(); - - let msg1 = ConversationMessage { - application: Application::GeminiCli, - date: date1, - project_hash: "hash".to_string(), - conversation_hash: "conv_hash_1".to_string(), - local_hash: None, - global_hash: "global_hash1".to_string(), - model: Some("model".to_string()), - stats: Stats { - input_tokens: 10, - ..Stats::default() - }, - role: MessageRole::Assistant, - uuid: None, - session_name: Some("Session 1".to_string()), - }; - - let msg2 = ConversationMessage { - date: date2, - conversation_hash: "conv_hash_2".to_string(), - global_hash: "global_hash2".to_string(), - session_name: Some("Session 2".to_string()), - stats: Stats { - input_tokens: 20, - ..Stats::default() - }, - ..msg1.clone() - }; - - let stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 2, - messages: vec![msg1, msg2], - analyzer_name: "Test".to_string(), - }; - - let sessions = aggregate_sessions_for_tool(&stats); - assert_eq!(sessions.len(), 2); - assert_eq!(sessions[0].session_id, "conv_hash_1"); - assert_eq!(sessions[1].session_id, "conv_hash_2"); -} - -#[test] -fn test_aggregate_sessions_user_messages_ignored() { - let date_utc = Utc.with_ymd_and_hms(2025, 11, 20, 2, 0, 0).unwrap(); - - let msg_user = ConversationMessage { - application: Application::GeminiCli, - date: date_utc, - project_hash: "hash".to_string(), - conversation_hash: "conv_hash".to_string(), - local_hash: None, - global_hash: "global_hash".to_string(), - model: None, // User messages have no model - stats: Stats { - input_tokens: 100, - ..Stats::default() - }, - role: MessageRole::User, - uuid: None, - session_name: None, - }; - - let msg_assistant = ConversationMessage { - model: Some("model".to_string()), - role: MessageRole::Assistant, - stats: Stats { - input_tokens: 10, - ..Stats::default() - }, - ..msg_user.clone() - }; - - let stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 1, - messages: vec![msg_user, msg_assistant], - analyzer_name: "Test".to_string(), - }; - - let sessions = aggregate_sessions_for_tool(&stats); - // Only the assistant message should be counted - assert_eq!(sessions[0].stats.input_tokens, 10); -} - -#[test] -fn test_aggregate_sessions_sorting() { - let date_early = Utc.with_ymd_and_hms(2025, 11, 20, 2, 0, 0).unwrap(); - let date_late = Utc.with_ymd_and_hms(2025, 11, 21, 2, 0, 0).unwrap(); - - let msg_late = ConversationMessage { - application: Application::GeminiCli, - date: date_late, - project_hash: "hash".to_string(), - conversation_hash: "conv_hash_late".to_string(), - local_hash: None, - global_hash: "global_hash_late".to_string(), - model: Some("model".to_string()), - stats: Stats::default(), - role: MessageRole::Assistant, - uuid: None, - session_name: None, - }; - - let msg_early = ConversationMessage { - date: date_early, - conversation_hash: "conv_hash_early".to_string(), - global_hash: "global_hash_early".to_string(), - ..msg_late.clone() - }; - - let stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 2, - messages: vec![msg_late, msg_early], // Add late first - analyzer_name: "Test".to_string(), - }; - - let sessions = aggregate_sessions_for_tool(&stats); - // Should be sorted by timestamp, earliest first - assert_eq!(sessions[0].session_id, "conv_hash_early"); - assert_eq!(sessions[1].session_id, "conv_hash_late"); -} - -#[test] -fn test_aggregate_sessions_multiple_models() { - let date_utc = Utc.with_ymd_and_hms(2025, 11, 20, 2, 0, 0).unwrap(); - - let msg1 = ConversationMessage { - application: Application::GeminiCli, - date: date_utc, - project_hash: "hash".to_string(), - conversation_hash: "conv_hash".to_string(), - local_hash: None, - global_hash: "global_hash1".to_string(), - model: Some("model-1".to_string()), - stats: Stats::default(), - role: MessageRole::Assistant, - uuid: None, - session_name: None, - }; - - let msg2 = ConversationMessage { - model: Some("model-2".to_string()), - global_hash: "global_hash2".to_string(), - ..msg1.clone() - }; - - let stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 1, - messages: vec![msg1, msg2], - analyzer_name: "Test".to_string(), - }; - - let sessions = aggregate_sessions_for_tool(&stats); - assert_eq!(sessions[0].models.len(), 2); - assert!(sessions[0].models.contains(&"model-1".to_string())); - assert!(sessions[0].models.contains(&"model-2".to_string())); -} - -// ============================================================================ -// HAS_DATA TESTS -// ============================================================================ - -#[test] -fn test_has_data_empty() { - let empty_stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 0, - messages: vec![], - analyzer_name: "Test".to_string(), - }; - assert!(!has_data(&empty_stats)); -} - -#[test] -fn test_has_data_with_conversations() { - let stats_with_conv = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 1, - messages: vec![], - analyzer_name: "Test".to_string(), - }; - assert!(has_data(&stats_with_conv)); -} - -#[test] -fn test_has_data_with_cost() { - let mut daily_stats = BTreeMap::new(); - let date_key = "2025-11-20".to_string(); - let day_stats = crate::types::DailyStats { - date: "2025-11-20".to_string(), - user_messages: 0, - ai_messages: 0, - conversations: 0, - models: BTreeMap::new(), - stats: Stats { - cost: 0.01, - ..Stats::default() - }, - }; - daily_stats.insert(date_key, day_stats); - - let stats = AgenticCodingToolStats { - daily_stats, - num_conversations: 0, - messages: vec![], - analyzer_name: "Test".to_string(), - }; - assert!(has_data(&stats)); -} - -#[test] -fn test_has_data_with_tokens() { - let mut daily_stats = BTreeMap::new(); - let date_key = "2025-11-20".to_string(); - let day_stats = crate::types::DailyStats { - date: "2025-11-20".to_string(), - user_messages: 0, - ai_messages: 0, - conversations: 0, - models: BTreeMap::new(), - stats: Stats { - input_tokens: 100, - ..Stats::default() - }, - }; - daily_stats.insert(date_key, day_stats); - - let stats = AgenticCodingToolStats { - daily_stats, - num_conversations: 0, - messages: vec![], - analyzer_name: "Test".to_string(), - }; - assert!(has_data(&stats)); -} - -// ============================================================================ -// AGGREGATE_SESSIONS_FOR_ALL_TOOLS TESTS -// ============================================================================ - -#[test] -fn test_aggregate_sessions_for_all_tools_empty() { - let filtered_stats: Vec<&AgenticCodingToolStats> = vec![]; - let result = aggregate_sessions_for_all_tools(&filtered_stats); - assert_eq!(result.len(), 0); -} - -#[test] -fn test_aggregate_sessions_for_all_tools_single() { - let date_utc = Utc.with_ymd_and_hms(2025, 11, 20, 2, 0, 0).unwrap(); - let msg = ConversationMessage { - application: Application::GeminiCli, - date: date_utc, - project_hash: "hash".to_string(), - conversation_hash: "conv_hash".to_string(), - local_hash: None, - global_hash: "global_hash".to_string(), - model: Some("model".to_string()), - stats: Stats { - input_tokens: 10, - ..Stats::default() - }, - role: MessageRole::Assistant, - uuid: None, - session_name: Some("Test Session".to_string()), - }; - - let stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 1, - messages: vec![msg], - analyzer_name: "Test".to_string(), - }; - - let filtered_stats = vec![&stats]; - let result = aggregate_sessions_for_all_tools(&filtered_stats); - - assert_eq!(result.len(), 1); - assert_eq!(result[0].len(), 1); - assert_eq!(result[0][0].session_id, "conv_hash"); -} - -#[test] -fn test_aggregate_sessions_for_all_tools_multiple() { - let date1 = Utc.with_ymd_and_hms(2025, 11, 20, 2, 0, 0).unwrap(); - let date2 = Utc.with_ymd_and_hms(2025, 11, 20, 3, 0, 0).unwrap(); - - let msg1 = ConversationMessage { - application: Application::GeminiCli, - date: date1, - project_hash: "hash".to_string(), - conversation_hash: "conv_hash_1".to_string(), - local_hash: None, - global_hash: "global_hash_1".to_string(), - model: Some("model".to_string()), - stats: Stats { - input_tokens: 10, - ..Stats::default() - }, - role: MessageRole::Assistant, - uuid: None, - session_name: None, - }; - - let msg2 = ConversationMessage { - date: date2, - conversation_hash: "conv_hash_2".to_string(), - global_hash: "global_hash_2".to_string(), - ..msg1.clone() - }; - - let stats1 = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 1, - messages: vec![msg1], - analyzer_name: "Claude Code".to_string(), - }; - - let stats2 = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 1, - messages: vec![msg2], - analyzer_name: "Copilot".to_string(), - }; - - let filtered_stats = vec![&stats1, &stats2]; - let result = aggregate_sessions_for_all_tools(&filtered_stats); - - assert_eq!(result.len(), 2); - assert_eq!(result[0].len(), 1); - assert_eq!(result[1].len(), 1); + assert_eq!(dst.cost(), 0.0); } // ============================================================================ @@ -856,139 +286,29 @@ fn test_date_matches_month_partial_prefix() { } #[test] -fn test_accumulate_stats_preserves_dst_initial_values() { - let mut dst = Stats { +fn test_accumulate_tui_stats_preserves_dst_initial_values() { + let mut dst = TuiStats { input_tokens: 50, output_tokens: 25, - cost: 0.005, - ..Stats::default() + cost_cents: 1, // 0.01 dollars + ..TuiStats::default() }; let src = Stats { input_tokens: 50, output_tokens: 25, - cost: 0.005, + cost: 0.01, ..Stats::default() }; - accumulate_stats(&mut dst, &src); + accumulate_tui_stats(&mut dst, &src); assert_eq!(dst.input_tokens, 100); assert_eq!(dst.output_tokens, 50); - assert_eq!(dst.cost, 0.01); + assert_eq!(dst.cost(), 0.02); } #[test] -fn test_session_aggregate_captures_earliest_timestamp() { - let date_late = Utc.with_ymd_and_hms(2025, 11, 21, 10, 0, 0).unwrap(); - let date_early = Utc.with_ymd_and_hms(2025, 11, 20, 12, 0, 0).unwrap(); - - let msg_late = ConversationMessage { - application: Application::GeminiCli, - date: date_late, - project_hash: "hash".to_string(), - conversation_hash: "conv_hash".to_string(), - local_hash: None, - global_hash: "global_hash_late".to_string(), - model: Some("model".to_string()), - stats: Stats::default(), - role: MessageRole::Assistant, - uuid: None, - session_name: None, - }; - - let msg_early = ConversationMessage { - date: date_early, - global_hash: "global_hash_early".to_string(), - ..msg_late.clone() - }; - - let stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 1, - messages: vec![msg_late, msg_early], - analyzer_name: "Test".to_string(), - }; - - let sessions = aggregate_sessions_for_tool(&stats); - assert_eq!(sessions[0].first_timestamp, date_early); - // The day_key is derived from local time, just verify it starts with 2025-11 - assert!(sessions[0].day_key.starts_with("2025-11")); -} - -#[test] -fn test_aggregate_sessions_deduplicates_models() { - let date_utc = Utc.with_ymd_and_hms(2025, 11, 20, 2, 0, 0).unwrap(); - - let msg1 = ConversationMessage { - application: Application::GeminiCli, - date: date_utc, - project_hash: "hash".to_string(), - conversation_hash: "conv_hash".to_string(), - local_hash: None, - global_hash: "global_hash1".to_string(), - model: Some("gpt-4".to_string()), - stats: Stats::default(), - role: MessageRole::Assistant, - uuid: None, - session_name: None, - }; - - let msg2 = ConversationMessage { - model: Some("gpt-4".to_string()), // Same model - global_hash: "global_hash2".to_string(), - ..msg1.clone() - }; - - let msg3 = ConversationMessage { - model: Some("gpt-3.5".to_string()), // Different model - global_hash: "global_hash3".to_string(), - ..msg1.clone() - }; - - let stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 1, - messages: vec![msg1, msg2, msg3], - analyzer_name: "Test".to_string(), - }; - - let sessions = aggregate_sessions_for_tool(&stats); - assert_eq!(sessions[0].models.len(), 2); // Only 2 unique models -} - -#[test] -fn test_session_day_key_formatting() { - // Use noon UTC to avoid timezone conversion causing day shift - let date_utc = Utc.with_ymd_and_hms(2025, 1, 5, 12, 0, 0).unwrap(); - - let msg = ConversationMessage { - application: Application::GeminiCli, - date: date_utc, - project_hash: "hash".to_string(), - conversation_hash: "conv_hash".to_string(), - local_hash: None, - global_hash: "global_hash".to_string(), - model: Some("model".to_string()), - stats: Stats::default(), - role: MessageRole::Assistant, - uuid: None, - session_name: None, - }; - - let stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 1, - messages: vec![msg], - analyzer_name: "Test".to_string(), - }; - - let sessions = aggregate_sessions_for_tool(&stats); - // The day_key is in YYYY-MM-DD format and based on local time - assert!(sessions[0].day_key.starts_with("2025-01")); -} - -#[test] -fn test_large_accumulation() { - let mut dst = Stats::default(); +fn test_large_tui_stats_accumulation() { + let mut dst = TuiStats::default(); for _ in 0..1000 { let src = Stats { input_tokens: 100, @@ -996,12 +316,12 @@ fn test_large_accumulation() { cost: 0.01, ..Stats::default() }; - accumulate_stats(&mut dst, &src); + accumulate_tui_stats(&mut dst, &src); } assert_eq!(dst.input_tokens, 100_000); assert_eq!(dst.output_tokens, 50_000); - assert!((dst.cost - 10.0).abs() < 0.0001); + assert!((dst.cost() - 10.0).abs() < 0.01); } // ============================================================================ @@ -1009,133 +329,27 @@ fn test_large_accumulation() { // ============================================================================ #[test] -fn test_accumulated_stats_correctness() { - let mut dst = Stats::default(); +fn test_accumulated_tui_stats_correctness() { + let mut dst = TuiStats::default(); let src = Stats { input_tokens: 150, output_tokens: 75, reasoning_tokens: 50, cost: 0.025, tool_calls: 5, - terminal_commands: 2, - files_read: 10, - lines_added: 250, + // File operation fields not tracked in TuiStats ..Stats::default() }; - accumulate_stats(&mut dst, &src); - accumulate_stats(&mut dst, &src); + accumulate_tui_stats(&mut dst, &src); + accumulate_tui_stats(&mut dst, &src); - // Verify accumulated stats + // Verify accumulated TUI stats (only the 6 display fields) assert_eq!(dst.input_tokens, 300); assert_eq!(dst.output_tokens, 150); assert_eq!(dst.reasoning_tokens, 100); assert_eq!(dst.tool_calls, 10); - assert_eq!(dst.terminal_commands, 4); - assert_eq!(dst.files_read, 20); - assert_eq!(dst.lines_added, 500); - assert!((dst.cost - 0.05).abs() < 0.0001); -} - -#[test] -fn test_session_aggregate_correctness() { - let date_utc = Utc.with_ymd_and_hms(2025, 11, 20, 12, 0, 0).unwrap(); - - let msg1 = ConversationMessage { - application: Application::ClaudeCode, - date: date_utc, - project_hash: "proj123".to_string(), - conversation_hash: "conv123".to_string(), - local_hash: None, - global_hash: "global123".to_string(), - model: Some("claude-3-5-sonnet".to_string()), - stats: Stats { - input_tokens: 500, - output_tokens: 250, - cost: 0.05, - ..Stats::default() - }, - role: MessageRole::Assistant, - uuid: None, - session_name: Some("Bug Fix Session".to_string()), - }; - - let stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 1, - messages: vec![msg1], - analyzer_name: "Claude Code".to_string(), - }; - - let sessions = aggregate_sessions_for_tool(&stats); - - // Verify session aggregate correctness - assert_eq!(sessions.len(), 1); - assert_eq!(sessions[0].session_id, "conv123"); - assert_eq!(sessions[0].analyzer_name, "Claude Code"); - assert_eq!( - sessions[0].session_name, - Some("Bug Fix Session".to_string()) - ); - assert_eq!(sessions[0].models, vec!["claude-3-5-sonnet".to_string()]); - assert_eq!(sessions[0].stats.input_tokens, 500); - assert_eq!(sessions[0].stats.output_tokens, 250); - assert!((sessions[0].stats.cost - 0.05).abs() < 0.0001); -} - -#[test] -fn test_multi_session_aggregation_correctness() { - let date1 = Utc.with_ymd_and_hms(2025, 11, 20, 12, 0, 0).unwrap(); - let date2 = Utc.with_ymd_and_hms(2025, 11, 21, 12, 0, 0).unwrap(); - - let msg1 = ConversationMessage { - application: Application::ClaudeCode, - date: date1, - project_hash: "proj".to_string(), - conversation_hash: "conv1".to_string(), - local_hash: None, - global_hash: "global1".to_string(), - model: Some("claude-3-5-sonnet".to_string()), - stats: Stats { - input_tokens: 100, - output_tokens: 50, - ..Stats::default() - }, - role: MessageRole::Assistant, - uuid: None, - session_name: Some("Session 1".to_string()), - }; - - let msg2 = ConversationMessage { - date: date2, - conversation_hash: "conv2".to_string(), - global_hash: "global2".to_string(), - session_name: Some("Session 2".to_string()), - stats: Stats { - input_tokens: 200, - output_tokens: 100, - ..Stats::default() - }, - ..msg1.clone() - }; - - let stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 2, - messages: vec![msg1, msg2], - analyzer_name: "Claude Code".to_string(), - }; - - let sessions = aggregate_sessions_for_tool(&stats); - - // Verify multiple sessions - assert_eq!(sessions.len(), 2); - assert_eq!(sessions[0].session_id, "conv1"); - assert_eq!(sessions[0].stats.input_tokens, 100); - assert_eq!(sessions[0].stats.output_tokens, 50); - assert_eq!(sessions[1].session_id, "conv2"); - assert_eq!(sessions[1].stats.input_tokens, 200); - assert_eq!(sessions[1].stats.output_tokens, 100); + assert!((dst.cost() - 0.05).abs() < 0.01); } // ============================================================================ @@ -1169,216 +383,9 @@ fn test_date_filter_exclusions() { assert!(!date_matches_buffer("2025-12-31", "2024")); } -// ============================================================================ -// TUI LOOP INTEGRATION TESTS -// ============================================================================ - -#[test] -fn test_tui_quit_behavior() { - let stats = make_tool_stats("with-data", true); - let multi = MultiAnalyzerStats { - analyzer_stats: vec![stats], - }; - - let events = vec![Event::Key(KeyEvent::new( - KeyCode::Char('q'), - KeyModifiers::empty(), - ))]; - - let result = run_tui_with_events(multi, events, 10); - assert_eq!(result.selected_tab, 0); - assert_eq!(result.stats_view_mode, StatsViewMode::Daily); -} - -#[test] -fn test_tui_tab_switch_and_session_toggle() { - let multi = make_multi_two_tools(); - - let events = vec![ - Event::Key(KeyEvent::new(KeyCode::Right, KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('t'), KeyModifiers::CONTROL)), - Event::Key(KeyEvent::new(KeyCode::Char('q'), KeyModifiers::empty())), - ]; - - let result = run_tui_with_events(multi, events, 50); - assert_eq!(result.selected_tab, 1); - assert_eq!(result.stats_view_mode, StatsViewMode::Session); -} - -#[test] -fn test_tui_date_jump_behavior() { - let multi = make_multi_single_tool_two_days(); - - let events = vec![ - Event::Key(KeyEvent::new(KeyCode::Char('/'), KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('2'), KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('0'), KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('2'), KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('5'), KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('-'), KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('0'), KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('2'), KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Enter, KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('q'), KeyModifiers::empty())), - ]; - - let result = run_tui_with_events(multi, events, 80); - assert_eq!(result.stats_view_mode, StatsViewMode::Daily); -} - -#[test] -fn test_tui_toggle_summary_panel() { - let stats = make_tool_stats("with-data", true); - let multi = MultiAnalyzerStats { - analyzer_stats: vec![stats], - }; - - // Press 's' twice (toggle off, toggle on) then quit - let events = vec![ - Event::Key(KeyEvent::new(KeyCode::Char('s'), KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('s'), KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('q'), KeyModifiers::empty())), - ]; - - // The test passes if run_tui_with_events completes without panic - // The toggle state is internal, but we verify the key handling works - let result = run_tui_with_events(multi, events, 50); - assert_eq!(result.selected_tab, 0); - assert_eq!(result.stats_view_mode, StatsViewMode::Daily); -} - #[test] -fn test_tui_drill_into_session_with_enter() { - let multi = make_multi_single_tool_two_days(); - - let events = vec![ - Event::Key(KeyEvent::new(KeyCode::Down, KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Enter, KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('q'), KeyModifiers::empty())), - ]; - - let result = run_tui_with_events(multi, events, 80); - assert_eq!(result.stats_view_mode, StatsViewMode::Session); -} - -#[test] -fn test_model_deduplication_in_session() { - let date_utc = Utc.with_ymd_and_hms(2025, 11, 20, 12, 0, 0).unwrap(); - - let models = vec!["gpt-4", "gpt-3.5", "gpt-4", "claude", "gpt-3.5"]; - let messages: Vec<_> = models - .into_iter() - .enumerate() - .map(|(i, model)| ConversationMessage { - application: Application::ClaudeCode, - date: date_utc, - project_hash: "hash".to_string(), - conversation_hash: "conv".to_string(), - local_hash: None, - global_hash: format!("hash_{}", i), - model: Some(model.to_string()), - stats: Stats::default(), - role: MessageRole::Assistant, - uuid: None, - session_name: None, - }) - .collect(); - - let stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 1, - messages, - analyzer_name: "Test".to_string(), - }; - - let sessions = aggregate_sessions_for_tool(&stats); - // Should have 3 unique models: gpt-4, gpt-3.5, claude - assert_eq!(sessions[0].models.len(), 3); - assert!(sessions[0].models.contains(&"gpt-4".to_string())); - assert!(sessions[0].models.contains(&"gpt-3.5".to_string())); - assert!(sessions[0].models.contains(&"claude".to_string())); -} - -#[test] -fn test_session_filtering_by_date_range() { - let date1 = Utc.with_ymd_and_hms(2025, 11, 15, 12, 0, 0).unwrap(); - let date2 = Utc.with_ymd_and_hms(2025, 11, 20, 12, 0, 0).unwrap(); - let date3 = Utc.with_ymd_and_hms(2025, 12, 1, 12, 0, 0).unwrap(); - - let messages = vec![ - ConversationMessage { - application: Application::ClaudeCode, - date: date1, - project_hash: "hash".to_string(), - conversation_hash: "conv1".to_string(), - local_hash: None, - global_hash: "global1".to_string(), - model: Some("model".to_string()), - stats: Stats::default(), - role: MessageRole::Assistant, - uuid: None, - session_name: Some("Nov 15".to_string()), - }, - ConversationMessage { - date: date2, - conversation_hash: "conv2".to_string(), - global_hash: "global2".to_string(), - session_name: Some("Nov 20".to_string()), - ..ConversationMessage { - application: Application::ClaudeCode, - date: date1, - project_hash: "hash".to_string(), - conversation_hash: "conv1".to_string(), - local_hash: None, - global_hash: "global1".to_string(), - model: Some("model".to_string()), - stats: Stats::default(), - role: MessageRole::Assistant, - uuid: None, - session_name: Some("Nov 15".to_string()), - } - }, - ConversationMessage { - date: date3, - conversation_hash: "conv3".to_string(), - global_hash: "global3".to_string(), - session_name: Some("Dec 01".to_string()), - ..ConversationMessage { - application: Application::ClaudeCode, - date: date1, - project_hash: "hash".to_string(), - conversation_hash: "conv1".to_string(), - local_hash: None, - global_hash: "global1".to_string(), - model: Some("model".to_string()), - stats: Stats::default(), - role: MessageRole::Assistant, - uuid: None, - session_name: Some("Nov 15".to_string()), - } - }, - ]; - - let stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 3, - messages, - analyzer_name: "Test".to_string(), - }; - - let sessions = aggregate_sessions_for_tool(&stats); - assert_eq!(sessions.len(), 3); - - // November sessions should match - assert!(date_matches_buffer(&sessions[0].day_key, "11")); - assert!(date_matches_buffer(&sessions[1].day_key, "11")); - // December session should not match - assert!(!date_matches_buffer(&sessions[2].day_key, "11")); -} - -#[test] -fn test_stats_accumulation_with_multiple_analyzers() { - let mut dst = Stats::default(); +fn test_tui_stats_accumulation_with_multiple_analyzers() { + let mut dst = TuiStats::default(); let src1 = Stats { input_tokens: 100, output_tokens: 50, @@ -1394,144 +401,11 @@ fn test_stats_accumulation_with_multiple_analyzers() { ..Stats::default() }; - accumulate_stats(&mut dst, &src1); - accumulate_stats(&mut dst, &src2); + accumulate_tui_stats(&mut dst, &src1); + accumulate_tui_stats(&mut dst, &src2); assert_eq!(dst.input_tokens, 300); assert_eq!(dst.output_tokens, 150); assert_eq!(dst.tool_calls, 6); - assert!((dst.cost - 0.03).abs() < 0.0001); -} - -#[test] -fn test_empty_analysis_state() { - let stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 0, - messages: vec![], - analyzer_name: "Empty Analyzer".to_string(), - }; - - // has_data should return false for empty stats - assert!(!has_data(&stats)); - - // aggregate_sessions should return empty vec - let sessions = aggregate_sessions_for_tool(&stats); - assert_eq!(sessions.len(), 0); -} - -#[test] -fn test_single_message_single_session_state() { - let date_utc = Utc.with_ymd_and_hms(2025, 11, 20, 12, 0, 0).unwrap(); - let msg = ConversationMessage { - application: Application::ClaudeCode, - date: date_utc, - project_hash: "hash".to_string(), - conversation_hash: "conv".to_string(), - local_hash: None, - global_hash: "global".to_string(), - model: Some("model".to_string()), - stats: Stats { - input_tokens: 50, - output_tokens: 25, - cost: 0.005, - ..Stats::default() - }, - role: MessageRole::Assistant, - uuid: None, - session_name: Some("Single Message Session".to_string()), - }; - - let stats = AgenticCodingToolStats { - daily_stats: BTreeMap::new(), - num_conversations: 1, - messages: vec![msg], - analyzer_name: "Test".to_string(), - }; - - // Verify state - assert!(has_data(&stats)); - let sessions = aggregate_sessions_for_tool(&stats); - assert_eq!(sessions.len(), 1); - assert_eq!(sessions[0].stats.input_tokens, 50); - assert_eq!( - sessions[0].session_name, - Some("Single Message Session".to_string()) - ); -} - -// ============================================================================ -// REVERSE SORT ('r' KEY) INTEGRATION TESTS -// ============================================================================ - -#[test] -fn test_tui_r_key_toggles_sort_order() { - // make_multi_single_tool_two_days has "2025-01-01" and "2025-02-01" - // Normal: row 0 = "2025-01-01", Reversed: row 0 = "2025-02-01" - - // Toggle once and drill - selection stays at row 0, but item changes - let events = vec![ - Event::Key(KeyEvent::new(KeyCode::Char('r'), KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Enter, KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('q'), KeyModifiers::empty())), - ]; - let result = run_tui_with_events(make_multi_single_tool_two_days(), events, 50); - assert!(result.test_state.sort_reversed); - assert_eq!( - result.test_state.selected_rows[0], - Some(0), - "Selection should stay at row 0" - ); - assert_eq!( - result.test_state.session_day_filters[0], - Some("2025-02-01".to_string()), - "Row 0 should now be latest date after reverse" - ); - - // Toggle twice and drill - back to normal order - let events = vec![ - Event::Key(KeyEvent::new(KeyCode::Char('r'), KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('r'), KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Enter, KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('q'), KeyModifiers::empty())), - ]; - let result = run_tui_with_events(make_multi_single_tool_two_days(), events, 50); - assert!(!result.test_state.sort_reversed); - assert_eq!(result.test_state.selected_rows[0], Some(0)); - assert_eq!( - result.test_state.session_day_filters[0], - Some("2025-01-01".to_string()), - "Row 0 should be earliest date after double toggle" - ); -} - -#[test] -fn test_tui_drill_selects_correct_day_for_sort_order() { - // make_multi_single_tool_two_days has dates "2025-01-01" and "2025-02-01" - // Normal: row 0 = earliest, Reversed: row 0 = latest - - // Normal order - first row is earliest - let events = vec![ - Event::Key(KeyEvent::new(KeyCode::Enter, KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('q'), KeyModifiers::empty())), - ]; - let result = run_tui_with_events(make_multi_single_tool_two_days(), events, 80); - assert_eq!( - result.test_state.session_day_filters[0], - Some("2025-01-01".to_string()), - "Normal order: first row should be earliest date" - ); - - // Reversed order - first row is latest - let events = vec![ - Event::Key(KeyEvent::new(KeyCode::Char('r'), KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Enter, KeyModifiers::empty())), - Event::Key(KeyEvent::new(KeyCode::Char('q'), KeyModifiers::empty())), - ]; - let result = run_tui_with_events(make_multi_single_tool_two_days(), events, 80); - assert_eq!( - result.test_state.session_day_filters[0], - Some("2025-02-01".to_string()), - "Reversed order: first row should be latest date" - ); + assert!((dst.cost() - 0.03).abs() < 0.01); } diff --git a/src/types.rs b/src/types.rs index d32e5df..0b1325f 100644 --- a/src/types.rs +++ b/src/types.rs @@ -1,7 +1,209 @@ use std::collections::BTreeMap; +use std::fmt; +use std::sync::Arc; use chrono::{DateTime, Utc}; +use parking_lot::RwLock; use serde::{Deserialize, Serialize}; +use tinyvec::TinyVec; + +use crate::cache::ModelKey; +use crate::tui::logic::aggregate_sessions_from_messages; + +// Re-export interning functions for convenience +pub use crate::cache::{intern_model, resolve_model}; + +// ============================================================================ +// CompactDate - Compact date representation (4 bytes, no heap allocation) +// ============================================================================ + +/// Compact representation of a date in "YYYY-MM-DD" format. +/// Stored as year (u16) + month (u8) + day (u8) = 4 bytes total. +#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Hash)] +pub struct CompactDate { + year: u16, + month: u8, + day: u8, +} + +impl CompactDate { + /// Create a CompactDate directly from a DateTime (in local timezone). + #[inline] + pub fn from_local(dt: &DateTime) -> Self { + use chrono::{Datelike, Local}; + let local = dt.with_timezone(&Local); + Self { + year: local.year() as u16, + month: local.month() as u8, + day: local.day() as u8, + } + } + + /// Create a CompactDate from individual components. + #[inline] + pub fn from_parts(year: u16, month: u8, day: u8) -> Self { + Self { year, month, day } + } + + /// Get the year component. + #[inline] + pub fn year(&self) -> u16 { + self.year + } + + /// Get the month component (1-12). + #[inline] + pub fn month(&self) -> u8 { + self.month + } + + /// Get the day component (1-31). + #[inline] + pub fn day(&self) -> u8 { + self.day + } + + /// Create a CompactDate from a "YYYY-MM-DD" string. + #[inline] + pub fn from_str(s: &str) -> Option { + Self::parse(s).map(|(year, month, day)| Self { year, month, day }) + } + + /// Parse a date string, returning None if invalid format. + #[inline] + fn parse(s: &str) -> Option<(u16, u8, u8)> { + if s.len() != 10 { + return None; + } + let bytes = s.as_bytes(); + if bytes[4] != b'-' || bytes[7] != b'-' { + return None; + } + let year = (bytes[0].wrapping_sub(b'0') as u16) + .checked_mul(1000)? + .checked_add((bytes[1].wrapping_sub(b'0') as u16).checked_mul(100)?)? + .checked_add((bytes[2].wrapping_sub(b'0') as u16).checked_mul(10)?)? + .checked_add(bytes[3].wrapping_sub(b'0') as u16)?; + let month = (bytes[5].wrapping_sub(b'0')) + .checked_mul(10)? + .checked_add(bytes[6].wrapping_sub(b'0'))?; + let day = (bytes[8].wrapping_sub(b'0')) + .checked_mul(10)? + .checked_add(bytes[9].wrapping_sub(b'0'))?; + Some((year, month, day)) + } +} + +impl Serialize for CompactDate { + fn serialize(&self, serializer: S) -> Result { + serializer.serialize_str(&self.to_string()) + } +} + +impl<'de> Deserialize<'de> for CompactDate { + fn deserialize>(deserializer: D) -> Result { + let s = String::deserialize(deserializer)?; + Self::from_str(&s).ok_or_else(|| serde::de::Error::custom("invalid date format")) + } +} + +impl Ord for CompactDate { + #[inline] + fn cmp(&self, other: &Self) -> std::cmp::Ordering { + (self.year, self.month, self.day).cmp(&(other.year, other.month, other.day)) + } +} + +impl PartialOrd for CompactDate { + #[inline] + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl fmt::Display for CompactDate { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{:04}-{:02}-{:02}", self.year, self.month, self.day) + } +} + +// ============================================================================ +// ModelCounts - Compact reference-counted model tracking +// ============================================================================ + +/// Compact model reference counts with inline storage for up to 3 models. +/// Provides a map-like interface over a TinyVec for memory efficiency. +/// Spills to heap if more than 3 models are added. +#[derive(Debug, Clone, Default)] +pub struct ModelCounts(TinyVec<[(ModelKey, u32); 3]>); + +impl ModelCounts { + /// Create an empty ModelCounts. + #[inline] + pub fn new() -> Self { + Self(TinyVec::new()) + } + + /// Increment the count for a model, inserting with count if not present. + #[inline] + pub fn increment(&mut self, key: ModelKey, count: u32) { + if let Some((_, c)) = self.0.iter_mut().find(|(k, _)| *k == key) { + *c += count; + } else { + self.0.push((key, count)); + } + } + + /// Decrement the count for a model, removing it if count reaches zero. + #[inline] + pub fn decrement(&mut self, key: ModelKey, count: u32) { + if let Some((_, c)) = self.0.iter_mut().find(|(k, _)| *k == key) { + *c = c.saturating_sub(count); + } + self.0.retain(|(_, c)| *c > 0); + } + + /// Get the count for a model, returning None if not present. + #[inline] + pub fn get(&self, key: ModelKey) -> Option { + self.0.iter().find(|(k, _)| *k == key).map(|(_, c)| *c) + } + + /// Iterate over (model, count) pairs. + #[inline] + pub fn iter(&self) -> impl Iterator { + self.0.iter() + } + + /// Create with a single model entry. + #[inline] + pub fn from_single(key: ModelKey, count: u32) -> Self { + let mut s = Self::new(); + s.0.push((key, count)); + s + } +} + +// ============================================================================ +// SessionAggregate +// ============================================================================ + +/// Pre-computed session aggregate for TUI display. +/// Contains aggregated stats per conversation session. +/// Note: Not serialized - view-only type for TUI. Uses `Arc` for memory efficiency. +#[derive(Debug, Clone)] +pub struct SessionAggregate { + pub session_id: String, + pub first_timestamp: DateTime, + /// Shared across all sessions from the same analyzer (Arc clone is cheap) + pub analyzer_name: Arc, + pub stats: TuiStats, + /// Reference-counted model names for correct incremental update subtraction. + /// Inline storage for up to 3 models; interned keys are 4 bytes each. + pub models: ModelCounts, + pub session_name: Option, + pub date: CompactDate, +} #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] #[serde(rename_all = "snake_case")] @@ -52,15 +254,48 @@ pub struct ConversationMessage { pub session_name: Option, } +/// Daily statistics for TUI display. +/// Note: This struct only contains fields displayed in the TUI. File operation stats +/// (files_read, files_edited, etc.) are not included here - they are computed on-demand +/// from raw messages when needed (e.g., in the MCP server). #[derive(Debug, Clone, Default, Serialize, Deserialize)] pub struct DailyStats { - #[allow(dead_code)] - pub date: String, + pub date: CompactDate, pub user_messages: u32, pub ai_messages: u32, pub conversations: u32, + /// Reference-counted model occurrences for correct incremental update subtraction. pub models: BTreeMap, - pub stats: Stats, + pub stats: TuiStats, +} + +impl std::ops::AddAssign<&DailyStats> for DailyStats { + fn add_assign(&mut self, rhs: &DailyStats) { + self.user_messages += rhs.user_messages; + self.ai_messages += rhs.ai_messages; + self.conversations += rhs.conversations; + for (model, count) in &rhs.models { + *self.models.entry(model.clone()).or_insert(0) += count; + } + self.stats += rhs.stats; + } +} + +impl std::ops::SubAssign<&DailyStats> for DailyStats { + fn sub_assign(&mut self, rhs: &DailyStats) { + self.user_messages = self.user_messages.saturating_sub(rhs.user_messages); + self.ai_messages = self.ai_messages.saturating_sub(rhs.ai_messages); + self.conversations = self.conversations.saturating_sub(rhs.conversations); + for (model, count) in &rhs.models { + if let Some(existing) = self.models.get_mut(model) { + *existing = existing.saturating_sub(*count); + if *existing == 0 { + self.models.remove(model); + } + } + } + self.stats -= rhs.stats; + } } #[derive(Debug, Clone, Default, Serialize, Deserialize)] @@ -119,6 +354,159 @@ pub enum FileCategory { Other, } +impl std::ops::AddAssign for Stats { + fn add_assign(&mut self, rhs: Self) { + self.input_tokens += rhs.input_tokens; + self.output_tokens += rhs.output_tokens; + self.reasoning_tokens += rhs.reasoning_tokens; + self.cache_creation_tokens += rhs.cache_creation_tokens; + self.cache_read_tokens += rhs.cache_read_tokens; + self.cached_tokens += rhs.cached_tokens; + self.cost += rhs.cost; + self.tool_calls += rhs.tool_calls; + self.terminal_commands += rhs.terminal_commands; + self.file_searches += rhs.file_searches; + self.file_content_searches += rhs.file_content_searches; + self.files_read += rhs.files_read; + self.files_added += rhs.files_added; + self.files_edited += rhs.files_edited; + self.files_deleted += rhs.files_deleted; + self.lines_read += rhs.lines_read; + self.lines_added += rhs.lines_added; + self.lines_edited += rhs.lines_edited; + self.lines_deleted += rhs.lines_deleted; + self.bytes_read += rhs.bytes_read; + self.bytes_added += rhs.bytes_added; + self.bytes_edited += rhs.bytes_edited; + self.bytes_deleted += rhs.bytes_deleted; + self.todos_created += rhs.todos_created; + self.todos_completed += rhs.todos_completed; + self.todos_in_progress += rhs.todos_in_progress; + self.todo_writes += rhs.todo_writes; + self.todo_reads += rhs.todo_reads; + self.code_lines += rhs.code_lines; + self.docs_lines += rhs.docs_lines; + self.data_lines += rhs.data_lines; + self.media_lines += rhs.media_lines; + self.config_lines += rhs.config_lines; + self.other_lines += rhs.other_lines; + } +} + +impl std::ops::SubAssign for Stats { + fn sub_assign(&mut self, rhs: Self) { + self.input_tokens = self.input_tokens.saturating_sub(rhs.input_tokens); + self.output_tokens = self.output_tokens.saturating_sub(rhs.output_tokens); + self.reasoning_tokens = self.reasoning_tokens.saturating_sub(rhs.reasoning_tokens); + self.cache_creation_tokens = self + .cache_creation_tokens + .saturating_sub(rhs.cache_creation_tokens); + self.cache_read_tokens = self.cache_read_tokens.saturating_sub(rhs.cache_read_tokens); + self.cached_tokens = self.cached_tokens.saturating_sub(rhs.cached_tokens); + self.cost -= rhs.cost; + self.tool_calls = self.tool_calls.saturating_sub(rhs.tool_calls); + self.terminal_commands = self.terminal_commands.saturating_sub(rhs.terminal_commands); + self.file_searches = self.file_searches.saturating_sub(rhs.file_searches); + self.file_content_searches = self + .file_content_searches + .saturating_sub(rhs.file_content_searches); + self.files_read = self.files_read.saturating_sub(rhs.files_read); + self.files_added = self.files_added.saturating_sub(rhs.files_added); + self.files_edited = self.files_edited.saturating_sub(rhs.files_edited); + self.files_deleted = self.files_deleted.saturating_sub(rhs.files_deleted); + self.lines_read = self.lines_read.saturating_sub(rhs.lines_read); + self.lines_added = self.lines_added.saturating_sub(rhs.lines_added); + self.lines_edited = self.lines_edited.saturating_sub(rhs.lines_edited); + self.lines_deleted = self.lines_deleted.saturating_sub(rhs.lines_deleted); + self.bytes_read = self.bytes_read.saturating_sub(rhs.bytes_read); + self.bytes_added = self.bytes_added.saturating_sub(rhs.bytes_added); + self.bytes_edited = self.bytes_edited.saturating_sub(rhs.bytes_edited); + self.bytes_deleted = self.bytes_deleted.saturating_sub(rhs.bytes_deleted); + self.todos_created = self.todos_created.saturating_sub(rhs.todos_created); + self.todos_completed = self.todos_completed.saturating_sub(rhs.todos_completed); + self.todos_in_progress = self.todos_in_progress.saturating_sub(rhs.todos_in_progress); + self.todo_writes = self.todo_writes.saturating_sub(rhs.todo_writes); + self.todo_reads = self.todo_reads.saturating_sub(rhs.todo_reads); + self.code_lines = self.code_lines.saturating_sub(rhs.code_lines); + self.docs_lines = self.docs_lines.saturating_sub(rhs.docs_lines); + self.data_lines = self.data_lines.saturating_sub(rhs.data_lines); + self.media_lines = self.media_lines.saturating_sub(rhs.media_lines); + self.config_lines = self.config_lines.saturating_sub(rhs.config_lines); + self.other_lines = self.other_lines.saturating_sub(rhs.other_lines); + } +} + +/// Lightweight stats for TUI display only (24 bytes vs 320 bytes for full Stats). +/// Contains only fields actually rendered in the UI. +/// Uses u32 for memory efficiency - sufficient for per-session and per-day values. +#[derive(Debug, Clone, Copy, Default, PartialEq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct TuiStats { + pub input_tokens: u32, + pub output_tokens: u32, + pub reasoning_tokens: u32, + pub cached_tokens: u32, + pub cost_cents: u32, // Store as cents to avoid f32 precision issues + pub tool_calls: u32, +} + +impl TuiStats { + /// Get cost as f64 dollars for display + #[inline] + pub fn cost(&self) -> f64 { + self.cost_cents as f64 / 100.0 + } + + /// Set cost from f64 dollars + #[inline] + pub fn set_cost(&mut self, dollars: f64) { + self.cost_cents = (dollars * 100.0).round() as u32; + } + + /// Add cost from f64 dollars + #[inline] + pub fn add_cost(&mut self, dollars: f64) { + self.cost_cents = self + .cost_cents + .saturating_add((dollars * 100.0).round() as u32); + } +} + +impl From<&Stats> for TuiStats { + fn from(s: &Stats) -> Self { + TuiStats { + input_tokens: s.input_tokens as u32, + output_tokens: s.output_tokens as u32, + reasoning_tokens: s.reasoning_tokens as u32, + cached_tokens: s.cached_tokens as u32, + cost_cents: (s.cost * 100.0).round() as u32, + tool_calls: s.tool_calls, + } + } +} + +impl std::ops::AddAssign for TuiStats { + fn add_assign(&mut self, rhs: Self) { + self.input_tokens = self.input_tokens.saturating_add(rhs.input_tokens); + self.output_tokens = self.output_tokens.saturating_add(rhs.output_tokens); + self.reasoning_tokens = self.reasoning_tokens.saturating_add(rhs.reasoning_tokens); + self.cached_tokens = self.cached_tokens.saturating_add(rhs.cached_tokens); + self.cost_cents = self.cost_cents.saturating_add(rhs.cost_cents); + self.tool_calls = self.tool_calls.saturating_add(rhs.tool_calls); + } +} + +impl std::ops::SubAssign for TuiStats { + fn sub_assign(&mut self, rhs: Self) { + self.input_tokens = self.input_tokens.saturating_sub(rhs.input_tokens); + self.output_tokens = self.output_tokens.saturating_sub(rhs.output_tokens); + self.reasoning_tokens = self.reasoning_tokens.saturating_sub(rhs.reasoning_tokens); + self.cached_tokens = self.cached_tokens.saturating_sub(rhs.cached_tokens); + self.cost_cents = self.cost_cents.saturating_sub(rhs.cost_cents); + self.tool_calls = self.tool_calls.saturating_sub(rhs.tool_calls); + } +} + impl FileCategory { pub fn from_extension(ext: &str) -> Self { match ext.to_lowercase().as_str() { @@ -153,6 +541,60 @@ pub struct MultiAnalyzerStats { pub analyzer_stats: Vec, } +/// Lightweight view for TUI - NO raw messages, only pre-computed aggregates. +/// Saves a lot of memory by not storing each message. +/// Note: Not serialized - view-only type for TUI. Uses `Arc` for memory efficiency. +#[derive(Debug, Clone)] +pub struct AnalyzerStatsView { + pub daily_stats: BTreeMap, + pub session_aggregates: Vec, + pub num_conversations: u64, + /// Shared analyzer name - same Arc used by all SessionAggregates + pub analyzer_name: Arc, +} + +/// Shared view type - Arc> allows mutation without cloning. +pub type SharedAnalyzerView = Arc>; + +/// Container for TUI display - view-only stats without messages. +/// Uses Arc> to share AnalyzerStatsView across caches and channels. +/// RwLock enables in-place mutation without cloning during incremental updates. +#[derive(Debug, Clone)] +pub struct MultiAnalyzerStatsView { + pub analyzer_stats: Vec, +} + +impl AgenticCodingToolStats { + /// Convert full stats to lightweight view, consuming self. + /// Messages are dropped, session_aggregates are pre-computed. + /// Returns SharedAnalyzerView for efficient sharing and in-place mutation. + pub fn into_view(self) -> SharedAnalyzerView { + // Convert analyzer_name to Arc once, shared across all sessions + let analyzer_name: Arc = Arc::from(self.analyzer_name); + let session_aggregates = + aggregate_sessions_from_messages(&self.messages, Arc::clone(&analyzer_name)); + Arc::new(RwLock::new(AnalyzerStatsView { + daily_stats: self.daily_stats, + session_aggregates, + num_conversations: self.num_conversations, + analyzer_name, + })) + } +} + +impl MultiAnalyzerStats { + /// Convert to view type, consuming self and dropping all messages. + pub fn into_view(self) -> MultiAnalyzerStatsView { + MultiAnalyzerStatsView { + analyzer_stats: self + .analyzer_stats + .into_iter() + .map(|s| s.into_view()) + .collect(), + } + } +} + #[derive(Debug, Deserialize)] pub struct UploadResponse { pub success: bool, @@ -168,6 +610,7 @@ pub struct ErrorResponse { #[cfg(test)] mod tests { use super::*; + use chrono::{TimeZone, Utc}; #[test] fn file_category_classifies_extensions() { @@ -205,4 +648,221 @@ mod tests { assert_eq!(stats.tool_calls, 0); assert_eq!(stats.code_lines, 0); } + + fn sample_message(date_str: &str, conv_hash: &str) -> ConversationMessage { + let date = chrono::NaiveDate::parse_from_str(date_str, "%Y-%m-%d") + .unwrap() + .and_hms_opt(12, 0, 0) + .unwrap(); + ConversationMessage { + application: Application::ClaudeCode, + date: Utc.from_utc_datetime(&date), + project_hash: "proj".into(), + conversation_hash: conv_hash.into(), + local_hash: None, + global_hash: format!("global_{}", conv_hash), + model: Some("claude-3-5-sonnet".into()), + stats: Stats { + input_tokens: 100, + output_tokens: 50, + cost: 0.01, + ..Stats::default() + }, + role: MessageRole::Assistant, + uuid: None, + session_name: Some("Test Session".into()), + } + } + + #[test] + fn into_view_converts_stats_correctly() { + let stats = AgenticCodingToolStats { + daily_stats: BTreeMap::new(), + num_conversations: 2, + messages: vec![ + sample_message("2025-01-01", "conv1"), + sample_message("2025-01-02", "conv2"), + ], + analyzer_name: "Test".into(), + }; + + let view = stats.into_view(); + let v = view.read(); + + assert_eq!(&*v.analyzer_name, "Test"); + assert_eq!(v.num_conversations, 2); + assert_eq!(v.session_aggregates.len(), 2); + } + + #[test] + fn multi_analyzer_stats_into_view() { + let multi = MultiAnalyzerStats { + analyzer_stats: vec![AgenticCodingToolStats { + daily_stats: BTreeMap::new(), + num_conversations: 1, + messages: vec![sample_message("2025-01-01", "conv1")], + analyzer_name: "Analyzer1".into(), + }], + }; + + let view = multi.into_view(); + + assert_eq!(view.analyzer_stats.len(), 1); + assert_eq!(&*view.analyzer_stats[0].read().analyzer_name, "Analyzer1"); + } + + // ======================================================================== + // Model reference counting tests + // ======================================================================== + + #[test] + fn daily_stats_model_ref_counting() { + let mut stats = DailyStats::default(); + let day1 = DailyStats { + models: [("gpt-4".into(), 2)].into_iter().collect(), + ..Default::default() + }; + let day2 = DailyStats { + models: [("gpt-4".into(), 1), ("claude".into(), 1)] + .into_iter() + .collect(), + ..Default::default() + }; + + stats += &day1; + assert_eq!(stats.models.get("gpt-4"), Some(&2)); + + stats += &day2; + assert_eq!(stats.models.get("gpt-4"), Some(&3)); + assert_eq!(stats.models.get("claude"), Some(&1)); + + stats -= &day1; + assert_eq!(stats.models.get("gpt-4"), Some(&1)); + assert_eq!(stats.models.get("claude"), Some(&1)); + + stats -= &day2; + assert_eq!(stats.models.get("gpt-4"), None); // removed at 0 + assert_eq!(stats.models.get("claude"), None); + } + + fn make_session_contrib( + session_id: &str, + model: &str, + count: u32, + ) -> crate::contribution_cache::MultiSessionContribution { + crate::contribution_cache::MultiSessionContribution { + session_aggregates: vec![SessionAggregate { + session_id: session_id.into(), + first_timestamp: Utc.with_ymd_and_hms(2025, 1, 1, 0, 0, 0).unwrap(), + analyzer_name: Arc::from("Test"), + stats: TuiStats::default(), + models: ModelCounts::from_single(intern_model(model), count), + session_name: None, + date: CompactDate::default(), + }], + ..Default::default() + } + } + + fn empty_view() -> AnalyzerStatsView { + AnalyzerStatsView { + daily_stats: BTreeMap::new(), + session_aggregates: Vec::new(), + num_conversations: 0, + analyzer_name: Arc::from("Test"), + } + } + + /// Helper to get model count + fn get_model_count(models: &ModelCounts, key: ModelKey) -> Option { + models.get(key) + } + + #[test] + fn session_model_ref_counting() { + let mut view = empty_view(); + let model_key = intern_model("claude-3-5-sonnet"); + + view.add_multi_session_contribution(&make_session_contrib("s1", "claude-3-5-sonnet", 1)); + assert_eq!( + get_model_count(&view.session_aggregates[0].models, model_key), + Some(1) + ); + + view.add_multi_session_contribution(&make_session_contrib("s1", "claude-3-5-sonnet", 2)); + assert_eq!( + get_model_count(&view.session_aggregates[0].models, model_key), + Some(3) + ); + + view.subtract_multi_session_contribution(&make_session_contrib( + "s1", + "claude-3-5-sonnet", + 1, + )); + assert_eq!( + get_model_count(&view.session_aggregates[0].models, model_key), + Some(2) + ); + + view.subtract_multi_session_contribution(&make_session_contrib( + "s1", + "claude-3-5-sonnet", + 2, + )); + assert_eq!( + get_model_count(&view.session_aggregates[0].models, model_key), + None + ); + } + + #[test] + fn session_model_persists_after_partial_subtraction() { + // Scenario: two files contribute same model to same session + let mut view = empty_view(); + let model_key = intern_model("gpt-4"); + + view.add_multi_session_contribution(&make_session_contrib("s1", "gpt-4", 1)); // file1 + view.add_multi_session_contribution(&make_session_contrib("s1", "gpt-4", 1)); // file2 + assert_eq!( + get_model_count(&view.session_aggregates[0].models, model_key), + Some(2) + ); + + view.subtract_multi_session_contribution(&make_session_contrib("s1", "gpt-4", 1)); // remove file1 + assert_eq!( + get_model_count(&view.session_aggregates[0].models, model_key), + Some(1) + ); + // file2 remains + } + + #[test] + fn session_multiple_models() { + let mut view = empty_view(); + let mut contrib = make_session_contrib("s1", "gpt-4", 1); + contrib.session_aggregates[0] + .models + .increment(intern_model("claude"), 2); + + view.add_multi_session_contribution(&contrib); + assert_eq!( + get_model_count(&view.session_aggregates[0].models, intern_model("gpt-4")), + Some(1) + ); + assert_eq!( + get_model_count(&view.session_aggregates[0].models, intern_model("claude")), + Some(2) + ); + + view.subtract_multi_session_contribution(&make_session_contrib("s1", "gpt-4", 1)); + assert_eq!( + get_model_count(&view.session_aggregates[0].models, intern_model("gpt-4")), + None + ); + assert_eq!( + get_model_count(&view.session_aggregates[0].models, intern_model("claude")), + Some(2) + ); + } } diff --git a/src/upload.rs b/src/upload.rs index 081ffd1..1a8681a 100644 --- a/src/upload.rs +++ b/src/upload.rs @@ -4,8 +4,9 @@ use crate::tui::UploadStatus; use crate::types::{ConversationMessage, ErrorResponse, MultiAnalyzerStats, UploadResponse}; use crate::utils; use anyhow::{Context, Result}; +use parking_lot::Mutex; use std::path::PathBuf; -use std::sync::{Arc, Mutex, OnceLock}; +use std::sync::{Arc, OnceLock}; use std::time::{Duration, Instant}; fn upload_log_path() -> PathBuf { @@ -230,20 +231,71 @@ where Ok(()) } -pub async fn perform_background_upload( - stats: MultiAnalyzerStats, +/// Helper to set upload status atomically. +fn set_upload_status(status: &Option>>, value: UploadStatus) { + if let Some(status) = status { + *status.lock() = value; + } +} + +/// Creates an upload progress callback that updates the TUI status. +fn make_progress_callback( upload_status: Option>>, - initial_delay_ms: Option, +) -> impl FnMut(usize, usize) { + move |current, total| { + if let Some(ref status) = upload_status { + let mut s = status.lock(); + match &*s { + UploadStatus::Uploading { dots, .. } => { + *s = UploadStatus::Uploading { + current, + total, + dots: *dots, + }; + } + _ => { + *s = UploadStatus::Uploading { + current, + total, + dots: 0, + }; + } + } + } + } +} + +/// Handles the result of an upload operation, updating status accordingly. +async fn handle_upload_result( + result: Option>, + upload_status: &Option>>, ) { - // Helper to set status - fn set_status(status: &Option>>, value: UploadStatus) { - if let Some(status) = status - && let Ok(mut s) = status.lock() - { - *s = value; + match result { + Some(Ok(_)) => { + set_upload_status(upload_status, UploadStatus::Uploaded); + // Hide success message after 3 seconds + tokio::time::sleep(Duration::from_secs(3)).await; + set_upload_status(upload_status, UploadStatus::None); + } + Some(Err(e)) => { + // Keep error messages visible permanently + set_upload_status(upload_status, UploadStatus::Failed(format!("{e:#}"))); } + None => (), // Config not available or nothing to upload } +} +/// Upload pre-filtered messages directly (used for incremental uploads from watcher). +/// This is more efficient than loading all stats and filtering afterwards. +/// If `on_success` is provided, it will be called after a successful upload. +pub async fn perform_background_upload_messages( + messages: Vec, + upload_status: Option>>, + initial_delay_ms: Option, + on_success: Option, +) where + F: FnOnce(), +{ // Optional initial delay if let Some(delay) = initial_delay_ms { tokio::time::sleep(Duration::from_millis(delay)).await; @@ -255,60 +307,65 @@ pub async fn perform_background_upload( return None; } - let mut messages = vec![]; - for analyzer_stats in stats.analyzer_stats { - messages.extend(analyzer_stats.messages); + if messages.is_empty() { + return Some(Ok(())); // Nothing to upload } - let messages = utils::get_messages_later_than(config.upload.last_date_uploaded, messages) - .await - .ok()?; + let result = upload_message_stats( + &messages, + &mut config, + make_progress_callback(upload_status.clone()), + ) + .await; - if messages.is_empty() { - return Some(Ok(())); // Nothing new to upload + // Call on_success callback if upload succeeded + if result.is_ok() + && let Some(callback) = on_success + { + callback(); } - Some( - upload_message_stats(&messages, &mut config, |current, total| { - // Update upload progress - if let Some(ref status) = upload_status - && let Ok(mut s) = status.lock() - { - match &*s { - UploadStatus::Uploading { dots, .. } => { - *s = UploadStatus::Uploading { - current, - total, - dots: *dots, - }; - } - _ => { - *s = UploadStatus::Uploading { - current, - total, - dots: 0, - }; - } - } - } - }) - .await, - ) + Some(result) } .await; - match upload_result { - Some(Ok(_)) => { - set_status(&upload_status, UploadStatus::Uploaded); - // Hide success message after 3 seconds - tokio::time::sleep(Duration::from_secs(3)).await; - set_status(&upload_status, UploadStatus::None); - } - Some(Err(e)) => { - // Keep error messages visible permanently - set_status(&upload_status, UploadStatus::Failed(format!("{e:#}"))); + handle_upload_result(upload_result, &upload_status).await; +} + +/// Upload stats from all analyzers (used for initial startup upload). +/// Filters messages by last upload timestamp before uploading. +pub async fn perform_background_upload( + stats: MultiAnalyzerStats, + upload_status: Option>>, + initial_delay_ms: Option, +) { + // Optional initial delay + if let Some(delay) = initial_delay_ms { + tokio::time::sleep(Duration::from_millis(delay)).await; + } + + // Load config and filter messages + let messages = async { + let config = Config::load().ok().flatten()?; + if !config.is_configured() { + return None; } - None => (), // Config not available or nothing to upload + + let all_messages: Vec<_> = stats + .analyzer_stats + .into_iter() + .flat_map(|s| s.messages) + .collect(); + + utils::get_messages_later_than(config.upload.last_date_uploaded, all_messages) + .await + .ok() + } + .await; + + if let Some(msgs) = messages { + // Delegate to the messages-based upload (no additional delay, no callback) + perform_background_upload_messages::(msgs, upload_status, None, None).await; } } diff --git a/src/upload/tests.rs b/src/upload/tests.rs index 78d7515..97da9d8 100644 --- a/src/upload/tests.rs +++ b/src/upload/tests.rs @@ -4,11 +4,12 @@ use crate::types::{ Stats, }; use chrono::Utc; +use parking_lot::Mutex; use std::collections::BTreeMap; use std::io::ErrorKind; use std::path::PathBuf; +use std::sync::Arc; use std::sync::atomic::{AtomicUsize, Ordering}; -use std::sync::{Arc, Mutex}; use tempfile::TempDir; use tokio::io::{AsyncReadExt, AsyncWriteExt}; use tokio::net::TcpListener; @@ -142,15 +143,14 @@ async fn upload_message_stats_success_updates_progress_and_config() { let progress_values_clone = progress_values.clone(); upload_message_stats(&messages, &mut config, move |current, total| { - let mut guard = progress_values_clone.lock().unwrap(); - guard.push((current, total)); + progress_values_clone.lock().push((current, total)); }) .await .expect("upload should succeed"); assert_eq!(request_counter.load(Ordering::SeqCst), 1); - let recorded = progress_values.lock().unwrap(); + let recorded = progress_values.lock(); assert!(!recorded.is_empty(), "progress callback should be called"); let (final_current, final_total) = recorded[recorded.len() - 1]; assert_eq!(final_total, messages.len()); @@ -308,7 +308,7 @@ async fn perform_background_upload_no_config_keeps_status_unchanged() { perform_background_upload(stats, Some(status_clone), None).await; - let final_status = status.lock().unwrap().clone(); + let final_status = status.lock().clone(); assert!( matches!(final_status, UploadStatus::MissingConfig), "status should remain unchanged when config is missing" @@ -332,7 +332,7 @@ async fn perform_background_upload_unconfigured_config_keeps_status_unchanged() perform_background_upload(stats, Some(status_clone), None).await; - let final_status = status.lock().unwrap().clone(); + let final_status = status.lock().clone(); assert!( matches!(final_status, UploadStatus::MissingApiToken), "status should remain unchanged when config is not fully configured" @@ -376,7 +376,7 @@ async fn perform_background_upload_propagates_upload_errors_to_status() { assert_eq!(request_counter.load(Ordering::SeqCst), 1); - let final_status = status.lock().unwrap().clone(); + let final_status = status.lock().clone(); match final_status { UploadStatus::Failed(msg) => { assert!( diff --git a/src/utils.rs b/src/utils.rs index 363c026..24e09b5 100644 --- a/src/utils.rs +++ b/src/utils.rs @@ -1,14 +1,15 @@ use std::collections::{BTreeMap, HashSet}; -use std::sync::{Mutex, OnceLock}; +use std::sync::OnceLock; use anyhow::Result; use chrono::{DateTime, Datelike, Local, Utc}; use num_format::{Locale, ToFormattedString}; +use parking_lot::Mutex; use serde::{Deserialize, Deserializer}; use sha2::{Digest, Sha256}; use xxhash_rust::xxh3::xxh3_64; -use crate::types::{ConversationMessage, DailyStats}; +use crate::types::{CompactDate, ConversationMessage, DailyStats}; static WARNED_MESSAGES: OnceLock>> = OnceLock::new(); @@ -16,9 +17,7 @@ pub fn warn_once(message: impl Into) { let message = message.into(); let cache = WARNED_MESSAGES.get_or_init(|| Mutex::new(HashSet::new())); - if let Ok(mut warned) = cache.lock() - && warned.insert(message.clone()) - { + if cache.lock().insert(message.clone()) { eprintln!("{message}"); } } @@ -31,7 +30,9 @@ pub struct NumberFormatOptions { pub decimal_places: usize, } -pub fn format_number(n: u64, options: &NumberFormatOptions) -> String { +/// Format a number for display. Accepts both u32 and u64. +pub fn format_number(n: impl Into, options: &NumberFormatOptions) -> String { + let n: u64 = n.into(); let locale = match options.locale.as_str() { "de" => Locale::de, "fr" => Locale::fr, @@ -102,6 +103,7 @@ pub fn format_date_for_display(date: &str) -> String { } } +// TODO: Don't use strings here, wasteful. pub fn aggregate_by_date(entries: &[ConversationMessage]) -> BTreeMap { let mut daily_stats: BTreeMap = BTreeMap::new(); let mut conversation_start_dates: BTreeMap = BTreeMap::new(); @@ -126,7 +128,7 @@ pub fn aggregate_by_date(entries: &[ConversationMessage]) -> BTreeMap BTreeMap { - // User message + // User message - no TUI-relevant stats to aggregate daily_stats_entry.user_messages += 1; - - // Aggregate user stats too (mostly todo-related) - daily_stats_entry.stats.todos_created += entry.stats.todos_created; - daily_stats_entry.stats.todos_completed += entry.stats.todos_completed; - daily_stats_entry.stats.todos_in_progress += entry.stats.todos_in_progress; - daily_stats_entry.stats.todo_writes += entry.stats.todo_writes; - daily_stats_entry.stats.todo_reads += entry.stats.todo_reads; } }; } @@ -229,7 +211,7 @@ pub fn aggregate_by_date(entries: &[ConversationMessage]) -> BTreeMap String { format!("{:016x}", xxh3_64(text.as_bytes())) } -/// Parallel deduplication by global_hash using DashMap. -/// Used by copilot, cline, roo_code, kilo_code analyzers. -pub fn deduplicate_by_global_hash_parallel( - messages: Vec, -) -> Vec { - use dashmap::DashMap; - use rayon::iter::{IntoParallelIterator, ParallelIterator}; +/// Sequential deduplication by global_hash using HashSet. +/// Used for post-init processing (incremental updates, uploads). +pub fn deduplicate_by_global_hash(messages: Vec) -> Vec { + use std::collections::HashSet; - let seen: DashMap = DashMap::with_capacity(messages.len() / 2); + let mut seen: HashSet = HashSet::with_capacity(messages.len() / 2); messages - .into_par_iter() - .filter(|msg| seen.insert(msg.global_hash.clone(), ()).is_none()) + .into_iter() + .filter(|msg| seen.insert(msg.global_hash.clone())) .collect() } -/// Parallel deduplication by local_hash using DashMap. -/// Used by gemini_cli, qwen_code analyzers. +/// Sequential deduplication by local_hash using HashSet. /// Messages without local_hash are always kept. -pub fn deduplicate_by_local_hash_parallel( - messages: Vec, -) -> Vec { - use dashmap::DashMap; - use rayon::iter::{IntoParallelIterator, ParallelIterator}; +/// Used for post-init processing (incremental updates, uploads). +pub fn deduplicate_by_local_hash(messages: Vec) -> Vec { + use std::collections::HashSet; - let seen: DashMap = DashMap::with_capacity(messages.len() / 2); + let mut seen: HashSet = HashSet::with_capacity(messages.len() / 2); messages - .into_par_iter() + .into_iter() .filter(|msg| { if let Some(local_hash) = &msg.local_hash { - seen.insert(local_hash.clone(), ()).is_none() + seen.insert(local_hash.clone()) } else { true // Always keep messages without local_hash } @@ -316,7 +292,7 @@ pub fn deduplicate_by_local_hash_parallel( .collect() } -/// Custom serde deserializer for RFC3339 timestamp strings to DateTime +/// Custom serde deserializer for RFC3339 timestamp strings to `DateTime` pub fn deserialize_utc_timestamp<'de, D>(deserializer: D) -> Result, D::Error> where D: Deserializer<'de>, diff --git a/src/utils/tests.rs b/src/utils/tests.rs index 2008f16..3aa3b22 100644 --- a/src/utils/tests.rs +++ b/src/utils/tests.rs @@ -1,6 +1,7 @@ use super::*; use crate::types::{ConversationMessage, MessageRole, Stats}; use chrono::{TimeZone, Utc}; +use std::collections::HashSet; #[test] fn test_format_number_comma() { @@ -11,9 +12,9 @@ fn test_format_number_comma() { decimal_places: 2, }; - assert_eq!(format_number(1000, &options), "1,000"); - assert_eq!(format_number(1000000, &options), "1,000,000"); - assert_eq!(format_number(123, &options), "123"); + assert_eq!(format_number(1000_u64, &options), "1,000"); + assert_eq!(format_number(1000000_u64, &options), "1,000,000"); + assert_eq!(format_number(123_u64, &options), "123"); } #[test] @@ -25,11 +26,11 @@ fn test_format_number_human() { decimal_places: 1, }; - assert_eq!(format_number(100, &options), "100"); - assert_eq!(format_number(1500, &options), "1.5k"); - assert_eq!(format_number(1_500_000, &options), "1.5m"); - assert_eq!(format_number(1_500_000_000, &options), "1.5b"); - assert_eq!(format_number(1_500_000_000_000, &options), "1.5t"); + assert_eq!(format_number(100_u64, &options), "100"); + assert_eq!(format_number(1500_u64, &options), "1.5k"); + assert_eq!(format_number(1_500_000_u64, &options), "1.5m"); + assert_eq!(format_number(1_500_000_000_u64, &options), "1.5b"); + assert_eq!(format_number(1_500_000_000_000_u64, &options), "1.5t"); } #[test] @@ -41,7 +42,7 @@ fn test_format_number_plain() { decimal_places: 2, }; - assert_eq!(format_number(1000, &options), "1000"); + assert_eq!(format_number(1000_u64, &options), "1000"); } #[test] @@ -138,7 +139,7 @@ fn test_aggregate_by_date_basic() { assert_eq!(stats.ai_messages, 1); assert_eq!(stats.conversations, 1); assert_eq!(stats.stats.input_tokens, 100); - assert_eq!(stats.stats.cost, 0.01); + assert_eq!(stats.stats.cost(), 0.01); } #[test] @@ -420,7 +421,7 @@ fn test_filter_zero_cost_messages_negative_cost() { // ============================================================================= #[test] -fn test_deduplicate_by_global_hash_parallel() { +fn test_deduplicate_by_global_hash() { let date = Utc.with_ymd_and_hms(2025, 1, 15, 12, 0, 0).unwrap(); let msg1 = ConversationMessage { @@ -450,19 +451,18 @@ fn test_deduplicate_by_global_hash_parallel() { }; let messages = vec![msg1, msg2, msg3]; - let result = deduplicate_by_global_hash_parallel(messages); + let result = deduplicate_by_global_hash(messages); // Should have 2 unique entries (same_hash and different_hash) assert_eq!(result.len(), 2); - let hashes: std::collections::HashSet<_> = - result.iter().map(|m| m.global_hash.as_str()).collect(); + let hashes: HashSet<_> = result.iter().map(|m| m.global_hash.as_str()).collect(); assert!(hashes.contains("same_hash")); assert!(hashes.contains("different_hash")); } #[test] -fn test_deduplicate_by_local_hash_parallel() { +fn test_deduplicate_by_local_hash() { let date = Utc.with_ymd_and_hms(2025, 1, 15, 12, 0, 0).unwrap(); let msg1 = ConversationMessage { @@ -492,7 +492,7 @@ fn test_deduplicate_by_local_hash_parallel() { }; let messages = vec![msg1, msg2, msg3]; - let result = deduplicate_by_local_hash_parallel(messages); + let result = deduplicate_by_local_hash(messages); // Should have 2 unique entries assert_eq!(result.len(), 2); @@ -529,7 +529,7 @@ fn test_deduplicate_keeps_messages_without_local_hash() { }; let messages = vec![msg_with_hash, msg_no_hash1, msg_no_hash2]; - let result = deduplicate_by_local_hash_parallel(messages); + let result = deduplicate_by_local_hash(messages); // All 3 should be kept (1 with hash, 2 without hash) assert_eq!(result.len(), 3); diff --git a/src/version_check.rs b/src/version_check.rs index b776bfd..7d60f17 100644 --- a/src/version_check.rs +++ b/src/version_check.rs @@ -1,6 +1,7 @@ use anyhow::Result; +use parking_lot::Mutex; use serde::Deserialize; -use std::sync::{Arc, Mutex}; +use std::sync::Arc; use std::time::Duration; use crate::reqwest_simd_json::ResponseSimdJsonExt; @@ -98,9 +99,7 @@ pub fn spawn_version_check() -> Arc> { tokio::spawn(async move { let result = check_for_updates().await; - if let Ok(mut s) = status_clone.lock() { - *s = result; - } + *status_clone.lock() = result; }); status diff --git a/src/watcher.rs b/src/watcher.rs index 3f6d222..abf8ad7 100644 --- a/src/watcher.rs +++ b/src/watcher.rs @@ -1,17 +1,18 @@ use anyhow::Result; use notify::{RecommendedWatcher, RecursiveMode, Watcher}; use notify_types::event::{Event, EventKind}; +use parking_lot::Mutex; use std::collections::{HashMap, HashSet}; use std::path::{Path, PathBuf}; +use std::sync::Arc; use std::sync::mpsc::{self, Receiver, Sender}; -use std::sync::{Arc, Mutex}; use std::time::{Duration, Instant}; use tokio::sync::watch; use crate::analyzer::AnalyzerRegistry; use crate::config::Config; use crate::tui::UploadStatus; -use crate::types::MultiAnalyzerStats; +use crate::types::MultiAnalyzerStatsView; use crate::upload; #[derive(Debug, Clone)] @@ -20,10 +21,6 @@ pub enum WatcherEvent { FileChanged(String, PathBuf), /// A file was deleted (analyzer name, file path) FileDeleted(String, PathBuf), - /// Fallback: data changed for an analyzer (full reload) - /// Kept for API stability - may be used programmatically - #[allow(dead_code)] - DataChanged(String), /// An error occurred Error(String), } @@ -71,18 +68,6 @@ impl FileWatcher { }) } - #[cfg(test)] - pub fn for_tests() -> Self { - let (_tx, event_rx) = mpsc::channel(); - let watcher = - notify::recommended_watcher(|_res| {}).expect("failed to create test file watcher"); - - Self { - _watcher: watcher, - event_rx, - } - } - pub fn try_recv(&self) -> Option { self.event_rx.try_recv().ok() } @@ -137,9 +122,8 @@ fn find_analyzer_for_path( pub struct RealtimeStatsManager { registry: AnalyzerRegistry, - current_stats: MultiAnalyzerStats, - update_tx: watch::Sender, - update_rx: watch::Receiver, + update_tx: watch::Sender, + update_rx: watch::Receiver, last_upload_time: Option, upload_debounce: Duration, upload_status: Option>>, @@ -148,14 +132,15 @@ pub struct RealtimeStatsManager { } impl RealtimeStatsManager { - pub async fn new(registry: AnalyzerRegistry) -> Result { - // Initial stats load using registry method - let initial_stats = registry.load_all_stats().await?; - let (update_tx, update_rx) = watch::channel(initial_stats.clone()); + /// Create a new stats manager with parallel file loading. + /// Must be called within a rayon threadpool context for parallelism. + pub fn new(registry: AnalyzerRegistry) -> Result { + // Initial stats load using rayon parallel I/O. + let initial_stats = registry.load_all_stats_views_parallel()?; + let (update_tx, update_rx) = watch::channel(initial_stats); Ok(Self { registry, - current_stats: initial_stats, update_tx, update_rx, last_upload_time: None, @@ -175,198 +160,152 @@ impl RealtimeStatsManager { // Caching has been removed - this is kept for API compatibility } - pub fn get_stats_receiver(&self) -> watch::Receiver { + pub fn get_stats_receiver(&self) -> watch::Receiver { self.update_rx.clone() } pub async fn handle_watcher_event(&mut self, event: WatcherEvent) -> Result<()> { match event { WatcherEvent::FileChanged(analyzer_name, path) => { - // Try incremental reload first (much faster - only reparses the changed file) - if self.registry.has_cached_messages(&analyzer_name) { - self.reload_single_file(&analyzer_name, &path).await; + // True incremental update - O(1), only reparses the changed file + if self.registry.has_cached_contributions(&analyzer_name) { + self.reload_single_file_incremental(&analyzer_name, &path) + .await; } else { - // Fallback to full reload if cache not populated - self.registry.invalidate_cache(&analyzer_name); + // Fallback to full reload if cache not populated (shouldn't happen normally) self.reload_analyzer_stats(&analyzer_name).await; } } WatcherEvent::FileDeleted(analyzer_name, path) => { - // Remove file from message cache - self.registry.remove_file_from_cache(&path); - - // Invalidate data source cache to reflect deletion - self.registry.invalidate_cache(&analyzer_name); - - // Reload stats (will use cached messages for other files) - self.reload_analyzer_stats(&analyzer_name).await; - } - WatcherEvent::DataChanged(analyzer_name) => { - // Full reload fallback - self.registry.invalidate_cache(&analyzer_name); - self.reload_analyzer_stats(&analyzer_name).await; + // Remove file from cache and get updated view + if self.registry.remove_file_from_cache(&analyzer_name, &path) { + self.apply_view_update().await; + } else { + // Fallback to full reload + self.reload_analyzer_stats(&analyzer_name).await; + } } WatcherEvent::Error(err) => { eprintln!("File watcher error: {err}"); } } + Ok(()) } - /// Helper to reload stats for a specific analyzer and broadcast updates + /// Helper to reload stats for a specific analyzer and broadcast updates (fallback) async fn reload_analyzer_stats(&mut self, analyzer_name: &str) { if let Some(analyzer) = self.registry.get_analyzer_by_display_name(analyzer_name) { - // Full parse of all files for this analyzer - let result = analyzer.get_stats().await; - self.apply_stats_update(analyzer_name, result).await; + // Full parse of all files for this analyzer (sync, no threadpool for incremental) + match analyzer.get_stats() { + Ok(new_stats) => { + // Update the cache with the new view + self.registry + .update_cached_view(analyzer_name, new_stats.into_view()); + self.apply_view_update().await; + } + Err(e) => { + eprintln!("Error reloading {analyzer_name} stats: {e}"); + } + } } } - /// Helper to reload stats for a single file change (incremental, much faster) - async fn reload_single_file(&mut self, analyzer_name: &str, path: &Path) { - // Incremental reload - only reparse the changed file - let result = self.registry.reload_file(analyzer_name, path).await; - self.apply_stats_update(analyzer_name, result).await; - } - - /// Apply a stats update result and broadcast to listeners - async fn apply_stats_update( - &mut self, - analyzer_name: &str, - result: anyhow::Result, - ) { - match result { - Ok(new_stats) => { - // Update the stats for this analyzer - let mut updated_analyzer_stats = self.current_stats.analyzer_stats.clone(); - - // Find and replace the stats for this analyzer - if let Some(pos) = updated_analyzer_stats - .iter() - .position(|s| s.analyzer_name == analyzer_name) - { - updated_analyzer_stats[pos] = new_stats; - } else { - // New analyzer data - updated_analyzer_stats.push(new_stats); - } - - self.current_stats = MultiAnalyzerStats { - analyzer_stats: updated_analyzer_stats, - }; - - // Send the update - let _ = self.update_tx.send(self.current_stats.clone()); - - // Trigger auto-upload if enabled and debounce time has passed - self.trigger_auto_upload_if_enabled().await; + /// Helper to reload stats for a single file change using true incremental update + async fn reload_single_file_incremental(&mut self, analyzer_name: &str, path: &Path) { + // True incremental update - subtract old, add new (sync, no threadpool for single file) + match self.registry.reload_file_incremental(analyzer_name, path) { + Ok(()) => { + self.apply_view_update().await; } Err(e) => { - eprintln!("Error reloading {analyzer_name} stats: {e}"); + eprintln!("Error in incremental reload for {analyzer_name}: {e}"); + // Fallback to full reload on error + self.reload_analyzer_stats(analyzer_name).await; } } } + /// Broadcast the current cache state to listeners. + /// The view is already updated in place via RwLock; we just rebuild and broadcast. + async fn apply_view_update(&mut self) { + // Build fresh MultiAnalyzerStatsView from cache - just clones Arc pointers + let stats = MultiAnalyzerStatsView { + analyzer_stats: self.registry.get_all_cached_views(), + }; + + // Send the update + let _ = self.update_tx.send(stats); + + // Trigger auto-upload if enabled and debounce time has passed + self.trigger_auto_upload_if_enabled().await; + } + async fn trigger_auto_upload_if_enabled(&mut self) { // Check if auto-upload is enabled - let _config = match Config::load() { + let config = match Config::load() { Ok(Some(cfg)) if cfg.upload.auto_upload && cfg.is_configured() => cfg, _ => return, // Auto-upload not enabled or config not available }; // Check if an upload is already in progress - if let Ok(in_progress) = self.upload_in_progress.lock() - && *in_progress - { + if *self.upload_in_progress.lock() { // Mark that we have pending changes to upload - if let Ok(mut pending) = self.pending_upload.lock() { - *pending = true; - } + *self.pending_upload.lock() = true; return; } - // Check debounce timing + // Check debounce timing - skip actual upload for debounce period + // Upload will be triggered on next change after debounce expires let now = Instant::now(); if let Some(last_time) = self.last_upload_time && now.duration_since(last_time) < self.upload_debounce { - // Schedule a delayed upload - let remaining_wait = self.upload_debounce - now.duration_since(last_time); - let stats = self.current_stats.clone(); - let upload_status = self.upload_status.clone(); - let upload_in_progress = self.upload_in_progress.clone(); - let pending_upload = self.pending_upload.clone(); - - tokio::spawn(async move { - tokio::time::sleep(remaining_wait).await; - - // Check if we should still upload - let should_upload = if let Ok(mut pending) = pending_upload.lock() { - let was_pending = *pending; - *pending = false; - was_pending - } else { - true - }; - - if should_upload { - // Mark upload as in progress - if let Ok(mut in_progress) = upload_in_progress.lock() { - *in_progress = true; - } - - upload::perform_background_upload(stats, upload_status, None).await; - - // Mark upload as complete - if let Ok(mut in_progress) = upload_in_progress.lock() { - *in_progress = false; - } - } - }); - - // Mark that we have a pending upload scheduled - if let Ok(mut pending) = self.pending_upload.lock() { - *pending = true; - } + // Mark that we have pending changes to upload + *self.pending_upload.lock() = true; return; } self.last_upload_time = Some(now); // Mark upload as in progress - if let Ok(mut in_progress) = self.upload_in_progress.lock() { - *in_progress = true; + *self.upload_in_progress.lock() = true; + + // For incremental upload, load only changed/new messages + // This avoids loading all historical data into memory + let messages = match self + .registry + .load_messages_for_upload(config.upload.last_date_uploaded) + { + Ok(msgs) => msgs, + Err(_) => { + *self.upload_in_progress.lock() = false; + return; + } + }; + + if messages.is_empty() { + // Nothing to upload + *self.upload_in_progress.lock() = false; + return; } - // Clone necessary data for the async upload task - let stats = self.current_stats.clone(); let upload_status = self.upload_status.clone(); let upload_in_progress = self.upload_in_progress.clone(); - let pending_upload = self.pending_upload.clone(); + let dirty_files = self.registry.dirty_files_handle(); - // Spawn background upload task + // Spawn background upload task with only the messages to upload tokio::spawn(async move { - upload::perform_background_upload(stats.clone(), upload_status.clone(), None).await; + upload::perform_background_upload_messages( + messages, + upload_status, + None, + Some(move || dirty_files.clear()), + ) + .await; // Mark upload as complete - if let Ok(mut in_progress) = upload_in_progress.lock() { - *in_progress = false; - } - - // Check if we need to upload again due to changes during the upload - let should_upload_again = if let Ok(mut pending) = pending_upload.lock() { - let was_pending = *pending; - *pending = false; - was_pending - } else { - false - }; - - if should_upload_again { - // Wait a short time before uploading again - tokio::time::sleep(Duration::from_secs(1)).await; - upload::perform_background_upload(stats, upload_status, None).await; - } + *upload_in_progress.lock() = false; }); } } @@ -375,6 +314,7 @@ impl RealtimeStatsManager { mod tests { use super::*; use crate::analyzer::{Analyzer, DataSource}; + use crate::contribution_cache::ContributionStrategy; use crate::types::{ AgenticCodingToolStats, Application, ConversationMessage, MessageRole, Stats, }; @@ -430,20 +370,25 @@ mod tests { Ok(Vec::new()) } - async fn parse_conversations( - &self, - _sources: Vec, - ) -> Result> { + fn parse_source(&self, _source: &DataSource) -> Result> { Ok(self.stats.messages.clone()) } - async fn get_stats(&self) -> Result { + fn get_stats(&self) -> Result { Ok(self.stats.clone()) } fn is_available(&self) -> bool { self.available } + + fn get_watch_directories(&self) -> Vec { + Vec::new() + } + + fn contribution_strategy(&self) -> ContributionStrategy { + ContributionStrategy::SingleSession + } } #[test] @@ -490,23 +435,29 @@ mod tests { available: true, }); - let mut manager = RealtimeStatsManager::new(registry).await.expect("manager"); + let mut manager = RealtimeStatsManager::new(registry).expect("manager"); let initial = manager.get_stats_receiver().borrow().clone(); assert!( initial.analyzer_stats.is_empty() - || initial.analyzer_stats[0].analyzer_name == "test-analyzer" + || &*initial.analyzer_stats[0].read().analyzer_name == "test-analyzer" ); manager - .handle_watcher_event(WatcherEvent::DataChanged("test-analyzer".into())) + .handle_watcher_event(WatcherEvent::FileDeleted( + "test-analyzer".into(), + PathBuf::from("/fake/path.jsonl"), + )) .await .expect("handle_watcher_event"); let updated = manager.get_stats_receiver().borrow().clone(); - // After handling DataChanged, we should still have stats for the analyzer. + // After handling FileDeleted, we should still have stats for the analyzer. assert!(!updated.analyzer_stats.is_empty()); - assert_eq!(updated.analyzer_stats[0].analyzer_name, "test-analyzer"); + assert_eq!( + &*updated.analyzer_stats[0].read().analyzer_name, + "test-analyzer" + ); // Also exercise the error branch. manager @@ -550,7 +501,7 @@ mod tests { available: true, }); - let mut manager = RealtimeStatsManager::new(registry).await.expect("manager"); + let mut manager = RealtimeStatsManager::new(registry).expect("manager"); // Handle FileDeleted event manager @@ -578,7 +529,7 @@ mod tests { available: true, }); - let mut manager = RealtimeStatsManager::new(registry).await.expect("manager"); + let mut manager = RealtimeStatsManager::new(registry).expect("manager"); // Handle FileChanged event manager @@ -604,7 +555,7 @@ mod tests { available: true, }); - let manager = RealtimeStatsManager::new(registry).await.expect("manager"); + let manager = RealtimeStatsManager::new(registry).expect("manager"); // persist_cache should not panic even if cache is empty manager.persist_cache();