diff --git a/Makefile b/Makefile
index cf5521a7a3..cecacb5b4f 100644
--- a/Makefile
+++ b/Makefile
@@ -112,9 +112,10 @@ mobile/node_modules/.installed: mobile/package.json mobile/bun.lock
ensure-deps: node_modules/.installed
# Rebuild native modules for Electron
-rebuild-native: node_modules/.installed ## Rebuild native modules (node-pty) for Electron
+rebuild-native: node_modules/.installed ## Rebuild native modules (node-pty, DuckDB) for Electron
@echo "Rebuilding native modules for Electron..."
@npx @electron/rebuild -f -m node_modules/node-pty
+ @npx @electron/rebuild -f -m node_modules/@duckdb/node-bindings
@echo "Native modules rebuilt successfully"
# Run compiled CLI with trailing arguments (builds only if missing)
diff --git a/bun.lock b/bun.lock
index a270307f5b..ee9bec472d 100644
--- a/bun.lock
+++ b/bun.lock
@@ -18,6 +18,7 @@
"@dnd-kit/core": "^6.3.1",
"@dnd-kit/sortable": "^10.0.0",
"@dnd-kit/utilities": "^3.2.2",
+ "@duckdb/node-api": "^1.4.4-r.1",
"@homebridge/ciao": "^1.3.4",
"@jitl/quickjs-wasmfile-release-asyncify": "^0.31.0",
"@lydell/node-pty": "1.1.0",
@@ -77,6 +78,7 @@
"react-colorful": "^5.6.1",
"react-resizable-panels": "^3.0.6",
"react-router-dom": "^7.11.0",
+ "recharts": "^2.15.3",
"rehype-harden": "^1.1.5",
"rehype-sanitize": "^6.0.0",
"remark-breaks": "^4.0.0",
@@ -564,6 +566,20 @@
"@dnd-kit/utilities": ["@dnd-kit/utilities@3.2.2", "", { "dependencies": { "tslib": "^2.0.0" }, "peerDependencies": { "react": ">=16.8.0" } }, "sha512-+MKAJEOfaBe5SmV6t34p80MMKhjvUz0vRrvVJbPT0WElzaOJ/1xs+D+KDv+tD/NE5ujfrChEcshd4fLn0wpiqg=="],
+ "@duckdb/node-api": ["@duckdb/node-api@1.4.4-r.1", "", { "dependencies": { "@duckdb/node-bindings": "1.4.4-r.1" } }, "sha512-oqaH9DXTJNwyLkd2FgJwmSnWVqjB5irbESeTeNVMBnM03iRaNY545BhfBDumu1TnOV2koIdG1mNsmjgq/ZTIkA=="],
+
+ "@duckdb/node-bindings": ["@duckdb/node-bindings@1.4.4-r.1", "", { "optionalDependencies": { "@duckdb/node-bindings-darwin-arm64": "1.4.4-r.1", "@duckdb/node-bindings-darwin-x64": "1.4.4-r.1", "@duckdb/node-bindings-linux-arm64": "1.4.4-r.1", "@duckdb/node-bindings-linux-x64": "1.4.4-r.1", "@duckdb/node-bindings-win32-x64": "1.4.4-r.1" } }, "sha512-NFm0AMrK3kiVLQhgnGUEjX5c8Elm93dYePZ9BUCvvd0AVVTKEBeRhBp9afziuzP3Sl5+7XQ1TyaBLsZJKKBDBQ=="],
+
+ "@duckdb/node-bindings-darwin-arm64": ["@duckdb/node-bindings-darwin-arm64@1.4.4-r.1", "", { "os": "darwin", "cpu": "arm64" }, "sha512-/NtbkCgCAOJDxw41XvSGV/mxQAlsx+2xUvhIVUj6fxoOfTG4jTttRhuphwE3EXNoWzJOjZxCZ5LwhC/qb6ZwLg=="],
+
+ "@duckdb/node-bindings-darwin-x64": ["@duckdb/node-bindings-darwin-x64@1.4.4-r.1", "", { "os": "darwin", "cpu": "x64" }, "sha512-lzFRDrZwc1EoV513vmKufasiAQ2WlhEb0O6guRBarbvOKKVhRb8tQ5H7LPVTrIewjTI3XDgHrnK+vfh9L+xQcA=="],
+
+ "@duckdb/node-bindings-linux-arm64": ["@duckdb/node-bindings-linux-arm64@1.4.4-r.1", "", { "os": "linux", "cpu": "arm64" }, "sha512-wq92/EcTiOTRW1RSDOwjeLyMMXWwNVNwU21TQdfu3sgS86+Ih3raaK68leDgY5cWgf72We3J2W7HYz8GwxcMYw=="],
+
+ "@duckdb/node-bindings-linux-x64": ["@duckdb/node-bindings-linux-x64@1.4.4-r.1", "", { "os": "linux", "cpu": "x64" }, "sha512-fjYNc+t4/T7mhzZ57oJoIQaWvbYVvxhidcNNansQFiWnd6/JMLCULd4qnt8XI3Tt2BrZsraH690KSBIS3QPt0w=="],
+
+ "@duckdb/node-bindings-win32-x64": ["@duckdb/node-bindings-win32-x64@1.4.4-r.1", "", { "os": "win32", "cpu": "x64" }, "sha512-+J+MUYGvYWfX0balWToDIy3CBYg7hHI0KQUQ39+SniinXlMF8+puRW6ebyQ+AXrcrKkwuj4wzJuEBD0AdhHGtw=="],
+
"@electron/asar": ["@electron/asar@3.4.1", "", { "dependencies": { "commander": "^5.0.0", "glob": "^7.1.6", "minimatch": "^3.0.4" }, "bin": { "asar": "bin/asar.js" } }, "sha512-i4/rNPRS84t0vSRa2HorerGRXWyF4vThfHesw0dmcWHp+cspK743UanA0suA5Q5y8kzY2y6YKrvbIUn69BCAiA=="],
"@electron/get": ["@electron/get@2.0.3", "", { "dependencies": { "debug": "^4.1.1", "env-paths": "^2.2.0", "fs-extra": "^8.1.0", "got": "^11.8.5", "progress": "^2.0.3", "semver": "^6.2.0", "sumchecker": "^3.0.1" }, "optionalDependencies": { "global-agent": "^3.0.0" } }, "sha512-Qkzpg2s9GnVV2I2BjRksUi43U5e6+zaQMcjoJy0C+C5oxaKl+fmckGDQFtRpZpZV0NQekuZZ+tGz7EA9TVnQtQ=="],
@@ -1988,6 +2004,8 @@
"decimal.js": ["decimal.js@10.6.0", "", {}, "sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg=="],
+ "decimal.js-light": ["decimal.js-light@2.5.1", "", {}, "sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg=="],
+
"decode-named-character-reference": ["decode-named-character-reference@1.2.0", "", { "dependencies": { "character-entities": "^2.0.0" } }, "sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q=="],
"decompress-response": ["decompress-response@6.0.0", "", { "dependencies": { "mimic-response": "^3.1.0" } }, "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ=="],
@@ -2052,6 +2070,8 @@
"dom-accessibility-api": ["dom-accessibility-api@0.5.16", "", {}, "sha512-X7BJ2yElsnOJ30pZF4uIIDfBEVgF4XEBxL9Bxhy6dnrm5hkzqmsWHGTiHqRiITNhMyFLyAiWndIJP7Z1NTteDg=="],
+ "dom-helpers": ["dom-helpers@5.2.1", "", { "dependencies": { "@babel/runtime": "^7.8.7", "csstype": "^3.0.2" } }, "sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA=="],
+
"dom-serializer": ["dom-serializer@0.2.2", "", { "dependencies": { "domelementtype": "^2.0.1", "entities": "^2.0.0" } }, "sha512-2/xPb3ORsQ42nHYiSunXkDjPLBaEj/xTwUO4B7XCZQTRk7EBtTOPaygh10YAAh2OI1Qrp6NWfpAhzswj0ydt9g=="],
"domelementtype": ["domelementtype@1.3.1", "", {}, "sha512-BSKB+TSpMpFI/HOxCNr1O8aMOTZ8hT3pM3GQ0w/mWRmkhEDSFJkkyzz4XQsBV44BChwGkrDfMyjVD0eA2aFV3w=="],
@@ -2180,6 +2200,8 @@
"etag": ["etag@1.8.1", "", {}, "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg=="],
+ "eventemitter3": ["eventemitter3@4.0.7", "", {}, "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw=="],
+
"eventsource-parser": ["eventsource-parser@3.0.6", "", {}, "sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg=="],
"execa": ["execa@5.1.1", "", { "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^6.0.0", "human-signals": "^2.1.0", "is-stream": "^2.0.0", "merge-stream": "^2.0.0", "npm-run-path": "^4.0.1", "onetime": "^5.1.2", "signal-exit": "^3.0.3", "strip-final-newline": "^2.0.0" } }, "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg=="],
@@ -2208,6 +2230,8 @@
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
+ "fast-equals": ["fast-equals@5.4.0", "", {}, "sha512-jt2DW/aNFNwke7AUd+Z+e6pz39KO5rzdbbFCg2sGafS4mk13MI7Z8O5z9cADNn5lhGODIgLwug6TZO2ctf7kcw=="],
+
"fast-glob": ["fast-glob@3.3.3", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.8" } }, "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg=="],
"fast-json-stable-stringify": ["fast-json-stable-stringify@2.1.0", "", {}, "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="],
@@ -3224,8 +3248,12 @@
"react-router-dom": ["react-router-dom@7.11.0", "", { "dependencies": { "react-router": "7.11.0" }, "peerDependencies": { "react": ">=18", "react-dom": ">=18" } }, "sha512-e49Ir/kMGRzFOOrYQBdoitq3ULigw4lKbAyKusnvtDu2t4dBX4AGYPrzNvorXmVuOyeakai6FUPW5MmibvVG8g=="],
+ "react-smooth": ["react-smooth@4.0.4", "", { "dependencies": { "fast-equals": "^5.0.1", "prop-types": "^15.8.1", "react-transition-group": "^4.4.5" }, "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-gnGKTpYwqL0Iii09gHobNolvX4Kiq4PKx6eWBCYYix+8cdw+cGo3do906l1NBPKkSWx1DghC1dlWG9L2uGd61Q=="],
+
"react-style-singleton": ["react-style-singleton@2.2.3", "", { "dependencies": { "get-nonce": "^1.0.0", "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ=="],
+ "react-transition-group": ["react-transition-group@4.4.5", "", { "dependencies": { "@babel/runtime": "^7.5.5", "dom-helpers": "^5.0.1", "loose-envify": "^1.4.0", "prop-types": "^15.6.2" }, "peerDependencies": { "react": ">=16.6.0", "react-dom": ">=16.6.0" } }, "sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g=="],
+
"read-binary-file-arch": ["read-binary-file-arch@1.0.6", "", { "dependencies": { "debug": "^4.3.4" }, "bin": { "read-binary-file-arch": "cli.js" } }, "sha512-BNg9EN3DD3GsDXX7Aa8O4p92sryjkmzYYgmgTAc6CA4uGLEDzFfxOxugu21akOxpcXHiEgsYkC6nPsQvLLLmEg=="],
"read-config-file": ["read-config-file@6.3.2", "", { "dependencies": { "config-file-ts": "^0.2.4", "dotenv": "^9.0.2", "dotenv-expand": "^5.1.0", "js-yaml": "^4.1.0", "json5": "^2.2.0", "lazy-val": "^1.0.4" } }, "sha512-M80lpCjnE6Wt6zb98DoW8WHR09nzMSpu8XHtPkiTHrJ5Az9CybfeQhTJ8D7saeBHpGhLPIVyA8lcL6ZmdKwY6Q=="],
@@ -3238,6 +3266,10 @@
"recast": ["recast@0.23.11", "", { "dependencies": { "ast-types": "^0.16.1", "esprima": "~4.0.0", "source-map": "~0.6.1", "tiny-invariant": "^1.3.3", "tslib": "^2.0.1" } }, "sha512-YTUo+Flmw4ZXiWfQKGcwwc11KnoRAYgzAE2E7mXKCjSviTKShtxBsN6YUUBB2gtaBzKzeKunxhUwNHQuRryhWA=="],
+ "recharts": ["recharts@2.15.4", "", { "dependencies": { "clsx": "^2.0.0", "eventemitter3": "^4.0.1", "lodash": "^4.17.21", "react-is": "^18.3.1", "react-smooth": "^4.0.4", "recharts-scale": "^0.4.4", "tiny-invariant": "^1.3.1", "victory-vendor": "^36.6.8" }, "peerDependencies": { "react": "^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", "react-dom": "^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-UT/q6fwS3c1dHbXv2uFgYJ9BMFHu3fwnd7AYZaEQhXuYQ4hgsxLvsUXzGdKeZrW5xopzDCvuA2N41WJ88I7zIw=="],
+
+ "recharts-scale": ["recharts-scale@0.4.5", "", { "dependencies": { "decimal.js-light": "^2.4.1" } }, "sha512-kivNFO+0OcUNu7jQquLXAxz1FIwZj8nrj+YkOKc5694NbjCvcT6aSZiIzNzd2Kul4o4rTto8QVR9lMNtxD4G1w=="],
+
"redent": ["redent@3.0.0", "", { "dependencies": { "indent-string": "^4.0.0", "strip-indent": "^3.0.0" } }, "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg=="],
"redux": ["redux@4.2.1", "", { "dependencies": { "@babel/runtime": "^7.9.2" } }, "sha512-LAUYz4lc+Do8/g7aeRa8JkyDErK6ekstQaqWQrNRW//MY1TvCEpMtpTWvlQ+FPbWCx+Xixu/6SHt5N0HR+SB4w=="],
@@ -3690,6 +3722,8 @@
"vfile-message": ["vfile-message@4.0.3", "", { "dependencies": { "@types/unist": "^3.0.0", "unist-util-stringify-position": "^4.0.0" } }, "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw=="],
+ "victory-vendor": ["victory-vendor@36.9.2", "", { "dependencies": { "@types/d3-array": "^3.0.3", "@types/d3-ease": "^3.0.0", "@types/d3-interpolate": "^3.0.1", "@types/d3-scale": "^4.0.2", "@types/d3-shape": "^3.1.0", "@types/d3-time": "^3.0.0", "@types/d3-timer": "^3.0.0", "d3-array": "^3.1.6", "d3-ease": "^3.0.1", "d3-interpolate": "^3.0.1", "d3-scale": "^4.0.2", "d3-shape": "^3.1.0", "d3-time": "^3.0.0", "d3-timer": "^3.0.1" } }, "sha512-PnpQQMuxlwYdocC8fIJqVXvkeViHYzotI+NJrCuav0ZYFoq912ZHBk3mCeuj+5/VpodOjPe1z0Fk2ihgzlXqjQ=="],
+
"vite": ["vite@7.2.6", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rollup": "^4.43.0", "tinyglobby": "^0.2.15" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", "less": "^4.0.0", "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-tI2l/nFHC5rLh7+5+o7QjKjSR04ivXDF4jcgV0f/bTQ+OJiITy5S6gaynVsEM+7RqzufMnVbIon6Sr5x1SDYaQ=="],
"vite-plugin-svgr": ["vite-plugin-svgr@4.5.0", "", { "dependencies": { "@rollup/pluginutils": "^5.2.0", "@svgr/core": "^8.1.0", "@svgr/plugin-jsx": "^8.1.0" }, "peerDependencies": { "vite": ">=2.6.0" } }, "sha512-W+uoSpmVkSmNOGPSsDCWVW/DDAyv+9fap9AZXBvWiQqrboJ08j2vh0tFxTD/LjwqwAd3yYSVJgm54S/1GhbdnA=="],
diff --git a/flake.nix b/flake.nix
index 0a06eb406c..e7835ab41a 100644
--- a/flake.nix
+++ b/flake.nix
@@ -76,7 +76,7 @@
outputHashMode = "recursive";
# Marker used by scripts/update_flake_hash.sh to update this hash in place.
- outputHash = "sha256-+6o2twg8KOUBuq2RoEqY/OwqCnWSrUiXFuaeLUiuF3k="; # mux-offline-cache-hash
+ outputHash = "sha256-XSh54hSydWb3fnA52vMJfh7zmEBsP+JHKZgIh6EzjYU="; # mux-offline-cache-hash
};
configurePhase = ''
diff --git a/package.json b/package.json
index 1625d20992..dd46078468 100644
--- a/package.json
+++ b/package.json
@@ -58,6 +58,7 @@
"@dnd-kit/core": "^6.3.1",
"@dnd-kit/sortable": "^10.0.0",
"@dnd-kit/utilities": "^3.2.2",
+ "@duckdb/node-api": "^1.4.4-r.1",
"@homebridge/ciao": "^1.3.4",
"@jitl/quickjs-wasmfile-release-asyncify": "^0.31.0",
"@lydell/node-pty": "1.1.0",
@@ -117,6 +118,7 @@
"react-colorful": "^5.6.1",
"react-resizable-panels": "^3.0.6",
"react-router-dom": "^7.11.0",
+ "recharts": "^2.15.3",
"rehype-harden": "^1.1.5",
"rehype-sanitize": "^6.0.0",
"remark-breaks": "^4.0.0",
@@ -272,7 +274,8 @@
"asarUnpack": [
"dist/**/*.wasm",
"dist/**/*.map",
- "**/node_modules/node-pty/build/**/*"
+ "**/node_modules/node-pty/build/**/*",
+ "**/node_modules/@duckdb/**/*.node"
],
"mac": {
"category": "public.app-category.developer-tools",
diff --git a/scripts/check_eager_imports.sh b/scripts/check_eager_imports.sh
index 6d4f91fb71..59103c57d0 100755
--- a/scripts/check_eager_imports.sh
+++ b/scripts/check_eager_imports.sh
@@ -16,6 +16,7 @@ BANNED_IMPORTS=(
"@ai-sdk/anthropic"
"@ai-sdk/openai"
"@ai-sdk/google"
+ "@duckdb/node-api"
"ai"
)
diff --git a/scripts/postinstall.sh b/scripts/postinstall.sh
index ff14ad8a26..71dad18fcc 100755
--- a/scripts/postinstall.sh
+++ b/scripts/postinstall.sh
@@ -13,6 +13,8 @@ SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
ELECTRON_PATH="$PROJECT_ROOT/node_modules/electron"
NODE_PTY_PATH="$PROJECT_ROOT/node_modules/node-pty"
+DUCKDB_NODE_API_PATH="$PROJECT_ROOT/node_modules/@duckdb/node-api"
+DUCKDB_NODE_BINDINGS_PATH="$PROJECT_ROOT/node_modules/@duckdb/node-bindings"
# 1) Skip in headless/benchmark mode (no Electron UI needed)
if [ "${MUX_HEADLESS:-}" = "1" ]; then
@@ -26,51 +28,52 @@ if [ "${INIT_CWD:-$PROJECT_ROOT}" != "$PROJECT_ROOT" ]; then
exit 0
fi
-# 3) Skip if Electron or node-pty aren't installed
-if [ ! -d "$ELECTRON_PATH" ] || [ ! -d "$NODE_PTY_PATH" ]; then
- echo "đ Server mode detected or Electron/node-pty missing â skipping native rebuild"
+# 3) Skip when Electron is unavailable (server mode install)
+if [ ! -d "$ELECTRON_PATH" ]; then
+ echo "đ Server mode detected (Electron missing) â skipping native rebuild"
exit 0
fi
-# 4) Build a cache key (Electron version + node-pty version + platform + arch)
+HAS_NODE_PTY=0
+if [ -d "$NODE_PTY_PATH" ]; then
+ HAS_NODE_PTY=1
+fi
+
+HAS_DUCKDB=0
+if [ -d "$DUCKDB_NODE_API_PATH" ] && [ -d "$DUCKDB_NODE_BINDINGS_PATH" ]; then
+ HAS_DUCKDB=1
+fi
+
+if [ "$HAS_NODE_PTY" = "0" ] && [ "$HAS_DUCKDB" = "0" ]; then
+ echo "đ Native modules missing â skipping native rebuild"
+ exit 0
+fi
+
+# 4) Build cache keys (Electron version + native module versions + platform + arch)
ELECTRON_VERSION="$(
node -p "require('${ELECTRON_PATH}/package.json').version" 2>/dev/null || echo "unknown"
)"
NODE_PTY_VERSION="$(
node -p "require('${NODE_PTY_PATH}/package.json').version" 2>/dev/null || echo "unknown"
)"
+DUCKDB_VERSION="$(
+ node -p "require('${DUCKDB_NODE_API_PATH}/package.json').version" 2>/dev/null || echo "unknown"
+)"
PLATFORM="$(uname -s 2>/dev/null || echo unknown)"
ARCH="$(uname -m 2>/dev/null || echo unknown)"
STAMP_DIR="$PROJECT_ROOT/node_modules/.cache/mux-native"
-STAMP_FILE="$STAMP_DIR/node-pty-${ELECTRON_VERSION}-${NODE_PTY_VERSION}-${PLATFORM}-${ARCH}.stamp"
+NODE_PTY_STAMP_FILE="$STAMP_DIR/node-pty-${ELECTRON_VERSION}-${NODE_PTY_VERSION}-${PLATFORM}-${ARCH}.stamp"
+DUCKDB_STAMP_FILE="$STAMP_DIR/duckdb-${ELECTRON_VERSION}-${DUCKDB_VERSION}-${PLATFORM}-${ARCH}.stamp"
mkdir -p "$STAMP_DIR"
-# 5) Skip if we've already rebuilt for this combo
-if [ -f "$STAMP_FILE" ]; then
- echo "â
node-pty already rebuilt for Electron ${ELECTRON_VERSION} on ${PLATFORM}/${ARCH} â skipping"
- exit 0
-fi
-
-echo "đ§ Rebuilding node-pty for Electron ${ELECTRON_VERSION} on ${PLATFORM}/${ARCH}..."
-
-# 6) Run rebuild
+# 5) Resolve rebuild command
if command -v npx >/dev/null 2>&1; then
- npx @electron/rebuild -f -m node_modules/node-pty || {
- echo "â ī¸ Failed to rebuild native modules"
- echo " Terminal functionality may not work in desktop mode."
- echo " Run 'make rebuild-native' manually to fix."
- exit 0
- }
+ REBUILD_CMD="npx"
elif command -v bunx >/dev/null 2>&1; then
- bunx @electron/rebuild -f -m node_modules/node-pty || {
- echo "â ī¸ Failed to rebuild native modules"
- echo " Terminal functionality may not work in desktop mode."
- echo " Run 'make rebuild-native' manually to fix."
- exit 0
- }
+ REBUILD_CMD="bunx"
else
echo "â ī¸ Neither npx nor bunx found - cannot rebuild native modules"
echo " Terminal functionality may not work in desktop mode."
@@ -78,6 +81,40 @@ else
exit 0
fi
-# 7) Mark this combo as done
-touch "$STAMP_FILE"
-echo "â
Native modules rebuilt successfully (cached at $STAMP_FILE)"
+# 6) Rebuild node-pty (once per version/platform)
+if [ "$HAS_NODE_PTY" = "1" ]; then
+ if [ -f "$NODE_PTY_STAMP_FILE" ]; then
+ echo "â
node-pty already rebuilt for Electron ${ELECTRON_VERSION} on ${PLATFORM}/${ARCH} â skipping"
+ else
+ echo "đ§ Rebuilding node-pty for Electron ${ELECTRON_VERSION} on ${PLATFORM}/${ARCH}..."
+ $REBUILD_CMD @electron/rebuild -f -m node_modules/node-pty || {
+ echo "â ī¸ Failed to rebuild native modules"
+ echo " Terminal functionality may not work in desktop mode."
+ echo " Run 'make rebuild-native' manually to fix."
+ exit 0
+ }
+ touch "$NODE_PTY_STAMP_FILE"
+ echo "â
node-pty rebuilt successfully (cached at $NODE_PTY_STAMP_FILE)"
+ fi
+else
+ echo "âšī¸ node-pty package missing â skipping node-pty rebuild"
+fi
+
+# 7) Rebuild DuckDB (once per version/platform)
+if [ "$HAS_DUCKDB" = "1" ]; then
+ if [ -f "$DUCKDB_STAMP_FILE" ]; then
+ echo "â
DuckDB already rebuilt for Electron ${ELECTRON_VERSION} on ${PLATFORM}/${ARCH} â skipping"
+ else
+ echo "đ§ Rebuilding DuckDB for Electron ${ELECTRON_VERSION} on ${PLATFORM}/${ARCH}..."
+ $REBUILD_CMD @electron/rebuild -f -m node_modules/@duckdb/node-bindings || {
+ echo "â ī¸ Failed to rebuild native modules"
+ echo " Terminal functionality may not work in desktop mode."
+ echo " Run 'make rebuild-native' manually to fix."
+ exit 0
+ }
+ touch "$DUCKDB_STAMP_FILE"
+ echo "â
DuckDB rebuilt successfully (cached at $DUCKDB_STAMP_FILE)"
+ fi
+else
+ echo "âšī¸ DuckDB packages missing â skipping DuckDB rebuild"
+fi
diff --git a/src/browser/App.tsx b/src/browser/App.tsx
index 8661f2c584..312606dd51 100644
--- a/src/browser/App.tsx
+++ b/src/browser/App.tsx
@@ -68,6 +68,7 @@ import { AboutDialogProvider } from "./contexts/AboutDialogContext";
import { ConfirmDialogProvider, useConfirmDialog } from "./contexts/ConfirmDialogContext";
import { AboutDialog } from "./components/About/AboutDialog";
import { SettingsPage } from "@/browser/components/Settings/SettingsPage";
+import { AnalyticsDashboard } from "@/browser/components/analytics/AnalyticsDashboard";
import { MuxGatewaySessionExpiredDialog } from "./components/MuxGatewaySessionExpiredDialog";
import { SshPromptDialog } from "./components/SshPromptDialog";
import { SplashScreenProvider } from "./components/splashScreens/SplashScreenProvider";
@@ -103,7 +104,13 @@ function AppInner() {
pendingNewWorkspaceDraftId,
beginWorkspaceCreation,
} = useWorkspaceContext();
- const { currentWorkspaceId, currentSettingsSection } = useRouter();
+ const {
+ currentWorkspaceId,
+ currentSettingsSection,
+ isAnalyticsOpen,
+ navigateToAnalytics,
+ navigateFromAnalytics,
+ } = useRouter();
const { theme, setTheme, toggleTheme } = useTheme();
const { open: openSettings, isOpen: isSettingsOpen } = useSettings();
const { confirm: confirmDialog } = useConfirmDialog();
@@ -701,6 +708,13 @@ function AppInner() {
} else if (matchesKeybind(e, KEYBINDS.OPEN_SETTINGS)) {
e.preventDefault();
openSettings();
+ } else if (matchesKeybind(e, KEYBINDS.OPEN_ANALYTICS)) {
+ e.preventDefault();
+ if (isAnalyticsOpen) {
+ navigateFromAnalytics();
+ } else {
+ navigateToAnalytics();
+ }
} else if (matchesKeybind(e, KEYBINDS.NAVIGATE_BACK)) {
e.preventDefault();
void navigate(-1);
@@ -720,6 +734,9 @@ function AppInner() {
closeCommandPalette,
openCommandPalette,
openSettings,
+ isAnalyticsOpen,
+ navigateToAnalytics,
+ navigateFromAnalytics,
navigate,
]);
// Mouse back/forward buttons (buttons 3 and 4)
@@ -957,8 +974,13 @@ function AppInner() {
- {/* Route-driven settings render in the main pane so project/workspace navigation stays visible. */}
- {currentSettingsSection ? (
+ {/* Route-driven settings and analytics render in the main pane so project/workspace navigation stays visible. */}
+ {isAnalyticsOpen ? (
+
+ ) : currentSettingsSection ? (
= (props) => {
window.removeEventListener(CUSTOM_EVENTS.THINKING_LEVEL_TOAST, handler as EventListener);
}, [variant, props, pushToast]);
+ // Show toast feedback for analytics rebuild command palette action.
+ useEffect(() => {
+ const handler = (event: Event) => {
+ const detail = (
+ event as CustomEvent<{ type: "success" | "error"; message: string; title?: string }>
+ ).detail;
+
+ if (!detail || (detail.type !== "success" && detail.type !== "error")) {
+ return;
+ }
+
+ pushToast({
+ type: detail.type,
+ title: detail.title,
+ message: detail.message,
+ });
+ };
+
+ window.addEventListener(CUSTOM_EVENTS.ANALYTICS_REBUILD_TOAST, handler as EventListener);
+ return () =>
+ window.removeEventListener(CUSTOM_EVENTS.ANALYTICS_REBUILD_TOAST, handler as EventListener);
+ }, [pushToast]);
+
// Voice input: command palette toggle + global recording keybinds
useEffect(() => {
if (!voiceInput.shouldShowUI) return;
diff --git a/src/browser/components/Settings/sections/KeybindsSection.tsx b/src/browser/components/Settings/sections/KeybindsSection.tsx
index 836017f0b0..5159dc8610 100644
--- a/src/browser/components/Settings/sections/KeybindsSection.tsx
+++ b/src/browser/components/Settings/sections/KeybindsSection.tsx
@@ -54,6 +54,7 @@ const KEYBIND_LABELS: Record = {
MARK_FILE_READ: "Mark file read",
TOGGLE_HUNK_COLLAPSE: "Toggle hunk collapse",
OPEN_SETTINGS: "Open settings",
+ OPEN_ANALYTICS: "Open analytics",
TOGGLE_VOICE_INPUT: "Toggle voice input",
NAVIGATE_BACK: "Navigate back",
NAVIGATE_FORWARD: "Navigate forward",
@@ -88,6 +89,7 @@ const KEYBIND_GROUPS: Array<{ label: string; keys: Array
"OPEN_COMMAND_PALETTE",
"OPEN_MUX_CHAT",
"OPEN_SETTINGS",
+ "OPEN_ANALYTICS",
"TOGGLE_SIDEBAR",
"CYCLE_MODEL",
"TOGGLE_THINKING",
diff --git a/src/browser/components/TitleBar.tsx b/src/browser/components/TitleBar.tsx
index a486aeac65..0147c80a93 100644
--- a/src/browser/components/TitleBar.tsx
+++ b/src/browser/components/TitleBar.tsx
@@ -3,15 +3,26 @@ import { cn } from "@/common/lib/utils";
import { VERSION } from "@/version";
import { SettingsButton } from "./SettingsButton";
import { GatewayIcon } from "./icons/GatewayIcon";
+import { Button } from "./ui/button";
import { Tooltip, TooltipTrigger, TooltipContent } from "./ui/tooltip";
import type { UpdateStatus } from "@/common/orpc/types";
-import { AlertTriangle, Download, Loader2, RefreshCw, ShieldCheck } from "lucide-react";
+import {
+ AlertTriangle,
+ BarChart3,
+ Download,
+ Loader2,
+ RefreshCw,
+ ShieldCheck,
+ X,
+} from "lucide-react";
import { useAPI } from "@/browser/contexts/API";
import { useAboutDialog } from "@/browser/contexts/AboutDialogContext";
import { usePolicy } from "@/browser/contexts/PolicyContext";
+import { useRouter } from "@/browser/contexts/RouterContext";
import { useSettings } from "@/browser/contexts/SettingsContext";
import { useGateway } from "@/browser/hooks/useGatewayModels";
+import { formatKeybind, KEYBINDS } from "@/browser/utils/ui/keybinds";
import {
formatMuxGatewayBalance,
useMuxGatewayAccountStatus,
@@ -58,6 +69,7 @@ export function TitleBar(props: TitleBarProps) {
const policyState = usePolicy();
const policyEnforced = policyState.status.state === "enforced";
const { open: openSettings } = useSettings();
+ const { isAnalyticsOpen, navigateToAnalytics, navigateFromAnalytics } = useRouter();
const gateway = useGateway();
const {
data: muxGatewayAccountStatus,
@@ -239,6 +251,35 @@ export function TitleBar(props: TitleBarProps) {
Your settings are controlled by a policy.
)}
+
+
+
+
+
+ {isAnalyticsOpen
+ ? "Close analytics"
+ : `Open analytics (${formatKeybind(KEYBINDS.OPEN_ANALYTICS)})`}
+
+
;
+ return (
+ typeof record.agentId === "string" &&
+ typeof record.costUsd === "number" &&
+ typeof record.tokenCount === "number" &&
+ typeof record.responseCount === "number"
+ );
+}
+
+function AgentCostTooltipContent(props: {
+ active?: boolean;
+ payload?: Array<{ payload?: unknown }>;
+}) {
+ if (!props.active || !props.payload || props.payload.length === 0) {
+ return null;
+ }
+
+ const firstPayload = props.payload[0];
+ if (!firstPayload || !isAgentCostItem(firstPayload.payload)) {
+ return null;
+ }
+
+ const row = firstPayload.payload;
+
+ return (
+
+
{row.agentId}
+
+ Cost
+ {formatUsd(row.costUsd)}
+
+
+ Tokens
+ {formatCompactNumber(row.tokenCount)}
+
+
+ Responses
+ {formatCompactNumber(row.responseCount)}
+
+
+ );
+}
+
+export function AgentCostChart(props: AgentCostChartProps) {
+ const rows = [...(props.data ?? [])].sort((a, b) => b.costUsd - a.costUsd).slice(0, 10);
+
+ return (
+
+
Agent cost breakdown
+
Top agents by cumulative spend.
+
+ {props.error ? (
+
Failed to load agent breakdown: {props.error}
+ ) : props.loading ? (
+
+
+
+ ) : rows.length === 0 ? (
+
+ No agent-level spend data available.
+
+ ) : (
+
+
+
+
+ formatUsd(Number(value))}
+ stroke="var(--color-border-light)"
+ />
+
+ }
+ />
+
+
+
+
+ )}
+
+ );
+}
diff --git a/src/browser/components/analytics/AnalyticsDashboard.tsx b/src/browser/components/analytics/AnalyticsDashboard.tsx
new file mode 100644
index 0000000000..c3ae0b3cf8
--- /dev/null
+++ b/src/browser/components/analytics/AnalyticsDashboard.tsx
@@ -0,0 +1,250 @@
+import { useState } from "react";
+import { ArrowLeft, Menu } from "lucide-react";
+import { useProjectContext } from "@/browser/contexts/ProjectContext";
+import { useRouter } from "@/browser/contexts/RouterContext";
+import {
+ useAnalyticsAgentCostBreakdown,
+ useAnalyticsProviderCacheHitRatio,
+ useAnalyticsSpendByModel,
+ useAnalyticsSpendByProject,
+ useAnalyticsSpendOverTime,
+ useAnalyticsSummary,
+ useAnalyticsTimingDistribution,
+} from "@/browser/hooks/useAnalytics";
+import { DESKTOP_TITLEBAR_HEIGHT_CLASS, isDesktopMode } from "@/browser/hooks/useDesktopTitlebar";
+import { usePersistedState } from "@/browser/hooks/usePersistedState";
+import { Button } from "@/browser/components/ui/button";
+import { cn } from "@/common/lib/utils";
+import { AgentCostChart } from "./AgentCostChart";
+import { ProviderCacheHitChart } from "./ProviderCacheHitChart";
+import { ModelBreakdown } from "./ModelBreakdown";
+import { SpendChart } from "./SpendChart";
+import { SummaryCards } from "./SummaryCards";
+import { TimingChart } from "./TimingChart";
+import { formatProjectDisplayName } from "./analyticsUtils";
+
+interface AnalyticsDashboardProps {
+ leftSidebarCollapsed: boolean;
+ onToggleLeftSidebarCollapsed: () => void;
+}
+
+type TimeRange = "7d" | "30d" | "90d" | "all";
+type TimingMetric = "ttft" | "duration" | "tps";
+
+const VALID_TIME_RANGES = new Set(["7d", "30d", "90d", "all"]);
+const VALID_TIMING_METRICS = new Set(["ttft", "duration", "tps"]);
+
+const ANALYTICS_TIME_RANGE_STORAGE_KEY = "analytics:timeRange";
+const ANALYTICS_TIMING_METRIC_STORAGE_KEY = "analytics:timingMetric";
+
+/** Coerce a persisted value to a known TimeRange, falling back to "30d" if stale/corrupted. */
+function normalizeTimeRange(value: unknown): TimeRange {
+ return typeof value === "string" && VALID_TIME_RANGES.has(value) ? (value as TimeRange) : "30d";
+}
+
+/** Coerce a persisted value to a known TimingMetric, falling back to "duration" if stale/corrupted. */
+function normalizeTimingMetric(value: unknown): TimingMetric {
+ return typeof value === "string" && VALID_TIMING_METRICS.has(value)
+ ? (value as TimingMetric)
+ : "duration";
+}
+
+/** Build a UTC-aligned date boundary N days before today. Using UTC avoids
+ * the backend's `toISOString().slice(0,10)` conversion silently shifting the
+ * day in positive-offset timezones. */
+function utcDaysAgo(days: number): Date {
+ const now = new Date();
+ return new Date(Date.UTC(now.getUTCFullYear(), now.getUTCMonth(), now.getUTCDate() - days));
+}
+
+function computeDateRange(timeRange: TimeRange): {
+ from: Date | null;
+ to: Date | null;
+ granularity: "hour" | "day" | "week";
+} {
+ switch (timeRange) {
+ case "7d":
+ return { from: utcDaysAgo(6), to: null, granularity: "day" };
+ case "30d":
+ return { from: utcDaysAgo(29), to: null, granularity: "day" };
+ case "90d":
+ return { from: utcDaysAgo(89), to: null, granularity: "week" };
+ case "all":
+ return { from: null, to: null, granularity: "week" };
+ default:
+ // Self-heal: unknown persisted value â safe default.
+ return { from: utcDaysAgo(29), to: null, granularity: "day" };
+ }
+}
+
+export function AnalyticsDashboard(props: AnalyticsDashboardProps) {
+ const { navigateFromAnalytics } = useRouter();
+ const { projects } = useProjectContext();
+
+ const [projectPath, setProjectPath] = useState(null);
+ const [rawTimeRange, setTimeRange] = usePersistedState(
+ ANALYTICS_TIME_RANGE_STORAGE_KEY,
+ "30d"
+ );
+ const [rawTimingMetric, setTimingMetric] = usePersistedState(
+ ANALYTICS_TIMING_METRIC_STORAGE_KEY,
+ "duration"
+ );
+
+ // Coerce persisted values to known enums â stale/corrupted localStorage
+ // entries self-heal to defaults instead of crashing the dashboard.
+ const timeRange = normalizeTimeRange(rawTimeRange);
+ const timingMetric = normalizeTimingMetric(rawTimingMetric);
+
+ const dateRange = computeDateRange(timeRange);
+
+ const summary = useAnalyticsSummary(projectPath, {
+ from: dateRange.from,
+ to: dateRange.to,
+ });
+ const spendOverTime = useAnalyticsSpendOverTime({
+ projectPath,
+ granularity: dateRange.granularity,
+ from: dateRange.from,
+ to: dateRange.to,
+ });
+ const spendByProject = useAnalyticsSpendByProject({
+ from: dateRange.from,
+ to: dateRange.to,
+ });
+ const spendByModel = useAnalyticsSpendByModel(projectPath, {
+ from: dateRange.from,
+ to: dateRange.to,
+ });
+ const timingDistribution = useAnalyticsTimingDistribution(timingMetric, projectPath, {
+ from: dateRange.from,
+ to: dateRange.to,
+ });
+ const providerCacheHitRatios = useAnalyticsProviderCacheHitRatio(projectPath, {
+ from: dateRange.from,
+ to: dateRange.to,
+ });
+ const agentCosts = useAnalyticsAgentCostBreakdown(projectPath, {
+ from: dateRange.from,
+ to: dateRange.to,
+ });
+
+ const projectRows = Array.from(projects.entries())
+ .map(([path]) => ({
+ path,
+ label: formatProjectDisplayName(path),
+ }))
+ .sort((a, b) => a.label.localeCompare(b.label));
+
+ const desktopMode = isDesktopMode();
+
+ return (
+
+
+
+ {props.leftSidebarCollapsed && (
+
+ )}
+
+
Analytics
+
+
+
+
+
+
+
+ {(
+ [
+ ["7d", "7D"],
+ ["30d", "30D"],
+ ["90d", "90D"],
+ ["all", "All"],
+ ] as const
+ ).map(([range, label]) => (
+
+ ))}
+
+
+
+
+
+
+ );
+}
diff --git a/src/browser/components/analytics/ModelBreakdown.tsx b/src/browser/components/analytics/ModelBreakdown.tsx
new file mode 100644
index 0000000000..5c79ec6696
--- /dev/null
+++ b/src/browser/components/analytics/ModelBreakdown.tsx
@@ -0,0 +1,157 @@
+import {
+ Bar,
+ BarChart,
+ CartesianGrid,
+ Cell,
+ Legend,
+ Pie,
+ PieChart,
+ ResponsiveContainer,
+ Tooltip,
+ XAxis,
+ YAxis,
+} from "recharts";
+import { Skeleton } from "@/browser/components/ui/skeleton";
+import type {
+ AsyncState,
+ SpendByModelItem,
+ SpendByProjectItem,
+} from "@/browser/hooks/useAnalytics";
+import { ANALYTICS_CHART_COLORS, formatProjectDisplayName, formatUsd } from "./analyticsUtils";
+
+interface ModelBreakdownProps {
+ spendByProject: AsyncState;
+ spendByModel: AsyncState;
+}
+
+interface ProjectChartRow extends SpendByProjectItem {
+ label: string;
+}
+
+export function ModelBreakdown(props: ModelBreakdownProps) {
+ const projectRows: ProjectChartRow[] = (props.spendByProject.data ?? [])
+ .map((row) => ({
+ ...row,
+ label:
+ row.projectName.trim().length > 0
+ ? row.projectName
+ : formatProjectDisplayName(row.projectPath),
+ }))
+ .sort((a, b) => b.costUsd - a.costUsd)
+ .slice(0, 8);
+
+ const modelRows = [...(props.spendByModel.data ?? [])]
+ .sort((a, b) => b.costUsd - a.costUsd)
+ .slice(0, 8);
+
+ return (
+
+
+
Spend by project
+
+ {props.spendByProject.error ? (
+
{props.spendByProject.error}
+ ) : props.spendByProject.loading ? (
+
+
+
+ ) : projectRows.length === 0 ? (
+
+ No project spend data yet.
+
+ ) : (
+
+
+
+
+ formatUsd(Number(value))}
+ stroke="var(--color-border-light)"
+ />
+
+ [formatUsd(Number(value)), "Spend"]}
+ contentStyle={{
+ borderColor: "var(--color-border-medium)",
+ backgroundColor: "var(--color-background-secondary)",
+ borderRadius: "8px",
+ }}
+ />
+
+
+
+
+ )}
+
+
+
+
Spend by model
+
+ {props.spendByModel.error ? (
+
{props.spendByModel.error}
+ ) : props.spendByModel.loading ? (
+
+
+
+ ) : modelRows.length === 0 ? (
+
+ No model spend data yet.
+
+ ) : (
+
+
+
+
+ {modelRows.map((row, index) => (
+ |
+ ))}
+
+ [formatUsd(Number(value)), key]}
+ contentStyle={{
+ borderColor: "var(--color-border-medium)",
+ backgroundColor: "var(--color-background-secondary)",
+ borderRadius: "8px",
+ color: "var(--color-foreground)",
+ }}
+ labelStyle={{ color: "var(--color-foreground)" }}
+ itemStyle={{ color: "var(--color-foreground)" }}
+ />
+
+
+
+ )}
+
+
+ );
+}
diff --git a/src/browser/components/analytics/ProviderCacheHitChart.tsx b/src/browser/components/analytics/ProviderCacheHitChart.tsx
new file mode 100644
index 0000000000..316de02a0a
--- /dev/null
+++ b/src/browser/components/analytics/ProviderCacheHitChart.tsx
@@ -0,0 +1,140 @@
+import { Bar, BarChart, CartesianGrid, ResponsiveContainer, Tooltip, XAxis, YAxis } from "recharts";
+import { Skeleton } from "@/browser/components/ui/skeleton";
+import type { ProviderCacheHitRatioItem } from "@/browser/hooks/useAnalytics";
+import { ANALYTICS_CHART_COLORS, formatCompactNumber, formatPercent } from "./analyticsUtils";
+
+interface ProviderCacheHitChartProps {
+ data: ProviderCacheHitRatioItem[] | null;
+ loading: boolean;
+ error: string | null;
+}
+
+interface ProviderCacheHitChartRow extends ProviderCacheHitRatioItem {
+ providerLabel: string;
+}
+
+const PROVIDER_DISPLAY_NAMES: Record = {
+ anthropic: "Anthropic",
+ openai: "OpenAI",
+ google: "Google",
+ xai: "xAI",
+ unknown: "Unknown",
+};
+
+function formatProviderLabel(provider: string): string {
+ const normalizedProvider = provider.trim().toLowerCase();
+ if (!normalizedProvider) {
+ return "Unknown";
+ }
+
+ return PROVIDER_DISPLAY_NAMES[normalizedProvider] ?? provider;
+}
+
+function isProviderCacheHitChartRow(value: unknown): value is ProviderCacheHitChartRow {
+ if (!value || typeof value !== "object") {
+ return false;
+ }
+
+ const record = value as Partial;
+ return (
+ typeof record.provider === "string" &&
+ typeof record.providerLabel === "string" &&
+ typeof record.cacheHitRatio === "number" &&
+ typeof record.responseCount === "number"
+ );
+}
+
+function ProviderCacheHitTooltipContent(props: {
+ active?: boolean;
+ payload?: Array<{ payload?: unknown }>;
+}) {
+ if (!props.active || !props.payload || props.payload.length === 0) {
+ return null;
+ }
+
+ const firstPayload = props.payload[0];
+ if (!firstPayload || !isProviderCacheHitChartRow(firstPayload.payload)) {
+ return null;
+ }
+
+ const row = firstPayload.payload;
+
+ return (
+
+
{row.providerLabel}
+
+ Cache hit ratio
+ {formatPercent(row.cacheHitRatio)}
+
+
+ Responses
+ {formatCompactNumber(row.responseCount)}
+
+
+ );
+}
+
+export function ProviderCacheHitChart(props: ProviderCacheHitChartProps) {
+ const rows: ProviderCacheHitChartRow[] = [...(props.data ?? [])]
+ .sort((left, right) => right.responseCount - left.responseCount)
+ .slice(0, 10)
+ .map((row) => ({
+ ...row,
+ providerLabel: formatProviderLabel(row.provider),
+ }));
+
+ return (
+
+
Cache hit ratio by provider
+
Prompt cache hit rate grouped by model provider.
+
+ {props.error ? (
+
+ Failed to load provider cache hit ratios: {props.error}
+
+ ) : props.loading ? (
+
+
+
+ ) : rows.length === 0 ? (
+
+ No provider cache hit data available.
+
+ ) : (
+
+
+
+
+ formatPercent(Number(value))}
+ stroke="var(--color-border-light)"
+ />
+
+ }
+ />
+
+
+
+
+ )}
+
+ );
+}
diff --git a/src/browser/components/analytics/SpendChart.tsx b/src/browser/components/analytics/SpendChart.tsx
new file mode 100644
index 0000000000..54c67d531b
--- /dev/null
+++ b/src/browser/components/analytics/SpendChart.tsx
@@ -0,0 +1,118 @@
+import {
+ Bar,
+ BarChart,
+ CartesianGrid,
+ Legend,
+ ResponsiveContainer,
+ Tooltip,
+ XAxis,
+ YAxis,
+} from "recharts";
+import { Skeleton } from "@/browser/components/ui/skeleton";
+import type { SpendOverTimeItem } from "@/browser/hooks/useAnalytics";
+import { ANALYTICS_CHART_COLORS, formatBucketLabel, formatUsd } from "./analyticsUtils";
+
+interface SpendChartProps {
+ data: SpendOverTimeItem[] | null;
+ loading: boolean;
+ error: string | null;
+}
+
+interface SpendChartRow {
+ bucket: string;
+ totalCostUsd: number;
+ [model: string]: string | number;
+}
+
+export function SpendChart(props: SpendChartProps) {
+ if (props.error) {
+ return (
+
+
Spend over time
+
Failed to load chart data: {props.error}
+
+ );
+ }
+
+ const rowsByBucket = new Map();
+ const models: string[] = [];
+
+ for (const item of props.data ?? []) {
+ if (!models.includes(item.model)) {
+ models.push(item.model);
+ }
+
+ const existingRow = rowsByBucket.get(item.bucket) ?? {
+ bucket: item.bucket,
+ totalCostUsd: 0,
+ };
+
+ const currentModelCost =
+ typeof existingRow[item.model] === "number" ? Number(existingRow[item.model]) : 0;
+
+ existingRow[item.model] = currentModelCost + item.costUsd;
+ existingRow.totalCostUsd += item.costUsd;
+
+ rowsByBucket.set(item.bucket, existingRow);
+ }
+
+ const rows = Array.from(rowsByBucket.values()).sort((a, b) => a.bucket.localeCompare(b.bucket));
+
+ return (
+
+
Spend over time
+
Model-attributed spend per time bucket.
+
+ {props.loading ? (
+
+
+
+ ) : rows.length === 0 ? (
+
+ No spend data for the selected filters.
+
+ ) : (
+
+
+
+
+
+ formatUsd(Number(value))}
+ width={64}
+ stroke="var(--color-border-light)"
+ />
+ formatBucketLabel(String(value))}
+ formatter={(value: number, key: string) => [formatUsd(Number(value)), key]}
+ cursor={{ fill: "var(--color-hover)" }}
+ contentStyle={{
+ borderColor: "var(--color-border-medium)",
+ backgroundColor: "var(--color-background-secondary)",
+ borderRadius: "8px",
+ }}
+ />
+
+ {models.map((model, index) => (
+
+ ))}
+
+
+
+ )}
+
+ );
+}
diff --git a/src/browser/components/analytics/SummaryCards.tsx b/src/browser/components/analytics/SummaryCards.tsx
new file mode 100644
index 0000000000..d77e1a4663
--- /dev/null
+++ b/src/browser/components/analytics/SummaryCards.tsx
@@ -0,0 +1,68 @@
+import { Skeleton } from "@/browser/components/ui/skeleton";
+import type { Summary } from "@/browser/hooks/useAnalytics";
+import { formatCompactNumber, formatPercent, formatUsd } from "./analyticsUtils";
+
+interface SummaryCardsProps {
+ data: Summary | null;
+ loading: boolean;
+ error: string | null;
+}
+
+export function SummaryCards(props: SummaryCardsProps) {
+ if (props.error) {
+ return (
+
+ Failed to load analytics summary: {props.error}
+
+ );
+ }
+
+ const totalSpend = props.data ? formatUsd(props.data.totalSpendUsd) : "$0.00";
+ const todaySpend = props.data ? formatUsd(props.data.todaySpendUsd) : "$0.00";
+ const avgDailySpend = props.data ? formatUsd(props.data.avgDailySpendUsd) : "$0.00";
+ const cacheHitRatio = props.data ? formatPercent(props.data.cacheHitRatio) : "0.0%";
+
+ const summaryRows = [
+ {
+ label: "Total Spend",
+ value: totalSpend,
+ helper: props.data ? `${formatCompactNumber(props.data.totalTokens)} tokens` : null,
+ },
+ {
+ label: "Today",
+ value: todaySpend,
+ helper: null,
+ },
+ {
+ label: "Avg / Day",
+ value: avgDailySpend,
+ helper: null,
+ },
+ {
+ label: "Cache Hit Ratio",
+ value: cacheHitRatio,
+ helper: props.data ? `${formatCompactNumber(props.data.totalResponses)} responses` : null,
+ },
+ ] as const;
+
+ return (
+
+ {summaryRows.map((row) => (
+
+
{row.label}
+ {props.loading ? (
+
+ ) : (
+
{row.value}
+ )}
+ {row.helper && !props.loading && (
+
{row.helper}
+ )}
+
+ ))}
+
+ );
+}
diff --git a/src/browser/components/analytics/TimingChart.tsx b/src/browser/components/analytics/TimingChart.tsx
new file mode 100644
index 0000000000..29117b024f
--- /dev/null
+++ b/src/browser/components/analytics/TimingChart.tsx
@@ -0,0 +1,172 @@
+import { Button } from "@/browser/components/ui/button";
+import { Skeleton } from "@/browser/components/ui/skeleton";
+import type { TimingDistribution } from "@/browser/hooks/useAnalytics";
+import {
+ Bar,
+ BarChart,
+ CartesianGrid,
+ ReferenceLine,
+ ResponsiveContainer,
+ Tooltip,
+ XAxis,
+ YAxis,
+} from "recharts";
+import { ANALYTICS_CHART_COLORS } from "./analyticsUtils";
+
+const METRIC_LABELS = {
+ ttft: {
+ label: "TTFT",
+ unitSuffix: "ms",
+ description: "Time to first token",
+ },
+ duration: {
+ label: "Duration",
+ unitSuffix: "ms",
+ description: "End-to-end response duration",
+ },
+ tps: {
+ label: "Output TPS",
+ unitSuffix: " tok/s",
+ description: "Tokens streamed per second",
+ },
+} as const;
+
+type TimingMetric = keyof typeof METRIC_LABELS;
+
+interface TimingChartProps {
+ data: TimingDistribution | null;
+ loading: boolean;
+ error: string | null;
+ metric: TimingMetric;
+ onMetricChange: (metric: TimingMetric) => void;
+}
+
+function formatMetricValue(value: number, metric: TimingMetric): string {
+ if (!Number.isFinite(value)) {
+ return `0${METRIC_LABELS[metric].unitSuffix}`;
+ }
+
+ if (metric === "tps") {
+ return `${value.toFixed(2)}${METRIC_LABELS[metric].unitSuffix}`;
+ }
+
+ return `${Math.round(value)}${METRIC_LABELS[metric].unitSuffix}`;
+}
+
+export function TimingChart(props: TimingChartProps) {
+ return (
+
+
+
+
Timing distribution
+
{METRIC_LABELS[props.metric].description}
+
+
+ {(Object.keys(METRIC_LABELS) as TimingMetric[]).map((metric) => (
+
+ ))}
+
+
+
+ {props.error ? (
+
+ Failed to load timing distribution: {props.error}
+
+ ) : props.loading ? (
+
+
+
+ ) : !props.data || props.data.histogram.length === 0 ? (
+
+ No timing data available yet.
+
+ ) : (
+
+
+
+
+ formatMetricValue(Number(value), props.metric)}
+ stroke="var(--color-border-light)"
+ />
+
+ formatMetricValue(Number(value), props.metric)}
+ formatter={(value: number) => [value, "Responses"]}
+ contentStyle={{
+ borderColor: "var(--color-border-medium)",
+ backgroundColor: "var(--color-background-secondary)",
+ borderRadius: "8px",
+ }}
+ />
+
+
+
+
+
+
+
+ )}
+
+ {!props.loading && !props.error && props.data && (
+
+
+ p50:{" "}
+ {formatMetricValue(props.data.p50, props.metric)}
+
+
+ p90:{" "}
+ {formatMetricValue(props.data.p90, props.metric)}
+
+
+ p99:{" "}
+ {formatMetricValue(props.data.p99, props.metric)}
+
+
+ )}
+
+ );
+}
diff --git a/src/browser/components/analytics/analyticsUtils.ts b/src/browser/components/analytics/analyticsUtils.ts
new file mode 100644
index 0000000000..39c471cffa
--- /dev/null
+++ b/src/browser/components/analytics/analyticsUtils.ts
@@ -0,0 +1,82 @@
+import assert from "@/common/utils/assert";
+
+// Shared color palette for all analytics charts.
+// Uses theme tokens so colors remain legible in both dark and light themes.
+export const ANALYTICS_CHART_COLORS = [
+ "var(--color-plan-mode)",
+ "var(--color-exec-mode)",
+ "var(--color-task-mode)",
+ "var(--color-success)",
+ "var(--color-warning)",
+ "var(--color-danger)",
+ "var(--color-info)",
+ "var(--color-ask-mode)",
+] as const;
+
+const usdFormatter = new Intl.NumberFormat("en-US", {
+ style: "currency",
+ currency: "USD",
+ minimumFractionDigits: 2,
+ maximumFractionDigits: 2,
+});
+
+const compactNumberFormatter = new Intl.NumberFormat("en-US", {
+ notation: "compact",
+ maximumFractionDigits: 1,
+});
+
+const BUCKET_TIME_COMPONENT_PATTERN = /(?:^|[ T])\d{2}:\d{2}(?::\d{2}(?:\.\d+)?)?/;
+
+export function formatUsd(amount: number): string {
+ if (!Number.isFinite(amount)) {
+ return "$0.00";
+ }
+ return usdFormatter.format(amount);
+}
+
+export function formatPercent(ratio: number): string {
+ if (!Number.isFinite(ratio)) {
+ return "0.0%";
+ }
+
+ const normalizedRatio = ratio <= 1 ? ratio * 100 : ratio;
+ return `${normalizedRatio.toFixed(1)}%`;
+}
+
+export function formatCompactNumber(value: number): string {
+ if (!Number.isFinite(value)) {
+ return "0";
+ }
+ return compactNumberFormatter.format(value);
+}
+
+export function formatProjectDisplayName(projectPath: string): string {
+ assert(typeof projectPath === "string", "projectPath must be a string");
+ const pathSegments = projectPath.split(/[\\/]/).filter(Boolean);
+ return pathSegments[pathSegments.length - 1] ?? projectPath;
+}
+
+export function formatBucketLabel(bucket: string): string {
+ const parsedDate = new Date(bucket);
+ if (!Number.isFinite(parsedDate.getTime())) {
+ return bucket;
+ }
+
+ const includesTime = BUCKET_TIME_COMPONENT_PATTERN.test(bucket);
+ if (includesTime) {
+ return parsedDate.toLocaleString(undefined, {
+ month: "short",
+ day: "numeric",
+ hour: "numeric",
+ });
+ }
+
+ // Date-only buckets (YYYY-MM-DD) are UTC midnight. Render with
+ // timeZone: "UTC" so west-of-UTC locales don't shift the displayed day
+ // (e.g. 2026-02-01 showing as "Jan 31" in PST).
+ return parsedDate.toLocaleDateString(undefined, {
+ month: "short",
+ day: "numeric",
+ timeZone: "UTC",
+ });
+}
diff --git a/src/browser/contexts/RouterContext.tsx b/src/browser/contexts/RouterContext.tsx
index 0f8e6020ad..79508270fd 100644
--- a/src/browser/contexts/RouterContext.tsx
+++ b/src/browser/contexts/RouterContext.tsx
@@ -20,6 +20,8 @@ export interface RouterContext {
navigateToHome: () => void;
navigateToSettings: (section?: string) => void;
navigateFromSettings: () => void;
+ navigateToAnalytics: () => void;
+ navigateFromAnalytics: () => void;
currentWorkspaceId: string | null;
/** Settings section from URL (null when not on settings page). */
@@ -36,6 +38,9 @@ export interface RouterContext {
/** Draft ID for UI-only workspace creation drafts (from URL) */
pendingDraftId: string | null;
+
+ /** True when the analytics dashboard route is active. */
+ isAnalyticsOpen: boolean;
}
const RouterContext = createContext(undefined);
@@ -114,6 +119,7 @@ function RouterContextInner(props: { children: ReactNode }) {
location.pathname === "/project" ? getProjectPathFromLocationState(location.state) : null;
const settingsMatch = /^\/settings\/([^/]+)$/.exec(location.pathname);
const currentSettingsSection = settingsMatch ? decodeURIComponent(settingsMatch[1]) : null;
+ const isAnalyticsOpen = location.pathname === "/analytics";
interface NonSettingsLocationSnapshot {
url: string;
@@ -123,16 +129,27 @@ function RouterContextInner(props: { children: ReactNode }) {
// When leaving settings, we need to restore the *full* previous location including
// any in-memory navigation state (e.g. /project relies on { projectPath } state, and
// the legacy ?path= deep link rewrite stores that path in location.state).
+ // Include /analytics so Settings opened from Analytics can close back to Analytics.
const lastNonSettingsLocationRef = useRef({
url: getInitialRoute(),
state: null,
});
+ // Keep a separate "close analytics" snapshot that intentionally excludes /analytics so
+ // closing analytics still returns to the last non-analytics route.
+ const lastNonAnalyticsLocationRef = useRef({
+ url: getInitialRoute(),
+ state: null,
+ });
useEffect(() => {
if (!location.pathname.startsWith("/settings")) {
- lastNonSettingsLocationRef.current = {
+ const locationSnapshot: NonSettingsLocationSnapshot = {
url: location.pathname + location.search,
state: location.state,
};
+ lastNonSettingsLocationRef.current = locationSnapshot;
+ if (location.pathname !== "/analytics") {
+ lastNonAnalyticsLocationRef.current = locationSnapshot;
+ }
}
}, [location.pathname, location.search, location.state]);
@@ -201,6 +218,23 @@ function RouterContextInner(props: { children: ReactNode }) {
void navigateRef.current(lastLocation.url, { state: lastLocation.state });
}, []);
+ const navigateToAnalytics = useCallback(() => {
+ void navigateRef.current("/analytics");
+ }, []);
+
+ const navigateFromAnalytics = useCallback(() => {
+ const lastLocation = lastNonAnalyticsLocationRef.current;
+ if (
+ !lastLocation.url ||
+ lastLocation.url.startsWith("/settings") ||
+ lastLocation.url === "/analytics"
+ ) {
+ void navigateRef.current("/");
+ return;
+ }
+ void navigateRef.current(lastLocation.url, { state: lastLocation.state });
+ }, []);
+
const value = useMemo(
() => ({
navigateToWorkspace,
@@ -208,18 +242,23 @@ function RouterContextInner(props: { children: ReactNode }) {
navigateToHome,
navigateToSettings,
navigateFromSettings,
+ navigateToAnalytics,
+ navigateFromAnalytics,
currentWorkspaceId,
currentSettingsSection,
currentProjectId,
currentProjectPathFromState,
pendingSectionId,
pendingDraftId,
+ isAnalyticsOpen,
}),
[
navigateToHome,
navigateToProject,
navigateToSettings,
navigateFromSettings,
+ navigateToAnalytics,
+ navigateFromAnalytics,
navigateToWorkspace,
currentWorkspaceId,
currentSettingsSection,
@@ -227,6 +266,7 @@ function RouterContextInner(props: { children: ReactNode }) {
currentProjectPathFromState,
pendingSectionId,
pendingDraftId,
+ isAnalyticsOpen,
]
);
diff --git a/src/browser/hooks/useAnalytics.test.tsx b/src/browser/hooks/useAnalytics.test.tsx
new file mode 100644
index 0000000000..561007194f
--- /dev/null
+++ b/src/browser/hooks/useAnalytics.test.tsx
@@ -0,0 +1,243 @@
+import { afterEach, beforeEach, describe, expect, mock, test } from "bun:test";
+import { cleanup, renderHook, waitFor } from "@testing-library/react";
+import { GlobalWindow } from "happy-dom";
+import { RPCLink as HTTPRPCLink } from "@orpc/client/fetch";
+import { createORPCClient } from "@orpc/client";
+import type { RouterClient } from "@orpc/server";
+import type { AppRouter } from "@/node/orpc/router";
+import type { OrpcServer } from "@/node/orpc/server";
+import type { ORPCContext } from "@/node/orpc/context";
+import type { AnalyticsService } from "@/node/services/analytics/analyticsService";
+import {
+ useAnalyticsProviderCacheHitRatio,
+ useAnalyticsSpendByModel,
+ useAnalyticsSummary,
+ type Summary,
+} from "./useAnalytics";
+
+const ANALYTICS_UNAVAILABLE_MESSAGE = "Analytics backend is not available in this build.";
+
+const summaryFixture: Summary = {
+ totalSpendUsd: 42.25,
+ todaySpendUsd: 1.75,
+ avgDailySpendUsd: 5.28125,
+ cacheHitRatio: 0.18,
+ totalTokens: 4200,
+ totalResponses: 84,
+};
+
+interface AnalyticsServiceCalls {
+ summary: Array<{
+ projectPath: string | null;
+ from: Date | null | undefined;
+ to: Date | null | undefined;
+ }>;
+ spendByModel: Array<{
+ projectPath: string | null;
+ from: Date | null | undefined;
+ to: Date | null | undefined;
+ }>;
+ cacheHitRatioByProvider: Array<{
+ projectPath: string | null;
+ from: Date | null | undefined;
+ to: Date | null | undefined;
+ }>;
+}
+
+let currentApiClient: RouterClient | null = null;
+let analyticsServiceCalls: AnalyticsServiceCalls | null = null;
+
+void mock.module("@/browser/contexts/API", () => ({
+ useAPI: () => ({ api: currentApiClient }),
+}));
+
+function createHttpClient(baseUrl: string): RouterClient {
+ const link = new HTTPRPCLink({
+ url: `${baseUrl}/orpc`,
+ });
+
+ // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion -- typed test helper
+ return createORPCClient(link) as RouterClient;
+}
+
+type AnalyticsServiceStub = Pick<
+ AnalyticsService,
+ | "getSummary"
+ | "getSpendOverTime"
+ | "getSpendByProject"
+ | "getSpendByModel"
+ | "getTimingDistribution"
+ | "getAgentCostBreakdown"
+ | "getCacheHitRatioByProvider"
+ | "rebuildAll"
+ | "clearWorkspace"
+ | "ingestWorkspace"
+>;
+
+function createAnalyticsServiceStub(summary: Summary): {
+ service: AnalyticsServiceStub;
+ calls: AnalyticsServiceCalls;
+} {
+ const calls: AnalyticsServiceCalls = {
+ summary: [],
+ spendByModel: [],
+ cacheHitRatioByProvider: [],
+ };
+
+ return {
+ calls,
+ service: {
+ getSummary: (projectPath, from, to) => {
+ calls.summary.push({ projectPath, from, to });
+ return Promise.resolve(summary);
+ },
+ getSpendOverTime: () => Promise.resolve([]),
+ getSpendByProject: () => Promise.resolve([]),
+ getSpendByModel: (projectPath, from, to) => {
+ calls.spendByModel.push({ projectPath, from, to });
+ return Promise.resolve([]);
+ },
+ getTimingDistribution: () => Promise.resolve({ p50: 0, p90: 0, p99: 0, histogram: [] }),
+ getAgentCostBreakdown: () => Promise.resolve([]),
+ getCacheHitRatioByProvider: (projectPath, from, to) => {
+ calls.cacheHitRatioByProvider.push({ projectPath, from, to });
+ return Promise.resolve([]);
+ },
+ rebuildAll: () => Promise.resolve({ success: true, workspacesIngested: 0 }),
+ clearWorkspace: () => undefined,
+ ingestWorkspace: () => undefined,
+ },
+ };
+}
+
+function requireAnalyticsServiceCalls(): AnalyticsServiceCalls {
+ if (!analyticsServiceCalls) {
+ throw new Error("Expected analytics service call tracking to be initialized");
+ }
+ return analyticsServiceCalls;
+}
+
+describe("useAnalytics hooks", () => {
+ let server: OrpcServer | null = null;
+
+ beforeEach(async () => {
+ globalThis.window = new GlobalWindow() as unknown as Window & typeof globalThis;
+ globalThis.document = globalThis.window.document;
+
+ const analyticsStub = createAnalyticsServiceStub(summaryFixture);
+ analyticsServiceCalls = analyticsStub.calls;
+
+ const context: Partial = {
+ analyticsService: analyticsStub.service as unknown as ORPCContext["analyticsService"],
+ };
+
+ // eslint-disable-next-line no-restricted-syntax -- test-only dynamic import avoids browser/node boundary lint
+ const { createOrpcServer } = await import("@/node/orpc/server");
+
+ server = await createOrpcServer({
+ host: "127.0.0.1",
+ port: 0,
+ context: context as ORPCContext,
+ onOrpcError: () => undefined,
+ });
+
+ currentApiClient = createHttpClient(server.baseUrl);
+ });
+
+ afterEach(async () => {
+ cleanup();
+ currentApiClient = null;
+ analyticsServiceCalls = null;
+ await server?.close();
+ server = null;
+ globalThis.window = undefined as unknown as Window & typeof globalThis;
+ globalThis.document = undefined as unknown as Document;
+ });
+
+ test("loads summary from a real ORPC client without backend-unavailable false negatives", async () => {
+ const apiClient = currentApiClient;
+ expect(apiClient).not.toBeNull();
+ if (!apiClient) {
+ throw new Error("Expected ORPC test client to be initialized");
+ }
+
+ // Regression guard: analytics namespace can be a callable proxy function.
+ expect(typeof (apiClient as { analytics: unknown }).analytics).toBe("function");
+
+ const { result } = renderHook(() => useAnalyticsSummary());
+
+ await waitFor(() => expect(result.current.loading).toBe(false));
+
+ expect(result.current.error).not.toBe(ANALYTICS_UNAVAILABLE_MESSAGE);
+ expect(result.current.error).toBeNull();
+ expect(result.current.data).toEqual(summaryFixture);
+ });
+
+ test("forwards from/to filters to summary endpoint", async () => {
+ const from = new Date("2026-01-05T00:00:00.000Z");
+ const to = new Date("2026-01-20T00:00:00.000Z");
+
+ const { result } = renderHook(() => useAnalyticsSummary("/tmp/project", { from, to }));
+
+ await waitFor(() => expect(result.current.loading).toBe(false));
+
+ const calls = requireAnalyticsServiceCalls().summary;
+ expect(calls.length).toBeGreaterThan(0);
+
+ const latest = calls.at(-1);
+ expect(latest).toBeDefined();
+ if (!latest || !(latest.from instanceof Date) || !(latest.to instanceof Date)) {
+ throw new Error("Expected summary call to include Date filters");
+ }
+
+ expect(latest.projectPath).toBe("/tmp/project");
+ expect(latest.from.toISOString()).toBe(from.toISOString());
+ expect(latest.to.toISOString()).toBe(to.toISOString());
+ });
+
+ test("forwards from/to filters to spend-by-model endpoint", async () => {
+ const from = new Date("2026-01-07T00:00:00.000Z");
+ const to = new Date("2026-01-27T00:00:00.000Z");
+
+ const { result } = renderHook(() => useAnalyticsSpendByModel("/tmp/project", { from, to }));
+
+ await waitFor(() => expect(result.current.loading).toBe(false));
+
+ const calls = requireAnalyticsServiceCalls().spendByModel;
+ expect(calls.length).toBeGreaterThan(0);
+
+ const latest = calls.at(-1);
+ expect(latest).toBeDefined();
+ if (!latest || !(latest.from instanceof Date) || !(latest.to instanceof Date)) {
+ throw new Error("Expected spend-by-model call to include Date filters");
+ }
+
+ expect(latest.projectPath).toBe("/tmp/project");
+ expect(latest.from.toISOString()).toBe(from.toISOString());
+ expect(latest.to.toISOString()).toBe(to.toISOString());
+ });
+
+ test("forwards from/to filters to provider cache-hit-ratio endpoint", async () => {
+ const from = new Date("2026-01-09T00:00:00.000Z");
+ const to = new Date("2026-01-30T00:00:00.000Z");
+
+ const { result } = renderHook(() =>
+ useAnalyticsProviderCacheHitRatio("/tmp/project", { from, to })
+ );
+
+ await waitFor(() => expect(result.current.loading).toBe(false));
+
+ const calls = requireAnalyticsServiceCalls().cacheHitRatioByProvider;
+ expect(calls.length).toBeGreaterThan(0);
+
+ const latest = calls.at(-1);
+ expect(latest).toBeDefined();
+ if (!latest || !(latest.from instanceof Date) || !(latest.to instanceof Date)) {
+ throw new Error("Expected provider cache-hit-ratio call to include Date filters");
+ }
+
+ expect(latest.projectPath).toBe("/tmp/project");
+ expect(latest.from.toISOString()).toBe(from.toISOString());
+ expect(latest.to.toISOString()).toBe(to.toISOString());
+ });
+});
diff --git a/src/browser/hooks/useAnalytics.ts b/src/browser/hooks/useAnalytics.ts
new file mode 100644
index 0000000000..066367af3c
--- /dev/null
+++ b/src/browser/hooks/useAnalytics.ts
@@ -0,0 +1,566 @@
+import assert from "@/common/utils/assert";
+import { useEffect, useState } from "react";
+import type { z } from "zod";
+import type { APIClient } from "@/browser/contexts/API";
+import { useAPI } from "@/browser/contexts/API";
+import type { analytics } from "@/common/orpc/schemas/analytics";
+import { getErrorMessage } from "@/common/utils/errors";
+
+export type Summary = z.infer;
+export type SpendOverTimeItem = z.infer[number];
+export type SpendByProjectItem = z.infer[number];
+export type SpendByModelItem = z.infer[number];
+export type TimingDistribution = z.infer;
+export type AgentCostItem = z.infer[number];
+export type ProviderCacheHitRatioItem = z.infer<
+ typeof analytics.getCacheHitRatioByProvider.output
+>[number];
+
+export interface AsyncState {
+ data: T | null;
+ loading: boolean;
+ error: string | null;
+}
+
+type SummaryInput = z.input;
+type SpendOverTimeInput = z.input;
+type SpendByProjectInput = z.input;
+type SpendByModelInput = z.input;
+type TimingDistributionInput = z.input;
+type AgentCostBreakdownInput = z.input;
+type ProviderCacheHitRatioInput = z.input;
+
+interface DateFilterParams {
+ from?: Date | null;
+ to?: Date | null;
+}
+
+interface AnalyticsNamespace {
+ getSummary: (input: SummaryInput) => Promise;
+ getSpendOverTime: (input: SpendOverTimeInput) => Promise;
+ getSpendByProject: (input: SpendByProjectInput) => Promise;
+ getSpendByModel: (input: SpendByModelInput) => Promise;
+ getTimingDistribution: (input: TimingDistributionInput) => Promise;
+ getAgentCostBreakdown: (input: AgentCostBreakdownInput) => Promise;
+ getCacheHitRatioByProvider: (
+ input: ProviderCacheHitRatioInput
+ ) => Promise;
+}
+
+const ANALYTICS_UNAVAILABLE_MESSAGE = "Analytics backend is not available in this build.";
+
+function getAnalyticsNamespace(api: APIClient): AnalyticsNamespace | null {
+ const candidate = (api as { analytics?: unknown }).analytics;
+ // ORPC client namespaces can be proxy objects or callable proxy functions
+ // depending on transport/runtime shape. Accept both so we don't
+ // misclassify a valid analytics backend as unavailable.
+ if (!candidate || (typeof candidate !== "object" && typeof candidate !== "function")) {
+ return null;
+ }
+
+ const maybeNamespace = candidate as Partial;
+ if (
+ typeof maybeNamespace.getSummary !== "function" ||
+ typeof maybeNamespace.getSpendOverTime !== "function" ||
+ typeof maybeNamespace.getSpendByProject !== "function" ||
+ typeof maybeNamespace.getSpendByModel !== "function" ||
+ typeof maybeNamespace.getTimingDistribution !== "function" ||
+ typeof maybeNamespace.getAgentCostBreakdown !== "function" ||
+ typeof maybeNamespace.getCacheHitRatioByProvider !== "function"
+ ) {
+ return null;
+ }
+
+ return maybeNamespace as AnalyticsNamespace;
+}
+
+export function useAnalyticsSummary(
+ projectPath?: string | null,
+ dateFilters?: DateFilterParams
+): AsyncState {
+ const fromMs = dateFilters?.from?.getTime() ?? null;
+ const toMs = dateFilters?.to?.getTime() ?? null;
+
+ const { api } = useAPI();
+ const [state, setState] = useState>({
+ data: null,
+ loading: true,
+ error: null,
+ });
+
+ useEffect(() => {
+ if (!api) {
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: true,
+ error: null,
+ }));
+ return;
+ }
+
+ const analyticsApi = getAnalyticsNamespace(api);
+ if (!analyticsApi) {
+ setState({ data: null, loading: false, error: ANALYTICS_UNAVAILABLE_MESSAGE });
+ return;
+ }
+
+ let ignore = false;
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: true,
+ error: null,
+ }));
+
+ const fromDate = fromMs == null ? null : new Date(fromMs);
+ const toDate = toMs == null ? null : new Date(toMs);
+
+ void analyticsApi
+ .getSummary({ projectPath: projectPath ?? null, from: fromDate, to: toDate })
+ .then((data) => {
+ if (ignore) {
+ return;
+ }
+ setState({ data, loading: false, error: null });
+ })
+ .catch((error: unknown) => {
+ if (ignore) {
+ return;
+ }
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: false,
+ error: getErrorMessage(error),
+ }));
+ });
+
+ return () => {
+ ignore = true;
+ };
+ }, [api, projectPath, fromMs, toMs]);
+
+ return state;
+}
+
+export function useAnalyticsSpendOverTime(params: {
+ projectPath?: string | null;
+ granularity: "hour" | "day" | "week";
+ from?: Date | null;
+ to?: Date | null;
+}): AsyncState {
+ assert(
+ params.granularity === "hour" || params.granularity === "day" || params.granularity === "week",
+ "useAnalyticsSpendOverTime requires a valid granularity"
+ );
+
+ const fromMs = params.from?.getTime() ?? null;
+ const toMs = params.to?.getTime() ?? null;
+
+ const { api } = useAPI();
+ const [state, setState] = useState>({
+ data: null,
+ loading: true,
+ error: null,
+ });
+
+ useEffect(() => {
+ if (!api) {
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: true,
+ error: null,
+ }));
+ return;
+ }
+
+ const analyticsApi = getAnalyticsNamespace(api);
+ if (!analyticsApi) {
+ setState({ data: null, loading: false, error: ANALYTICS_UNAVAILABLE_MESSAGE });
+ return;
+ }
+
+ let ignore = false;
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: true,
+ error: null,
+ }));
+
+ const fromDate = fromMs == null ? null : new Date(fromMs);
+ const toDate = toMs == null ? null : new Date(toMs);
+
+ void analyticsApi
+ .getSpendOverTime({
+ projectPath: params.projectPath ?? null,
+ granularity: params.granularity,
+ from: fromDate,
+ to: toDate,
+ })
+ .then((data) => {
+ if (ignore) {
+ return;
+ }
+ setState({ data, loading: false, error: null });
+ })
+ .catch((error: unknown) => {
+ if (ignore) {
+ return;
+ }
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: false,
+ error: getErrorMessage(error),
+ }));
+ });
+
+ return () => {
+ ignore = true;
+ };
+ }, [api, params.projectPath, params.granularity, fromMs, toMs]);
+
+ return state;
+}
+
+export function useAnalyticsSpendByProject(
+ dateFilters?: DateFilterParams
+): AsyncState {
+ const fromMs = dateFilters?.from?.getTime() ?? null;
+ const toMs = dateFilters?.to?.getTime() ?? null;
+
+ const { api } = useAPI();
+ const [state, setState] = useState>({
+ data: null,
+ loading: true,
+ error: null,
+ });
+
+ useEffect(() => {
+ if (!api) {
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: true,
+ error: null,
+ }));
+ return;
+ }
+
+ const analyticsApi = getAnalyticsNamespace(api);
+ if (!analyticsApi) {
+ setState({ data: null, loading: false, error: ANALYTICS_UNAVAILABLE_MESSAGE });
+ return;
+ }
+
+ let ignore = false;
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: true,
+ error: null,
+ }));
+
+ const fromDate = fromMs == null ? null : new Date(fromMs);
+ const toDate = toMs == null ? null : new Date(toMs);
+
+ void analyticsApi
+ .getSpendByProject({ from: fromDate, to: toDate })
+ .then((data) => {
+ if (ignore) {
+ return;
+ }
+ setState({ data, loading: false, error: null });
+ })
+ .catch((error: unknown) => {
+ if (ignore) {
+ return;
+ }
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: false,
+ error: getErrorMessage(error),
+ }));
+ });
+
+ return () => {
+ ignore = true;
+ };
+ }, [api, fromMs, toMs]);
+
+ return state;
+}
+
+export function useAnalyticsSpendByModel(
+ projectPath?: string | null,
+ dateFilters?: DateFilterParams
+): AsyncState {
+ const fromMs = dateFilters?.from?.getTime() ?? null;
+ const toMs = dateFilters?.to?.getTime() ?? null;
+
+ const { api } = useAPI();
+ const [state, setState] = useState>({
+ data: null,
+ loading: true,
+ error: null,
+ });
+
+ useEffect(() => {
+ if (!api) {
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: true,
+ error: null,
+ }));
+ return;
+ }
+
+ const analyticsApi = getAnalyticsNamespace(api);
+ if (!analyticsApi) {
+ setState({ data: null, loading: false, error: ANALYTICS_UNAVAILABLE_MESSAGE });
+ return;
+ }
+
+ let ignore = false;
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: true,
+ error: null,
+ }));
+
+ const fromDate = fromMs == null ? null : new Date(fromMs);
+ const toDate = toMs == null ? null : new Date(toMs);
+
+ void analyticsApi
+ .getSpendByModel({ projectPath: projectPath ?? null, from: fromDate, to: toDate })
+ .then((data) => {
+ if (ignore) {
+ return;
+ }
+ setState({ data, loading: false, error: null });
+ })
+ .catch((error: unknown) => {
+ if (ignore) {
+ return;
+ }
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: false,
+ error: getErrorMessage(error),
+ }));
+ });
+
+ return () => {
+ ignore = true;
+ };
+ }, [api, projectPath, fromMs, toMs]);
+
+ return state;
+}
+
+export function useAnalyticsTimingDistribution(
+ metric: "ttft" | "duration" | "tps",
+ projectPath?: string | null,
+ dateFilters?: DateFilterParams
+): AsyncState {
+ assert(
+ metric === "ttft" || metric === "duration" || metric === "tps",
+ "useAnalyticsTimingDistribution requires a valid metric"
+ );
+
+ const fromMs = dateFilters?.from?.getTime() ?? null;
+ const toMs = dateFilters?.to?.getTime() ?? null;
+
+ const { api } = useAPI();
+ const [state, setState] = useState>({
+ data: null,
+ loading: true,
+ error: null,
+ });
+
+ useEffect(() => {
+ if (!api) {
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: true,
+ error: null,
+ }));
+ return;
+ }
+
+ const analyticsApi = getAnalyticsNamespace(api);
+ if (!analyticsApi) {
+ setState({ data: null, loading: false, error: ANALYTICS_UNAVAILABLE_MESSAGE });
+ return;
+ }
+
+ let ignore = false;
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: true,
+ error: null,
+ }));
+
+ const fromDate = fromMs == null ? null : new Date(fromMs);
+ const toDate = toMs == null ? null : new Date(toMs);
+
+ void analyticsApi
+ .getTimingDistribution({
+ metric,
+ projectPath: projectPath ?? null,
+ from: fromDate,
+ to: toDate,
+ })
+ .then((data) => {
+ if (ignore) {
+ return;
+ }
+ setState({ data, loading: false, error: null });
+ })
+ .catch((error: unknown) => {
+ if (ignore) {
+ return;
+ }
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: false,
+ error: getErrorMessage(error),
+ }));
+ });
+
+ return () => {
+ ignore = true;
+ };
+ }, [api, metric, projectPath, fromMs, toMs]);
+
+ return state;
+}
+
+export function useAnalyticsProviderCacheHitRatio(
+ projectPath?: string | null,
+ dateFilters?: DateFilterParams
+): AsyncState {
+ const fromMs = dateFilters?.from?.getTime() ?? null;
+ const toMs = dateFilters?.to?.getTime() ?? null;
+
+ const { api } = useAPI();
+ const [state, setState] = useState>({
+ data: null,
+ loading: true,
+ error: null,
+ });
+
+ useEffect(() => {
+ if (!api) {
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: true,
+ error: null,
+ }));
+ return;
+ }
+
+ const analyticsApi = getAnalyticsNamespace(api);
+ if (!analyticsApi) {
+ setState({ data: null, loading: false, error: ANALYTICS_UNAVAILABLE_MESSAGE });
+ return;
+ }
+
+ let ignore = false;
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: true,
+ error: null,
+ }));
+
+ const fromDate = fromMs == null ? null : new Date(fromMs);
+ const toDate = toMs == null ? null : new Date(toMs);
+
+ void analyticsApi
+ .getCacheHitRatioByProvider({ projectPath: projectPath ?? null, from: fromDate, to: toDate })
+ .then((data) => {
+ if (ignore) {
+ return;
+ }
+ setState({ data, loading: false, error: null });
+ })
+ .catch((error: unknown) => {
+ if (ignore) {
+ return;
+ }
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: false,
+ error: getErrorMessage(error),
+ }));
+ });
+
+ return () => {
+ ignore = true;
+ };
+ }, [api, projectPath, fromMs, toMs]);
+
+ return state;
+}
+
+export function useAnalyticsAgentCostBreakdown(
+ projectPath?: string | null,
+ dateFilters?: DateFilterParams
+): AsyncState {
+ const fromMs = dateFilters?.from?.getTime() ?? null;
+ const toMs = dateFilters?.to?.getTime() ?? null;
+
+ const { api } = useAPI();
+ const [state, setState] = useState>({
+ data: null,
+ loading: true,
+ error: null,
+ });
+
+ useEffect(() => {
+ if (!api) {
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: true,
+ error: null,
+ }));
+ return;
+ }
+
+ const analyticsApi = getAnalyticsNamespace(api);
+ if (!analyticsApi) {
+ setState({ data: null, loading: false, error: ANALYTICS_UNAVAILABLE_MESSAGE });
+ return;
+ }
+
+ let ignore = false;
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: true,
+ error: null,
+ }));
+
+ const fromDate = fromMs == null ? null : new Date(fromMs);
+ const toDate = toMs == null ? null : new Date(toMs);
+
+ void analyticsApi
+ .getAgentCostBreakdown({ projectPath: projectPath ?? null, from: fromDate, to: toDate })
+ .then((data) => {
+ if (ignore) {
+ return;
+ }
+ setState({ data, loading: false, error: null });
+ })
+ .catch((error: unknown) => {
+ if (ignore) {
+ return;
+ }
+ setState((previousState) => ({
+ data: previousState.data,
+ loading: false,
+ error: getErrorMessage(error),
+ }));
+ });
+
+ return () => {
+ ignore = true;
+ };
+ }, [api, projectPath, fromMs, toMs]);
+
+ return state;
+}
diff --git a/src/browser/stories/App.analytics.stories.tsx b/src/browser/stories/App.analytics.stories.tsx
new file mode 100644
index 0000000000..c0620dd337
--- /dev/null
+++ b/src/browser/stories/App.analytics.stories.tsx
@@ -0,0 +1,674 @@
+/**
+ * Analytics dashboard (stats page) story.
+ *
+ * Navigates through the titlebar analytics button so the story exercises
+ * the same route transition users hit in the real app.
+ */
+
+import type { APIClient } from "@/browser/contexts/API";
+import type {
+ AgentCostItem,
+ ProviderCacheHitRatioItem,
+ SpendByModelItem,
+ SpendByProjectItem,
+ SpendOverTimeItem,
+ Summary,
+ TimingDistribution,
+} from "@/browser/hooks/useAnalytics";
+import { createMockORPCClient } from "@/browser/stories/mocks/orpc";
+import assert from "@/common/utils/assert";
+import { userEvent, waitFor, within } from "@storybook/test";
+import { appMeta, AppWithMocks, type AppStory } from "./meta.js";
+import { createWorkspace, groupWorkspacesByProject } from "./mockFactory";
+import { selectWorkspace } from "./storyHelpers";
+
+export default {
+ ...appMeta,
+ title: "App/Analytics",
+};
+
+const PROJECT_PATHS = {
+ atlas: "/home/user/projects/atlas-api",
+ orbit: "/home/user/projects/orbit-web",
+ docs: "/home/user/projects/docs-site",
+} as const;
+
+type AnalyticsProjectPath = (typeof PROJECT_PATHS)[keyof typeof PROJECT_PATHS];
+type TimingMetric = "ttft" | "duration" | "tps";
+
+interface StoryAnalyticsNamespace {
+ getSummary: (input: { projectPath?: string | null }) => Promise;
+ getSpendOverTime: (input: {
+ projectPath?: string | null;
+ granularity: "hour" | "day" | "week";
+ from?: Date | null;
+ to?: Date | null;
+ }) => Promise;
+ getSpendByProject: (_input: Record) => Promise;
+ getSpendByModel: (input: { projectPath?: string | null }) => Promise;
+ getTimingDistribution: (input: {
+ metric: TimingMetric;
+ projectPath?: string | null;
+ }) => Promise;
+ getAgentCostBreakdown: (input: { projectPath?: string | null }) => Promise;
+ getCacheHitRatioByProvider: (input: {
+ projectPath?: string | null;
+ from?: Date | null;
+ to?: Date | null;
+ }) => Promise;
+ rebuildDatabase: (_input: Record) => Promise<{
+ success: boolean;
+ workspacesIngested: number;
+ }>;
+}
+
+interface ScopedSpendOverTimeRow extends SpendOverTimeItem {
+ projectPath: AnalyticsProjectPath;
+}
+
+interface ScopedSpendByModelRow extends SpendByModelItem {
+ projectPath: AnalyticsProjectPath;
+}
+
+const KNOWN_PROJECT_PATHS = new Set(Object.values(PROJECT_PATHS));
+
+const SUMMARY_BY_PROJECT = new Map([
+ [
+ null,
+ {
+ totalSpendUsd: 184.73,
+ todaySpendUsd: 6.42,
+ avgDailySpendUsd: 4.11,
+ cacheHitRatio: 0.43,
+ totalTokens: 8_420_000,
+ totalResponses: 1_286,
+ },
+ ],
+ [
+ PROJECT_PATHS.atlas,
+ {
+ totalSpendUsd: 91.42,
+ todaySpendUsd: 3.24,
+ avgDailySpendUsd: 2.98,
+ cacheHitRatio: 0.47,
+ totalTokens: 4_120_000,
+ totalResponses: 602,
+ },
+ ],
+ [
+ PROJECT_PATHS.orbit,
+ {
+ totalSpendUsd: 63.18,
+ todaySpendUsd: 2.11,
+ avgDailySpendUsd: 2.14,
+ cacheHitRatio: 0.41,
+ totalTokens: 2_780_000,
+ totalResponses: 421,
+ },
+ ],
+ [
+ PROJECT_PATHS.docs,
+ {
+ totalSpendUsd: 30.13,
+ todaySpendUsd: 1.07,
+ avgDailySpendUsd: 1.05,
+ cacheHitRatio: 0.35,
+ totalTokens: 1_520_000,
+ totalResponses: 263,
+ },
+ ],
+]);
+
+const SPEND_BY_PROJECT: SpendByProjectItem[] = [
+ {
+ projectName: "atlas-api",
+ projectPath: PROJECT_PATHS.atlas,
+ costUsd: 91.42,
+ tokenCount: 4_120_000,
+ },
+ {
+ projectName: "orbit-web",
+ projectPath: PROJECT_PATHS.orbit,
+ costUsd: 63.18,
+ tokenCount: 2_780_000,
+ },
+ {
+ projectName: "docs-site",
+ projectPath: PROJECT_PATHS.docs,
+ costUsd: 30.13,
+ tokenCount: 1_520_000,
+ },
+];
+
+const SPEND_BY_MODEL_ROWS: ScopedSpendByModelRow[] = [
+ {
+ projectPath: PROJECT_PATHS.atlas,
+ model: "openai:gpt-5-mini",
+ costUsd: 39.6,
+ tokenCount: 1_940_000,
+ responseCount: 302,
+ },
+ {
+ projectPath: PROJECT_PATHS.atlas,
+ model: "anthropic:claude-sonnet-4-20250514",
+ costUsd: 29.2,
+ tokenCount: 1_300_000,
+ responseCount: 196,
+ },
+ {
+ projectPath: PROJECT_PATHS.atlas,
+ model: "openai:gpt-4.1",
+ costUsd: 22.62,
+ tokenCount: 880_000,
+ responseCount: 104,
+ },
+ {
+ projectPath: PROJECT_PATHS.orbit,
+ model: "anthropic:claude-sonnet-4-20250514",
+ costUsd: 26.8,
+ tokenCount: 1_140_000,
+ responseCount: 161,
+ },
+ {
+ projectPath: PROJECT_PATHS.orbit,
+ model: "openai:gpt-5-mini",
+ costUsd: 21.9,
+ tokenCount: 960_000,
+ responseCount: 145,
+ },
+ {
+ projectPath: PROJECT_PATHS.orbit,
+ model: "xai:grok-4-fast",
+ costUsd: 14.48,
+ tokenCount: 680_000,
+ responseCount: 115,
+ },
+ {
+ projectPath: PROJECT_PATHS.docs,
+ model: "openai:gpt-4.1",
+ costUsd: 8.58,
+ tokenCount: 620_000,
+ responseCount: 97,
+ },
+ {
+ projectPath: PROJECT_PATHS.docs,
+ model: "anthropic:claude-sonnet-4-20250514",
+ costUsd: 6.9,
+ tokenCount: 260_000,
+ responseCount: 38,
+ },
+ {
+ projectPath: PROJECT_PATHS.docs,
+ model: "xai:grok-4-fast",
+ costUsd: 6.05,
+ tokenCount: 340_000,
+ responseCount: 45,
+ },
+ {
+ projectPath: PROJECT_PATHS.docs,
+ model: "openai:gpt-5-mini",
+ costUsd: 8.6,
+ tokenCount: 300_000,
+ responseCount: 83,
+ },
+];
+
+const SPEND_OVER_TIME_ROWS: ScopedSpendOverTimeRow[] = [
+ {
+ projectPath: PROJECT_PATHS.atlas,
+ bucket: "2026-02-14",
+ model: "openai:gpt-5-mini",
+ costUsd: 6.2,
+ },
+ {
+ projectPath: PROJECT_PATHS.atlas,
+ bucket: "2026-02-14",
+ model: "anthropic:claude-sonnet-4-20250514",
+ costUsd: 3.8,
+ },
+ {
+ projectPath: PROJECT_PATHS.atlas,
+ bucket: "2026-02-15",
+ model: "openai:gpt-5-mini",
+ costUsd: 7.1,
+ },
+ {
+ projectPath: PROJECT_PATHS.atlas,
+ bucket: "2026-02-15",
+ model: "anthropic:claude-sonnet-4-20250514",
+ costUsd: 4.4,
+ },
+ { projectPath: PROJECT_PATHS.atlas, bucket: "2026-02-16", model: "openai:gpt-4.1", costUsd: 2.1 },
+ {
+ projectPath: PROJECT_PATHS.atlas,
+ bucket: "2026-02-17",
+ model: "openai:gpt-5-mini",
+ costUsd: 6.8,
+ },
+ {
+ projectPath: PROJECT_PATHS.orbit,
+ bucket: "2026-02-14",
+ model: "anthropic:claude-sonnet-4-20250514",
+ costUsd: 4.2,
+ },
+ {
+ projectPath: PROJECT_PATHS.orbit,
+ bucket: "2026-02-15",
+ model: "openai:gpt-5-mini",
+ costUsd: 3.5,
+ },
+ {
+ projectPath: PROJECT_PATHS.orbit,
+ bucket: "2026-02-16",
+ model: "xai:grok-4-fast",
+ costUsd: 2.8,
+ },
+ { projectPath: PROJECT_PATHS.orbit, bucket: "2026-02-18", model: "openai:gpt-4.1", costUsd: 2.4 },
+ {
+ projectPath: PROJECT_PATHS.orbit,
+ bucket: "2026-02-20",
+ model: "openai:gpt-5-mini",
+ costUsd: 3.9,
+ },
+ { projectPath: PROJECT_PATHS.docs, bucket: "2026-02-14", model: "openai:gpt-4.1", costUsd: 1.4 },
+ { projectPath: PROJECT_PATHS.docs, bucket: "2026-02-15", model: "xai:grok-4-fast", costUsd: 1.1 },
+ {
+ projectPath: PROJECT_PATHS.docs,
+ bucket: "2026-02-16",
+ model: "anthropic:claude-sonnet-4-20250514",
+ costUsd: 1.6,
+ },
+ {
+ projectPath: PROJECT_PATHS.docs,
+ bucket: "2026-02-17",
+ model: "openai:gpt-5-mini",
+ costUsd: 1.3,
+ },
+ { projectPath: PROJECT_PATHS.docs, bucket: "2026-02-18", model: "openai:gpt-4.1", costUsd: 1.2 },
+ { projectPath: PROJECT_PATHS.docs, bucket: "2026-02-19", model: "xai:grok-4-fast", costUsd: 1.0 },
+ {
+ projectPath: PROJECT_PATHS.docs,
+ bucket: "2026-02-20",
+ model: "anthropic:claude-sonnet-4-20250514",
+ costUsd: 1.5,
+ },
+];
+
+const BASE_TIMING_DISTRIBUTION: Record = {
+ ttft: {
+ p50: 390,
+ p90: 840,
+ p99: 1_450,
+ histogram: [
+ { bucket: 200, count: 101 },
+ { bucket: 350, count: 218 },
+ { bucket: 500, count: 179 },
+ { bucket: 700, count: 109 },
+ { bucket: 1_000, count: 44 },
+ { bucket: 1_400, count: 16 },
+ ],
+ },
+ duration: {
+ p50: 2_400,
+ p90: 6_100,
+ p99: 12_900,
+ histogram: [
+ { bucket: 1_000, count: 88 },
+ { bucket: 2_000, count: 196 },
+ { bucket: 3_000, count: 152 },
+ { bucket: 5_000, count: 104 },
+ { bucket: 8_000, count: 51 },
+ { bucket: 12_000, count: 19 },
+ ],
+ },
+ tps: {
+ p50: 38,
+ p90: 82,
+ p99: 118,
+ histogram: [
+ { bucket: 12, count: 23 },
+ { bucket: 24, count: 85 },
+ { bucket: 36, count: 173 },
+ { bucket: 48, count: 158 },
+ { bucket: 72, count: 94 },
+ { bucket: 108, count: 26 },
+ ],
+ },
+};
+
+const TIMING_SCALING: Record<
+ AnalyticsProjectPath,
+ { percentileScale: number; countScale: number }
+> = {
+ [PROJECT_PATHS.atlas]: { percentileScale: 0.92, countScale: 1.15 },
+ [PROJECT_PATHS.orbit]: { percentileScale: 1.08, countScale: 0.9 },
+ [PROJECT_PATHS.docs]: { percentileScale: 1.18, countScale: 0.58 },
+};
+
+const BASE_AGENT_COST_BREAKDOWN: AgentCostItem[] = [
+ { agentId: "exec", costUsd: 72.11, tokenCount: 3_010_000, responseCount: 426 },
+ { agentId: "plan", costUsd: 38.42, tokenCount: 1_540_000, responseCount: 219 },
+ { agentId: "explore", costUsd: 26.71, tokenCount: 1_190_000, responseCount: 178 },
+ { agentId: "compact", costUsd: 17.58, tokenCount: 970_000, responseCount: 126 },
+ { agentId: "docs", costUsd: 12.95, tokenCount: 710_000, responseCount: 101 },
+ { agentId: "research", costUsd: 9.04, tokenCount: 490_000, responseCount: 71 },
+ { agentId: "review", costUsd: 7.92, tokenCount: 390_000, responseCount: 57 },
+];
+
+const AGENT_SCALING: Record = {
+ [PROJECT_PATHS.atlas]: { costScale: 0.52, tokenScale: 0.54 },
+ [PROJECT_PATHS.orbit]: { costScale: 0.36, tokenScale: 0.35 },
+ [PROJECT_PATHS.docs]: { costScale: 0.18, tokenScale: 0.19 },
+};
+
+const BASE_PROVIDER_CACHE_HIT_RATIOS: ProviderCacheHitRatioItem[] = [
+ { provider: "anthropic", cacheHitRatio: 0.56, responseCount: 512 },
+ { provider: "openai", cacheHitRatio: 0.43, responseCount: 463 },
+ { provider: "google", cacheHitRatio: 0.37, responseCount: 201 },
+ { provider: "unknown", cacheHitRatio: 0.21, responseCount: 110 },
+];
+
+const PROVIDER_CACHE_HIT_SCALING: Record<
+ AnalyticsProjectPath,
+ { ratioScale: number; responseScale: number }
+> = {
+ [PROJECT_PATHS.atlas]: { ratioScale: 1.06, responseScale: 0.55 },
+ [PROJECT_PATHS.orbit]: { ratioScale: 0.94, responseScale: 0.35 },
+ [PROJECT_PATHS.docs]: { ratioScale: 0.82, responseScale: 0.2 },
+};
+
+function normalizeProjectPath(projectPath: string | null | undefined): AnalyticsProjectPath | null {
+ if (projectPath == null) {
+ return null;
+ }
+
+ assert(
+ KNOWN_PROJECT_PATHS.has(projectPath as AnalyticsProjectPath),
+ `Unexpected analytics projectPath: ${projectPath}`
+ );
+
+ return projectPath as AnalyticsProjectPath;
+}
+
+function isBucketInRange(bucket: string, from: Date | null, to: Date | null): boolean {
+ const bucketDate = new Date(bucket);
+ if (!Number.isFinite(bucketDate.getTime())) {
+ return true;
+ }
+
+ if (from && bucketDate < from) {
+ return false;
+ }
+
+ if (to && bucketDate > to) {
+ return false;
+ }
+
+ return true;
+}
+
+function getSpendOverTimeRows(input: {
+ projectPath: AnalyticsProjectPath | null;
+ from: Date | null;
+ to: Date | null;
+}): SpendOverTimeItem[] {
+ const rows =
+ input.projectPath === null
+ ? SPEND_OVER_TIME_ROWS
+ : SPEND_OVER_TIME_ROWS.filter((row) => row.projectPath === input.projectPath);
+
+ const aggregatedRows = new Map();
+ for (const row of rows) {
+ if (!isBucketInRange(row.bucket, input.from, input.to)) {
+ continue;
+ }
+
+ const key = `${row.bucket}|${row.model}`;
+ const current = aggregatedRows.get(key);
+ if (current) {
+ current.costUsd += row.costUsd;
+ continue;
+ }
+
+ aggregatedRows.set(key, {
+ bucket: row.bucket,
+ model: row.model,
+ costUsd: row.costUsd,
+ });
+ }
+
+ return Array.from(aggregatedRows.values()).sort((left, right) => {
+ if (left.bucket === right.bucket) {
+ return left.model.localeCompare(right.model);
+ }
+
+ return left.bucket.localeCompare(right.bucket);
+ });
+}
+
+function getSpendByModelRows(projectPath: AnalyticsProjectPath | null): SpendByModelItem[] {
+ const rows =
+ projectPath === null
+ ? SPEND_BY_MODEL_ROWS
+ : SPEND_BY_MODEL_ROWS.filter((row) => row.projectPath === projectPath);
+
+ const byModel = new Map();
+ for (const row of rows) {
+ const current = byModel.get(row.model);
+ if (current) {
+ current.costUsd += row.costUsd;
+ current.tokenCount += row.tokenCount;
+ current.responseCount += row.responseCount;
+ continue;
+ }
+
+ byModel.set(row.model, {
+ model: row.model,
+ costUsd: row.costUsd,
+ tokenCount: row.tokenCount,
+ responseCount: row.responseCount,
+ });
+ }
+
+ return Array.from(byModel.values()).sort((left, right) => right.costUsd - left.costUsd);
+}
+
+function scaleTimingDistribution(
+ distribution: TimingDistribution,
+ percentileScale: number,
+ countScale: number
+): TimingDistribution {
+ return {
+ p50: Math.round(distribution.p50 * percentileScale),
+ p90: Math.round(distribution.p90 * percentileScale),
+ p99: Math.round(distribution.p99 * percentileScale),
+ histogram: distribution.histogram.map((bucket) => ({
+ bucket: bucket.bucket,
+ count: Math.max(1, Math.round(bucket.count * countScale)),
+ })),
+ };
+}
+
+function getTimingDistribution(
+ metric: TimingMetric,
+ projectPath: AnalyticsProjectPath | null
+): TimingDistribution {
+ const base = BASE_TIMING_DISTRIBUTION[metric];
+ if (projectPath === null) {
+ return base;
+ }
+
+ const scaling = TIMING_SCALING[projectPath];
+ return scaleTimingDistribution(base, scaling.percentileScale, scaling.countScale);
+}
+
+function getAgentCostBreakdown(projectPath: AnalyticsProjectPath | null): AgentCostItem[] {
+ if (projectPath === null) {
+ return BASE_AGENT_COST_BREAKDOWN;
+ }
+
+ const scaling = AGENT_SCALING[projectPath];
+ return BASE_AGENT_COST_BREAKDOWN.map((row) => ({
+ agentId: row.agentId,
+ costUsd: Number((row.costUsd * scaling.costScale).toFixed(2)),
+ tokenCount: Math.round(row.tokenCount * scaling.tokenScale),
+ responseCount: Math.max(1, Math.round(row.responseCount * scaling.costScale)),
+ })).filter((row) => row.costUsd > 1.5);
+}
+
+function getProviderCacheHitRatios(
+ projectPath: AnalyticsProjectPath | null
+): ProviderCacheHitRatioItem[] {
+ if (projectPath === null) {
+ return BASE_PROVIDER_CACHE_HIT_RATIOS;
+ }
+
+ const scaling = PROVIDER_CACHE_HIT_SCALING[projectPath];
+ return BASE_PROVIDER_CACHE_HIT_RATIOS.map((row) => ({
+ provider: row.provider,
+ cacheHitRatio: Math.max(
+ 0,
+ Math.min(0.98, Number((row.cacheHitRatio * scaling.ratioScale).toFixed(3)))
+ ),
+ responseCount: Math.max(1, Math.round(row.responseCount * scaling.responseScale)),
+ })).filter((row) => row.responseCount >= 12);
+}
+
+function setupAnalyticsStory(): APIClient {
+ const workspaces = [
+ createWorkspace({
+ id: "ws-analytics-atlas",
+ name: "feature/observability-rollup",
+ projectName: "atlas-api",
+ projectPath: PROJECT_PATHS.atlas,
+ }),
+ createWorkspace({
+ id: "ws-analytics-orbit",
+ name: "feature/checkout-funnel",
+ projectName: "orbit-web",
+ projectPath: PROJECT_PATHS.orbit,
+ }),
+ createWorkspace({
+ id: "ws-analytics-docs",
+ name: "docs/launch-playbook",
+ projectName: "docs-site",
+ projectPath: PROJECT_PATHS.docs,
+ }),
+ ];
+
+ selectWorkspace(workspaces[0]);
+
+ const baseClient = createMockORPCClient({
+ projects: groupWorkspacesByProject(workspaces),
+ workspaces,
+ });
+
+ const analytics: StoryAnalyticsNamespace = {
+ getSummary: (input) => {
+ const projectPath = normalizeProjectPath(input.projectPath ?? null);
+ const summary = SUMMARY_BY_PROJECT.get(projectPath);
+
+ assert(
+ summary != null,
+ `Missing analytics summary fixture for scope ${projectPath ?? "all"}`
+ );
+ return Promise.resolve(summary);
+ },
+ getSpendOverTime: (input) => {
+ assert(
+ input.granularity === "hour" || input.granularity === "day" || input.granularity === "week",
+ `Unsupported granularity for analytics story: ${input.granularity}`
+ );
+
+ const projectPath = normalizeProjectPath(input.projectPath ?? null);
+ return Promise.resolve(
+ getSpendOverTimeRows({
+ projectPath,
+ from: input.from ?? null,
+ to: input.to ?? null,
+ })
+ );
+ },
+ getSpendByProject: () => Promise.resolve(SPEND_BY_PROJECT),
+ getSpendByModel: (input) => {
+ const projectPath = normalizeProjectPath(input.projectPath ?? null);
+ return Promise.resolve(getSpendByModelRows(projectPath));
+ },
+ getTimingDistribution: (input) => {
+ const projectPath = normalizeProjectPath(input.projectPath ?? null);
+ return Promise.resolve(getTimingDistribution(input.metric, projectPath));
+ },
+ getAgentCostBreakdown: (input) => {
+ const projectPath = normalizeProjectPath(input.projectPath ?? null);
+ return Promise.resolve(getAgentCostBreakdown(projectPath));
+ },
+ getCacheHitRatioByProvider: (input) => {
+ const projectPath = normalizeProjectPath(input.projectPath ?? null);
+ return Promise.resolve(getProviderCacheHitRatios(projectPath));
+ },
+ rebuildDatabase: () =>
+ Promise.resolve({
+ success: true,
+ workspacesIngested: workspaces.length,
+ }),
+ };
+
+ const client = baseClient as Omit & { analytics: unknown };
+ client.analytics = analytics;
+
+ return client as APIClient;
+}
+
+async function openAnalyticsDashboard(canvasElement: HTMLElement): Promise {
+ const canvas = within(canvasElement);
+
+ const analyticsButton = await canvas.findByTestId("analytics-button", {}, { timeout: 10_000 });
+ await userEvent.click(analyticsButton);
+
+ await canvas.findByRole("heading", { name: /^analytics$/i });
+}
+
+export const StatsDashboard: AppStory = {
+ render: () => ,
+ play: async ({ canvasElement }: { canvasElement: HTMLElement }) => {
+ const canvas = within(canvasElement);
+
+ await openAnalyticsDashboard(canvasElement);
+
+ await canvas.findByText("Total Spend");
+ await canvas.findByText("$184.73");
+
+ await canvas.findByRole("heading", { name: /spend over time/i });
+ await canvas.findByRole("heading", { name: /spend by project/i });
+ await canvas.findByRole("heading", { name: /spend by model/i });
+ await canvas.findByRole("heading", { name: /timing distribution/i });
+ await canvas.findByRole("heading", { name: /cache hit ratio by provider/i });
+ await canvas.findByRole("heading", { name: /agent cost breakdown/i });
+
+ await waitFor(() => {
+ if (canvas.queryByText(/No spend data for the selected filters/i)) {
+ throw new Error("Expected spend-over-time chart to render populated data");
+ }
+
+ if (canvas.queryByText(/No project spend data yet/i)) {
+ throw new Error("Expected spend-by-project chart to render populated data");
+ }
+
+ if (canvas.queryByText(/No model spend data yet/i)) {
+ throw new Error("Expected spend-by-model chart to render populated data");
+ }
+
+ if (canvas.queryByText(/No timing data available yet/i)) {
+ throw new Error("Expected timing distribution chart to render populated data");
+ }
+
+ if (canvas.queryByText(/No provider cache hit data available/i)) {
+ throw new Error("Expected provider cache-hit chart to render populated data");
+ }
+
+ if (canvas.queryByText(/No agent-level spend data available/i)) {
+ throw new Error("Expected agent-cost chart to render populated data");
+ }
+ });
+ },
+};
diff --git a/src/browser/utils/commandIds.ts b/src/browser/utils/commandIds.ts
index a8c2754d2d..12c9cee3b0 100644
--- a/src/browser/utils/commandIds.ts
+++ b/src/browser/utils/commandIds.ts
@@ -64,6 +64,9 @@ export const CommandIds = {
themeToggle: () => "appearance:theme:toggle" as const,
themeSet: (theme: string) => `appearance:theme:set:${theme}` as const,
+ // Analytics commands
+ analyticsRebuildDatabase: () => "analytics:rebuild-database" as const,
+
// Layout commands
layoutApplySlot: (slot: number) => `layout:apply-slot:${slot}` as const,
layoutCaptureSlot: (slot: number) => `layout:capture-slot:${slot}` as const,
diff --git a/src/browser/utils/commands/sources.test.ts b/src/browser/utils/commands/sources.test.ts
index 7d9928b005..e4e68e2cc7 100644
--- a/src/browser/utils/commands/sources.test.ts
+++ b/src/browser/utils/commands/sources.test.ts
@@ -61,6 +61,9 @@ const mk = (over: Partial[0]> = {}) => {
truncateHistory: () => Promise.resolve({ success: true, data: undefined }),
interruptStream: () => Promise.resolve({ success: true, data: undefined }),
},
+ analytics: {
+ rebuildDatabase: () => Promise.resolve({ success: true, workspacesIngested: 2 }),
+ },
} as unknown as APIClient,
getBranchesForProject: () =>
Promise.resolve({
@@ -187,6 +190,121 @@ test("archive merged workspaces prompt submits selected project", async () => {
expect(onArchiveMergedWorkspacesInProject).toHaveBeenCalledWith("/repo/a");
});
+test("buildCoreSources includes rebuild analytics database action with discoverable keywords", () => {
+ const sources = mk();
+ const actions = sources.flatMap((s) => s());
+ const rebuildAction = actions.find((a) => a.id === "analytics:rebuild-database");
+
+ expect(rebuildAction).toBeDefined();
+ expect(rebuildAction?.title).toBe("Rebuild Analytics Database");
+ expect(rebuildAction?.keywords).toContain("analytics");
+ expect(rebuildAction?.keywords).toContain("rebuild");
+ expect(rebuildAction?.keywords).toContain("recompute");
+ expect(rebuildAction?.keywords).toContain("database");
+ expect(rebuildAction?.keywords).toContain("stats");
+});
+
+test("analytics rebuild command calls route and dispatches toast feedback", async () => {
+ const rebuildDatabase = mock(() => Promise.resolve({ success: true, workspacesIngested: 4 }));
+
+ const testWindow = new GlobalWindow();
+ const originalWindow = globalThis.window;
+ const originalDocument = globalThis.document;
+ const originalCustomEvent = globalThis.CustomEvent;
+
+ globalThis.window = testWindow as unknown as Window & typeof globalThis;
+ globalThis.document = testWindow.document as unknown as Document;
+ globalThis.CustomEvent = testWindow.CustomEvent as unknown as typeof CustomEvent;
+
+ const chatInputHost = document.createElement("div");
+ chatInputHost.setAttribute("data-component", "ChatInputSection");
+ document.body.appendChild(chatInputHost);
+
+ const receivedToasts: Array<{
+ type: "success" | "error";
+ message: string;
+ title?: string;
+ }> = [];
+ const handleToast = (event: Event) => {
+ receivedToasts.push(
+ (event as CustomEvent<{ type: "success" | "error"; message: string; title?: string }>).detail
+ );
+ };
+ window.addEventListener(CUSTOM_EVENTS.ANALYTICS_REBUILD_TOAST, handleToast);
+
+ try {
+ const sources = mk({
+ api: {
+ workspace: {
+ truncateHistory: () => Promise.resolve({ success: true, data: undefined }),
+ interruptStream: () => Promise.resolve({ success: true, data: undefined }),
+ },
+ analytics: { rebuildDatabase },
+ } as unknown as APIClient,
+ });
+ const actions = sources.flatMap((s) => s());
+ const rebuildAction = actions.find((a) => a.id === "analytics:rebuild-database");
+
+ expect(rebuildAction).toBeDefined();
+ await rebuildAction!.run();
+
+ expect(rebuildDatabase).toHaveBeenCalledWith({});
+ expect(receivedToasts).toEqual([
+ {
+ type: "success",
+ message: "Analytics database rebuilt successfully (4 workspaces ingested).",
+ },
+ ]);
+ } finally {
+ window.removeEventListener(CUSTOM_EVENTS.ANALYTICS_REBUILD_TOAST, handleToast);
+ globalThis.window = originalWindow;
+ globalThis.document = originalDocument;
+ globalThis.CustomEvent = originalCustomEvent;
+ }
+});
+
+test("analytics rebuild command falls back to alert when chat input toast host is unavailable", async () => {
+ const rebuildDatabase = mock(() => Promise.resolve({ success: true, workspacesIngested: 1 }));
+
+ const testWindow = new GlobalWindow();
+ const originalWindow = globalThis.window;
+ const originalDocument = globalThis.document;
+ const originalCustomEvent = globalThis.CustomEvent;
+
+ globalThis.window = testWindow as unknown as Window & typeof globalThis;
+ globalThis.document = testWindow.document as unknown as Document;
+ globalThis.CustomEvent = testWindow.CustomEvent as unknown as typeof CustomEvent;
+
+ const alertMock = mock(() => undefined);
+ window.alert = alertMock as unknown as typeof window.alert;
+
+ try {
+ const sources = mk({
+ api: {
+ workspace: {
+ truncateHistory: () => Promise.resolve({ success: true, data: undefined }),
+ interruptStream: () => Promise.resolve({ success: true, data: undefined }),
+ },
+ analytics: { rebuildDatabase },
+ } as unknown as APIClient,
+ });
+ const actions = sources.flatMap((s) => s());
+ const rebuildAction = actions.find((a) => a.id === "analytics:rebuild-database");
+
+ expect(rebuildAction).toBeDefined();
+ await rebuildAction!.run();
+
+ expect(rebuildDatabase).toHaveBeenCalledWith({});
+ expect(alertMock).toHaveBeenCalledWith(
+ "Analytics database rebuilt successfully (1 workspace ingested)."
+ );
+ } finally {
+ globalThis.window = originalWindow;
+ globalThis.document = originalDocument;
+ globalThis.CustomEvent = originalCustomEvent;
+ }
+});
+
test("workspace generate title command is hidden for Chat with Mux workspace", () => {
const sources = mk({
selectedWorkspace: {
diff --git a/src/browser/utils/commands/sources.ts b/src/browser/utils/commands/sources.ts
index 9cd1280302..2a10f3d6da 100644
--- a/src/browser/utils/commands/sources.ts
+++ b/src/browser/utils/commands/sources.ts
@@ -38,6 +38,7 @@ import type { FrontendWorkspaceMetadata } from "@/common/types/workspace";
import type { BranchListResult } from "@/common/orpc/types";
import type { WorkspaceState } from "@/browser/stores/WorkspaceStore";
import type { RuntimeConfig } from "@/common/types/runtime";
+import { getErrorMessage } from "@/common/utils/errors";
export interface BuildSourcesParams {
api: APIClient | null;
@@ -168,6 +169,52 @@ function toFileUrl(filePath: string): string {
return `file://${encodeURI(normalized)}`;
}
+interface AnalyticsRebuildNamespace {
+ rebuildDatabase?: (
+ input: Record
+ ) => Promise<{ success: boolean; workspacesIngested: number }>;
+}
+
+const getAnalyticsRebuildDatabase = (
+ api: APIClient | null
+): AnalyticsRebuildNamespace["rebuildDatabase"] | null => {
+ const candidate = (api as { analytics?: unknown } | null)?.analytics;
+ if (!candidate || (typeof candidate !== "object" && typeof candidate !== "function")) {
+ return null;
+ }
+
+ const rebuildDatabase = (candidate as AnalyticsRebuildNamespace).rebuildDatabase;
+ return typeof rebuildDatabase === "function" ? rebuildDatabase : null;
+};
+
+const showCommandFeedbackToast = (feedback: {
+ type: "success" | "error";
+ message: string;
+ title?: string;
+}) => {
+ if (typeof window === "undefined") {
+ return;
+ }
+
+ // Analytics view does not mount ChatInput, so keep a basic alert fallback
+ // for command palette actions that need user feedback.
+ const hasChatInputToastHost =
+ typeof document !== "undefined" &&
+ document.querySelector('[data-component="ChatInputSection"]') !== null;
+
+ if (hasChatInputToastHost) {
+ window.dispatchEvent(createCustomEvent(CUSTOM_EVENTS.ANALYTICS_REBUILD_TOAST, feedback));
+ return;
+ }
+
+ const alertMessage = feedback.title
+ ? `${feedback.title}\n\n${feedback.message}`
+ : feedback.message;
+ if (typeof window.alert === "function") {
+ window.alert(alertMessage);
+ }
+};
+
const findFirstTerminalSessionTab = (
node: ReturnType["root"]
): { tabsetId: string; tab: TabType } | null => {
@@ -959,6 +1006,54 @@ export function buildCoreSources(p: BuildSourcesParams): Array<() => CommandActi
return list;
});
+ // Analytics maintenance
+ actions.push(() => [
+ {
+ id: CommandIds.analyticsRebuildDatabase(),
+ title: "Rebuild Analytics Database",
+ subtitle: "Recompute analytics from workspace history",
+ section: section.settings,
+ keywords: ["analytics", "rebuild", "recompute", "database", "stats"],
+ run: async () => {
+ const rebuildDatabase = getAnalyticsRebuildDatabase(p.api);
+ if (!rebuildDatabase) {
+ showCommandFeedbackToast({
+ type: "error",
+ title: "Analytics Unavailable",
+ message: "Analytics backend is not available in this build.",
+ });
+ return;
+ }
+
+ try {
+ const result = await rebuildDatabase({});
+ if (!result.success) {
+ showCommandFeedbackToast({
+ type: "error",
+ title: "Analytics Rebuild Failed",
+ message: "Analytics database rebuild did not complete successfully.",
+ });
+ return;
+ }
+
+ const workspaceLabel = `${result.workspacesIngested} workspace${
+ result.workspacesIngested === 1 ? "" : "s"
+ }`;
+ showCommandFeedbackToast({
+ type: "success",
+ message: `Analytics database rebuilt successfully (${workspaceLabel} ingested).`,
+ });
+ } catch (error) {
+ showCommandFeedbackToast({
+ type: "error",
+ title: "Analytics Rebuild Failed",
+ message: getErrorMessage(error),
+ });
+ }
+ },
+ },
+ ]);
+
// Settings
if (p.onOpenSettings) {
const openSettings = p.onOpenSettings;
diff --git a/src/browser/utils/ui/keybinds.ts b/src/browser/utils/ui/keybinds.ts
index 14ce64b9ae..d50e5254f0 100644
--- a/src/browser/utils/ui/keybinds.ts
+++ b/src/browser/utils/ui/keybinds.ts
@@ -404,6 +404,11 @@ export const KEYBINDS = {
// macOS: Cmd+, Win/Linux: Ctrl+,
OPEN_SETTINGS: { key: ",", ctrl: true },
+ /** Open analytics dashboard */
+ // macOS: Cmd+Shift+Y, Win/Linux: Ctrl+Shift+Y
+ // "Y" for analYtics â Ctrl+. is reserved for CYCLE_AGENT
+ OPEN_ANALYTICS: { key: "Y", ctrl: true, shift: true },
+
/** Toggle voice input (dictation) */
// macOS: Cmd+D, Win/Linux: Ctrl+D
// "D" for Dictate - intuitive and available
diff --git a/src/cli/cli.test.ts b/src/cli/cli.test.ts
index 05927d5f7d..dd6c003641 100644
--- a/src/cli/cli.test.ts
+++ b/src/cli/cli.test.ts
@@ -89,6 +89,7 @@ async function createTestServer(authToken?: string): Promise {
coderService: services.coderService,
serverAuthService: services.serverAuthService,
sshPromptService: services.sshPromptService,
+ analyticsService: services.analyticsService,
};
// Use the actual createOrpcServer function
diff --git a/src/cli/server.test.ts b/src/cli/server.test.ts
index 35c4b48362..e799bbaf40 100644
--- a/src/cli/server.test.ts
+++ b/src/cli/server.test.ts
@@ -92,6 +92,7 @@ async function createTestServer(): Promise {
coderService: services.coderService,
serverAuthService: services.serverAuthService,
sshPromptService: services.sshPromptService,
+ analyticsService: services.analyticsService,
};
// Use the actual createOrpcServer function
diff --git a/src/common/constants/events.ts b/src/common/constants/events.ts
index 46c1801126..6bd15f6ab7 100644
--- a/src/common/constants/events.ts
+++ b/src/common/constants/events.ts
@@ -81,6 +81,12 @@ export const CUSTOM_EVENTS = {
*/
TOGGLE_VOICE_INPUT: "mux:toggleVoiceInput",
+ /**
+ * Event to show toast feedback for analytics database rebuild commands.
+ * Detail: { type: "success" | "error", message: string, title?: string }
+ */
+ ANALYTICS_REBUILD_TOAST: "mux:analyticsRebuildToast",
+
/**
* Event to open the debug LLM request modal
* No detail
@@ -129,6 +135,11 @@ export interface CustomEventPayloads {
runtime?: string;
};
[CUSTOM_EVENTS.TOGGLE_VOICE_INPUT]: never; // No payload
+ [CUSTOM_EVENTS.ANALYTICS_REBUILD_TOAST]: {
+ type: "success" | "error";
+ message: string;
+ title?: string;
+ };
[CUSTOM_EVENTS.OPEN_DEBUG_LLM_REQUEST]: never; // No payload
}
diff --git a/src/common/orpc/schemas.ts b/src/common/orpc/schemas.ts
index 84e8bfd98a..2b877b4c6f 100644
--- a/src/common/orpc/schemas.ts
+++ b/src/common/orpc/schemas.ts
@@ -37,6 +37,28 @@ export {
WorkspaceStatsSnapshotSchema,
} from "./schemas/workspaceStats";
+// Analytics schemas
+export {
+ AgentCostRowSchema,
+ EventRowSchema,
+ HistogramBucketSchema,
+ SpendByModelRowSchema,
+ SpendByProjectRowSchema,
+ SpendOverTimeRowSchema,
+ SummaryRowSchema,
+ TimingPercentilesRowSchema,
+} from "./schemas/analytics";
+export type {
+ AgentCostRow,
+ EventRow,
+ HistogramBucket,
+ SpendByModelRow,
+ SpendByProjectRow,
+ SpendOverTimeRow,
+ SummaryRow,
+ TimingPercentilesRow,
+} from "./schemas/analytics";
+
// Chat stats schemas
export {
ChatStatsSchema,
@@ -174,6 +196,7 @@ export {
export {
ApiServerStatusSchema,
AWSCredentialStatusSchema,
+ analytics,
coder,
CoderInfoSchema,
CoderPresetSchema,
diff --git a/src/common/orpc/schemas/analytics.ts b/src/common/orpc/schemas/analytics.ts
new file mode 100644
index 0000000000..3349f0669c
--- /dev/null
+++ b/src/common/orpc/schemas/analytics.ts
@@ -0,0 +1,208 @@
+import { z } from "zod";
+
+// ââ Reusable row schemas (used by both oRPC output AND worker query validation) ââ
+
+/** Single row from DuckDB, validated before crossing workerâmain boundary */
+export const SummaryRowSchema = z.object({
+ total_spend_usd: z.number(),
+ today_spend_usd: z.number(),
+ avg_daily_spend_usd: z.number(),
+ cache_hit_ratio: z.number(),
+ total_tokens: z.number(),
+ total_responses: z.number(),
+});
+export type SummaryRow = z.infer;
+
+export const SpendOverTimeRowSchema = z.object({
+ bucket: z.string(),
+ model: z.string(),
+ cost_usd: z.number(),
+});
+export type SpendOverTimeRow = z.infer;
+
+export const SpendByProjectRowSchema = z.object({
+ project_name: z.string(),
+ project_path: z.string(),
+ cost_usd: z.number(),
+ token_count: z.number(),
+});
+export type SpendByProjectRow = z.infer;
+
+export const SpendByModelRowSchema = z.object({
+ model: z.string(),
+ cost_usd: z.number(),
+ token_count: z.number(),
+ response_count: z.number(),
+});
+export type SpendByModelRow = z.infer;
+
+export const TimingPercentilesRowSchema = z.object({
+ p50: z.number(),
+ p90: z.number(),
+ p99: z.number(),
+});
+export type TimingPercentilesRow = z.infer;
+
+export const HistogramBucketSchema = z.object({
+ bucket: z.number(),
+ count: z.number(),
+});
+export type HistogramBucket = z.infer;
+
+export const AgentCostRowSchema = z.object({
+ agent_id: z.string(),
+ cost_usd: z.number(),
+ token_count: z.number(),
+ response_count: z.number(),
+});
+export type AgentCostRow = z.infer;
+
+export const ProviderCacheHitModelRowSchema = z.object({
+ model: z.string(),
+ cached_tokens: z.number(),
+ total_prompt_tokens: z.number(),
+ response_count: z.number(),
+});
+export type ProviderCacheHitModelRow = z.infer;
+
+/** ETL input validation â each row extracted from chat.jsonl is validated before insert */
+export const EventRowSchema = z.object({
+ workspace_id: z.string(),
+ project_path: z.string().nullable(),
+ project_name: z.string().nullable(),
+ workspace_name: z.string().nullable(),
+ parent_workspace_id: z.string().nullable(),
+ agent_id: z.string().nullable(),
+ timestamp: z.number().nullable(), // unix ms
+ model: z.string().nullable(),
+ thinking_level: z.string().nullable(),
+ input_tokens: z.number().default(0),
+ output_tokens: z.number().default(0),
+ reasoning_tokens: z.number().default(0),
+ cached_tokens: z.number().default(0),
+ cache_create_tokens: z.number().default(0),
+ input_cost_usd: z.number().default(0),
+ output_cost_usd: z.number().default(0),
+ reasoning_cost_usd: z.number().default(0),
+ cached_cost_usd: z.number().default(0),
+ total_cost_usd: z.number().default(0),
+ duration_ms: z.number().nullable(),
+ ttft_ms: z.number().nullable(),
+ streaming_ms: z.number().nullable(),
+ tool_execution_ms: z.number().nullable(),
+ output_tps: z.number().nullable(),
+ response_index: z.number().nullable(),
+ is_sub_agent: z.boolean().default(false),
+});
+export type EventRow = z.infer;
+
+// ââ oRPC procedure schemas (camelCase for API contract) ââ
+
+export const analytics = {
+ getSummary: {
+ input: z.object({
+ projectPath: z.string().nullish(),
+ from: z.coerce.date().nullish(),
+ to: z.coerce.date().nullish(),
+ }),
+ output: z.object({
+ totalSpendUsd: z.number(),
+ todaySpendUsd: z.number(),
+ avgDailySpendUsd: z.number(),
+ cacheHitRatio: z.number(),
+ totalTokens: z.number(),
+ totalResponses: z.number(),
+ }),
+ },
+ getSpendOverTime: {
+ input: z.object({
+ projectPath: z.string().nullish(),
+ granularity: z.enum(["hour", "day", "week"]),
+ from: z.coerce.date().nullish(),
+ to: z.coerce.date().nullish(),
+ }),
+ output: z.array(
+ z.object({
+ bucket: z.string(),
+ costUsd: z.number(),
+ model: z.string(),
+ })
+ ),
+ },
+ getSpendByProject: {
+ input: z.object({
+ from: z.coerce.date().nullish(),
+ to: z.coerce.date().nullish(),
+ }),
+ output: z.array(
+ z.object({
+ projectName: z.string(),
+ projectPath: z.string(),
+ costUsd: z.number(),
+ tokenCount: z.number(),
+ })
+ ),
+ },
+ getSpendByModel: {
+ input: z.object({
+ projectPath: z.string().nullish(),
+ from: z.coerce.date().nullish(),
+ to: z.coerce.date().nullish(),
+ }),
+ output: z.array(
+ z.object({
+ model: z.string(),
+ costUsd: z.number(),
+ tokenCount: z.number(),
+ responseCount: z.number(),
+ })
+ ),
+ },
+ getTimingDistribution: {
+ input: z.object({
+ metric: z.enum(["ttft", "duration", "tps"]),
+ projectPath: z.string().nullish(),
+ from: z.coerce.date().nullish(),
+ to: z.coerce.date().nullish(),
+ }),
+ output: z.object({
+ p50: z.number(),
+ p90: z.number(),
+ p99: z.number(),
+ histogram: z.array(z.object({ bucket: z.number(), count: z.number() })),
+ }),
+ },
+ getAgentCostBreakdown: {
+ input: z.object({
+ projectPath: z.string().nullish(),
+ from: z.coerce.date().nullish(),
+ to: z.coerce.date().nullish(),
+ }),
+ output: z.array(
+ z.object({
+ agentId: z.string(),
+ costUsd: z.number(),
+ tokenCount: z.number(),
+ responseCount: z.number(),
+ })
+ ),
+ },
+ getCacheHitRatioByProvider: {
+ input: z.object({
+ projectPath: z.string().nullish(),
+ from: z.coerce.date().nullish(),
+ to: z.coerce.date().nullish(),
+ }),
+ output: z.array(
+ z.object({
+ provider: z.string(),
+ cacheHitRatio: z.number(),
+ responseCount: z.number(),
+ })
+ ),
+ },
+ rebuildDatabase: {
+ input: z.object({}),
+ output: z.object({ success: z.boolean(), workspacesIngested: z.number() }),
+ },
+};
diff --git a/src/common/orpc/schemas/api.ts b/src/common/orpc/schemas/api.ts
index 1ebad12424..bcd96f6cdf 100644
--- a/src/common/orpc/schemas/api.ts
+++ b/src/common/orpc/schemas/api.ts
@@ -82,6 +82,9 @@ export { telemetry, TelemetryEventSchema } from "./telemetry";
// Re-export signing schemas
export { signing, type SigningCapabilities, type SignatureEnvelope } from "./signing";
+// Re-export analytics schemas
+export { analytics } from "./analytics";
+
// --- API Router Schemas ---
// Background process info (for UI display)
diff --git a/src/common/orpc/schemas/message.ts b/src/common/orpc/schemas/message.ts
index 59faf4df64..ca4aa604ec 100644
--- a/src/common/orpc/schemas/message.ts
+++ b/src/common/orpc/schemas/message.ts
@@ -119,6 +119,7 @@ export const MuxMessageSchema = z.object({
providerMetadata: z.record(z.string(), z.unknown()).optional(),
contextProviderMetadata: z.record(z.string(), z.unknown()).optional(),
duration: z.number().optional(),
+ ttftMs: z.number().optional(),
systemMessageTokens: z.number().optional(),
muxMetadata: z.any().optional(),
cmuxMetadata: z.any().optional(), // Legacy field for backward compatibility
diff --git a/src/common/orpc/schemas/stream.ts b/src/common/orpc/schemas/stream.ts
index de14742644..f2c6ea6852 100644
--- a/src/common/orpc/schemas/stream.ts
+++ b/src/common/orpc/schemas/stream.ts
@@ -228,6 +228,7 @@ export const StreamEndEventSchema = z.object({
// Last step's provider metadata (for context window cache display)
contextProviderMetadata: z.record(z.string(), z.unknown()).optional(),
duration: z.number().optional(),
+ ttftMs: z.number().optional(),
systemMessageTokens: z.number().optional(),
historySequence: z.number().optional().meta({
description: "Present when loading from history",
diff --git a/src/common/types/message.ts b/src/common/types/message.ts
index d1ee93cb59..306d21d731 100644
--- a/src/common/types/message.ts
+++ b/src/common/types/message.ts
@@ -387,6 +387,7 @@ export function isCompactionSummaryMetadata(
export interface MuxMetadata {
historySequence?: number; // Assigned by backend for global message ordering (required when writing to history)
duration?: number;
+ ttftMs?: number; // Time-to-first-token measured from stream start; omitted when unavailable
/** @deprecated Legacy base mode derived from agent definition. */
mode?: AgentMode;
timestamp?: number;
diff --git a/src/node/orpc/context.ts b/src/node/orpc/context.ts
index 6b616eca90..b42f7f9d6d 100644
--- a/src/node/orpc/context.ts
+++ b/src/node/orpc/context.ts
@@ -31,6 +31,7 @@ import type { PolicyService } from "@/node/services/policyService";
import type { CoderService } from "@/node/services/coderService";
import type { ServerAuthService } from "@/node/services/serverAuthService";
import type { SshPromptService } from "@/node/services/sshPromptService";
+import type { AnalyticsService } from "@/node/services/analytics/analyticsService";
export interface ORPCContext {
config: Config;
@@ -65,5 +66,6 @@ export interface ORPCContext {
coderService: CoderService;
serverAuthService: ServerAuthService;
sshPromptService: SshPromptService;
+ analyticsService: AnalyticsService;
headers?: IncomingHttpHeaders;
}
diff --git a/src/node/orpc/router.ts b/src/node/orpc/router.ts
index 497bf6de55..9ae532a4d3 100644
--- a/src/node/orpc/router.ts
+++ b/src/node/orpc/router.ts
@@ -4066,6 +4066,77 @@ export const router = (authToken?: string) => {
return { success: true };
}),
},
+ analytics: {
+ getSummary: t
+ .input(schemas.analytics.getSummary.input)
+ .output(schemas.analytics.getSummary.output)
+ .handler(async ({ context, input }) => {
+ return context.analyticsService.getSummary(
+ input.projectPath ?? null,
+ input.from ?? null,
+ input.to ?? null
+ );
+ }),
+ getSpendOverTime: t
+ .input(schemas.analytics.getSpendOverTime.input)
+ .output(schemas.analytics.getSpendOverTime.output)
+ .handler(async ({ context, input }) => {
+ return context.analyticsService.getSpendOverTime(input);
+ }),
+ getSpendByProject: t
+ .input(schemas.analytics.getSpendByProject.input)
+ .output(schemas.analytics.getSpendByProject.output)
+ .handler(async ({ context, input }) => {
+ return context.analyticsService.getSpendByProject(input.from ?? null, input.to ?? null);
+ }),
+ getSpendByModel: t
+ .input(schemas.analytics.getSpendByModel.input)
+ .output(schemas.analytics.getSpendByModel.output)
+ .handler(async ({ context, input }) => {
+ return context.analyticsService.getSpendByModel(
+ input.projectPath ?? null,
+ input.from ?? null,
+ input.to ?? null
+ );
+ }),
+ getTimingDistribution: t
+ .input(schemas.analytics.getTimingDistribution.input)
+ .output(schemas.analytics.getTimingDistribution.output)
+ .handler(async ({ context, input }) => {
+ return context.analyticsService.getTimingDistribution(
+ input.metric,
+ input.projectPath ?? null,
+ input.from ?? null,
+ input.to ?? null
+ );
+ }),
+ getAgentCostBreakdown: t
+ .input(schemas.analytics.getAgentCostBreakdown.input)
+ .output(schemas.analytics.getAgentCostBreakdown.output)
+ .handler(async ({ context, input }) => {
+ return context.analyticsService.getAgentCostBreakdown(
+ input.projectPath ?? null,
+ input.from ?? null,
+ input.to ?? null
+ );
+ }),
+ getCacheHitRatioByProvider: t
+ .input(schemas.analytics.getCacheHitRatioByProvider.input)
+ .output(schemas.analytics.getCacheHitRatioByProvider.output)
+ .handler(async ({ context, input }) => {
+ return context.analyticsService.getCacheHitRatioByProvider(
+ input.projectPath ?? null,
+ input.from ?? null,
+ input.to ?? null
+ );
+ }),
+ rebuildDatabase: t
+ .input(schemas.analytics.rebuildDatabase.input)
+ .output(schemas.analytics.rebuildDatabase.output)
+ .handler(async ({ context }) => {
+ return context.analyticsService.rebuildAll();
+ }),
+ },
ssh: {
prompt: {
subscribe: t
diff --git a/src/node/services/analytics/analyticsService.test.ts b/src/node/services/analytics/analyticsService.test.ts
new file mode 100644
index 0000000000..4d58902f65
--- /dev/null
+++ b/src/node/services/analytics/analyticsService.test.ts
@@ -0,0 +1,91 @@
+import { describe, expect, test } from "bun:test";
+import type { ProviderCacheHitModelRow } from "@/common/orpc/schemas/analytics";
+import { aggregateProviderCacheHitRows } from "./analyticsService";
+
+describe("aggregateProviderCacheHitRows", () => {
+ test("rolls model rows up to providers using weighted token ratios", () => {
+ const rows: ProviderCacheHitModelRow[] = [
+ {
+ model: "openai:gpt-4o",
+ cached_tokens: 20,
+ total_prompt_tokens: 100,
+ response_count: 4,
+ },
+ {
+ model: "openai:gpt-4.1",
+ cached_tokens: 30,
+ total_prompt_tokens: 60,
+ response_count: 3,
+ },
+ {
+ model: "anthropic:claude-sonnet-4-5",
+ cached_tokens: 24,
+ total_prompt_tokens: 40,
+ response_count: 2,
+ },
+ ];
+
+ expect(aggregateProviderCacheHitRows(rows)).toEqual([
+ {
+ provider: "anthropic",
+ cacheHitRatio: 0.6,
+ responseCount: 2,
+ },
+ {
+ provider: "openai",
+ cacheHitRatio: 0.3125,
+ responseCount: 7,
+ },
+ ]);
+ });
+
+ test("buckets missing or malformed model providers under unknown", () => {
+ const rows: ProviderCacheHitModelRow[] = [
+ {
+ model: "",
+ cached_tokens: 10,
+ total_prompt_tokens: 20,
+ response_count: 1,
+ },
+ {
+ model: "unknown",
+ cached_tokens: 10,
+ total_prompt_tokens: 0,
+ response_count: 2,
+ },
+ {
+ model: "custom-model-without-provider",
+ cached_tokens: 5,
+ total_prompt_tokens: 10,
+ response_count: 1,
+ },
+ ];
+
+ expect(aggregateProviderCacheHitRows(rows)).toEqual([
+ {
+ provider: "unknown",
+ cacheHitRatio: 25 / 30,
+ responseCount: 4,
+ },
+ ]);
+ });
+
+ test("normalizes mux-gateway model prefixes before provider grouping", () => {
+ const rows: ProviderCacheHitModelRow[] = [
+ {
+ model: "mux-gateway:openai/gpt-4.1",
+ cached_tokens: 12,
+ total_prompt_tokens: 30,
+ response_count: 2,
+ },
+ ];
+
+ expect(aggregateProviderCacheHitRows(rows)).toEqual([
+ {
+ provider: "openai",
+ cacheHitRatio: 0.4,
+ responseCount: 2,
+ },
+ ]);
+ });
+});
diff --git a/src/node/services/analytics/analyticsService.ts b/src/node/services/analytics/analyticsService.ts
new file mode 100644
index 0000000000..820e172cb6
--- /dev/null
+++ b/src/node/services/analytics/analyticsService.ts
@@ -0,0 +1,622 @@
+import assert from "node:assert/strict";
+import * as fs from "node:fs/promises";
+import * as path from "node:path";
+import { Worker } from "node:worker_threads";
+import type {
+ AgentCostRow,
+ HistogramBucket,
+ ProviderCacheHitModelRow,
+ SpendByModelRow,
+ SpendByProjectRow,
+ SpendOverTimeRow,
+ SummaryRow,
+ TimingPercentilesRow,
+} from "@/common/orpc/schemas/analytics";
+import { getModelProvider } from "@/common/utils/ai/models";
+import type { Config } from "@/node/config";
+import { getErrorMessage } from "@/common/utils/errors";
+import { PlatformPaths } from "@/common/utils/paths";
+import { log } from "@/node/services/log";
+
+interface WorkerRequest {
+ messageId: number;
+ taskName: string;
+ data: unknown;
+}
+
+interface WorkerSuccessResponse {
+ messageId: number;
+ result: unknown;
+}
+
+interface WorkerErrorResponse {
+ messageId: number;
+ error: {
+ message: string;
+ stack?: string;
+ };
+}
+
+type WorkerResponse = WorkerSuccessResponse | WorkerErrorResponse;
+
+type AnalyticsQueryName =
+ | "getSummary"
+ | "getSpendOverTime"
+ | "getSpendByProject"
+ | "getSpendByModel"
+ | "getTimingDistribution"
+ | "getAgentCostBreakdown"
+ | "getCacheHitRatioByProvider";
+
+interface IngestWorkspaceMeta {
+ projectPath?: string;
+ projectName?: string;
+ workspaceName?: string;
+ parentWorkspaceId?: string;
+}
+
+interface TimingDistributionRow {
+ percentiles: TimingPercentilesRow;
+ histogram: HistogramBucket[];
+}
+
+interface RebuildAllResult {
+ workspacesIngested: number;
+}
+
+interface NeedsBackfillResult {
+ needsBackfill: boolean;
+}
+
+interface RebuildAllData {
+ sessionsDir: string;
+ workspaceMetaById: Record;
+}
+
+interface NeedsBackfillData {
+ sessionsDir: string;
+}
+
+function toOptionalNonEmptyString(value: string | undefined): string | undefined {
+ if (value == null) {
+ return undefined;
+ }
+
+ const trimmed = value.trim();
+ return trimmed.length > 0 ? trimmed : undefined;
+}
+
+function toDateFilterString(value: Date | null | undefined): string | null {
+ if (value == null) {
+ return null;
+ }
+
+ assert(Number.isFinite(value.getTime()), "Analytics date filter must be a valid Date");
+ return value.toISOString().slice(0, 10);
+}
+
+interface ProviderCacheHitTotals {
+ cachedTokens: number;
+ totalPromptTokens: number;
+ responseCount: number;
+}
+
+function normalizeProviderName(model: string): string {
+ const provider = getModelProvider(model).trim().toLowerCase();
+ return provider.length > 0 ? provider : "unknown";
+}
+
+/**
+ * Roll model-level cache metrics into provider buckets using the same provider
+ * parser as the rest of the app (handles mux-gateway prefixes and malformed
+ * model strings consistently).
+ */
+export function aggregateProviderCacheHitRows(
+ rows: ProviderCacheHitModelRow[]
+): Array<{ provider: string; cacheHitRatio: number; responseCount: number }> {
+ const totalsByProvider = new Map();
+
+ for (const row of rows) {
+ assert(typeof row.model === "string", "Provider cache hit aggregation requires a string model");
+ assert(
+ Number.isFinite(row.cached_tokens) && row.cached_tokens >= 0,
+ "Provider cache hit aggregation requires non-negative cached_tokens"
+ );
+ assert(
+ Number.isFinite(row.total_prompt_tokens) && row.total_prompt_tokens >= 0,
+ "Provider cache hit aggregation requires non-negative total_prompt_tokens"
+ );
+ assert(
+ Number.isFinite(row.response_count) && row.response_count >= 0,
+ "Provider cache hit aggregation requires non-negative response_count"
+ );
+
+ const provider = normalizeProviderName(row.model);
+ const current = totalsByProvider.get(provider);
+
+ if (current) {
+ current.cachedTokens += row.cached_tokens;
+ current.totalPromptTokens += row.total_prompt_tokens;
+ current.responseCount += row.response_count;
+ continue;
+ }
+
+ totalsByProvider.set(provider, {
+ cachedTokens: row.cached_tokens,
+ totalPromptTokens: row.total_prompt_tokens,
+ responseCount: row.response_count,
+ });
+ }
+
+ return Array.from(totalsByProvider.entries())
+ .map(([provider, totals]) => ({
+ provider,
+ cacheHitRatio:
+ totals.totalPromptTokens > 0 ? totals.cachedTokens / totals.totalPromptTokens : 0,
+ responseCount: totals.responseCount,
+ }))
+ .sort((left, right) => {
+ if (right.cacheHitRatio !== left.cacheHitRatio) {
+ return right.cacheHitRatio - left.cacheHitRatio;
+ }
+
+ if (right.responseCount !== left.responseCount) {
+ return right.responseCount - left.responseCount;
+ }
+
+ return left.provider.localeCompare(right.provider);
+ });
+}
+
+export class AnalyticsService {
+ private worker: Worker | null = null;
+ private messageIdCounter = 0;
+ private readonly pendingPromises = new Map<
+ number,
+ { resolve: (value: unknown) => void; reject: (error: Error) => void }
+ >();
+ private workerError: Error | null = null;
+ private initPromise: Promise | null = null;
+
+ constructor(private readonly config: Config) {}
+
+ private rejectPending(error: Error): void {
+ for (const pending of this.pendingPromises.values()) {
+ pending.reject(error);
+ }
+ this.pendingPromises.clear();
+ }
+
+ private resolveWorkerPath(): string {
+ const currentDir = path.dirname(__filename);
+ const pathParts = currentDir.split(path.sep);
+ const hasDist = pathParts.includes("dist");
+ const srcIndex = pathParts.lastIndexOf("src");
+
+ let workerDir = currentDir;
+ let workerFile = "analyticsWorker.js";
+
+ const isBun = !!(process as unknown as { isBun?: boolean }).isBun;
+ if (isBun && path.extname(__filename) === ".ts") {
+ workerFile = "analyticsWorker.ts";
+ } else if (srcIndex !== -1 && !hasDist) {
+ pathParts[srcIndex] = "dist";
+ workerDir = pathParts.join(path.sep);
+ }
+
+ return path.join(workerDir, workerFile);
+ }
+
+ private buildRebuildWorkspaceMetaById(): Record {
+ const configSnapshot = this.config.loadConfigOrDefault();
+ const workspaceMetaById: Record = {};
+
+ for (const [projectPath, projectConfig] of configSnapshot.projects) {
+ const normalizedProjectPath = toOptionalNonEmptyString(projectPath);
+ if (!normalizedProjectPath) {
+ log.warn("[AnalyticsService] Skipping rebuild metadata for empty project path");
+ continue;
+ }
+
+ const projectName = PlatformPaths.getProjectName(normalizedProjectPath);
+
+ for (const workspace of projectConfig.workspaces) {
+ const workspaceId = toOptionalNonEmptyString(workspace.id);
+ if (!workspaceId) {
+ continue;
+ }
+
+ if (workspaceMetaById[workspaceId]) {
+ log.warn(
+ "[AnalyticsService] Duplicate workspace ID in config while building rebuild metadata",
+ {
+ workspaceId,
+ projectPath: normalizedProjectPath,
+ }
+ );
+ continue;
+ }
+
+ workspaceMetaById[workspaceId] = {
+ projectPath: normalizedProjectPath,
+ projectName,
+ workspaceName: toOptionalNonEmptyString(workspace.name),
+ parentWorkspaceId: toOptionalNonEmptyString(workspace.parentWorkspaceId),
+ };
+ }
+ }
+
+ return workspaceMetaById;
+ }
+
+ private buildRebuildAllData(): RebuildAllData {
+ assert(
+ this.config.sessionsDir.trim().length > 0,
+ "Analytics rebuild requires a non-empty sessionsDir"
+ );
+
+ return {
+ sessionsDir: this.config.sessionsDir,
+ workspaceMetaById: this.buildRebuildWorkspaceMetaById(),
+ };
+ }
+
+ private readonly onWorkerMessage = (response: WorkerResponse): void => {
+ const pending = this.pendingPromises.get(response.messageId);
+ if (!pending) {
+ log.error("[AnalyticsService] No pending promise for message", {
+ messageId: response.messageId,
+ });
+ return;
+ }
+
+ this.pendingPromises.delete(response.messageId);
+
+ if ("error" in response) {
+ const error = new Error(response.error.message);
+ error.stack = response.error.stack;
+ pending.reject(error);
+ return;
+ }
+
+ pending.resolve(response.result);
+ };
+
+ private readonly onWorkerError = (error: Error): void => {
+ this.workerError = error;
+ this.rejectPending(error);
+ log.error("[AnalyticsService] Worker error", { error: getErrorMessage(error) });
+ };
+
+ private readonly onWorkerExit = (code: number): void => {
+ if (code === 0) {
+ return;
+ }
+
+ const error = new Error(`Analytics worker exited with code ${code}`);
+ this.workerError = error;
+ this.rejectPending(error);
+ log.error("[AnalyticsService] Worker exited unexpectedly", { code });
+ };
+
+ private async startWorker(): Promise {
+ const dbDir = path.join(this.config.rootDir, "analytics");
+ await fs.mkdir(dbDir, { recursive: true });
+
+ const workerPath = this.resolveWorkerPath();
+ this.worker = new Worker(workerPath);
+ this.worker.unref();
+
+ this.worker.on("message", this.onWorkerMessage);
+ this.worker.on("error", this.onWorkerError);
+ this.worker.on("exit", this.onWorkerExit);
+
+ const dbPath = path.join(dbDir, "analytics.db");
+ await this.dispatch("init", { dbPath });
+
+ const backfillState = await this.dispatch("needsBackfill", {
+ sessionsDir: this.config.sessionsDir,
+ } satisfies NeedsBackfillData);
+ assert(
+ typeof backfillState.needsBackfill === "boolean",
+ "Analytics worker needsBackfill task must return a boolean"
+ );
+
+ if (!backfillState.needsBackfill) {
+ return;
+ }
+
+ // Backfill existing workspace history when analytics initialization is
+ // missing or appears partial (for example, when any session workspace lacks
+ // a matching watermark row, even if stale watermark rows keep counts equal).
+ // Once every session workspace has a watermark row, routine worker restarts
+ // skip full rebuilds, including zero-event histories. Awaited so the first
+ // query sees complete data instead of an empty/partially-rebuilt database.
+ try {
+ await this.dispatch("rebuildAll", this.buildRebuildAllData());
+ } catch (error) {
+ // Non-fatal: queries will work but may show partial historical data
+ // until incremental stream-end ingestion fills gaps.
+ log.warn("[AnalyticsService] Initial backfill failed (non-fatal)", {
+ error: getErrorMessage(error),
+ });
+ }
+ }
+
+ private ensureWorker(): Promise {
+ if (this.workerError) {
+ return Promise.reject(this.workerError);
+ }
+
+ this.initPromise ??= this.startWorker().catch((error: unknown) => {
+ const normalizedError = error instanceof Error ? error : new Error(getErrorMessage(error));
+ this.workerError = normalizedError;
+ this.initPromise = null;
+ throw normalizedError;
+ });
+
+ return this.initPromise;
+ }
+
+ private dispatch(taskName: string, data: unknown): Promise {
+ if (this.workerError) {
+ return Promise.reject(this.workerError);
+ }
+
+ const worker = this.worker;
+ assert(worker, `Analytics worker is unavailable for task '${taskName}'`);
+
+ const request: WorkerRequest = {
+ messageId: this.messageIdCounter,
+ taskName,
+ data,
+ };
+
+ this.messageIdCounter += 1;
+
+ return new Promise((resolve, reject) => {
+ this.pendingPromises.set(request.messageId, {
+ resolve: resolve as (value: unknown) => void,
+ reject,
+ });
+
+ try {
+ worker.postMessage(request);
+ } catch (error) {
+ this.pendingPromises.delete(request.messageId);
+ reject(error instanceof Error ? error : new Error(getErrorMessage(error)));
+ }
+ });
+ }
+
+ private async executeQuery(
+ queryName: AnalyticsQueryName,
+ params: Record
+ ): Promise {
+ await this.ensureWorker();
+ return this.dispatch("query", { queryName, params });
+ }
+
+ async getSummary(
+ projectPath: string | null,
+ from?: Date | null,
+ to?: Date | null
+ ): Promise<{
+ totalSpendUsd: number;
+ todaySpendUsd: number;
+ avgDailySpendUsd: number;
+ cacheHitRatio: number;
+ totalTokens: number;
+ totalResponses: number;
+ }> {
+ const row = await this.executeQuery("getSummary", {
+ projectPath,
+ from: toDateFilterString(from),
+ to: toDateFilterString(to),
+ });
+
+ return {
+ totalSpendUsd: row.total_spend_usd,
+ todaySpendUsd: row.today_spend_usd,
+ avgDailySpendUsd: row.avg_daily_spend_usd,
+ cacheHitRatio: row.cache_hit_ratio,
+ totalTokens: row.total_tokens,
+ totalResponses: row.total_responses,
+ };
+ }
+
+ async getSpendOverTime(params: {
+ granularity: "hour" | "day" | "week";
+ projectPath?: string | null;
+ from?: Date | null;
+ to?: Date | null;
+ }): Promise> {
+ const rows = await this.executeQuery("getSpendOverTime", {
+ granularity: params.granularity,
+ projectPath: params.projectPath ?? null,
+ from: toDateFilterString(params.from),
+ to: toDateFilterString(params.to),
+ });
+
+ return rows.map((row) => ({
+ bucket: row.bucket,
+ model: row.model,
+ costUsd: row.cost_usd,
+ }));
+ }
+
+ async getSpendByProject(
+ from?: Date | null,
+ to?: Date | null
+ ): Promise<
+ Array<{ projectName: string; projectPath: string; costUsd: number; tokenCount: number }>
+ > {
+ const rows = await this.executeQuery("getSpendByProject", {
+ from: toDateFilterString(from),
+ to: toDateFilterString(to),
+ });
+
+ return rows.map((row) => ({
+ projectName: row.project_name,
+ projectPath: row.project_path,
+ costUsd: row.cost_usd,
+ tokenCount: row.token_count,
+ }));
+ }
+
+ async getSpendByModel(
+ projectPath: string | null,
+ from?: Date | null,
+ to?: Date | null
+ ): Promise> {
+ const rows = await this.executeQuery("getSpendByModel", {
+ projectPath,
+ from: toDateFilterString(from),
+ to: toDateFilterString(to),
+ });
+
+ return rows.map((row) => ({
+ model: row.model,
+ costUsd: row.cost_usd,
+ tokenCount: row.token_count,
+ responseCount: row.response_count,
+ }));
+ }
+
+ async getTimingDistribution(
+ metric: "ttft" | "duration" | "tps",
+ projectPath: string | null,
+ from?: Date | null,
+ to?: Date | null
+ ): Promise<{
+ p50: number;
+ p90: number;
+ p99: number;
+ histogram: Array<{ bucket: number; count: number }>;
+ }> {
+ const row = await this.executeQuery("getTimingDistribution", {
+ metric,
+ projectPath,
+ from: toDateFilterString(from),
+ to: toDateFilterString(to),
+ });
+
+ return {
+ p50: row.percentiles.p50,
+ p90: row.percentiles.p90,
+ p99: row.percentiles.p99,
+ histogram: row.histogram.map((bucket) => ({
+ bucket: bucket.bucket,
+ count: bucket.count,
+ })),
+ };
+ }
+
+ async getAgentCostBreakdown(
+ projectPath: string | null,
+ from?: Date | null,
+ to?: Date | null
+ ): Promise<
+ Array<{ agentId: string; costUsd: number; tokenCount: number; responseCount: number }>
+ > {
+ const rows = await this.executeQuery("getAgentCostBreakdown", {
+ projectPath,
+ from: toDateFilterString(from),
+ to: toDateFilterString(to),
+ });
+
+ return rows.map((row) => ({
+ agentId: row.agent_id,
+ costUsd: row.cost_usd,
+ tokenCount: row.token_count,
+ responseCount: row.response_count,
+ }));
+ }
+
+ async getCacheHitRatioByProvider(
+ projectPath: string | null,
+ from?: Date | null,
+ to?: Date | null
+ ): Promise> {
+ const rows = await this.executeQuery("getCacheHitRatioByProvider", {
+ projectPath,
+ from: toDateFilterString(from),
+ to: toDateFilterString(to),
+ });
+
+ return aggregateProviderCacheHitRows(rows);
+ }
+
+ async rebuildAll(): Promise<{ success: boolean; workspacesIngested: number }> {
+ await this.ensureWorker();
+ const result = await this.dispatch("rebuildAll", this.buildRebuildAllData());
+
+ return {
+ success: true,
+ workspacesIngested: result.workspacesIngested,
+ };
+ }
+
+ clearWorkspace(workspaceId: string): void {
+ if (workspaceId.trim().length === 0) {
+ log.warn("[AnalyticsService] Skipping workspace clear due to missing workspaceId", {
+ workspaceId,
+ });
+ return;
+ }
+
+ const runClear = () => {
+ this.ensureWorker()
+ .then(() => this.dispatch("clearWorkspace", { workspaceId }))
+ .catch((error) => {
+ log.warn("[AnalyticsService] Failed to clear workspace analytics state", {
+ workspaceId,
+ error: getErrorMessage(error),
+ });
+ });
+ };
+
+ // Workspace-removal hooks can fire before analytics is ever opened in this
+ // process. If analytics DB does not exist yet, skip bootstrapping worker.
+ // If DB does exist (from prior runs), bootstrap and clear so stale rows are
+ // removed immediately after workspace deletion.
+ if (this.worker == null && this.initPromise == null && this.workerError == null) {
+ const dbPath = path.join(this.config.rootDir, "analytics", "analytics.db");
+ void fs
+ .access(dbPath)
+ .then(() => {
+ runClear();
+ })
+ .catch((error) => {
+ if ((error as NodeJS.ErrnoException).code === "ENOENT") {
+ return;
+ }
+
+ // For non-ENOENT access failures, attempt best-effort cleanup anyway.
+ runClear();
+ });
+ return;
+ }
+
+ runClear();
+ }
+
+ ingestWorkspace(workspaceId: string, sessionDir: string, meta: IngestWorkspaceMeta = {}): void {
+ if (workspaceId.trim().length === 0 || sessionDir.trim().length === 0) {
+ log.warn("[AnalyticsService] Skipping ingest due to missing workspace information", {
+ workspaceId,
+ sessionDir,
+ });
+ return;
+ }
+
+ this.ensureWorker()
+ .then(() => this.dispatch("ingest", { workspaceId, sessionDir, meta }))
+ .catch((error) => {
+ log.warn("[AnalyticsService] Failed to ingest workspace", {
+ workspaceId,
+ error: getErrorMessage(error),
+ });
+ });
+ }
+}
diff --git a/src/node/services/analytics/analyticsWorker.ts b/src/node/services/analytics/analyticsWorker.ts
new file mode 100644
index 0000000000..399b7d219e
--- /dev/null
+++ b/src/node/services/analytics/analyticsWorker.ts
@@ -0,0 +1,405 @@
+import assert from "node:assert/strict";
+import type { Dirent } from "node:fs";
+import * as fs from "node:fs/promises";
+import * as path from "node:path";
+import { parentPort } from "node:worker_threads";
+import { DuckDBInstance, type DuckDBConnection } from "@duckdb/node-api";
+import { getErrorMessage } from "@/common/utils/errors";
+import { shouldRunInitialBackfill } from "./backfillDecision";
+import { CHAT_FILE_NAME, clearWorkspaceAnalyticsState, ingestWorkspace, rebuildAll } from "./etl";
+import { executeNamedQuery } from "./queries";
+
+interface WorkerRequest {
+ messageId: number;
+ taskName: string;
+ data: unknown;
+}
+
+interface WorkerSuccessResponse {
+ messageId: number;
+ result: unknown;
+}
+
+interface WorkerErrorResponse {
+ messageId: number;
+ error: {
+ message: string;
+ stack?: string;
+ };
+}
+
+interface InitData {
+ dbPath: string;
+}
+
+interface WorkspaceMeta {
+ projectPath?: string;
+ projectName?: string;
+ workspaceName?: string;
+ parentWorkspaceId?: string;
+}
+
+interface IngestData {
+ workspaceId: string;
+ sessionDir: string;
+ meta?: WorkspaceMeta;
+}
+
+interface RebuildAllData {
+ sessionsDir: string;
+ workspaceMetaById?: Record;
+}
+
+interface NeedsBackfillData {
+ sessionsDir: string;
+}
+
+interface ClearWorkspaceData {
+ workspaceId: string;
+}
+
+interface QueryData {
+ queryName: string;
+ params: Record;
+}
+
+const CREATE_EVENTS_TABLE_SQL = `
+CREATE TABLE IF NOT EXISTS events (
+ workspace_id VARCHAR NOT NULL,
+ project_path VARCHAR,
+ project_name VARCHAR,
+ workspace_name VARCHAR,
+ parent_workspace_id VARCHAR,
+ agent_id VARCHAR,
+ timestamp BIGINT,
+ date DATE,
+ model VARCHAR,
+ thinking_level VARCHAR,
+ input_tokens INTEGER DEFAULT 0,
+ output_tokens INTEGER DEFAULT 0,
+ reasoning_tokens INTEGER DEFAULT 0,
+ cached_tokens INTEGER DEFAULT 0,
+ cache_create_tokens INTEGER DEFAULT 0,
+ input_cost_usd DOUBLE DEFAULT 0,
+ output_cost_usd DOUBLE DEFAULT 0,
+ reasoning_cost_usd DOUBLE DEFAULT 0,
+ cached_cost_usd DOUBLE DEFAULT 0,
+ total_cost_usd DOUBLE DEFAULT 0,
+ duration_ms DOUBLE,
+ ttft_ms DOUBLE,
+ streaming_ms DOUBLE,
+ tool_execution_ms DOUBLE,
+ output_tps DOUBLE,
+ response_index INTEGER,
+ is_sub_agent BOOLEAN DEFAULT false
+)
+`;
+
+const CREATE_WATERMARK_TABLE_SQL = `
+CREATE TABLE IF NOT EXISTS ingest_watermarks (
+ workspace_id VARCHAR PRIMARY KEY,
+ last_sequence BIGINT NOT NULL,
+ last_modified DOUBLE NOT NULL
+)
+`;
+
+let conn: DuckDBConnection | null = null;
+
+function getConn(): DuckDBConnection {
+ assert(conn, "analytics worker has not been initialized");
+ return conn;
+}
+
+async function handleInit(data: InitData): Promise {
+ assert(data.dbPath.trim().length > 0, "init requires a non-empty dbPath");
+
+ const instance = await DuckDBInstance.create(data.dbPath);
+ conn = await instance.connect();
+
+ const activeConn = getConn();
+ await activeConn.run(CREATE_EVENTS_TABLE_SQL);
+ await activeConn.run(CREATE_WATERMARK_TABLE_SQL);
+}
+
+async function handleIngest(data: IngestData): Promise {
+ assert(data.workspaceId.trim().length > 0, "ingest requires workspaceId");
+ assert(data.sessionDir.trim().length > 0, "ingest requires sessionDir");
+
+ await ingestWorkspace(getConn(), data.workspaceId, data.sessionDir, data.meta ?? {});
+}
+
+async function handleRebuildAll(data: RebuildAllData): Promise<{ workspacesIngested: number }> {
+ assert(data.sessionsDir.trim().length > 0, "rebuildAll requires sessionsDir");
+ if (data.workspaceMetaById != null) {
+ assert(
+ isRecord(data.workspaceMetaById) && !Array.isArray(data.workspaceMetaById),
+ "rebuildAll workspaceMetaById must be an object when provided"
+ );
+ }
+
+ return rebuildAll(getConn(), data.sessionsDir, data.workspaceMetaById ?? {});
+}
+
+async function handleClearWorkspace(data: ClearWorkspaceData): Promise {
+ assert(data.workspaceId.trim().length > 0, "clearWorkspace requires workspaceId");
+ await clearWorkspaceAnalyticsState(getConn(), data.workspaceId);
+}
+
+async function handleQuery(data: QueryData): Promise {
+ assert(data.queryName.trim().length > 0, "query requires queryName");
+ return executeNamedQuery(getConn(), data.queryName, data.params);
+}
+
+function isRecord(value: unknown): value is Record {
+ return typeof value === "object" && value !== null;
+}
+
+function parseNonNegativeInteger(value: unknown): number | null {
+ if (typeof value === "bigint") {
+ const parsed = Number(value);
+ if (!Number.isSafeInteger(parsed) || parsed < 0) {
+ return null;
+ }
+
+ return parsed;
+ }
+
+ if (typeof value !== "number" || !Number.isInteger(value) || value < 0) {
+ return null;
+ }
+
+ return value;
+}
+
+function parseBooleanLike(value: unknown): boolean | null {
+ if (typeof value === "boolean") {
+ return value;
+ }
+
+ const parsed = parseNonNegativeInteger(value);
+ if (parsed === 0) {
+ return false;
+ }
+
+ if (parsed === 1) {
+ return true;
+ }
+
+ return null;
+}
+
+function parseNonEmptyString(value: unknown): string | null {
+ if (typeof value !== "string") {
+ return null;
+ }
+
+ if (value.trim().length === 0) {
+ return null;
+ }
+
+ return value;
+}
+
+async function listSessionWorkspaceIdsWithHistory(sessionsDir: string): Promise {
+ let entries: Dirent[];
+
+ try {
+ entries = await fs.readdir(sessionsDir, { withFileTypes: true });
+ } catch (error) {
+ if (isRecord(error) && error.code === "ENOENT") {
+ return [];
+ }
+
+ throw error;
+ }
+
+ const sessionWorkspaceIds: string[] = [];
+
+ for (const entry of entries) {
+ if (!entry.isDirectory()) {
+ continue;
+ }
+
+ const chatPath = path.join(sessionsDir, entry.name, CHAT_FILE_NAME);
+
+ try {
+ const chatStat = await fs.stat(chatPath);
+ if (chatStat.isFile()) {
+ const workspaceId = parseNonEmptyString(entry.name);
+ assert(
+ workspaceId !== null,
+ "needsBackfill expected session workspace directory names to be non-empty"
+ );
+ sessionWorkspaceIds.push(workspaceId);
+ }
+ } catch (error) {
+ if (isRecord(error) && error.code === "ENOENT") {
+ continue;
+ }
+
+ throw error;
+ }
+ }
+
+ return sessionWorkspaceIds;
+}
+
+async function listWatermarkWorkspaceIds(): Promise> {
+ const result = await getConn().run("SELECT workspace_id FROM ingest_watermarks");
+ const rows = await result.getRowObjectsJS();
+
+ const watermarkWorkspaceIds = new Set();
+ for (const row of rows) {
+ const workspaceId = parseNonEmptyString(row.workspace_id);
+ assert(
+ workspaceId !== null,
+ "needsBackfill expected ingest_watermarks rows to have non-empty workspace_id"
+ );
+ watermarkWorkspaceIds.add(workspaceId);
+ }
+
+ return watermarkWorkspaceIds;
+}
+
+async function handleNeedsBackfill(data: NeedsBackfillData): Promise<{ needsBackfill: boolean }> {
+ assert(data.sessionsDir.trim().length > 0, "needsBackfill requires sessionsDir");
+
+ const result = await getConn().run(`
+ SELECT
+ (SELECT COUNT(*) FROM events) AS event_count,
+ (SELECT COUNT(*) FROM ingest_watermarks) AS watermark_count,
+ (SELECT EXISTS(SELECT 1 FROM ingest_watermarks WHERE last_sequence >= 0))
+ AS has_any_watermark_at_or_above_zero
+ `);
+ const rows = await result.getRowObjectsJS();
+ assert(rows.length === 1, "needsBackfill should return exactly one row");
+
+ const eventCount = parseNonNegativeInteger(rows[0].event_count);
+ assert(eventCount !== null, "needsBackfill expected a non-negative integer event_count");
+
+ const watermarkCount = parseNonNegativeInteger(rows[0].watermark_count);
+ assert(watermarkCount !== null, "needsBackfill expected a non-negative integer watermark_count");
+
+ const hasAnyWatermarkAtOrAboveZero = parseBooleanLike(rows[0].has_any_watermark_at_or_above_zero);
+ assert(
+ hasAnyWatermarkAtOrAboveZero !== null,
+ "needsBackfill expected boolean has_any_watermark_at_or_above_zero"
+ );
+
+ const sessionWorkspaceIds = await listSessionWorkspaceIdsWithHistory(data.sessionsDir);
+ const sessionWorkspaceCount = sessionWorkspaceIds.length;
+ const sessionWorkspaceIdSet = new Set(sessionWorkspaceIds);
+
+ const watermarkWorkspaceIds = await listWatermarkWorkspaceIds();
+ assert(
+ watermarkWorkspaceIds.size === watermarkCount,
+ "needsBackfill expected watermark_count to match ingest_watermarks workspace IDs"
+ );
+
+ const hasSessionWorkspaceMissingWatermark = sessionWorkspaceIds.some(
+ (workspaceId) => !watermarkWorkspaceIds.has(workspaceId)
+ );
+ const hasWatermarkMissingSessionWorkspace = [...watermarkWorkspaceIds].some(
+ (workspaceId) => !sessionWorkspaceIdSet.has(workspaceId)
+ );
+
+ return {
+ needsBackfill: shouldRunInitialBackfill({
+ eventCount,
+ watermarkCount,
+ sessionWorkspaceCount,
+ hasSessionWorkspaceMissingWatermark,
+ hasWatermarkMissingSessionWorkspace,
+ hasAnyWatermarkAtOrAboveZero,
+ }),
+ };
+}
+
+async function dispatchTask(taskName: string, data: unknown): Promise {
+ switch (taskName) {
+ case "init":
+ return handleInit(data as InitData);
+ case "ingest":
+ return handleIngest(data as IngestData);
+ case "rebuildAll":
+ return handleRebuildAll(data as RebuildAllData);
+ case "clearWorkspace":
+ return handleClearWorkspace(data as ClearWorkspaceData);
+ case "query":
+ return handleQuery(data as QueryData);
+ case "needsBackfill":
+ return handleNeedsBackfill(data as NeedsBackfillData);
+ default:
+ throw new Error(`Unknown analytics worker task: ${taskName}`);
+ }
+}
+
+function requireParentPort(): NonNullable {
+ if (parentPort == null) {
+ throw new Error("analytics worker requires a parentPort");
+ }
+
+ return parentPort;
+}
+
+const workerParentPort = requireParentPort();
+
+function toResponseMessageId(message: WorkerRequest): number {
+ if (Number.isInteger(message.messageId) && message.messageId >= 0) {
+ return message.messageId;
+ }
+
+ return -1;
+}
+
+function postWorkerResponse(response: WorkerSuccessResponse | WorkerErrorResponse): void {
+ try {
+ workerParentPort.postMessage(response);
+ } catch (error) {
+ process.stderr.write(
+ `[analytics-worker] Failed to post worker response: ${getErrorMessage(error)}\n`
+ );
+ }
+}
+
+async function processMessage(message: WorkerRequest): Promise {
+ const responseMessageId = toResponseMessageId(message);
+
+ try {
+ assert(
+ Number.isInteger(message.messageId) && message.messageId >= 0,
+ "analytics worker message must include a non-negative integer messageId"
+ );
+ assert(
+ typeof message.taskName === "string" && message.taskName.trim().length > 0,
+ "analytics worker message requires taskName"
+ );
+
+ const result = await dispatchTask(message.taskName, message.data);
+ const response: WorkerSuccessResponse = {
+ messageId: responseMessageId,
+ result,
+ };
+ postWorkerResponse(response);
+ } catch (error) {
+ const response: WorkerErrorResponse = {
+ messageId: responseMessageId,
+ error: {
+ message: getErrorMessage(error),
+ stack: error instanceof Error ? error.stack : undefined,
+ },
+ };
+ postWorkerResponse(response);
+ }
+}
+
+let messageQueue: Promise = Promise.resolve();
+
+workerParentPort.on("message", (message: WorkerRequest) => {
+ // Serialize ETL and query tasks to avoid races when ingest/rebuild requests
+ // arrive back-to-back from the parent process.
+ messageQueue = messageQueue.then(
+ () => processMessage(message),
+ () => processMessage(message)
+ );
+});
diff --git a/src/node/services/analytics/backfillDecision.test.ts b/src/node/services/analytics/backfillDecision.test.ts
new file mode 100644
index 0000000000..6af0674729
--- /dev/null
+++ b/src/node/services/analytics/backfillDecision.test.ts
@@ -0,0 +1,130 @@
+import { describe, expect, test } from "bun:test";
+import { shouldRunInitialBackfill } from "./backfillDecision";
+
+describe("shouldRunInitialBackfill", () => {
+ test("returns true when session workspaces exist but watermark coverage is missing", () => {
+ expect(
+ shouldRunInitialBackfill({
+ eventCount: 1,
+ watermarkCount: 0,
+ sessionWorkspaceCount: 2,
+ hasSessionWorkspaceMissingWatermark: true,
+ hasWatermarkMissingSessionWorkspace: false,
+ hasAnyWatermarkAtOrAboveZero: false,
+ })
+ ).toBe(true);
+
+ expect(
+ shouldRunInitialBackfill({
+ eventCount: 0,
+ watermarkCount: 0,
+ sessionWorkspaceCount: 1,
+ hasSessionWorkspaceMissingWatermark: true,
+ hasWatermarkMissingSessionWorkspace: false,
+ hasAnyWatermarkAtOrAboveZero: false,
+ })
+ ).toBe(true);
+ });
+
+ test("returns true when any session workspace is missing a watermark row", () => {
+ expect(
+ shouldRunInitialBackfill({
+ eventCount: 10,
+ watermarkCount: 1,
+ sessionWorkspaceCount: 2,
+ hasSessionWorkspaceMissingWatermark: true,
+ hasWatermarkMissingSessionWorkspace: false,
+ hasAnyWatermarkAtOrAboveZero: false,
+ })
+ ).toBe(true);
+ });
+
+ test("returns true when a watermark references a workspace missing on disk", () => {
+ expect(
+ shouldRunInitialBackfill({
+ eventCount: 3,
+ watermarkCount: 2,
+ sessionWorkspaceCount: 2,
+ hasSessionWorkspaceMissingWatermark: false,
+ hasWatermarkMissingSessionWorkspace: true,
+ hasAnyWatermarkAtOrAboveZero: false,
+ })
+ ).toBe(true);
+ });
+
+ test("returns true when events are missing but watermarks show prior assistant history", () => {
+ expect(
+ shouldRunInitialBackfill({
+ eventCount: 0,
+ watermarkCount: 2,
+ sessionWorkspaceCount: 2,
+ hasSessionWorkspaceMissingWatermark: false,
+ hasWatermarkMissingSessionWorkspace: false,
+ hasAnyWatermarkAtOrAboveZero: true,
+ })
+ ).toBe(true);
+ });
+
+ test("returns false for fully initialized zero-event histories", () => {
+ expect(
+ shouldRunInitialBackfill({
+ eventCount: 0,
+ watermarkCount: 2,
+ sessionWorkspaceCount: 2,
+ hasSessionWorkspaceMissingWatermark: false,
+ hasWatermarkMissingSessionWorkspace: false,
+ hasAnyWatermarkAtOrAboveZero: false,
+ })
+ ).toBe(false);
+ });
+
+ test("returns false when events already exist and watermark coverage is complete", () => {
+ expect(
+ shouldRunInitialBackfill({
+ eventCount: 3,
+ watermarkCount: 2,
+ sessionWorkspaceCount: 2,
+ hasSessionWorkspaceMissingWatermark: false,
+ hasWatermarkMissingSessionWorkspace: false,
+ hasAnyWatermarkAtOrAboveZero: true,
+ })
+ ).toBe(false);
+ });
+
+ test("returns false when there are no session workspaces and the DB is empty", () => {
+ expect(
+ shouldRunInitialBackfill({
+ eventCount: 0,
+ watermarkCount: 0,
+ sessionWorkspaceCount: 0,
+ hasSessionWorkspaceMissingWatermark: false,
+ hasWatermarkMissingSessionWorkspace: false,
+ hasAnyWatermarkAtOrAboveZero: false,
+ })
+ ).toBe(false);
+ });
+
+ test("returns true when there are no session workspaces but stale DB rows remain", () => {
+ expect(
+ shouldRunInitialBackfill({
+ eventCount: 5,
+ watermarkCount: 0,
+ sessionWorkspaceCount: 0,
+ hasSessionWorkspaceMissingWatermark: false,
+ hasWatermarkMissingSessionWorkspace: false,
+ hasAnyWatermarkAtOrAboveZero: true,
+ })
+ ).toBe(true);
+
+ expect(
+ shouldRunInitialBackfill({
+ eventCount: 0,
+ watermarkCount: 2,
+ sessionWorkspaceCount: 0,
+ hasSessionWorkspaceMissingWatermark: false,
+ hasWatermarkMissingSessionWorkspace: false,
+ hasAnyWatermarkAtOrAboveZero: false,
+ })
+ ).toBe(true);
+ });
+});
diff --git a/src/node/services/analytics/backfillDecision.ts b/src/node/services/analytics/backfillDecision.ts
new file mode 100644
index 0000000000..d4c98ee423
--- /dev/null
+++ b/src/node/services/analytics/backfillDecision.ts
@@ -0,0 +1,77 @@
+import assert from "node:assert/strict";
+
+export interface BackfillDecisionInput {
+ eventCount: number;
+ watermarkCount: number;
+ sessionWorkspaceCount: number;
+ hasSessionWorkspaceMissingWatermark: boolean;
+ hasWatermarkMissingSessionWorkspace: boolean;
+ hasAnyWatermarkAtOrAboveZero: boolean;
+}
+
+export function shouldRunInitialBackfill(input: BackfillDecisionInput): boolean {
+ assert(
+ Number.isInteger(input.eventCount) && input.eventCount >= 0,
+ "shouldRunInitialBackfill requires a non-negative integer eventCount"
+ );
+ assert(
+ Number.isInteger(input.watermarkCount) && input.watermarkCount >= 0,
+ "shouldRunInitialBackfill requires a non-negative integer watermarkCount"
+ );
+ assert(
+ Number.isInteger(input.sessionWorkspaceCount) && input.sessionWorkspaceCount >= 0,
+ "shouldRunInitialBackfill requires a non-negative integer sessionWorkspaceCount"
+ );
+ assert(
+ typeof input.hasSessionWorkspaceMissingWatermark === "boolean",
+ "shouldRunInitialBackfill requires boolean hasSessionWorkspaceMissingWatermark"
+ );
+ assert(
+ typeof input.hasWatermarkMissingSessionWorkspace === "boolean",
+ "shouldRunInitialBackfill requires boolean hasWatermarkMissingSessionWorkspace"
+ );
+ assert(
+ typeof input.hasAnyWatermarkAtOrAboveZero === "boolean",
+ "shouldRunInitialBackfill requires boolean hasAnyWatermarkAtOrAboveZero"
+ );
+
+ if (input.sessionWorkspaceCount === 0) {
+ // No live session workspaces means any persisted analytics rows are stale
+ // leftovers from deleted workspaces and should be purged via rebuild.
+ return input.watermarkCount > 0 || input.eventCount > 0;
+ }
+
+ if (input.watermarkCount === 0) {
+ // Event rows can exist without any watermark rows when ingestion is interrupted
+ // between writes. Treat missing watermarks as incomplete initialization so
+ // startup repairs the partial state on the next boot.
+ return true;
+ }
+
+ if (input.hasSessionWorkspaceMissingWatermark) {
+ // Count parity alone is not enough: stale watermark rows can keep the count
+ // equal while still leaving current session workspaces uncovered.
+ return true;
+ }
+
+ if (input.hasWatermarkMissingSessionWorkspace) {
+ // Complementary coverage check: if a watermark points to a workspace that no
+ // longer exists on disk, rebuild so stale watermark/event rows are purged.
+ return true;
+ }
+
+ // Keep this as a defensive fallback in case upstream workspace-id coverage
+ // checks regress and start reporting false negatives.
+ if (input.watermarkCount < input.sessionWorkspaceCount) {
+ return true;
+ }
+
+ if (input.eventCount > 0) {
+ return false;
+ }
+
+ // Empty events + complete watermark coverage is usually a legitimate zero-event
+ // history. Rebuild only if any watermark proves assistant events were ingested
+ // before (last_sequence >= 0), which indicates the events table was wiped.
+ return input.hasAnyWatermarkAtOrAboveZero;
+}
diff --git a/src/node/services/analytics/etl.test.ts b/src/node/services/analytics/etl.test.ts
new file mode 100644
index 0000000000..15fa6c88f8
--- /dev/null
+++ b/src/node/services/analytics/etl.test.ts
@@ -0,0 +1,78 @@
+import { randomUUID } from "node:crypto";
+import * as os from "node:os";
+import * as path from "node:path";
+import { describe, expect, mock, test } from "bun:test";
+import type { DuckDBConnection } from "@duckdb/node-api";
+import { rebuildAll } from "./etl";
+
+function createMissingSessionsDir(): string {
+ return path.join(os.tmpdir(), `mux-analytics-etl-${process.pid}-${randomUUID()}`);
+}
+
+function createMockConn(runImplementation: (sql: string, params?: unknown[]) => Promise): {
+ conn: DuckDBConnection;
+ runMock: ReturnType;
+} {
+ const runMock = mock(runImplementation);
+
+ return {
+ conn: { run: runMock } as unknown as DuckDBConnection,
+ runMock,
+ };
+}
+
+function getSqlStatements(runMock: ReturnType): string[] {
+ const calls = runMock.mock.calls as unknown[][];
+
+ return calls.map((call) => {
+ const sql = call[0];
+ if (typeof sql !== "string") {
+ throw new TypeError("Expected SQL statement as the first run() argument");
+ }
+
+ return sql;
+ });
+}
+
+describe("rebuildAll", () => {
+ test("deletes events and watermarks inside a single transaction", async () => {
+ const { conn, runMock } = createMockConn(() => Promise.resolve(undefined));
+
+ const result = await rebuildAll(conn, createMissingSessionsDir());
+
+ expect(result).toEqual({ workspacesIngested: 0 });
+ expect(getSqlStatements(runMock)).toEqual([
+ "BEGIN TRANSACTION",
+ "DELETE FROM events",
+ "DELETE FROM ingest_watermarks",
+ "COMMIT",
+ ]);
+ });
+
+ test("rolls back when the reset cannot delete both tables", async () => {
+ const deleteWatermarksError = new Error("delete ingest_watermarks failed");
+ const { conn, runMock } = createMockConn((sql) => {
+ if (sql === "DELETE FROM ingest_watermarks") {
+ return Promise.reject(deleteWatermarksError);
+ }
+
+ return Promise.resolve(undefined);
+ });
+
+ await rebuildAll(conn, createMissingSessionsDir()).then(
+ () => {
+ throw new Error("Expected rebuildAll to reject when deleting ingest_watermarks fails");
+ },
+ (error: unknown) => {
+ expect(error).toBe(deleteWatermarksError);
+ }
+ );
+
+ expect(getSqlStatements(runMock)).toEqual([
+ "BEGIN TRANSACTION",
+ "DELETE FROM events",
+ "DELETE FROM ingest_watermarks",
+ "ROLLBACK",
+ ]);
+ });
+});
diff --git a/src/node/services/analytics/etl.ts b/src/node/services/analytics/etl.ts
new file mode 100644
index 0000000000..e64be22759
--- /dev/null
+++ b/src/node/services/analytics/etl.ts
@@ -0,0 +1,959 @@
+import assert from "node:assert/strict";
+import type { Dirent } from "node:fs";
+import * as fs from "node:fs/promises";
+import * as path from "node:path";
+import type { LanguageModelV2Usage } from "@ai-sdk/provider";
+import type { DuckDBConnection } from "@duckdb/node-api";
+import { EventRowSchema, type EventRow } from "@/common/orpc/schemas/analytics";
+import { getErrorMessage } from "@/common/utils/errors";
+import { createDisplayUsage } from "@/common/utils/tokens/displayUsage";
+import { log } from "@/node/services/log";
+
+export const CHAT_FILE_NAME = "chat.jsonl";
+const METADATA_FILE_NAME = "metadata.json";
+
+const INSERT_EVENT_SQL = `
+INSERT INTO events (
+ workspace_id,
+ project_path,
+ project_name,
+ workspace_name,
+ parent_workspace_id,
+ agent_id,
+ timestamp,
+ date,
+ model,
+ thinking_level,
+ input_tokens,
+ output_tokens,
+ reasoning_tokens,
+ cached_tokens,
+ cache_create_tokens,
+ input_cost_usd,
+ output_cost_usd,
+ reasoning_cost_usd,
+ cached_cost_usd,
+ total_cost_usd,
+ duration_ms,
+ ttft_ms,
+ streaming_ms,
+ tool_execution_ms,
+ output_tps,
+ response_index,
+ is_sub_agent
+) VALUES (
+ ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,
+ ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,
+ ?, ?, ?, ?, ?, ?, ?
+)
+`;
+
+interface WorkspaceMeta {
+ projectPath?: string;
+ projectName?: string;
+ workspaceName?: string;
+ parentWorkspaceId?: string;
+}
+
+type WorkspaceMetaById = Record;
+
+interface IngestWatermark {
+ lastSequence: number;
+ lastModified: number;
+}
+
+interface IngestEvent {
+ row: EventRow;
+ sequence: number;
+ date: string | null;
+}
+
+interface EventHeadSignatureParts {
+ timestamp: number | null;
+ model: string | null;
+ totalCostUsd: number | null;
+}
+
+interface PersistedMessage {
+ role?: unknown;
+ createdAt?: unknown;
+ metadata?: unknown;
+}
+
+const TTFT_FIELD_CANDIDATES = [
+ "ttftMs",
+ "ttft_ms",
+ "timeToFirstTokenMs",
+ "time_to_first_token_ms",
+ "timeToFirstToken",
+ "time_to_first_token",
+ "firstTokenMs",
+ "first_token_ms",
+] as const;
+
+const TIMING_RECORD_CANDIDATES = [
+ "providerMetadata",
+ "timing",
+ "timings",
+ "metrics",
+ "latency",
+ "performance",
+] as const;
+
+function isRecord(value: unknown): value is Record {
+ return typeof value === "object" && value !== null;
+}
+
+function toFiniteNumber(value: unknown): number | null {
+ // DuckDB returns BIGINT columns as JS bigint â coerce to number when safe.
+ if (typeof value === "bigint") {
+ const coerced = Number(value);
+ return Number.isFinite(coerced) ? coerced : null;
+ }
+
+ if (typeof value !== "number" || !Number.isFinite(value)) {
+ return null;
+ }
+
+ return value;
+}
+
+function toFiniteInteger(value: unknown): number | null {
+ const parsed = toFiniteNumber(value);
+ if (parsed === null || !Number.isInteger(parsed)) {
+ return null;
+ }
+
+ return parsed;
+}
+
+function toOptionalString(value: unknown): string | undefined {
+ if (typeof value !== "string") {
+ return undefined;
+ }
+
+ const trimmed = value.trim();
+ return trimmed.length > 0 ? trimmed : undefined;
+}
+
+function parseCreatedAtTimestamp(value: unknown): number | null {
+ if (value instanceof Date && Number.isFinite(value.getTime())) {
+ return value.getTime();
+ }
+
+ if (typeof value !== "string") {
+ return null;
+ }
+
+ const parsed = Date.parse(value);
+ return Number.isFinite(parsed) ? parsed : null;
+}
+
+function dateBucketFromTimestamp(timestampMs: number | null): string | null {
+ if (timestampMs === null) {
+ return null;
+ }
+
+ const date = new Date(timestampMs);
+ if (!Number.isFinite(date.getTime())) {
+ return null;
+ }
+
+ return date.toISOString().slice(0, 10);
+}
+
+function parseUsage(rawUsage: unknown): LanguageModelV2Usage | undefined {
+ if (!isRecord(rawUsage)) {
+ return undefined;
+ }
+
+ const inputTokens = toFiniteNumber(rawUsage.inputTokens) ?? undefined;
+ const outputTokens = toFiniteNumber(rawUsage.outputTokens) ?? undefined;
+ const totalTokens = toFiniteNumber(rawUsage.totalTokens) ?? undefined;
+ const reasoningTokens = toFiniteNumber(rawUsage.reasoningTokens) ?? undefined;
+ const cachedInputTokens = toFiniteNumber(rawUsage.cachedInputTokens) ?? undefined;
+
+ if (
+ inputTokens === undefined &&
+ outputTokens === undefined &&
+ totalTokens === undefined &&
+ reasoningTokens === undefined &&
+ cachedInputTokens === undefined
+ ) {
+ return undefined;
+ }
+
+ return {
+ inputTokens,
+ outputTokens,
+ totalTokens,
+ reasoningTokens,
+ cachedInputTokens,
+ };
+}
+
+function readFirstFiniteMetric(
+ source: Record,
+ keys: readonly string[]
+): number | null {
+ for (const key of keys) {
+ const parsed = toFiniteNumber(source[key]);
+ if (parsed !== null) {
+ return parsed;
+ }
+ }
+
+ return null;
+}
+
+function collectTimingMetricSources(
+ metadata: Record
+): Array> {
+ const visited = new Set>();
+ const sources: Array> = [];
+
+ const enqueueRecord = (value: unknown): void => {
+ if (!isRecord(value) || visited.has(value)) {
+ return;
+ }
+
+ visited.add(value);
+ sources.push(value);
+ };
+
+ const enqueueKnownTimingCandidates = (value: unknown): void => {
+ if (!isRecord(value)) {
+ return;
+ }
+
+ enqueueRecord(value);
+
+ for (const key of TIMING_RECORD_CANDIDATES) {
+ enqueueRecord(value[key]);
+ }
+ };
+
+ enqueueKnownTimingCandidates(metadata);
+
+ const providerMetadata = metadata.providerMetadata;
+ enqueueKnownTimingCandidates(providerMetadata);
+
+ if (isRecord(providerMetadata)) {
+ for (const nestedProviderMetadata of Object.values(providerMetadata)) {
+ enqueueKnownTimingCandidates(nestedProviderMetadata);
+ }
+ }
+
+ return sources;
+}
+
+function extractTtftMs(metadata: Record): number | null {
+ const timingSources = collectTimingMetricSources(metadata);
+ assert(timingSources.length > 0, "extractTtftMs: expected at least one timing source");
+
+ for (const source of timingSources) {
+ const ttftMs = readFirstFiniteMetric(source, TTFT_FIELD_CANDIDATES);
+ if (ttftMs !== null) {
+ return ttftMs;
+ }
+ }
+
+ return null;
+}
+
+function deriveProjectName(projectPath: string | undefined): string | undefined {
+ if (!projectPath) {
+ return undefined;
+ }
+
+ const basename = path.basename(projectPath);
+ return basename.length > 0 ? basename : undefined;
+}
+
+function parseWorkspaceMetaFromUnknown(value: unknown): WorkspaceMeta {
+ if (!isRecord(value)) {
+ return {};
+ }
+
+ return {
+ projectPath: toOptionalString(value.projectPath),
+ projectName: toOptionalString(value.projectName),
+ workspaceName: toOptionalString(value.name),
+ parentWorkspaceId: toOptionalString(value.parentWorkspaceId),
+ };
+}
+
+async function readWorkspaceMetaFromDisk(sessionDir: string): Promise {
+ const metadataPath = path.join(sessionDir, METADATA_FILE_NAME);
+
+ try {
+ const raw = await fs.readFile(metadataPath, "utf-8");
+ return parseWorkspaceMetaFromUnknown(JSON.parse(raw) as unknown);
+ } catch (error) {
+ if (isRecord(error) && error.code === "ENOENT") {
+ return {};
+ }
+
+ log.warn("[analytics-etl] Failed to read workspace metadata", {
+ metadataPath,
+ error: getErrorMessage(error),
+ });
+ return {};
+ }
+}
+
+function mergeWorkspaceMeta(
+ sessionMeta: WorkspaceMeta,
+ overrideMeta: WorkspaceMeta
+): WorkspaceMeta {
+ const projectPath = overrideMeta.projectPath ?? sessionMeta.projectPath;
+
+ return {
+ projectPath,
+ projectName:
+ overrideMeta.projectName ?? sessionMeta.projectName ?? deriveProjectName(projectPath),
+ workspaceName: overrideMeta.workspaceName ?? sessionMeta.workspaceName,
+ parentWorkspaceId: overrideMeta.parentWorkspaceId ?? sessionMeta.parentWorkspaceId,
+ };
+}
+
+function parsePersistedMessage(
+ line: string,
+ workspaceId: string,
+ lineNumber: number
+): PersistedMessage | null {
+ try {
+ const parsed = JSON.parse(line) as unknown;
+ return isRecord(parsed) ? (parsed as PersistedMessage) : null;
+ } catch (error) {
+ log.warn("[analytics-etl] Skipping malformed chat.jsonl line", {
+ workspaceId,
+ lineNumber,
+ error: getErrorMessage(error),
+ });
+ return null;
+ }
+}
+
+function extractIngestEvent(params: {
+ workspaceId: string;
+ workspaceMeta: WorkspaceMeta;
+ message: PersistedMessage;
+ lineNumber: number;
+ responseIndex: number;
+}): IngestEvent | null {
+ if (params.message.role !== "assistant") {
+ return null;
+ }
+
+ const metadata = isRecord(params.message.metadata) ? params.message.metadata : null;
+ if (!metadata) {
+ return null;
+ }
+
+ const usage = parseUsage(metadata.usage);
+ if (!usage) {
+ return null;
+ }
+
+ const sequence = toFiniteInteger(metadata.historySequence) ?? params.lineNumber;
+
+ const model = toOptionalString(metadata.model);
+ const providerMetadata = isRecord(metadata.providerMetadata)
+ ? metadata.providerMetadata
+ : undefined;
+
+ const displayUsage = createDisplayUsage(usage, model ?? "unknown", providerMetadata);
+ assert(displayUsage, "createDisplayUsage should return data for parsed usage payloads");
+
+ const timestamp =
+ toFiniteNumber(metadata.timestamp) ?? parseCreatedAtTimestamp(params.message.createdAt) ?? null;
+ const dateBucket = dateBucketFromTimestamp(timestamp);
+
+ const inputTokens = displayUsage.input.tokens;
+ const outputTokens = displayUsage.output.tokens;
+ const reasoningTokens = displayUsage.reasoning.tokens;
+ const cachedTokens = displayUsage.cached.tokens;
+ const cacheCreateTokens = displayUsage.cacheCreate.tokens;
+
+ const inputCostUsd = displayUsage.input.cost_usd ?? 0;
+ const outputCostUsd = displayUsage.output.cost_usd ?? 0;
+ const reasoningCostUsd = displayUsage.reasoning.cost_usd ?? 0;
+ const cachedCostUsd =
+ (displayUsage.cached.cost_usd ?? 0) + (displayUsage.cacheCreate.cost_usd ?? 0);
+
+ const durationMs = toFiniteNumber(metadata.duration);
+ const ttftMs = extractTtftMs(metadata);
+ const outputTps =
+ durationMs !== null && durationMs > 0 ? outputTokens / (durationMs / 1000) : null;
+
+ const maybeEvent = {
+ workspace_id: params.workspaceId,
+ project_path: params.workspaceMeta.projectPath ?? null,
+ project_name: params.workspaceMeta.projectName ?? null,
+ workspace_name: params.workspaceMeta.workspaceName ?? null,
+ parent_workspace_id: params.workspaceMeta.parentWorkspaceId ?? null,
+ agent_id: toOptionalString(metadata.agentId) ?? null,
+ timestamp,
+ model: model ?? null,
+ thinking_level: toOptionalString(metadata.thinkingLevel) ?? null,
+ input_tokens: inputTokens,
+ output_tokens: outputTokens,
+ reasoning_tokens: reasoningTokens,
+ cached_tokens: cachedTokens,
+ cache_create_tokens: cacheCreateTokens,
+ input_cost_usd: inputCostUsd,
+ output_cost_usd: outputCostUsd,
+ reasoning_cost_usd: reasoningCostUsd,
+ cached_cost_usd: cachedCostUsd,
+ total_cost_usd: inputCostUsd + outputCostUsd + reasoningCostUsd + cachedCostUsd,
+ duration_ms: durationMs,
+ ttft_ms: ttftMs,
+ streaming_ms: null,
+ tool_execution_ms: null,
+ output_tps: outputTps,
+ response_index: params.responseIndex,
+ is_sub_agent: (params.workspaceMeta.parentWorkspaceId ?? "").length > 0,
+ };
+
+ const parsedEvent = EventRowSchema.safeParse(maybeEvent);
+ if (!parsedEvent.success) {
+ log.warn("[analytics-etl] Skipping invalid analytics row", {
+ workspaceId: params.workspaceId,
+ lineNumber: params.lineNumber,
+ issues: parsedEvent.error.issues,
+ });
+ return null;
+ }
+
+ return {
+ row: parsedEvent.data,
+ sequence,
+ date: dateBucket,
+ };
+}
+
+async function readWatermark(
+ conn: DuckDBConnection,
+ workspaceId: string
+): Promise {
+ const result = await conn.run(
+ `SELECT last_sequence, last_modified FROM ingest_watermarks WHERE workspace_id = ?`,
+ [workspaceId]
+ );
+ const rows = await result.getRowObjectsJS();
+
+ if (rows.length === 0) {
+ return { lastSequence: -1, lastModified: 0 };
+ }
+
+ const row = rows[0];
+ const lastSequence = toFiniteNumber(row.last_sequence) ?? -1;
+ const lastModified = toFiniteNumber(row.last_modified) ?? 0;
+
+ return {
+ lastSequence,
+ lastModified,
+ };
+}
+
+async function readWorkspaceEventRowCount(
+ conn: DuckDBConnection,
+ workspaceId: string
+): Promise {
+ const result = await conn.run(`SELECT COUNT(*) AS row_count FROM events WHERE workspace_id = ?`, [
+ workspaceId,
+ ]);
+ const rows = await result.getRowObjectsJS();
+ assert(rows.length === 1, "readWorkspaceEventRowCount: expected exactly one COUNT(*) result row");
+
+ const rowCount = toFiniteInteger(rows[0].row_count);
+ assert(
+ rowCount !== null && rowCount >= 0,
+ "readWorkspaceEventRowCount: expected non-negative integer row_count"
+ );
+
+ return rowCount;
+}
+
+export async function clearWorkspaceAnalyticsState(
+ conn: DuckDBConnection,
+ workspaceId: string
+): Promise {
+ assert(workspaceId.trim().length > 0, "clearWorkspaceAnalyticsState: workspaceId is required");
+
+ await conn.run("BEGIN TRANSACTION");
+ try {
+ await conn.run("DELETE FROM events WHERE workspace_id = ?", [workspaceId]);
+ await conn.run("DELETE FROM ingest_watermarks WHERE workspace_id = ?", [workspaceId]);
+ await conn.run("COMMIT");
+ } catch (error) {
+ await conn.run("ROLLBACK");
+ throw error;
+ }
+}
+
+function serializeHeadSignatureValue(value: string | number | null): string {
+ if (value === null) {
+ return "null";
+ }
+
+ return `${typeof value}:${String(value)}`;
+}
+
+function createEventHeadSignature(parts: EventHeadSignatureParts): string {
+ return [
+ serializeHeadSignatureValue(parts.timestamp),
+ serializeHeadSignatureValue(parts.model),
+ serializeHeadSignatureValue(parts.totalCostUsd),
+ ].join("|");
+}
+
+function createEventHeadSignatureFromParsedEvent(event: IngestEvent): string {
+ const row = event.row;
+ assert(
+ Number.isFinite(row.total_cost_usd),
+ "createEventHeadSignatureFromParsedEvent: expected finite total_cost_usd"
+ );
+
+ return createEventHeadSignature({
+ timestamp: row.timestamp,
+ model: row.model,
+ totalCostUsd: row.total_cost_usd,
+ });
+}
+
+async function readPersistedWorkspaceHeadSignature(
+ conn: DuckDBConnection,
+ workspaceId: string
+): Promise {
+ const result = await conn.run(
+ `
+ SELECT timestamp, model, total_cost_usd
+ FROM events
+ WHERE workspace_id = ?
+ ORDER BY response_index ASC NULLS LAST
+ LIMIT 1
+ `,
+ [workspaceId]
+ );
+ const rows = await result.getRowObjectsJS();
+
+ if (rows.length === 0) {
+ return null;
+ }
+
+ assert(
+ rows.length === 1,
+ "readPersistedWorkspaceHeadSignature: expected zero or one persisted head row"
+ );
+
+ const row = rows[0] as Record;
+ const timestamp = toFiniteNumber(row.timestamp);
+ assert(
+ timestamp !== null || row.timestamp === null,
+ "readPersistedWorkspaceHeadSignature: expected timestamp to be finite number or null"
+ );
+
+ const model = row.model;
+ assert(
+ model === null || typeof model === "string",
+ "readPersistedWorkspaceHeadSignature: expected model to be string or null"
+ );
+
+ const totalCostUsd = toFiniteNumber(row.total_cost_usd);
+ assert(
+ totalCostUsd !== null || row.total_cost_usd === null,
+ "readPersistedWorkspaceHeadSignature: expected total_cost_usd to be finite number or null"
+ );
+
+ return createEventHeadSignature({
+ timestamp,
+ model,
+ totalCostUsd,
+ });
+}
+
+function hasPersistedWatermark(watermark: IngestWatermark): boolean {
+ return watermark.lastSequence >= 0 || watermark.lastModified > 0;
+}
+
+async function writeWatermark(
+ conn: DuckDBConnection,
+ workspaceId: string,
+ watermark: IngestWatermark
+): Promise {
+ await conn.run(
+ `
+ INSERT INTO ingest_watermarks (workspace_id, last_sequence, last_modified)
+ VALUES (?, ?, ?)
+ ON CONFLICT(workspace_id) DO UPDATE
+ SET last_sequence = excluded.last_sequence,
+ last_modified = excluded.last_modified
+ `,
+ [workspaceId, watermark.lastSequence, watermark.lastModified]
+ );
+}
+
+async function replaceEventsByResponseIndex(
+ conn: DuckDBConnection,
+ workspaceId: string,
+ events: IngestEvent[]
+): Promise {
+ if (events.length === 0) {
+ return;
+ }
+
+ const responseIndexes: number[] = [];
+ const seenResponseIndexes = new Set();
+
+ for (const event of events) {
+ const row = event.row;
+ assert(
+ row.workspace_id === workspaceId,
+ "replaceEventsByResponseIndex: all rows must belong to the target workspace"
+ );
+ const responseIndex = row.response_index;
+ assert(responseIndex !== null, "replaceEventsByResponseIndex: response_index must be present");
+ assert(
+ Number.isInteger(responseIndex),
+ "replaceEventsByResponseIndex: response_index must be an integer"
+ );
+ if (seenResponseIndexes.has(responseIndex)) {
+ continue;
+ }
+
+ seenResponseIndexes.add(responseIndex);
+ responseIndexes.push(responseIndex);
+ }
+
+ assert(
+ responseIndexes.length > 0,
+ "replaceEventsByResponseIndex: non-empty events must include response indexes"
+ );
+
+ const placeholders = responseIndexes.map(() => "?").join(", ");
+
+ await conn.run("BEGIN TRANSACTION");
+ try {
+ // response_index is stable for in-place rewrites, so delete before insert to
+ // ensure rewritten rows replace stale analytics entries instead of appending.
+ await conn.run(
+ `DELETE FROM events WHERE workspace_id = ? AND response_index IN (${placeholders})`,
+ [workspaceId, ...responseIndexes]
+ );
+
+ for (const event of events) {
+ const row = event.row;
+ await conn.run(INSERT_EVENT_SQL, [
+ row.workspace_id,
+ row.project_path,
+ row.project_name,
+ row.workspace_name,
+ row.parent_workspace_id,
+ row.agent_id,
+ row.timestamp,
+ event.date,
+ row.model,
+ row.thinking_level,
+ row.input_tokens,
+ row.output_tokens,
+ row.reasoning_tokens,
+ row.cached_tokens,
+ row.cache_create_tokens,
+ row.input_cost_usd,
+ row.output_cost_usd,
+ row.reasoning_cost_usd,
+ row.cached_cost_usd,
+ row.total_cost_usd,
+ row.duration_ms,
+ row.ttft_ms,
+ row.streaming_ms,
+ row.tool_execution_ms,
+ row.output_tps,
+ row.response_index,
+ row.is_sub_agent,
+ ]);
+ }
+
+ await conn.run("COMMIT");
+ } catch (error) {
+ await conn.run("ROLLBACK");
+ throw error;
+ }
+}
+
+async function replaceWorkspaceEvents(
+ conn: DuckDBConnection,
+ workspaceId: string,
+ events: IngestEvent[]
+): Promise {
+ await conn.run("BEGIN TRANSACTION");
+ try {
+ await conn.run("DELETE FROM events WHERE workspace_id = ?", [workspaceId]);
+
+ for (const event of events) {
+ const row = event.row;
+ assert(
+ row.workspace_id === workspaceId,
+ "replaceWorkspaceEvents: all rows must belong to the target workspace"
+ );
+ await conn.run(INSERT_EVENT_SQL, [
+ row.workspace_id,
+ row.project_path,
+ row.project_name,
+ row.workspace_name,
+ row.parent_workspace_id,
+ row.agent_id,
+ row.timestamp,
+ event.date,
+ row.model,
+ row.thinking_level,
+ row.input_tokens,
+ row.output_tokens,
+ row.reasoning_tokens,
+ row.cached_tokens,
+ row.cache_create_tokens,
+ row.input_cost_usd,
+ row.output_cost_usd,
+ row.reasoning_cost_usd,
+ row.cached_cost_usd,
+ row.total_cost_usd,
+ row.duration_ms,
+ row.ttft_ms,
+ row.streaming_ms,
+ row.tool_execution_ms,
+ row.output_tps,
+ row.response_index,
+ row.is_sub_agent,
+ ]);
+ }
+
+ await conn.run("COMMIT");
+ } catch (error) {
+ await conn.run("ROLLBACK");
+ throw error;
+ }
+}
+
+function getMaxSequence(events: IngestEvent[]): number | null {
+ if (events.length === 0) {
+ return null;
+ }
+
+ let maxSequence = Number.NEGATIVE_INFINITY;
+ for (const event of events) {
+ maxSequence = Math.max(maxSequence, event.sequence);
+ }
+
+ assert(Number.isFinite(maxSequence), "getMaxSequence: expected finite max sequence");
+ return maxSequence;
+}
+
+function shouldRebuildWorkspaceForSequenceRegression(params: {
+ watermark: IngestWatermark;
+ parsedMaxSequence: number | null;
+ hasTruncation: boolean;
+ hasHeadMismatch: boolean;
+}): boolean {
+ if (params.hasTruncation || params.hasHeadMismatch) {
+ return true;
+ }
+
+ if (!hasPersistedWatermark(params.watermark)) {
+ return false;
+ }
+
+ if (params.parsedMaxSequence === null) {
+ return true;
+ }
+
+ return params.parsedMaxSequence < params.watermark.lastSequence;
+}
+
+export async function ingestWorkspace(
+ conn: DuckDBConnection,
+ workspaceId: string,
+ sessionDir: string,
+ meta: WorkspaceMeta
+): Promise {
+ assert(workspaceId.trim().length > 0, "ingestWorkspace: workspaceId is required");
+ assert(sessionDir.trim().length > 0, "ingestWorkspace: sessionDir is required");
+
+ const chatPath = path.join(sessionDir, CHAT_FILE_NAME);
+
+ let stat: Awaited>;
+ try {
+ stat = await fs.stat(chatPath);
+ } catch (error) {
+ if (isRecord(error) && error.code === "ENOENT") {
+ // Remove stale analytics state when the workspace history file no longer exists.
+ await clearWorkspaceAnalyticsState(conn, workspaceId);
+ return;
+ }
+
+ throw error;
+ }
+
+ const watermark = await readWatermark(conn, workspaceId);
+ if (stat.mtimeMs <= watermark.lastModified) {
+ return;
+ }
+
+ const persistedMeta = await readWorkspaceMetaFromDisk(sessionDir);
+ const workspaceMeta = mergeWorkspaceMeta(persistedMeta, meta);
+
+ const chatContents = await fs.readFile(chatPath, "utf-8");
+ const lines = chatContents.split("\n").filter((line) => line.trim().length > 0);
+
+ let responseIndex = 0;
+ const parsedEvents: IngestEvent[] = [];
+
+ for (let i = 0; i < lines.length; i++) {
+ const lineNumber = i + 1;
+ const message = parsePersistedMessage(lines[i], workspaceId, lineNumber);
+ if (!message) {
+ continue;
+ }
+
+ const event = extractIngestEvent({
+ workspaceId,
+ workspaceMeta,
+ message,
+ lineNumber,
+ responseIndex,
+ });
+ if (!event) {
+ continue;
+ }
+
+ assert(
+ Number.isInteger(event.sequence),
+ "ingestWorkspace: expected assistant event sequence to be an integer"
+ );
+
+ responseIndex += 1;
+ parsedEvents.push(event);
+ }
+
+ const parsedMaxSequence = getMaxSequence(parsedEvents);
+ const hasExistingWatermark = hasPersistedWatermark(watermark);
+ const persistedEventRowCount = await readWorkspaceEventRowCount(conn, workspaceId);
+ // Sequence-only checks miss truncations when the tail keeps the previous max
+ // historySequence. If fewer assistant events are parsed than currently stored,
+ // stale deleted rows remain unless we force a full workspace rebuild.
+ const hasTruncation = hasExistingWatermark && parsedEvents.length < persistedEventRowCount;
+ const persistedHeadSignature = hasExistingWatermark
+ ? await readPersistedWorkspaceHeadSignature(conn, workspaceId)
+ : null;
+ const parsedHeadSignature =
+ parsedEvents.length > 0 ? createEventHeadSignatureFromParsedEvent(parsedEvents[0]) : null;
+ // Count checks can miss head truncation + append rewrites where assistant row
+ // totals recover. Head signature drift reveals shifted response indexes.
+ const hasHeadMismatch =
+ hasExistingWatermark &&
+ persistedHeadSignature !== null &&
+ parsedHeadSignature !== null &&
+ persistedHeadSignature !== parsedHeadSignature;
+
+ const shouldRebuild = shouldRebuildWorkspaceForSequenceRegression({
+ watermark,
+ parsedMaxSequence,
+ hasTruncation,
+ hasHeadMismatch,
+ });
+
+ if (shouldRebuild) {
+ // Rebuild on truncation, head mismatch, or max-sequence rewinds. This removes
+ // stale rows, including the zero-assistant-event truncation case.
+ await replaceWorkspaceEvents(conn, workspaceId, parsedEvents);
+
+ await writeWatermark(conn, workspaceId, {
+ lastSequence: parsedMaxSequence ?? -1,
+ lastModified: stat.mtimeMs,
+ });
+ return;
+ }
+
+ let maxSequence = watermark.lastSequence;
+ const eventsToInsert: IngestEvent[] = [];
+ for (const event of parsedEvents) {
+ maxSequence = Math.max(maxSequence, event.sequence);
+
+ // Include the current watermark sequence so in-place rewrites with the same
+ // historySequence refresh stale analytics rows instead of getting skipped forever.
+ if (event.sequence < watermark.lastSequence) {
+ continue;
+ }
+
+ eventsToInsert.push(event);
+ }
+
+ await replaceEventsByResponseIndex(conn, workspaceId, eventsToInsert);
+
+ await writeWatermark(conn, workspaceId, {
+ lastSequence: maxSequence,
+ lastModified: stat.mtimeMs,
+ });
+}
+
+export async function rebuildAll(
+ conn: DuckDBConnection,
+ sessionsDir: string,
+ workspaceMetaById: WorkspaceMetaById = {}
+): Promise<{ workspacesIngested: number }> {
+ assert(sessionsDir.trim().length > 0, "rebuildAll: sessionsDir is required");
+ assert(
+ isRecord(workspaceMetaById) && !Array.isArray(workspaceMetaById),
+ "rebuildAll: workspaceMetaById must be an object"
+ );
+
+ await conn.run("BEGIN TRANSACTION");
+ try {
+ // Reset both tables atomically so a crash cannot leave empty events with
+ // stale watermarks that would incorrectly suppress initial backfill.
+ await conn.run("DELETE FROM events");
+ await conn.run("DELETE FROM ingest_watermarks");
+ await conn.run("COMMIT");
+ } catch (error) {
+ await conn.run("ROLLBACK");
+ throw error;
+ }
+
+ let entries: Dirent[] | null = null;
+ try {
+ entries = await fs.readdir(sessionsDir, { withFileTypes: true });
+ } catch (error) {
+ if (isRecord(error) && error.code === "ENOENT") {
+ return { workspacesIngested: 0 };
+ }
+
+ throw error;
+ }
+
+ assert(entries, "rebuildAll expected a directory listing");
+
+ let workspacesIngested = 0;
+
+ for (const entry of entries) {
+ if (!entry.isDirectory()) {
+ continue;
+ }
+
+ const workspaceId = entry.name;
+ const sessionDir = path.join(sessionsDir, workspaceId);
+ const suppliedWorkspaceMeta = workspaceMetaById[workspaceId] ?? {};
+
+ try {
+ await ingestWorkspace(conn, workspaceId, sessionDir, suppliedWorkspaceMeta);
+ workspacesIngested += 1;
+ } catch (error) {
+ log.warn("[analytics-etl] Failed to ingest workspace during rebuild", {
+ workspaceId,
+ error: getErrorMessage(error),
+ });
+ }
+ }
+
+ return { workspacesIngested };
+}
diff --git a/src/node/services/analytics/queries.ts b/src/node/services/analytics/queries.ts
new file mode 100644
index 0000000000..9e7111d394
--- /dev/null
+++ b/src/node/services/analytics/queries.ts
@@ -0,0 +1,508 @@
+import assert from "node:assert/strict";
+import type { DuckDBConnection, DuckDBValue } from "@duckdb/node-api";
+import type { z } from "zod";
+import {
+ AgentCostRowSchema,
+ HistogramBucketSchema,
+ ProviderCacheHitModelRowSchema,
+ SpendByModelRowSchema,
+ SpendByProjectRowSchema,
+ SpendOverTimeRowSchema,
+ SummaryRowSchema,
+ TimingPercentilesRowSchema,
+ type AgentCostRow,
+ type HistogramBucket,
+ type ProviderCacheHitModelRow,
+ type SpendByModelRow,
+ type SpendByProjectRow,
+ type SpendOverTimeRow,
+ type SummaryRow,
+ type TimingPercentilesRow,
+} from "@/common/orpc/schemas/analytics";
+
+const MAX_SAFE_BIGINT = BigInt(Number.MAX_SAFE_INTEGER);
+const MIN_SAFE_BIGINT = BigInt(Number.MIN_SAFE_INTEGER);
+
+type Granularity = "hour" | "day" | "week";
+type TimingMetric = "ttft" | "duration" | "tps";
+
+interface TimingDistributionResult {
+ percentiles: TimingPercentilesRow;
+ histogram: HistogramBucket[];
+}
+
+function normalizeDuckDbValue(value: unknown): unknown {
+ if (typeof value === "bigint") {
+ assert(
+ value <= MAX_SAFE_BIGINT && value >= MIN_SAFE_BIGINT,
+ `DuckDB bigint out of JS safe integer range: ${value}`
+ );
+ return Number(value);
+ }
+
+ if (value instanceof Date) {
+ return value.toISOString();
+ }
+
+ return value;
+}
+
+function normalizeDuckDbRow(row: Record): Record {
+ const normalized: Record = {};
+
+ for (const [key, value] of Object.entries(row)) {
+ normalized[key] = normalizeDuckDbValue(value);
+ }
+
+ return normalized;
+}
+
+async function typedQuery(
+ conn: DuckDBConnection,
+ sql: string,
+ params: DuckDBValue[],
+ schema: z.ZodType
+): Promise {
+ const result = await conn.run(sql, params);
+ const rows = await result.getRowObjectsJS();
+
+ return rows.map((row) => schema.parse(normalizeDuckDbRow(row)));
+}
+
+async function typedQueryOne(
+ conn: DuckDBConnection,
+ sql: string,
+ params: DuckDBValue[],
+ schema: z.ZodType
+): Promise {
+ const rows = await typedQuery(conn, sql, params, schema);
+ assert(rows.length === 1, `Expected one row, got ${rows.length}`);
+ return rows[0];
+}
+
+function parseOptionalString(value: unknown): string | null {
+ if (typeof value !== "string") {
+ return null;
+ }
+
+ const trimmed = value.trim();
+ return trimmed.length > 0 ? trimmed : null;
+}
+
+function parseDateFilter(value: unknown): string | null {
+ if (value === null || value === undefined) {
+ return null;
+ }
+
+ if (value instanceof Date) {
+ assert(Number.isFinite(value.getTime()), "Invalid Date provided for analytics filter");
+ return value.toISOString().slice(0, 10);
+ }
+
+ if (typeof value === "string") {
+ const trimmed = value.trim();
+ if (!trimmed) {
+ return null;
+ }
+
+ // Accept either full ISO timestamps or YYYY-MM-DD and normalize to YYYY-MM-DD.
+ const parsed = new Date(trimmed);
+ assert(Number.isFinite(parsed.getTime()), `Invalid date filter value: ${trimmed}`);
+ return parsed.toISOString().slice(0, 10);
+ }
+
+ throw new Error("Unsupported analytics date filter type");
+}
+
+function parseGranularity(value: unknown): Granularity {
+ assert(
+ value === "hour" || value === "day" || value === "week",
+ `Invalid granularity: ${String(value)}`
+ );
+ return value;
+}
+
+function parseTimingMetric(value: unknown): TimingMetric {
+ assert(
+ value === "ttft" || value === "duration" || value === "tps",
+ `Invalid timing metric: ${String(value)}`
+ );
+ return value;
+}
+
+function getTodayUtcDateString(now: Date = new Date()): string {
+ assert(Number.isFinite(now.getTime()), "Invalid Date while computing analytics summary date");
+ return now.toISOString().slice(0, 10);
+}
+
+async function querySummary(
+ conn: DuckDBConnection,
+ params: {
+ projectPath: string | null;
+ from: string | null;
+ to: string | null;
+ }
+): Promise {
+ // events.date is derived from message timestamps via UTC date buckets, so
+ // summary "today" must use a UTC date key instead of DuckDB local CURRENT_DATE.
+ const todayUtcDate = getTodayUtcDateString();
+
+ return typedQueryOne(
+ conn,
+ `
+ SELECT
+ COALESCE(SUM(total_cost_usd), 0) AS total_spend_usd,
+ COALESCE(SUM(CASE WHEN date = CAST(? AS DATE) THEN total_cost_usd ELSE 0 END), 0) AS today_spend_usd,
+ COALESCE(
+ COALESCE(SUM(total_cost_usd), 0) / NULLIF(COUNT(DISTINCT date), 0),
+ 0
+ ) AS avg_daily_spend_usd,
+ COALESCE(
+ SUM(cached_tokens)::DOUBLE / NULLIF(SUM(input_tokens + cached_tokens + cache_create_tokens), 0),
+ 0
+ ) AS cache_hit_ratio,
+ COALESCE(
+ SUM(input_tokens + output_tokens + reasoning_tokens + cached_tokens + cache_create_tokens),
+ 0
+ ) AS total_tokens,
+ COALESCE(COUNT(*), 0) AS total_responses
+ FROM events
+ WHERE (? IS NULL OR project_path = ?)
+ AND (? IS NULL OR date >= CAST(? AS DATE))
+ AND (? IS NULL OR date <= CAST(? AS DATE))
+ `,
+ [
+ todayUtcDate,
+ params.projectPath,
+ params.projectPath,
+ params.from,
+ params.from,
+ params.to,
+ params.to,
+ ],
+ SummaryRowSchema
+ );
+}
+
+async function querySpendOverTime(
+ conn: DuckDBConnection,
+ params: {
+ granularity: Granularity;
+ projectPath: string | null;
+ from: string | null;
+ to: string | null;
+ }
+): Promise {
+ const bucketExpression: Record = {
+ hour: "DATE_TRUNC('hour', to_timestamp(timestamp / 1000.0))",
+ day: "DATE_TRUNC('day', date)",
+ week: "DATE_TRUNC('week', date)",
+ };
+
+ const bucketExpr = bucketExpression[params.granularity];
+ const bucketNullFilter =
+ params.granularity === "hour" ? "AND timestamp IS NOT NULL" : "AND date IS NOT NULL";
+
+ return typedQuery(
+ conn,
+ `
+ SELECT
+ CAST(${bucketExpr} AS VARCHAR) AS bucket,
+ COALESCE(model, 'unknown') AS model,
+ COALESCE(SUM(total_cost_usd), 0) AS cost_usd
+ FROM events
+ WHERE
+ (? IS NULL OR project_path = ?)
+ AND (? IS NULL OR date >= CAST(? AS DATE))
+ AND (? IS NULL OR date <= CAST(? AS DATE))
+ ${bucketNullFilter}
+ GROUP BY 1, 2
+ ORDER BY 1 ASC, 2 ASC
+ `,
+ [params.projectPath, params.projectPath, params.from, params.from, params.to, params.to],
+ SpendOverTimeRowSchema
+ );
+}
+
+async function querySpendByProject(
+ conn: DuckDBConnection,
+ params: { from: string | null; to: string | null }
+): Promise {
+ return typedQuery(
+ conn,
+ `
+ SELECT
+ COALESCE(project_name, 'unknown') AS project_name,
+ COALESCE(project_path, 'unknown') AS project_path,
+ COALESCE(SUM(total_cost_usd), 0) AS cost_usd,
+ COALESCE(
+ SUM(input_tokens + output_tokens + reasoning_tokens + cached_tokens + cache_create_tokens),
+ 0
+ ) AS token_count
+ FROM events
+ WHERE (? IS NULL OR date >= CAST(? AS DATE))
+ AND (? IS NULL OR date <= CAST(? AS DATE))
+ GROUP BY 1, 2
+ ORDER BY cost_usd DESC
+ `,
+ [params.from, params.from, params.to, params.to],
+ SpendByProjectRowSchema
+ );
+}
+
+async function querySpendByModel(
+ conn: DuckDBConnection,
+ projectPath: string | null,
+ from: string | null,
+ to: string | null
+): Promise {
+ return typedQuery(
+ conn,
+ `
+ SELECT
+ COALESCE(model, 'unknown') AS model,
+ COALESCE(SUM(total_cost_usd), 0) AS cost_usd,
+ COALESCE(
+ SUM(input_tokens + output_tokens + reasoning_tokens + cached_tokens + cache_create_tokens),
+ 0
+ ) AS token_count,
+ COALESCE(COUNT(*), 0) AS response_count
+ FROM events
+ WHERE (? IS NULL OR project_path = ?)
+ AND (? IS NULL OR date >= CAST(? AS DATE))
+ AND (? IS NULL OR date <= CAST(? AS DATE))
+ GROUP BY 1
+ ORDER BY cost_usd DESC
+ `,
+ [projectPath, projectPath, from, from, to, to],
+ SpendByModelRowSchema
+ );
+}
+
+async function queryTimingDistribution(
+ conn: DuckDBConnection,
+ metric: TimingMetric,
+ projectPath: string | null,
+ from: string | null,
+ to: string | null
+): Promise {
+ const columnByMetric: Record = {
+ ttft: "ttft_ms",
+ duration: "duration_ms",
+ tps: "output_tps",
+ };
+
+ const column = columnByMetric[metric];
+
+ const percentiles = await typedQueryOne(
+ conn,
+ `
+ SELECT
+ COALESCE(PERCENTILE_CONT(0.5) WITHIN GROUP (ORDER BY ${column}), 0) AS p50,
+ COALESCE(PERCENTILE_CONT(0.9) WITHIN GROUP (ORDER BY ${column}), 0) AS p90,
+ COALESCE(PERCENTILE_CONT(0.99) WITHIN GROUP (ORDER BY ${column}), 0) AS p99
+ FROM events
+ WHERE ${column} IS NOT NULL
+ AND (? IS NULL OR project_path = ?)
+ AND (? IS NULL OR date >= CAST(? AS DATE))
+ AND (? IS NULL OR date <= CAST(? AS DATE))
+ `,
+ [projectPath, projectPath, from, from, to, to],
+ TimingPercentilesRowSchema
+ );
+
+ // Histogram emits real metric values (e.g. ms, tok/s) as bucket labels,
+ // not abstract 1..20 indices. This way the chart x-axis maps directly to
+ // meaningful units and percentile reference lines land correctly.
+ const histogram = await typedQuery(
+ conn,
+ `
+ WITH stats AS (
+ SELECT
+ MIN(${column}) AS min_value,
+ MAX(${column}) AS max_value
+ FROM events
+ WHERE ${column} IS NOT NULL
+ AND (? IS NULL OR project_path = ?)
+ AND (? IS NULL OR date >= CAST(? AS DATE))
+ AND (? IS NULL OR date <= CAST(? AS DATE))
+ ),
+ bucketed AS (
+ SELECT
+ CASE
+ WHEN stats.min_value IS NULL OR stats.max_value IS NULL THEN NULL
+ WHEN stats.max_value = stats.min_value THEN 1
+ ELSE LEAST(
+ 20,
+ GREATEST(
+ 1,
+ CAST(
+ FLOOR(
+ ((events.${column} - stats.min_value) / NULLIF(stats.max_value - stats.min_value, 0)) * 20
+ ) AS INTEGER
+ ) + 1
+ )
+ )
+ END AS bucket_id
+ FROM events
+ CROSS JOIN stats
+ WHERE events.${column} IS NOT NULL
+ AND (? IS NULL OR events.project_path = ?)
+ AND (? IS NULL OR events.date >= CAST(? AS DATE))
+ AND (? IS NULL OR events.date <= CAST(? AS DATE))
+ )
+ SELECT
+ COALESCE(
+ ROUND(
+ (SELECT min_value FROM stats) +
+ (bucket_id - 0.5) * (
+ NULLIF((SELECT max_value FROM stats) - (SELECT min_value FROM stats), 0) / 20.0
+ ),
+ 2
+ ),
+ -- When min == max (single distinct value), NULLIF produces NULL.
+ -- Fall back to the actual value so the bucket label is meaningful.
+ ROUND((SELECT min_value FROM stats), 2)
+ ) AS bucket,
+ COUNT(*) AS count
+ FROM bucketed
+ WHERE bucket_id IS NOT NULL
+ GROUP BY bucket_id
+ ORDER BY bucket_id
+ `,
+ [projectPath, projectPath, from, from, to, to, projectPath, projectPath, from, from, to, to],
+ HistogramBucketSchema
+ );
+
+ return {
+ percentiles,
+ histogram,
+ };
+}
+
+async function queryAgentCostBreakdown(
+ conn: DuckDBConnection,
+ projectPath: string | null,
+ from: string | null,
+ to: string | null
+): Promise {
+ return typedQuery(
+ conn,
+ `
+ SELECT
+ COALESCE(agent_id, 'unknown') AS agent_id,
+ COALESCE(SUM(total_cost_usd), 0) AS cost_usd,
+ COALESCE(
+ SUM(input_tokens + output_tokens + reasoning_tokens + cached_tokens + cache_create_tokens),
+ 0
+ ) AS token_count,
+ COALESCE(COUNT(*), 0) AS response_count
+ FROM events
+ WHERE (? IS NULL OR project_path = ?)
+ AND (? IS NULL OR date >= CAST(? AS DATE))
+ AND (? IS NULL OR date <= CAST(? AS DATE))
+ GROUP BY 1
+ ORDER BY cost_usd DESC
+ `,
+ [projectPath, projectPath, from, from, to, to],
+ AgentCostRowSchema
+ );
+}
+
+async function queryCacheHitRatioByProvider(
+ conn: DuckDBConnection,
+ projectPath: string | null,
+ from: string | null,
+ to: string | null
+): Promise {
+ return typedQuery(
+ conn,
+ `
+ SELECT
+ COALESCE(model, 'unknown') AS model,
+ COALESCE(SUM(cached_tokens), 0) AS cached_tokens,
+ COALESCE(SUM(input_tokens + cached_tokens + cache_create_tokens), 0) AS total_prompt_tokens,
+ COALESCE(COUNT(*), 0) AS response_count
+ FROM events
+ WHERE (? IS NULL OR project_path = ?)
+ AND (? IS NULL OR date >= CAST(? AS DATE))
+ AND (? IS NULL OR date <= CAST(? AS DATE))
+ GROUP BY 1
+ ORDER BY response_count DESC
+ `,
+ [projectPath, projectPath, from, from, to, to],
+ ProviderCacheHitModelRowSchema
+ );
+}
+
+export async function executeNamedQuery(
+ conn: DuckDBConnection,
+ queryName: string,
+ params: Record
+): Promise {
+ switch (queryName) {
+ case "getSummary": {
+ return querySummary(conn, {
+ projectPath: parseOptionalString(params.projectPath),
+ from: parseDateFilter(params.from),
+ to: parseDateFilter(params.to),
+ });
+ }
+
+ case "getSpendOverTime": {
+ return querySpendOverTime(conn, {
+ granularity: parseGranularity(params.granularity),
+ projectPath: parseOptionalString(params.projectPath),
+ from: parseDateFilter(params.from),
+ to: parseDateFilter(params.to),
+ });
+ }
+
+ case "getSpendByProject": {
+ return querySpendByProject(conn, {
+ from: parseDateFilter(params.from),
+ to: parseDateFilter(params.to),
+ });
+ }
+
+ case "getSpendByModel": {
+ return querySpendByModel(
+ conn,
+ parseOptionalString(params.projectPath),
+ parseDateFilter(params.from),
+ parseDateFilter(params.to)
+ );
+ }
+
+ case "getTimingDistribution": {
+ return queryTimingDistribution(
+ conn,
+ parseTimingMetric(params.metric),
+ parseOptionalString(params.projectPath),
+ parseDateFilter(params.from),
+ parseDateFilter(params.to)
+ );
+ }
+
+ case "getAgentCostBreakdown": {
+ return queryAgentCostBreakdown(
+ conn,
+ parseOptionalString(params.projectPath),
+ parseDateFilter(params.from),
+ parseDateFilter(params.to)
+ );
+ }
+
+ case "getCacheHitRatioByProvider": {
+ return queryCacheHitRatioByProvider(
+ conn,
+ parseOptionalString(params.projectPath),
+ parseDateFilter(params.from),
+ parseDateFilter(params.to)
+ );
+ }
+
+ default:
+ throw new Error(`Unknown analytics query: ${queryName}`);
+ }
+}
diff --git a/src/node/services/serviceContainer.ts b/src/node/services/serviceContainer.ts
index dd828cb7d0..1bb192ab18 100644
--- a/src/node/services/serviceContainer.ts
+++ b/src/node/services/serviceContainer.ts
@@ -39,6 +39,7 @@ import type {
} from "@/common/types/stream";
import { FeatureFlagService } from "@/node/services/featureFlagService";
import { SessionTimingService } from "@/node/services/sessionTimingService";
+import { AnalyticsService } from "@/node/services/analytics/analyticsService";
import { ExperimentsService } from "@/node/services/experimentsService";
import { WorkspaceMcpOverridesService } from "@/node/services/workspaceMcpOverridesService";
import { McpOauthService } from "@/node/services/mcpOauthService";
@@ -112,6 +113,7 @@ export class ServiceContainer {
public readonly telemetryService: TelemetryService;
public readonly featureFlagService: FeatureFlagService;
public readonly sessionTimingService: SessionTimingService;
+ public readonly analyticsService: AnalyticsService;
public readonly experimentsService: ExperimentsService;
public readonly signingService: SigningService;
public readonly policyService: PolicyService;
@@ -133,6 +135,7 @@ export class ServiceContainer {
muxHome: config.rootDir,
});
this.sessionTimingService = new SessionTimingService(config, this.telemetryService);
+ this.analyticsService = new AnalyticsService(config);
// Desktop passes WorkspaceMcpOverridesService explicitly so AIService uses
// the persistent config rather than creating a default with an ephemeral one.
@@ -253,9 +256,29 @@ export class ServiceContainer {
this.aiService.on("tool-call-end", (data: ToolCallEndEvent) =>
this.sessionTimingService.handleToolCallEnd(data)
);
- this.aiService.on("stream-end", (data: StreamEndEvent) =>
- this.sessionTimingService.handleStreamEnd(data)
- );
+ this.aiService.on("stream-end", (data: StreamEndEvent) => {
+ this.sessionTimingService.handleStreamEnd(data);
+
+ const workspaceLookup = this.config.findWorkspace(data.workspaceId);
+ const sessionDir = this.config.getSessionDir(data.workspaceId);
+ this.analyticsService.ingestWorkspace(data.workspaceId, sessionDir, {
+ projectPath: workspaceLookup?.projectPath,
+ projectName: workspaceLookup?.projectPath
+ ? path.basename(workspaceLookup.projectPath)
+ : undefined,
+ });
+ });
+ // WorkspaceService emits metadata:null after successful remove().
+ // Clear analytics rows immediately so deleted workspaces disappear from stats
+ // without waiting for a future ingest pass.
+ this.workspaceService.on("metadata", (event) => {
+ if (event.metadata !== null) {
+ return;
+ }
+
+ this.analyticsService.clearWorkspace(event.workspaceId);
+ });
+
this.aiService.on("stream-abort", (data: StreamAbortEvent) =>
this.sessionTimingService.handleStreamAbort(data)
);
@@ -447,6 +470,7 @@ export class ServiceContainer {
coderService: this.coderService,
serverAuthService: this.serverAuthService,
sshPromptService: this.sshPromptService,
+ analyticsService: this.analyticsService,
};
}
diff --git a/src/node/services/streamManager.test.ts b/src/node/services/streamManager.test.ts
index 1ae77ba396..d8e3f71978 100644
--- a/src/node/services/streamManager.test.ts
+++ b/src/node/services/streamManager.test.ts
@@ -891,6 +891,151 @@ describe("StreamManager - Unavailable Tool Handling", () => {
});
});
+describe("StreamManager - TTFT metadata persistence", () => {
+ const runtime = createRuntime({ type: "local", srcBaseDir: "/tmp" });
+
+ async function finalizeStreamAndReadMessage(params: {
+ workspaceId: string;
+ messageId: string;
+ historySequence: number;
+ startTime: number;
+ parts: unknown[];
+ }) {
+ const streamManager = new StreamManager(historyService);
+ // Suppress error events from bubbling up as uncaught exceptions during tests
+ streamManager.on("error", () => undefined);
+
+ const replaceTokenTrackerResult = Reflect.set(streamManager, "tokenTracker", {
+ setModel: () => Promise.resolve(undefined),
+ countTokens: () => Promise.resolve(0),
+ });
+ if (!replaceTokenTrackerResult) {
+ throw new Error("Failed to mock StreamManager.tokenTracker");
+ }
+
+ const appendResult = await historyService.appendToHistory(params.workspaceId, {
+ id: params.messageId,
+ role: "assistant",
+ metadata: {
+ historySequence: params.historySequence,
+ partial: true,
+ },
+ parts: [],
+ });
+ expect(appendResult.success).toBe(true);
+ if (!appendResult.success) {
+ throw new Error(appendResult.error);
+ }
+
+ const processStreamWithCleanup = Reflect.get(streamManager, "processStreamWithCleanup") as (
+ workspaceId: string,
+ streamInfo: unknown,
+ historySequence: number
+ ) => Promise;
+ expect(typeof processStreamWithCleanup).toBe("function");
+
+ const streamInfo = {
+ state: "streaming",
+ streamResult: {
+ fullStream: (async function* () {
+ // No-op stream: tests verify stream-end finalization behavior from pre-populated parts.
+ })(),
+ totalUsage: Promise.resolve({ inputTokens: 4, outputTokens: 6, totalTokens: 10 }),
+ usage: Promise.resolve({ inputTokens: 4, outputTokens: 6, totalTokens: 10 }),
+ providerMetadata: Promise.resolve(undefined),
+ steps: Promise.resolve([]),
+ },
+ abortController: new AbortController(),
+ messageId: params.messageId,
+ token: "test-token",
+ startTime: params.startTime,
+ lastPartTimestamp: params.startTime,
+ toolCompletionTimestamps: new Map(),
+ model: KNOWN_MODELS.SONNET.id,
+ historySequence: params.historySequence,
+ parts: params.parts,
+ lastPartialWriteTime: 0,
+ partialWriteTimer: undefined,
+ partialWritePromise: undefined,
+ processingPromise: Promise.resolve(),
+ softInterrupt: { pending: false as const },
+ runtimeTempDir: "",
+ runtime,
+ cumulativeUsage: { inputTokens: 0, outputTokens: 0, totalTokens: 0 },
+ cumulativeProviderMetadata: undefined,
+ didRetryPreviousResponseIdAtStep: false,
+ currentStepStartIndex: 0,
+ stepTracker: {},
+ };
+
+ await processStreamWithCleanup.call(
+ streamManager,
+ params.workspaceId,
+ streamInfo,
+ params.historySequence
+ );
+
+ const historyResult = await historyService.getHistoryFromLatestBoundary(params.workspaceId);
+ expect(historyResult.success).toBe(true);
+ if (!historyResult.success) {
+ throw new Error(historyResult.error);
+ }
+
+ const updatedMessage = historyResult.data.find((message) => message.id === params.messageId);
+ expect(updatedMessage).toBeDefined();
+ if (!updatedMessage) {
+ throw new Error(`Expected updated message ${params.messageId} in history`);
+ }
+
+ return updatedMessage;
+ }
+
+ test("persists ttftMs in final assistant metadata when first-token timing is available", async () => {
+ const startTime = Date.now() - 1000;
+ const updatedMessage = await finalizeStreamAndReadMessage({
+ workspaceId: "ttft-present-workspace",
+ messageId: "ttft-present-message",
+ historySequence: 1,
+ startTime,
+ parts: [
+ {
+ type: "text",
+ text: "hello",
+ timestamp: startTime + 250,
+ },
+ ],
+ });
+
+ expect(updatedMessage.metadata?.ttftMs).toBe(250);
+ });
+
+ test("omits ttftMs in final assistant metadata when first-token timing is unavailable", async () => {
+ const startTime = Date.now() - 1000;
+ const updatedMessage = await finalizeStreamAndReadMessage({
+ workspaceId: "ttft-missing-workspace",
+ messageId: "ttft-missing-message",
+ historySequence: 1,
+ startTime,
+ parts: [
+ {
+ type: "dynamic-tool",
+ toolCallId: "tool-1",
+ toolName: "bash",
+ state: "output-available",
+ input: { script: "echo hi" },
+ output: { ok: true },
+ timestamp: startTime + 100,
+ },
+ ],
+ });
+
+ expect(updatedMessage.metadata?.ttftMs).toBeUndefined();
+ expect(Object.prototype.hasOwnProperty.call(updatedMessage.metadata ?? {}, "ttftMs")).toBe(
+ false
+ );
+ });
+});
+
describe("StreamManager - previousResponseId recovery", () => {
test("isResponseIdLost returns false for unknown IDs", () => {
const streamManager = new StreamManager(historyService);
diff --git a/src/node/services/streamManager.ts b/src/node/services/streamManager.ts
index c534621675..064d94e467 100644
--- a/src/node/services/streamManager.ts
+++ b/src/node/services/streamManager.ts
@@ -677,6 +677,33 @@ export class StreamManager extends EventEmitter {
return totalUsage;
}
+ private resolveTtftMsForStreamEnd(streamInfo: WorkspaceStreamInfo): number | undefined {
+ const firstTokenPart = streamInfo.parts.find(
+ (
+ part
+ ): part is Extract<
+ CompletedMessagePart,
+ { type: "text" | "reasoning"; timestamp?: number }
+ > => (part.type === "text" || part.type === "reasoning") && part.text.length > 0
+ );
+
+ if (!firstTokenPart) {
+ return undefined;
+ }
+
+ if (!Number.isFinite(streamInfo.startTime)) {
+ return undefined;
+ }
+
+ const firstTokenTimestamp = firstTokenPart.timestamp;
+ if (typeof firstTokenTimestamp !== "number" || !Number.isFinite(firstTokenTimestamp)) {
+ return undefined;
+ }
+
+ const ttftMs = Math.max(0, firstTokenTimestamp - streamInfo.startTime);
+ return Number.isFinite(ttftMs) ? ttftMs : undefined;
+ }
+
/**
* Aggregate provider metadata across all steps.
*
@@ -1941,6 +1968,7 @@ export class StreamManager extends EventEmitter {
const contextProviderMetadata =
streamMeta.contextProviderMetadata ?? streamInfo.lastStepProviderMetadata;
const duration = streamMeta.duration;
+ const ttftMs = this.resolveTtftMsForStreamEnd(streamInfo);
// Aggregated provider metadata across all steps (for cost calculation with cache tokens)
const providerMetadata = markProviderMetadataCostsIncluded(
await this.getAggregatedProviderMetadata(streamInfo),
@@ -1968,6 +1996,7 @@ export class StreamManager extends EventEmitter {
providerMetadata, // Aggregated (for cost calculation)
contextProviderMetadata, // Last step (for context window display)
duration,
+ ...(ttftMs !== undefined && { ttftMs }),
},
parts: streamInfo.parts, // Parts array with temporal ordering (includes reasoning)
};
diff --git a/tests/ipc/setup.ts b/tests/ipc/setup.ts
index fc228dd9a5..4ae57f9c4c 100644
--- a/tests/ipc/setup.ts
+++ b/tests/ipc/setup.ts
@@ -119,6 +119,7 @@ export async function createTestEnvironment(): Promise {
serverAuthService: services.serverAuthService,
policyService: services.policyService,
sshPromptService: services.sshPromptService,
+ analyticsService: services.analyticsService,
};
const orpc = createOrpcTestClient(orpcContext);