Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,15 @@
## Unreleased

### Added (CLI)
- **Agent and subagent tracking coverage.** Gemini sessions now emit one
provider call per assistant message with token usage instead of one aggregate
call per session, preserving per-message tools, bash commands, timestamps,
and nearest user prompts. Existing cached aggregate Gemini entries are
reparsed so the new per-message shape takes effect, and per-tool counts may
increase because repeated tools are now attributed to the specific Gemini
message that used them. Claude discovery also scans direct project-level
`subagents/*.jsonl` files, and Codex agent tool normalization is covered by
regression tests. Addresses #336.
- **Multiple subscription plans can be tracked at the same time.**
`codeburn plan set` now stores plans in a provider-keyed `plans` map, so
setting a Codex custom plan no longer overwrites an existing Claude plan.
Expand Down
27 changes: 21 additions & 6 deletions src/parser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1321,18 +1321,24 @@ async function parseSessionFile(

async function collectJsonlFiles(dirPath: string): Promise<string[]> {
const files = await readdir(dirPath).catch(() => [])
const jsonlFiles = files.filter(f => f.endsWith('.jsonl')).map(f => join(dirPath, f))
const jsonlFiles = new Set(files.filter(f => f.endsWith('.jsonl')).map(f => join(dirPath, f)))

const directSubagentsPath = join(dirPath, 'subagents')
const directSubFiles = await readdir(directSubagentsPath).catch(() => [])
for (const sf of directSubFiles) {
if (sf.endsWith('.jsonl')) jsonlFiles.add(join(directSubagentsPath, sf))
}

for (const entry of files) {
if (entry.endsWith('.jsonl')) continue
const subagentsPath = join(dirPath, entry, 'subagents')
const subFiles = await readdir(subagentsPath).catch(() => [])
for (const sf of subFiles) {
if (sf.endsWith('.jsonl')) jsonlFiles.push(join(subagentsPath, sf))
if (sf.endsWith('.jsonl')) jsonlFiles.add(join(subagentsPath, sf))
}
}

return jsonlFiles
return [...jsonlFiles]
}

async function scanProjectDirs(
Expand Down Expand Up @@ -1639,6 +1645,14 @@ function getOrCreateProviderSection(cache: SessionCache, provider: string): Prov
return section
}

function cachedFileNeedsProviderReparse(providerName: string, cached: CachedFile): boolean {
if (providerName !== 'gemini') return false

return cached.turns.some(turn =>
turn.calls.some(call => call.deduplicationKey === `gemini:${turn.sessionId}`),
)
}

const warnedProviderReadFailures = new Set<string>()

function warnProviderReadFailureOnce(providerName: string, err: unknown): void {
Expand Down Expand Up @@ -1674,9 +1688,10 @@ async function parseProviderSources(
const fp = await fingerprintFile(source.path)
if (!fp) continue

const action = reconcileFile(fp, section.files[source.path])
if (action.action === 'unchanged') {
unchangedSources.push({ source, cached: section.files[source.path]! })
const cached = section.files[source.path]
const action = reconcileFile(fp, cached)
if (action.action === 'unchanged' && cached && !cachedFileNeedsProviderReparse(providerName, cached)) {
unchangedSources.push({ source, cached })
} else {
changedSources.push({ source, fp })
}
Expand Down
129 changes: 63 additions & 66 deletions src/providers/gemini.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,84 +66,81 @@ type GeminiSession = {
function parseSession(data: GeminiSession, seenKeys: Set<string>): ParsedProviderCall[] {
const results: ParsedProviderCall[] = []

const geminiMessages = data.messages.filter(m => m.type === 'gemini' && m.tokens && m.model)
if (geminiMessages.length === 0) return results

const dedupKey = `gemini:${data.sessionId}`
if (seenKeys.has(dedupKey)) return results
seenKeys.add(dedupKey)

let totalInput = 0
let totalOutput = 0
let totalCached = 0
let totalThoughts = 0
const allTools: string[] = []
const bashCommands: string[] = []
let model = ''

for (const msg of geminiMessages) {
const t = msg.tokens!
totalInput += t.input ?? 0
totalOutput += t.output ?? 0
totalCached += t.cached ?? 0
totalThoughts += t.thoughts ?? 0
if (msg.model && !model) model = msg.model
let lastUserMessage = ''
let geminiOrdinal = 0

for (const msg of data.messages) {
if (msg.type === 'user') {
if (Array.isArray(msg.content)) {
lastUserMessage = msg.content.map(c => c.text).join(' ').slice(0, 500)
} else if (typeof msg.content === 'string') {
lastUserMessage = msg.content.slice(0, 500)
}
continue
}

if (msg.type !== 'gemini' || !msg.tokens || !msg.model) continue

const t = msg.tokens
const totalInput = t.input ?? 0
const totalOutput = t.output ?? 0
const totalCached = t.cached ?? 0
const totalThoughts = t.thoughts ?? 0
if (totalInput === 0 && totalOutput === 0 && totalCached === 0 && totalThoughts === 0) continue

const messageKey = msg.id || `idx-${geminiOrdinal}`
geminiOrdinal++
const dedupKey = `gemini:${data.sessionId}:${messageKey}`
if (seenKeys.has(dedupKey)) continue

const tools: string[] = []
const bashCommands: string[] = []

if (msg.toolCalls) {
for (const tc of msg.toolCalls) {
const mapped = toolNameMap[tc.displayName ?? ''] ?? toolNameMap[tc.name] ?? tc.displayName ?? tc.name
allTools.push(mapped)
tools.push(mapped)
if (mapped === 'Bash' && tc.args && typeof tc.args.command === 'string') {
bashCommands.push(...extractBashCommands(tc.args.command))
}
}
}
}

if (totalInput === 0 && totalOutput === 0) return results

// Gemini's `input` count includes `cached` tokens as a subset, so fresh input
// must subtract cached to avoid double-charging at both rates.
const freshInput = totalInput - totalCached

let userMessage = ''
const firstUser = data.messages.find(m => m.type === 'user')
if (firstUser) {
if (Array.isArray(firstUser.content)) {
userMessage = firstUser.content.map(c => c.text).join(' ').slice(0, 500)
} else if (typeof firstUser.content === 'string') {
userMessage = firstUser.content.slice(0, 500)
}
// Gemini's `input` count includes `cached` tokens as a subset, so fresh
// input must subtract cached to avoid double-charging at both rates.
const freshInput = Math.max(0, totalInput - totalCached)

const tsDate = new Date(msg.timestamp || data.startTime)
if (isNaN(tsDate.getTime()) || tsDate.getTime() < 1_000_000_000_000) continue

seenKeys.add(dedupKey)

// Gemini bills thoughts at the output token rate; calculateCost does not
// accept a reasoning parameter, so fold thoughts into the output count for
// pricing while keeping outputTokens / reasoningTokens reported separately.
const costUSD = calculateCost(msg.model, freshInput, totalOutput + totalThoughts, 0, totalCached, 0)

results.push({
provider: 'gemini',
model: msg.model,
inputTokens: freshInput,
outputTokens: totalOutput,
cacheCreationInputTokens: 0,
cacheReadInputTokens: totalCached,
cachedInputTokens: totalCached,
reasoningTokens: totalThoughts,
webSearchRequests: 0,
costUSD,
tools: [...new Set(tools)],
bashCommands: [...new Set(bashCommands)],
timestamp: tsDate.toISOString(),
speed: 'standard',
deduplicationKey: dedupKey,
userMessage: lastUserMessage,
sessionId: data.sessionId,
})
}

const tsDate = new Date(data.startTime)
if (isNaN(tsDate.getTime()) || tsDate.getTime() < 1_000_000_000_000) return results

// Gemini bills thoughts at the output token rate; calculateCost does not
// accept a reasoning parameter, so fold thoughts into the output count for
// pricing while keeping outputTokens / reasoningTokens reported separately.
const costUSD = calculateCost(model, freshInput, totalOutput + totalThoughts, 0, totalCached, 0)

results.push({
provider: 'gemini',
model,
inputTokens: freshInput,
outputTokens: totalOutput,
cacheCreationInputTokens: 0,
cacheReadInputTokens: totalCached,
cachedInputTokens: totalCached,
reasoningTokens: totalThoughts,
webSearchRequests: 0,
costUSD,
tools: [...new Set(allTools)],
bashCommands: [...new Set(bashCommands)],
timestamp: tsDate.toISOString(),
speed: 'standard',
deduplicationKey: dedupKey,
userMessage,
sessionId: data.sessionId,
})

return results
}

Expand Down
134 changes: 134 additions & 0 deletions tests/parser-gemini-cache.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
import { mkdir, mkdtemp, readFile, rm, stat, writeFile } from 'fs/promises'
import { tmpdir } from 'os'
import { join } from 'path'

import { afterEach, beforeEach, describe, expect, it } from 'vitest'

import { clearSessionCache, parseAllSessions } from '../src/parser.js'
import { CACHE_VERSION, computeEnvFingerprint } from '../src/session-cache.js'
import type { DateRange } from '../src/types.js'

let home: string
let cacheDir: string
let previousHome: string | undefined
let previousCacheDir: string | undefined

beforeEach(async () => {
home = await mkdtemp(join(tmpdir(), 'codeburn-gemini-home-'))
cacheDir = await mkdtemp(join(tmpdir(), 'codeburn-gemini-cache-'))
previousHome = process.env['HOME']
previousCacheDir = process.env['CODEBURN_CACHE_DIR']
process.env['HOME'] = home
process.env['CODEBURN_CACHE_DIR'] = cacheDir
})

afterEach(async () => {
clearSessionCache()
if (previousHome === undefined) delete process.env['HOME']
else process.env['HOME'] = previousHome
if (previousCacheDir === undefined) delete process.env['CODEBURN_CACHE_DIR']
else process.env['CODEBURN_CACHE_DIR'] = previousCacheDir
await rm(home, { recursive: true, force: true })
await rm(cacheDir, { recursive: true, force: true })
})

describe('Gemini session cache migration', () => {
it('reparses cached legacy aggregate Gemini entries into granular calls', async () => {
const chatsDir = join(home, '.gemini', 'tmp', 'project-a', 'chats')
await mkdir(chatsDir, { recursive: true })
const sessionPath = join(chatsDir, 'session-2026-05-16.json')
await writeFile(sessionPath, JSON.stringify({
sessionId: 'gemini-session-1',
startTime: '2026-05-16T10:00:00.000Z',
messages: [
{ id: 'u1', timestamp: '2026-05-16T10:00:00.000Z', type: 'user', content: 'work' },
{
id: 'g1',
timestamp: '2026-05-16T10:00:05.000Z',
type: 'gemini',
content: 'first',
model: 'gemini-3.1-pro-preview',
tokens: { input: 10, output: 5 },
},
{
id: 'g2',
timestamp: '2026-05-16T10:00:10.000Z',
type: 'gemini',
content: 'second',
model: 'gemini-3.1-pro-preview',
tokens: { input: 12, output: 6 },
},
],
}))

const fileStat = await stat(sessionPath)
await writeFile(join(cacheDir, 'session-cache.json'), JSON.stringify({
version: CACHE_VERSION,
providers: {
gemini: {
envFingerprint: computeEnvFingerprint('gemini'),
files: {
[sessionPath]: {
fingerprint: {
dev: fileStat.dev,
ino: fileStat.ino,
mtimeMs: fileStat.mtimeMs,
sizeBytes: fileStat.size,
},
mcpInventory: [],
turns: [{
timestamp: '2026-05-16T10:00:00.000Z',
sessionId: 'gemini-session-1',
userMessage: 'work',
calls: [{
provider: 'gemini',
model: 'gemini-3.1-pro-preview',
usage: {
inputTokens: 22,
outputTokens: 11,
cacheCreationInputTokens: 0,
cacheReadInputTokens: 0,
cachedInputTokens: 0,
reasoningTokens: 0,
webSearchRequests: 0,
cacheCreationOneHourTokens: 0,
},
speed: 'standard',
timestamp: '2026-05-16T10:00:00.000Z',
tools: [],
bashCommands: [],
skills: [],
deduplicationKey: 'gemini:gemini-session-1',
}],
}],
},
},
},
},
}))

const range: DateRange = {
start: new Date('2026-05-16T00:00:00.000Z'),
end: new Date('2026-05-16T23:59:59.999Z'),
}

const projects = await parseAllSessions(range, 'gemini')
const keys = projects.flatMap(project =>
project.sessions.flatMap(session =>
session.turns.flatMap(turn => turn.assistantCalls.map(call => call.deduplicationKey)),
),
)

expect(projects[0]!.totalApiCalls).toBe(2)
expect(keys).toEqual([
'gemini:gemini-session-1:g1',
'gemini:gemini-session-1:g2',
])

const savedCache = JSON.parse(await readFile(join(cacheDir, 'session-cache.json'), 'utf-8'))
const savedKeys = savedCache.providers.gemini.files[sessionPath].turns.flatMap((turn: { calls: Array<{ deduplicationKey: string }> }) =>
turn.calls.map(call => call.deduplicationKey),
)
expect(savedKeys).toEqual(keys)
})
})
Loading
Loading