fix: avoid logging stale anthropic usage (#1501) (thanks @parubets)

This commit is contained in:
Peter Steinberger 2026-01-24 06:18:40 +00:00
parent e85abaca2b
commit 012a83cdb2
3 changed files with 53 additions and 5 deletions

View File

@ -76,6 +76,43 @@ describe("createAnthropicPayloadLogger", () => {
expect(event.usage).toEqual({ input: 12 });
});
it("skips usage when no new assistant message was added", () => {
const lines: string[] = [];
const logger = createAnthropicPayloadLogger({
cfg: {
diagnostics: {
anthropicPayloadLog: {
enabled: true,
},
},
},
env: {},
modelApi: "anthropic-messages",
writer: {
filePath: "memory",
write: (line) => lines.push(line),
},
});
logger?.recordUsage(
[
{
role: "assistant",
usage: {
input: 1,
},
} as unknown as {
role: string;
usage: { input: number };
},
],
undefined,
1,
);
expect(lines.length).toBe(0);
});
it("records request payloads and forwards onPayload", async () => {
const lines: string[] = [];
let forwarded: unknown;

View File

@ -110,8 +110,12 @@ function isAnthropicModel(model: Model<Api> | undefined | null): boolean {
return (model as { api?: unknown })?.api === "anthropic-messages";
}
function findLastAssistantUsage(messages: AgentMessage[]): Record<string, unknown> | null {
function findLastAssistantUsage(
messages: AgentMessage[],
minIndex = 0,
): Record<string, unknown> | null {
for (let i = messages.length - 1; i >= 0; i -= 1) {
if (i < minIndex) break;
const msg = messages[i] as { role?: unknown; usage?: unknown };
if (msg?.role === "assistant" && msg.usage && typeof msg.usage === "object") {
return msg.usage as Record<string, unknown>;
@ -124,7 +128,7 @@ export type AnthropicPayloadLogger = {
enabled: true;
filePath: string;
wrapStreamFn: (streamFn: StreamFn) => StreamFn;
recordUsage: (messages: AgentMessage[], error?: unknown) => void;
recordUsage: (messages: AgentMessage[], error?: unknown, baselineMessageCount?: number) => void;
};
export function createAnthropicPayloadLogger(params: {
@ -184,8 +188,12 @@ export function createAnthropicPayloadLogger(params: {
return wrapped;
};
const recordUsage: AnthropicPayloadLogger["recordUsage"] = (messages, error) => {
const usage = findLastAssistantUsage(messages);
const recordUsage: AnthropicPayloadLogger["recordUsage"] = (
messages,
error,
baselineMessageCount,
) => {
const usage = findLastAssistantUsage(messages, baselineMessageCount ?? 0);
if (!usage) {
if (error) {
record({

View File

@ -643,6 +643,7 @@ export async function runEmbeddedAttempt(
let messagesSnapshot: AgentMessage[] = [];
let sessionIdUsed = activeSession.sessionId;
let promptStartMessageCount = activeSession.messages.length;
const onAbort = () => {
const reason = params.abortSignal ? getAbortReason(params.abortSignal) : undefined;
const timeout = reason ? isTimeoutError(reason) : false;
@ -714,6 +715,8 @@ export async function runEmbeddedAttempt(
);
}
promptStartMessageCount = activeSession.messages.length;
try {
// Detect and load images referenced in the prompt for vision-capable models.
// This eliminates the need for an explicit "view" tool call by injecting
@ -789,7 +792,7 @@ export async function runEmbeddedAttempt(
messages: messagesSnapshot,
note: promptError ? "prompt error" : undefined,
});
anthropicPayloadLogger?.recordUsage(messagesSnapshot, promptError);
anthropicPayloadLogger?.recordUsage(messagesSnapshot, promptError, promptStartMessageCount);
// Run agent_end hooks to allow plugins to analyze the conversation
// This is fire-and-forget, so we don't await