Skip to content

Commit e89a54a

Browse files
authored
fix: Add usage data integration to #760 feature addition (#785)
1 parent b1ca7c3 commit e89a54a

File tree

14 files changed

+287
-15
lines changed

14 files changed

+287
-15
lines changed

.changeset/fiery-ravens-refuse.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
---
2+
'@openai/agents-openai': patch
3+
'@openai/agents-core': patch
4+
---
5+
6+
fix: Add usage data integration to #760 feature addition

examples/memory/oai-compact.ts

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,10 @@ async function main() {
6868
console.log(`Assistant: ${event.item.content.trim()}`);
6969
}
7070
}
71+
console.log(
72+
'Usage for the turn:',
73+
result.state.usage.requestUsageEntries,
74+
);
7175
}
7276

7377
const compactedHistory = await session.getItems();
@@ -77,7 +81,8 @@ async function main() {
7781
}
7882

7983
// You can manually run compaction this way:
80-
await session.runCompaction({ force: true });
84+
const compactionResult = await session.runCompaction({ force: true });
85+
console.log('Manual compaction result:', compactionResult);
8186

8287
const finalHistory = await session.getItems();
8388
console.log('\nStored history after final compaction:');

packages/agents-core/src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -183,6 +183,7 @@ export type {
183183
SessionInputCallback,
184184
OpenAIResponsesCompactionArgs,
185185
OpenAIResponsesCompactionAwareSession,
186+
OpenAIResponsesCompactionResult,
186187
} from './memory/session';
187188
export { isOpenAIResponsesCompactionAwareSession } from './memory/session';
188189
export { MemorySession } from './memory/memorySession';

packages/agents-core/src/memory/session.ts

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import type { AgentInputItem } from '../types';
2+
import type { RequestUsage } from '../usage';
23

34
/**
45
* A function that combines session history with new input items before the model call.
@@ -60,6 +61,10 @@ export type OpenAIResponsesCompactionArgs = {
6061
force?: boolean;
6162
};
6263

64+
export type OpenAIResponsesCompactionResult = {
65+
usage: RequestUsage;
66+
};
67+
6368
export interface OpenAIResponsesCompactionAwareSession extends Session {
6469
/**
6570
* Invoked by the runner after it persists a completed turn into the session.
@@ -70,7 +75,12 @@ export interface OpenAIResponsesCompactionAwareSession extends Session {
7075
* This hook is best-effort. Implementations should consider handling transient failures and
7176
* deciding whether to retry or skip compaction for the current turn.
7277
*/
73-
runCompaction(args?: OpenAIResponsesCompactionArgs): Promise<void> | void;
78+
runCompaction(
79+
args?: OpenAIResponsesCompactionArgs,
80+
):
81+
| Promise<OpenAIResponsesCompactionResult | null>
82+
| OpenAIResponsesCompactionResult
83+
| null;
7484
}
7585

7686
export function isOpenAIResponsesCompactionAwareSession(

packages/agents-core/src/runImplementation.ts

Lines changed: 22 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,7 @@ import {
6565
type Session,
6666
type SessionInputCallback,
6767
} from './memory/session';
68+
import { Usage } from './usage';
6869

6970
// Represents a single handoff function call that still needs to be executed after the model turn.
7071
type ToolRunHandoff = {
@@ -2296,14 +2297,30 @@ function shouldStripIdForType(type: string): boolean {
22962297
async function runCompactionOnSession(
22972298
session: Session | undefined,
22982299
responseId: string | undefined,
2300+
state: RunState<any, any>,
22992301
): Promise<void> {
23002302
if (!isOpenAIResponsesCompactionAwareSession(session)) {
23012303
return;
23022304
}
23032305
// Called after a completed turn is persisted so compaction can consider the latest stored state.
2304-
await session.runCompaction(
2306+
const compactionResult = await session.runCompaction(
23052307
typeof responseId === 'undefined' ? undefined : { responseId },
23062308
);
2309+
if (!compactionResult) {
2310+
return;
2311+
}
2312+
const usage = compactionResult.usage;
2313+
state._context.usage.add(
2314+
new Usage({
2315+
requests: 1,
2316+
inputTokens: usage.inputTokens,
2317+
outputTokens: usage.outputTokens,
2318+
totalTokens: usage.totalTokens,
2319+
inputTokensDetails: usage.inputTokensDetails,
2320+
outputTokensDetails: usage.outputTokensDetails,
2321+
requestUsageEntries: [usage],
2322+
}),
2323+
);
23072324
}
23082325

23092326
/**
@@ -2335,12 +2352,12 @@ export async function saveToSession(
23352352
if (itemsToSave.length === 0) {
23362353
state._currentTurnPersistedItemCount =
23372354
alreadyPersisted + newRunItems.length;
2338-
await runCompactionOnSession(session, result.lastResponseId);
2355+
await runCompactionOnSession(session, result.lastResponseId, state);
23392356
return;
23402357
}
23412358
const sanitizedItems = normalizeItemsForSessionPersistence(itemsToSave);
23422359
await session.addItems(sanitizedItems);
2343-
await runCompactionOnSession(session, result.lastResponseId);
2360+
await runCompactionOnSession(session, result.lastResponseId, state);
23442361
state._currentTurnPersistedItemCount = alreadyPersisted + newRunItems.length;
23452362
}
23462363

@@ -2382,12 +2399,12 @@ export async function saveStreamResultToSession(
23822399
if (itemsToSave.length === 0) {
23832400
state._currentTurnPersistedItemCount =
23842401
alreadyPersisted + newRunItems.length;
2385-
await runCompactionOnSession(session, result.lastResponseId);
2402+
await runCompactionOnSession(session, result.lastResponseId, state);
23862403
return;
23872404
}
23882405
const sanitizedItems = normalizeItemsForSessionPersistence(itemsToSave);
23892406
await session.addItems(sanitizedItems);
2390-
await runCompactionOnSession(session, result.lastResponseId);
2407+
await runCompactionOnSession(session, result.lastResponseId, state);
23912408
state._currentTurnPersistedItemCount = alreadyPersisted + newRunItems.length;
23922409
}
23932410

packages/agents-core/src/runState.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,7 @@ const requestUsageSchema = z.object({
7878
totalTokens: z.number(),
7979
inputTokensDetails: z.record(z.string(), z.number()).optional(),
8080
outputTokensDetails: z.record(z.string(), z.number()).optional(),
81+
endpoint: z.string().optional(),
8182
});
8283

8384
const usageSchema = z.object({
@@ -470,6 +471,7 @@ export class RunState<TContext, TAgent extends Agent<any, any>> {
470471
totalTokens: entry.totalTokens,
471472
inputTokensDetails: entry.inputTokensDetails,
472473
outputTokensDetails: entry.outputTokensDetails,
474+
...(entry.endpoint ? { endpoint: entry.endpoint } : {}),
473475
}),
474476
),
475477
}

packages/agents-core/src/types/protocol.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -769,6 +769,7 @@ export const RequestUsageData = z.object({
769769
totalTokens: z.number(),
770770
inputTokensDetails: z.record(z.string(), z.number()).optional(),
771771
outputTokensDetails: z.record(z.string(), z.number()).optional(),
772+
endpoint: z.string().optional(),
772773
});
773774

774775
export type RequestUsageData = z.infer<typeof RequestUsageData>;

packages/agents-core/src/usage.ts

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ type RequestUsageInput = Partial<
77
total_tokens: number;
88
input_tokens_details: object;
99
output_tokens_details: object;
10+
endpoint?: string;
1011
}
1112
>;
1213

@@ -56,6 +57,11 @@ export class RequestUsage {
5657
*/
5758
public outputTokensDetails: Record<string, number>;
5859

60+
/**
61+
* The endpoint that produced this usage entry (e.g., responses.create, responses.compact).
62+
*/
63+
public endpoint?: 'responses.create' | 'responses.compact' | (string & {});
64+
5965
constructor(input?: RequestUsageInput) {
6066
this.inputTokens = input?.inputTokens ?? input?.input_tokens ?? 0;
6167
this.outputTokens = input?.outputTokens ?? input?.output_tokens ?? 0;
@@ -73,6 +79,9 @@ export class RequestUsage {
7379
this.outputTokensDetails = outputTokensDetails
7480
? (outputTokensDetails as Record<string, number>)
7581
: {};
82+
if (typeof input?.endpoint !== 'undefined') {
83+
this.endpoint = input.endpoint;
84+
}
7685
}
7786
}
7887

packages/agents-core/test/runImplementation.test.ts

Lines changed: 121 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ import {
5050
import { handoff } from '../src/handoff';
5151
import { ModelBehaviorError, UserError } from '../src/errors';
5252
import { Computer } from '../src/computer';
53-
import { Usage } from '../src/usage';
53+
import { RequestUsage, Usage } from '../src/usage';
5454
import { setTracingDisabled, withTrace } from '../src';
5555

5656
import {
@@ -70,7 +70,10 @@ import { RunContext } from '../src/runContext';
7070
import { setDefaultModelProvider } from '../src';
7171
import { Logger } from '../src/logger';
7272
import type { UnknownContext } from '../src/types';
73-
import type { Session } from '../src/memory/session';
73+
import type {
74+
OpenAIResponsesCompactionResult,
75+
Session,
76+
} from '../src/memory/session';
7477
import type { AgentInputItem } from '../src/types';
7578

7679
beforeAll(() => {
@@ -580,8 +583,9 @@ describe('saveToSession', () => {
580583

581584
async runCompaction(args: {
582585
responseId: string | undefined;
583-
}): Promise<void> {
586+
}): Promise<OpenAIResponsesCompactionResult | null> {
584587
this.events.push(`runCompaction:${args.responseId}`);
588+
return null;
585589
}
586590
}
587591

@@ -662,8 +666,11 @@ describe('saveToSession', () => {
662666
this.items = [];
663667
}
664668

665-
async runCompaction(args?: { responseId?: string }): Promise<void> {
669+
async runCompaction(args?: {
670+
responseId?: string;
671+
}): Promise<OpenAIResponsesCompactionResult | null> {
666672
this.events.push(`runCompaction:${String(args?.responseId)}`);
673+
return null;
667674
}
668675
}
669676

@@ -713,6 +720,116 @@ describe('saveToSession', () => {
713720
expect(session.events).toEqual(['addItems:2', 'runCompaction:undefined']);
714721
expect(session.items).toHaveLength(2);
715722
});
723+
724+
it('aggregates compaction usage into the run usage', async () => {
725+
class TrackingSession implements Session {
726+
items: AgentInputItem[] = [];
727+
events: string[] = [];
728+
729+
async getSessionId(): Promise<string> {
730+
return 'session';
731+
}
732+
733+
async getItems(): Promise<AgentInputItem[]> {
734+
return [...this.items];
735+
}
736+
737+
async addItems(items: AgentInputItem[]): Promise<void> {
738+
this.events.push(`addItems:${items.length}`);
739+
this.items.push(...items);
740+
}
741+
742+
async popItem(): Promise<AgentInputItem | undefined> {
743+
return undefined;
744+
}
745+
746+
async clearSession(): Promise<void> {
747+
this.items = [];
748+
}
749+
750+
async runCompaction(): Promise<OpenAIResponsesCompactionResult | null> {
751+
this.events.push('runCompaction:resp_123');
752+
return {
753+
usage: new RequestUsage({
754+
inputTokens: 4,
755+
outputTokens: 6,
756+
totalTokens: 10,
757+
endpoint: 'responses.compact',
758+
}),
759+
};
760+
}
761+
}
762+
763+
const textAgent = new Agent<UnknownContext, 'text'>({
764+
name: 'Recorder',
765+
outputType: 'text',
766+
instructions: 'capture',
767+
});
768+
const agent = textAgent as unknown as Agent<
769+
UnknownContext,
770+
AgentOutputType
771+
>;
772+
const session = new TrackingSession();
773+
const context = new RunContext<UnknownContext>(undefined as UnknownContext);
774+
const state = new RunState<
775+
UnknownContext,
776+
Agent<UnknownContext, AgentOutputType>
777+
>(context, 'hello', agent, 10);
778+
779+
const modelUsage = new Usage({
780+
requests: 1,
781+
inputTokens: 2,
782+
outputTokens: 3,
783+
totalTokens: 5,
784+
requestUsageEntries: [
785+
new RequestUsage({
786+
inputTokens: 2,
787+
outputTokens: 3,
788+
totalTokens: 5,
789+
endpoint: 'responses.create',
790+
}),
791+
],
792+
});
793+
state._modelResponses.push({
794+
output: [],
795+
usage: modelUsage,
796+
responseId: 'resp_123',
797+
});
798+
state._context.usage.add(modelUsage);
799+
state._generatedItems = [
800+
new MessageOutputItem(
801+
{
802+
type: 'message',
803+
role: 'assistant',
804+
id: 'msg_123',
805+
status: 'completed',
806+
content: [
807+
{
808+
type: 'output_text',
809+
text: 'here is the reply',
810+
},
811+
],
812+
providerData: {},
813+
},
814+
textAgent,
815+
),
816+
];
817+
state._currentStep = {
818+
type: 'next_step_final_output',
819+
output: 'here is the reply',
820+
};
821+
822+
const result = new RunResult(state);
823+
await saveToSession(session, toInputItemList(state._originalInput), result);
824+
825+
expect(session.events).toEqual(['addItems:2', 'runCompaction:resp_123']);
826+
expect(state.usage.inputTokens).toBe(6);
827+
expect(state.usage.outputTokens).toBe(9);
828+
expect(state.usage.totalTokens).toBe(15);
829+
expect(
830+
state.usage.requestUsageEntries?.map((entry) => entry.endpoint),
831+
).toEqual(['responses.create', 'responses.compact']);
832+
});
716833
});
717834

718835
describe('prepareInputItemsWithSession', () => {

packages/agents-core/test/usage.test.ts

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -163,4 +163,36 @@ describe('Usage', () => {
163163
},
164164
]);
165165
});
166+
167+
it('preserves endpoint metadata on request usage entries', () => {
168+
const aggregated = new Usage();
169+
170+
aggregated.add(
171+
new Usage({
172+
requests: 1,
173+
inputTokens: 3,
174+
outputTokens: 4,
175+
totalTokens: 7,
176+
requestUsageEntries: [
177+
new RequestUsage({
178+
inputTokens: 3,
179+
outputTokens: 4,
180+
totalTokens: 7,
181+
endpoint: 'responses.create',
182+
}),
183+
],
184+
}),
185+
);
186+
187+
expect(aggregated.requestUsageEntries).toEqual([
188+
{
189+
inputTokens: 3,
190+
outputTokens: 4,
191+
totalTokens: 7,
192+
inputTokensDetails: {},
193+
outputTokensDetails: {},
194+
endpoint: 'responses.create',
195+
},
196+
]);
197+
});
166198
});

0 commit comments

Comments
 (0)