mirror of
https://github.com/whekin/household-bot.git
synced 2026-03-31 14:04:04 +00:00
fix(bot): parse nested openai responses payloads
This commit is contained in:
@@ -1,3 +1,5 @@
|
||||
import { extractOpenAiResponseText, type OpenAiResponsePayload } from './openai-responses'
|
||||
|
||||
export interface AssistantUsage {
|
||||
inputTokens: number
|
||||
outputTokens: number
|
||||
@@ -22,15 +24,6 @@ export interface ConversationalAssistant {
|
||||
}): Promise<AssistantReply>
|
||||
}
|
||||
|
||||
interface OpenAiResponsePayload {
|
||||
output_text?: string
|
||||
usage?: {
|
||||
input_tokens?: number
|
||||
output_tokens?: number
|
||||
total_tokens?: number
|
||||
}
|
||||
}
|
||||
|
||||
const ASSISTANT_SYSTEM_PROMPT = [
|
||||
'You are Kojori, a household finance assistant for one specific household.',
|
||||
'Stay within the provided household context and recent conversation context.',
|
||||
@@ -99,8 +92,14 @@ export function createOpenAiChatAssistant(
|
||||
throw new Error(`Assistant request failed with status ${response.status}`)
|
||||
}
|
||||
|
||||
const payload = (await response.json()) as OpenAiResponsePayload
|
||||
const text = payload.output_text?.trim()
|
||||
const payload = (await response.json()) as OpenAiResponsePayload & {
|
||||
usage?: {
|
||||
input_tokens?: number
|
||||
output_tokens?: number
|
||||
total_tokens?: number
|
||||
}
|
||||
}
|
||||
const text = extractOpenAiResponseText(payload)
|
||||
if (!text) {
|
||||
throw new Error('Assistant response did not contain text')
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
import { extractOpenAiResponseText, parseJsonFromResponseText } from './openai-responses'
|
||||
|
||||
import type { PurchaseParserLlmFallback } from '@household/application'
|
||||
|
||||
interface OpenAiStructuredResult {
|
||||
@@ -84,17 +86,20 @@ export function createOpenAiParserFallback(
|
||||
}
|
||||
|
||||
const payload = (await response.json()) as {
|
||||
output_text?: string
|
||||
output_text?: string | null
|
||||
output?: Array<{
|
||||
content?: Array<{
|
||||
text?: string | { value?: string | null } | null
|
||||
}> | null
|
||||
}> | null
|
||||
}
|
||||
|
||||
if (!payload.output_text) {
|
||||
const responseText = extractOpenAiResponseText(payload)
|
||||
if (!responseText) {
|
||||
return null
|
||||
}
|
||||
|
||||
let parsedJson: OpenAiStructuredResult
|
||||
try {
|
||||
parsedJson = JSON.parse(payload.output_text) as OpenAiStructuredResult
|
||||
} catch {
|
||||
const parsedJson = parseJsonFromResponseText<OpenAiStructuredResult>(responseText)
|
||||
if (!parsedJson) {
|
||||
return null
|
||||
}
|
||||
|
||||
|
||||
98
apps/bot/src/openai-purchase-interpreter.test.ts
Normal file
98
apps/bot/src/openai-purchase-interpreter.test.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { describe, expect, test } from 'bun:test'
|
||||
|
||||
import {
|
||||
createOpenAiPurchaseInterpreter,
|
||||
type PurchaseInterpretation
|
||||
} from './openai-purchase-interpreter'
|
||||
|
||||
function successfulResponse(payload: unknown): Response {
|
||||
return new Response(JSON.stringify(payload), {
|
||||
status: 200,
|
||||
headers: {
|
||||
'content-type': 'application/json'
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
describe('createOpenAiPurchaseInterpreter', () => {
|
||||
test('parses nested responses api content output', async () => {
|
||||
const interpreter = createOpenAiPurchaseInterpreter('test-key', 'gpt-5-mini')
|
||||
expect(interpreter).toBeDefined()
|
||||
|
||||
const originalFetch = globalThis.fetch
|
||||
globalThis.fetch = (async () =>
|
||||
successfulResponse({
|
||||
output: [
|
||||
{
|
||||
content: [
|
||||
{
|
||||
text: JSON.stringify({
|
||||
decision: 'purchase',
|
||||
amountMinor: '100000',
|
||||
currency: 'GEL',
|
||||
itemDescription: 'армянская золотая швабра',
|
||||
confidence: 93,
|
||||
clarificationQuestion: null
|
||||
})
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
})) as unknown as typeof fetch
|
||||
|
||||
try {
|
||||
const result = await interpreter!('Купил швабру, Армянскую, золотую. 1000 лари', {
|
||||
defaultCurrency: 'GEL'
|
||||
})
|
||||
|
||||
expect(result).toEqual<PurchaseInterpretation>({
|
||||
decision: 'purchase',
|
||||
amountMinor: 100000n,
|
||||
currency: 'GEL',
|
||||
itemDescription: 'армянская золотая швабра',
|
||||
confidence: 93,
|
||||
parserMode: 'llm',
|
||||
clarificationQuestion: null
|
||||
})
|
||||
} finally {
|
||||
globalThis.fetch = originalFetch
|
||||
}
|
||||
})
|
||||
|
||||
test('parses fenced json responses', async () => {
|
||||
const interpreter = createOpenAiPurchaseInterpreter('test-key', 'gpt-5-mini')
|
||||
expect(interpreter).toBeDefined()
|
||||
|
||||
const originalFetch = globalThis.fetch
|
||||
globalThis.fetch = (async () =>
|
||||
successfulResponse({
|
||||
output: [
|
||||
{
|
||||
content: [
|
||||
{
|
||||
text: '```json\n{"decision":"purchase","amountMinor":"1000","currency":"GEL","itemDescription":"сухари","confidence":88,"clarificationQuestion":null}\n```'
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
})) as unknown as typeof fetch
|
||||
|
||||
try {
|
||||
const result = await interpreter!('Купил сухари на стол за 10 лари', {
|
||||
defaultCurrency: 'GEL'
|
||||
})
|
||||
|
||||
expect(result).toEqual<PurchaseInterpretation>({
|
||||
decision: 'purchase',
|
||||
amountMinor: 1000n,
|
||||
currency: 'GEL',
|
||||
itemDescription: 'сухари',
|
||||
confidence: 88,
|
||||
parserMode: 'llm',
|
||||
clarificationQuestion: null
|
||||
})
|
||||
} finally {
|
||||
globalThis.fetch = originalFetch
|
||||
}
|
||||
})
|
||||
})
|
||||
@@ -1,3 +1,5 @@
|
||||
import { extractOpenAiResponseText, parseJsonFromResponseText } from './openai-responses'
|
||||
|
||||
export type PurchaseInterpretationDecision = 'purchase' | 'clarification' | 'not_purchase'
|
||||
|
||||
export interface PurchaseInterpretation {
|
||||
@@ -133,17 +135,20 @@ export function createOpenAiPurchaseInterpreter(
|
||||
}
|
||||
|
||||
const payload = (await response.json()) as {
|
||||
output_text?: string
|
||||
output_text?: string | null
|
||||
output?: Array<{
|
||||
content?: Array<{
|
||||
text?: string | { value?: string | null } | null
|
||||
}> | null
|
||||
}> | null
|
||||
}
|
||||
|
||||
if (!payload.output_text) {
|
||||
const responseText = extractOpenAiResponseText(payload)
|
||||
if (!responseText) {
|
||||
return null
|
||||
}
|
||||
|
||||
let parsedJson: OpenAiStructuredResult
|
||||
try {
|
||||
parsedJson = JSON.parse(payload.output_text) as OpenAiStructuredResult
|
||||
} catch {
|
||||
const parsedJson = parseJsonFromResponseText<OpenAiStructuredResult>(responseText)
|
||||
if (!parsedJson) {
|
||||
return null
|
||||
}
|
||||
|
||||
|
||||
48
apps/bot/src/openai-responses.test.ts
Normal file
48
apps/bot/src/openai-responses.test.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { describe, expect, test } from 'bun:test'
|
||||
|
||||
import { extractOpenAiResponseText, parseJsonFromResponseText } from './openai-responses'
|
||||
|
||||
describe('extractOpenAiResponseText', () => {
|
||||
test('returns top-level output_text when present', () => {
|
||||
expect(
|
||||
extractOpenAiResponseText({
|
||||
output_text: 'hello'
|
||||
})
|
||||
).toBe('hello')
|
||||
})
|
||||
|
||||
test('falls back to nested output content text', () => {
|
||||
expect(
|
||||
extractOpenAiResponseText({
|
||||
output: [
|
||||
{
|
||||
content: [
|
||||
{
|
||||
text: 'first'
|
||||
},
|
||||
{
|
||||
text: {
|
||||
value: 'second'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
})
|
||||
).toBe('first\nsecond')
|
||||
})
|
||||
})
|
||||
|
||||
describe('parseJsonFromResponseText', () => {
|
||||
test('parses plain json', () => {
|
||||
expect(parseJsonFromResponseText<{ ok: boolean }>('{"ok":true}')).toEqual({
|
||||
ok: true
|
||||
})
|
||||
})
|
||||
|
||||
test('parses fenced json', () => {
|
||||
expect(parseJsonFromResponseText<{ ok: boolean }>('```json\n{"ok":true}\n```')).toEqual({
|
||||
ok: true
|
||||
})
|
||||
})
|
||||
})
|
||||
66
apps/bot/src/openai-responses.ts
Normal file
66
apps/bot/src/openai-responses.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
interface OpenAiResponseContentItem {
|
||||
text?: string | { value?: string | null } | null
|
||||
}
|
||||
|
||||
interface OpenAiResponseOutputItem {
|
||||
content?: OpenAiResponseContentItem[] | null
|
||||
}
|
||||
|
||||
export interface OpenAiResponsePayload {
|
||||
output_text?: string | null
|
||||
output?: OpenAiResponseOutputItem[] | null
|
||||
}
|
||||
|
||||
function normalizeResponseText(value: string | null | undefined): string | null {
|
||||
const trimmed = value?.trim()
|
||||
return trimmed && trimmed.length > 0 ? trimmed : null
|
||||
}
|
||||
|
||||
function contentItemText(contentItem: OpenAiResponseContentItem): string | null {
|
||||
if (typeof contentItem.text === 'string') {
|
||||
return normalizeResponseText(contentItem.text)
|
||||
}
|
||||
|
||||
if (contentItem.text && typeof contentItem.text.value === 'string') {
|
||||
return normalizeResponseText(contentItem.text.value)
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
export function extractOpenAiResponseText(payload: OpenAiResponsePayload): string | null {
|
||||
const directOutputText = normalizeResponseText(payload.output_text)
|
||||
if (directOutputText) {
|
||||
return directOutputText
|
||||
}
|
||||
|
||||
const nestedOutputText = payload.output
|
||||
?.flatMap((outputItem) => outputItem.content ?? [])
|
||||
.map(contentItemText)
|
||||
.filter((value): value is string => value !== null)
|
||||
.join('\n')
|
||||
|
||||
return normalizeResponseText(nestedOutputText)
|
||||
}
|
||||
|
||||
export function parseJsonFromResponseText<T>(text: string): T | null {
|
||||
const normalizedText = normalizeResponseText(text)
|
||||
if (!normalizedText) {
|
||||
return null
|
||||
}
|
||||
|
||||
try {
|
||||
return JSON.parse(normalizedText) as T
|
||||
} catch {
|
||||
const fencedMatch = normalizedText.match(/```(?:json)?\s*([\s\S]*?)\s*```/i)
|
||||
if (!fencedMatch?.[1]) {
|
||||
return null
|
||||
}
|
||||
|
||||
try {
|
||||
return JSON.parse(fencedMatch[1]) as T
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user