diff --git a/bin/chat.js b/bin/chat.js index 65d5f0a..fe8049d 100644 --- a/bin/chat.js +++ b/bin/chat.js @@ -21,13 +21,13 @@ const colors = { } /** - * @import { ResponsesInput, ResponseInputItem } from './types.d.ts' - * @param {ResponsesInput} chatInput - * @returns {Promise} + * @import { ChatInput, Message } from './types.d.ts' + * @param {ChatInput} chatInput + * @returns {Promise} */ async function sendToServer(chatInput) { // Send the request to the server - const response = await fetch('https://hyperparam.app/api/functions/openai/responses', { + const response = await fetch('https://hyperparam.app/api/functions/chat', { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify(chatInput), @@ -38,7 +38,7 @@ async function sendToServer(chatInput) { } // Process the streaming response - /** @type {ResponseInputItem[]} */ + /** @type {Message[]} */ const incoming = [] const reader = response.body?.getReader() if (!reader) throw new Error('No response body') @@ -89,11 +89,13 @@ async function sendToServer(chatInput) { summary: chunk.item.summary, } incoming.push(reasoningItem) - } else if (!chunk.key) { - console.log('Unknown chunk', chunk) + } else if (chunk.key || chunk.type === 'response.completed') { + // ignore + } else { + console.log('\nUnknown chunk', chunk) } } catch (err) { - console.error('Error parsing chunk', err) + console.error('\nError parsing chunk', err) } } } @@ -105,15 +107,15 @@ async function sendToServer(chatInput) { * Will mutate the messages array! * * @import { ResponseFunctionToolCall, ToolHandler } from './types.d.ts' - * @param {ResponseInputItem[]} input + * @param {Message[][]} messages * @returns {Promise} */ -async function sendMessages(input) { - /** @type {ResponsesInput} */ +async function sendMessages(messages) { + /** @type {ChatInput} */ const chatInput = { model: 'gpt-5', instructions, - input, + messages, reasoning: { effort: 'low', }, @@ -174,13 +176,13 @@ async function sendMessages(input) { } } - input.push(...incoming) + messages.push(incoming) // send messages with tool results - await sendMessages(input) + await sendMessages(messages) } else { // no tool calls, just append incoming messages - input.push(...incoming) + messages.push(incoming) } } @@ -231,7 +233,7 @@ function writeWithColor() { } export function chat() { - /** @type {ResponseInputItem[]} */ + /** @type {Message[][]} */ const messages = [] process.stdin.setEncoding('utf-8') @@ -245,7 +247,7 @@ export function chat() { try { write(colors.user, 'answer: ', colors.normal) outputMode = 'text' // switch to text output mode - messages.push({ role: 'user', content: input.trim() }) + messages.push([{ role: 'user', content: input.trim() }]) await sendMessages(messages) } catch (error) { console.error(colors.error, '\n' + error) diff --git a/bin/types.d.ts b/bin/types.d.ts index e474c6d..88c6b9b 100644 --- a/bin/types.d.ts +++ b/bin/types.d.ts @@ -1,30 +1,17 @@ // Model Input -// Based on ResponseCreateParamsStreaming from openai client -export interface ResponsesInput { +export interface ChatInput { model: string - input: ResponseInputItem[] // or string but we always use stateless messages instructions?: string // system prompt - background?: boolean - include?: string[] + messages: Message[][] + tools?: ResponseTool[] reasoning?: { - effort?: 'low' | 'medium' | 'high' + effort?: 'minimal' | 'low' | 'medium' | 'high' summary?: 'auto' | 'concise' | 'detailed' } - tools?: ResponseTool[] - max_output_tokens?: number parallel_tool_calls?: boolean - previous_response_id?: string - // service_tier?: 'auto' | 'default' | 'flex' - // store?: boolean // store response for later retrieval - temperature?: number // 0..2 - text?: unknown - tool_choice?: 'auto' | 'none' | 'required' - top_p?: number // 0..1 - truncation?: 'auto' | 'disabled' - user?: string } -export type ResponseInputItem = +export type Message = | EasyInputMessage | ResponseFunctionToolCall | FunctionCallOutput diff --git a/package.json b/package.json index 54943dd..167a95b 100644 --- a/package.json +++ b/package.json @@ -56,35 +56,35 @@ }, "dependencies": { "hightable": "0.20.4", - "hyparquet": "1.20.1", + "hyparquet": "1.20.2", "hyparquet-compressors": "1.1.1", "icebird": "0.3.1" }, "devDependencies": { - "@eslint/js": "9.38.0", - "@storybook/react-vite": "10.0.1", + "@eslint/js": "9.39.1", + "@storybook/react-vite": "10.0.4", "@testing-library/react": "16.3.0", - "@types/node": "24.9.2", + "@types/node": "24.10.0", "@types/react": "19.2.2", "@types/react-dom": "19.2.2", "@vitejs/plugin-react": "5.1.0", - "@vitest/coverage-v8": "4.0.5", - "eslint": "9.38.0", + "@vitest/coverage-v8": "4.0.7", + "eslint": "9.39.1", "eslint-plugin-react": "7.37.5", "eslint-plugin-react-hooks": "7.0.1", "eslint-plugin-react-refresh": "0.4.24", - "eslint-plugin-storybook": "10.0.1", - "globals": "16.4.0", - "jsdom": "27.0.1", + "eslint-plugin-storybook": "10.0.4", + "globals": "16.5.0", + "jsdom": "27.1.0", "nodemon": "3.1.10", "npm-run-all": "4.1.5", "react": "19.2.0", "react-dom": "19.2.0", - "storybook": "10.0.1", + "storybook": "10.0.4", "typescript": "5.9.3", - "typescript-eslint": "8.46.2", - "vite": "7.1.12", - "vitest": "4.0.5" + "typescript-eslint": "8.46.3", + "vite": "7.2.0", + "vitest": "4.0.7" }, "peerDependencies": { "react": "^18.3.1 || ^19",