diff --git a/src/components/ChatMessage.vue b/src/components/ChatMessage.vue index eea286c..4e29409 100644 --- a/src/components/ChatMessage.vue +++ b/src/components/ChatMessage.vue @@ -1,4 +1,25 @@ - {{this.message.content}} + diff --git a/src/components/ChatWidget.vue b/src/components/ChatWidget.vue index 77aca3f..05c5a06 100644 --- a/src/components/ChatWidget.vue +++ b/src/components/ChatWidget.vue @@ -1,12 +1,13 @@ - - + - + - + Chat with Opey - + - + - + - Send + Send diff --git a/src/obp/opey-functions.ts b/src/obp/opey-functions.ts new file mode 100644 index 0000000..eb07d4a --- /dev/null +++ b/src/obp/opey-functions.ts @@ -0,0 +1,98 @@ +export interface OpeyMessage { + id: string; + role: string; + content: string; +} + +export interface OpeyStreamContext { + currentAssistantMessage: OpeyMessage; + messages: OpeyMessage[]; + status: string; +} + +/** + * Process a stream from Opey API and update the message content + * @param stream The ReadableStream from the fetch response + * @param context The context object containing the message to update and status + * @returns A promise that resolves when the stream is complete + */ +export async function processOpeyStream( + stream: ReadableStream, + context: OpeyStreamContext +): Promise { + const reader = stream.getReader(); + let decoder = new TextDecoder(); + + try { + while (true) { + const { done, value } = await reader.read(); + + if (done) { + console.log('Stream complete'); + context.status = 'ready'; + break; + } + + const decodedValue = decoder.decode(value); + console.debug('Received:', decodedValue); //DEBUG + + // Parse the SSE data format + const lines = decodedValue.split('\n'); + for (const line of lines) { + if (line.startsWith('data: ') && line !== 'data: [DONE]') { + try { + const jsonStr = line.substring(6); // Remove 'data: ' + const data = JSON.parse(jsonStr); + + if (data.type === 'token' && data.content) { + // Append content to the current assistant message + context.currentAssistantMessage.content += data.content; + // Force Vue to detect the change + context.messages = [...context.messages]; + } + } catch (e) { + throw new Error(`Error parsing JSON: ${e}`); + } + } + } + } + } catch (error) { + console.error('Stream error:', error); + context.status = 'ready'; + throw error + } + +} + +export async function sendOpeyMessage( + message: string, + threadId: string, + isToolCallApproval: boolean, + context: OpeyStreamContext +): Promise { + try { + const response = await fetch('/api/opey/stream', { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + thread_id: threadId, + message: message, + is_tool_call_approval: isToolCallApproval + }) + }) + + const stream = response.body; + if (!stream) { + throw new Error('No stream returned from API') + } + + await processOpeyStream(stream, context); + } catch (error) { + console.error('Error sending Opey message:', error); + context.status = 'ready'; + throw error; + } + +} \ No newline at end of file diff --git a/src/test/ChatMessage.test.ts b/src/test/ChatMessage.test.ts index 55bb581..4eb1fc6 100644 --- a/src/test/ChatMessage.test.ts +++ b/src/test/ChatMessage.test.ts @@ -5,26 +5,37 @@ import ChatMessage from '../components/ChatMessage.vue' describe('ChatMessage', () => { it('should render correctly on human message', () => { - const humanMessage = JSON.parse(`{ - "messages": [ - { - "content": "Hello Opey!", - "additional_kwargs": {}, - "response_metadata": {}, - "type": "human", - "id": "ed614658-22a3-40a3-b403-bc790b941a9a", - "example": false - } - ] - }`) + const humanMessage = { + id: 123, + role: 'user', + content: 'Hello Opey!', + } const wrapper = mount(ChatMessage, { props: { message: humanMessage } }) - expect(wrapper.text()).toContain('Hello Opey!') + expect(wrapper.text()).toContain(humanMessage.content) expect(wrapper.html()).toMatchSnapshot() }) + + it('should render correctly on assistant message', () => { + const assistantMessage = { + id: 123, + role: 'assistant', + content: 'Hi there, how can I help you today?', + } + const wrapper = mount(ChatMessage, { + props: { + message: assistantMessage + } + }) + + expect(wrapper.text()).toContain(assistantMessage.content) + expect(wrapper.html()).toMatchSnapshot() + }) + + }) diff --git a/src/test/ChatWidget.test.ts b/src/test/ChatWidget.test.ts index 5641f82..27d1b8f 100644 --- a/src/test/ChatWidget.test.ts +++ b/src/test/ChatWidget.test.ts @@ -3,7 +3,7 @@ import { describe, it, expect } from 'vitest'; import ChatWidget from '../components/ChatWidget.vue' describe('ChatWidget', () => { - it('should answer a basic question', async () => { + it('should append messages in correct order', async () => { }) }) \ No newline at end of file diff --git a/src/test/__snapshots__/ChatMessage.test.ts.snap b/src/test/__snapshots__/ChatMessage.test.ts.snap new file mode 100644 index 0000000..4074347 --- /dev/null +++ b/src/test/__snapshots__/ChatMessage.test.ts.snap @@ -0,0 +1,17 @@ +// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html + +exports[`ChatMessage > should render correctly on assistant message 1`] = ` +" + + Hi there, how can I help you today? + +" +`; + +exports[`ChatMessage > should render correctly on human message 1`] = ` +" + + Hello Opey! + +" +`; diff --git a/src/test/opey-functions.test.ts b/src/test/opey-functions.test.ts new file mode 100644 index 0000000..0e531bf --- /dev/null +++ b/src/test/opey-functions.test.ts @@ -0,0 +1,144 @@ +import * as OpeyModule from '@/obp/opey-functions'; +import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'; + +describe('processOpeyStream', async () => { + let mockContext: OpeyModule.OpeyStreamContext; + + beforeEach(() => { + // Reset the mock context before each test + mockContext = { + currentAssistantMessage: { + id: '123', + role: 'assistant', + content: '', + }, + messages: [], + status: 'loading', + } + }) + it('should update context with streamed content', async () => { + // Mock a ReadableStream + const mockAsisstantMessage = "Hi I'm Opey, your personal banking assistant. I'll certainly not take over the world, no, not at all!" + + // Split the message into chunks, but reappend the whitespace (this is to simulate llm tokens) + const mockMessageChunks = mockAsisstantMessage.split(" ") + for (let i = 0; i < mockMessageChunks.length; i++) { + // Don't add whitespace to the last chunk + if (i === mockMessageChunks.length - 1 ) { + mockMessageChunks[i] = `${mockMessageChunks[i]}` + break + } + mockMessageChunks[i] = `${mockMessageChunks[i]} ` + } + + // Fake the token stream + const stream = new ReadableStream({ + start(controller) { + for (let i = 0; i < mockMessageChunks.length; i++) { + controller.enqueue(new TextEncoder().encode(`data: {"type":"token","content":"${mockMessageChunks[i]}"}\n`)); + } + controller.close(); + }, + }); + + await OpeyModule.processOpeyStream(stream, mockContext) + console.log(mockContext.currentAssistantMessage.content) + expect(mockContext.currentAssistantMessage.content).toBe(mockAsisstantMessage) + }) + + it('should throw an error when the stream is closed by the server', async () => { + const brokenStream = new ReadableStream({ + start(controller) { + for (let i = 0; i < 10; i++) { + if (i === 5) { + controller.error(new Error('Stream closed by server')) + break; + } + controller.enqueue(new TextEncoder().encode(`data: {"type":"token","content":"test"}\n`)); + } + + }, + }); + + await expect(OpeyModule.processOpeyStream(brokenStream, mockContext)) + .rejects + .toThrow('Stream closed by server') + }) + + it('should throw an error when the chunk is not valid json', async () => { + const invalidJsonStream = new ReadableStream({ + start(controller) { + for (let i=0; i<10; i++) { + controller.enqueue(new TextEncoder().encode(`data: {"type":"token","content":"test"}\n`)); + if (i === 5) { + controller.enqueue(new TextEncoder().encode('data: "type":"token","content":"test"}\n')); + } + } + controller.close(); + + } + }) + + await expect(OpeyModule.processOpeyStream(invalidJsonStream, mockContext)) + .rejects + .toThrowError() + }) + + it("should set status to 'ready' when completed", async () => { + const stream = new ReadableStream({ + start(controller) { + controller.enqueue(new TextEncoder().encode(`data: {"type":"token","content":"test"}\n`)); + controller.close(); + } + }) + + await OpeyModule.processOpeyStream(stream, mockContext) + expect(mockContext.status).toBe('ready') + }) +}) + +describe('sendOpeyMessage', () => { + let mockContext: OpeyModule.OpeyStreamContext; + + beforeEach(() => { + mockContext = { + currentAssistantMessage: { + id: '123', + role: 'assistant', + content: '', + }, + messages: [], + status: 'loading', + } + + // create a mock stream + const mockStream = new ReadableStream({ + start(controller) { + controller.enqueue(new TextEncoder().encode(`data: {"type":"token","content":"test"}\n`)); + controller.close(); + }, + }); + + // mock the fetch function + global.fetch = vi.fn(() => + Promise.resolve(new Response(mockStream, { + headers: { 'content-type': 'text/event-stream' }, + status: 200, + })) + ); + }) + afterEach(() => { + vi.clearAllMocks() + }) + it('should call fetch', async () => { + await OpeyModule.sendOpeyMessage('test message', '123', false, mockContext) + + expect(global.fetch).toHaveBeenCalled() + }) + it("should push the 'ready' status to the context", async () => { + + await OpeyModule.sendOpeyMessage('test message', '123', false, mockContext) + + expect(mockContext.status).toBe('ready') + }) +}) \ No newline at end of file
Hi there, how can I help you today?
Hello Opey!