tests and syntax hilighting

This commit is contained in:
Nemo Godebski-Pedersen 2025-02-27 14:58:34 +00:00
parent fa8288a476
commit e97ee622d2
7 changed files with 376 additions and 94 deletions

View File

@ -1,4 +1,25 @@
<script>
import MarkdownIt from "markdown-it";
// Imports for syntax highlighting
import Prism from 'prismjs';
import 'prismjs/themes/prism.css'
import 'prismjs/components/prism-bash'
import 'prismjs/components/prism-javascript'
import 'prismjs/components/prism-python'
import 'prismjs/components/prism-go'
import 'prismjs/components/prism-json'
import 'prismjs/components/prism-liquid'
import 'prismjs/components/prism-markdown'
import 'prismjs/components/prism-markup-templating'
import 'prismjs/components/prism-php'
import 'prismjs/components/prism-scss'
import 'prismjs/components/prism-yaml'
import 'prismjs/components/prism-markup';
import 'prismjs/components/prism-http';
import 'prismjs/themes/prism-okaidia.css';
export default {
props: {
message: {
@ -12,12 +33,46 @@
content: ''
}
},
methods: {
highlightCode(content, language) {
if (Prism.languages[language]) {
return Prism.highlight(content, Prism.languages[language], language);
} else {
console.log(`could not highlight ${language} code block, add language to dependencies`)
// If the language is not recognized, return the content as is
return content;
}
},
renderMarkdown(content) {
const markdown = new MarkdownIt({
highlight: (str, lang) => {
console.log(`highlighting ${lang} code block`)
if (lang && Prism.languages[lang]) {
try {
return `<pre class="language-${lang}"><code>${this.highlightCode(str, lang)}</code></pre>`;
} catch (error) {
console.log(`error hilighting ${lang} code block: ${error}`)
}
} else if (!lang) {
console.warn('No language specified for code block')
} else if (!Prism.languages[lang]) {
console.warn(`Language ${lang} not recognized or not installed`)
}
// If the language is not specified or not recognized, use a default language
return `<pre class="language-"><code>${markdown.utils.escapeHtml(str)}</code></pre>`;
}
});
return markdown.render(content);
},
}
}
</script>
<template>
<div class="message-container" :class="this.message.role">
<div class="content">{{this.message.content}}</div>
<div class="content" v-html="renderMarkdown(this.message.content)"></div>
</div>
</template>

View File

@ -1,12 +1,13 @@
<!--
placeholder for Opey II Chat widget
-->
<script>
<script lang="ts">
import { ref, reactive } from 'vue'
import { Close } from '@element-plus/icons-vue'
import ChatMessage from './ChatMessage.vue';
import { v4 as uuidv4 } from 'uuid';
import { OpeyStreamContext, OpeyMessage, sendOpeyMessage } from '@/obp/opey-functions';
export default {
setup () {
@ -18,10 +19,16 @@ export default {
return {
chatOpen: false,
thread_id: uuidv4(),
messages: ref([]),
status: 'ready',
input: '',
currentAssistantMessage: null,
opeyContext: reactive({
currentAssistantMessage: {
id: '',
role: 'assistant',
content: ''
},
messages: new Array<OpeyMessage>(),
status: 'ready'
} as OpeyStreamContext),
}
},
components: {
@ -33,92 +40,42 @@ export default {
},
async onSubmit() {
// Add user message to the messages array
const userMessage = {
const userMessage: OpeyMessage = {
id: uuidv4(),
role: 'user',
content: this.input
};
this.messages.push(userMessage);
this.opeyContext.messages.push(userMessage);
// Create a placeholder for the assistant's response
this.currentAssistantMessage = {
this.opeyContext.currentAssistantMessage = {
id: uuidv4(),
role: 'assistant',
content: ''
};
this.messages.push(this.currentAssistantMessage);
this.opeyContext.messages.push(this.opeyContext.currentAssistantMessage);
// Set status to loading
this.status = 'loading';
this.opeyContext.status = 'loading';
const userInput = this.input;
// Clear input field after sending
this.input = '';
try {
const response = await fetch('/api/opey/stream', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
thread_id: this.thread_id,
message: userInput,
is_tool_call_approval: false,
}),
});
const stream = response.body;
const decoder = new TextDecoder();
const reader = stream.getReader();
// Use an arrow function to preserve 'this' context
const processStream = async () => {
try {
while (true) {
const { done, value } = await reader.read();
if (done) {
console.log('Stream complete');
this.status = 'ready';
break;
}
const decodedValue = decoder.decode(value);
console.debug('Received:', decodedValue); //DEBUG
// Parse the SSE data format
const lines = decodedValue.split('\n');
for (const line of lines) {
if (line.startsWith('data: ') && line !== 'data: [DONE]') {
try {
const jsonStr = line.substring(6); // Remove 'data: '
const data = JSON.parse(jsonStr);
if (data.type === 'token' && data.content) {
// Append content to the current assistant message
this.currentAssistantMessage.content += data.content;
// Force Vue to detect the change
this.messages = [...this.messages];
}
} catch (e) {
console.error('Error parsing JSON:', e);
}
}
}
}
} catch (error) {
console.error('Stream error:', error);
this.status = 'ready';
}
};
// Start processing the stream
processStream();
await sendOpeyMessage(
userMessage.content,
this.thread_id,
false,
this.opeyContext
)
console.log('Opey Status: ', this.opeyContext.status)
} catch (error) {
console.error('Error:', error);
this.status = 'ready';
console.error('Error in chat:', error);
this.opeyContext.status = 'ready';
}
},
},
@ -127,34 +84,34 @@ export default {
</script>
<template>
<div v-if="!this.chatOpen" class="chat-widget-button-container">
<div v-if="!chatOpen" class="chat-widget-button-container">
<el-tooltip content="Chat with our AI, Opey" placement="left" effect="light">
<el-button class="chat-widget-button" type="primary" size="large" @click="this.toggleChat" circle >
<el-button class="chat-widget-button" type="primary" size="large" @click="toggleChat" circle >
<img alt="AI Help" src="@/assets/opey-icon-white.png" />
</el-button>
</el-tooltip>
</div>
<div v-if="this.chatOpen" class="chat-container">
<div v-if="chatOpen" class="chat-container">
<div class="chat-container-inner">
<el-container direction="vertical">
<el-header>
<img alt="Opey Logo" src="@/assets/opey-logo-inv.png">
Chat with Opey
<el-button type="danger" :icon="this.Close" @click="this.toggleChat" size="small" circle></el-button>
<el-button type="danger" :icon="Close" @click="toggleChat" size="small" circle></el-button>
</el-header>
<el-main>
<div class="messages-container">
<ChatMessage v-for="message in messages" :key="message.id" :message="message" />
<ChatMessage v-for="message in opeyContext.messages" :key="message.id" :message="message" />
</div>
</el-main>
<el-footer>
<el-form :inline="true" @submit.prevent>
<el-form-item label="Message">
<el-input v-model="input" placeholder="Type your message..." :disabled="status !== 'ready'" @keypress.enter="this.onSubmit" clearable />
<el-input v-model="input" placeholder="Type your message..." :disabled="opeyContext.status !== 'ready'" @keypress.enter="onSubmit" clearable />
</el-form-item>
<el-form-item>
<el-button type="primary" @click="this.onSubmit">Send</el-button>
<el-button type="primary" @click="onSubmit">Send</el-button>
</el-form-item>
</el-form>
</el-footer>

98
src/obp/opey-functions.ts Normal file
View File

@ -0,0 +1,98 @@
export interface OpeyMessage {
id: string;
role: string;
content: string;
}
export interface OpeyStreamContext {
currentAssistantMessage: OpeyMessage;
messages: OpeyMessage[];
status: string;
}
/**
* Process a stream from Opey API and update the message content
* @param stream The ReadableStream from the fetch response
* @param context The context object containing the message to update and status
* @returns A promise that resolves when the stream is complete
*/
export async function processOpeyStream(
stream: ReadableStream<Uint8Array>,
context: OpeyStreamContext
): Promise<void> {
const reader = stream.getReader();
let decoder = new TextDecoder();
try {
while (true) {
const { done, value } = await reader.read();
if (done) {
console.log('Stream complete');
context.status = 'ready';
break;
}
const decodedValue = decoder.decode(value);
console.debug('Received:', decodedValue); //DEBUG
// Parse the SSE data format
const lines = decodedValue.split('\n');
for (const line of lines) {
if (line.startsWith('data: ') && line !== 'data: [DONE]') {
try {
const jsonStr = line.substring(6); // Remove 'data: '
const data = JSON.parse(jsonStr);
if (data.type === 'token' && data.content) {
// Append content to the current assistant message
context.currentAssistantMessage.content += data.content;
// Force Vue to detect the change
context.messages = [...context.messages];
}
} catch (e) {
throw new Error(`Error parsing JSON: ${e}`);
}
}
}
}
} catch (error) {
console.error('Stream error:', error);
context.status = 'ready';
throw error
}
}
export async function sendOpeyMessage(
message: string,
threadId: string,
isToolCallApproval: boolean,
context: OpeyStreamContext
): Promise<void> {
try {
const response = await fetch('/api/opey/stream', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
thread_id: threadId,
message: message,
is_tool_call_approval: isToolCallApproval
})
})
const stream = response.body;
if (!stream) {
throw new Error('No stream returned from API')
}
await processOpeyStream(stream, context);
} catch (error) {
console.error('Error sending Opey message:', error);
context.status = 'ready';
throw error;
}
}

View File

@ -5,26 +5,37 @@ import ChatMessage from '../components/ChatMessage.vue'
describe('ChatMessage', () => {
it('should render correctly on human message', () => {
const humanMessage = JSON.parse(`{
"messages": [
{
"content": "Hello Opey!",
"additional_kwargs": {},
"response_metadata": {},
"type": "human",
"id": "ed614658-22a3-40a3-b403-bc790b941a9a",
"example": false
}
]
}`)
const humanMessage = {
id: 123,
role: 'user',
content: 'Hello Opey!',
}
const wrapper = mount(ChatMessage, {
props: {
message: humanMessage
}
})
expect(wrapper.text()).toContain('Hello Opey!')
expect(wrapper.text()).toContain(humanMessage.content)
expect(wrapper.html()).toMatchSnapshot()
})
it('should render correctly on assistant message', () => {
const assistantMessage = {
id: 123,
role: 'assistant',
content: 'Hi there, how can I help you today?',
}
const wrapper = mount(ChatMessage, {
props: {
message: assistantMessage
}
})
expect(wrapper.text()).toContain(assistantMessage.content)
expect(wrapper.html()).toMatchSnapshot()
})
})

View File

@ -3,7 +3,7 @@ import { describe, it, expect } from 'vitest';
import ChatWidget from '../components/ChatWidget.vue'
describe('ChatWidget', () => {
it('should answer a basic question', async () => {
it('should append messages in correct order', async () => {
})
})

View File

@ -0,0 +1,17 @@
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
exports[`ChatMessage > should render correctly on assistant message 1`] = `
"<div class=\\"message-container assistant\\">
<div class=\\"content\\">
<p>Hi there, how can I help you today?</p>
</div>
</div>"
`;
exports[`ChatMessage > should render correctly on human message 1`] = `
"<div class=\\"message-container user\\">
<div class=\\"content\\">
<p>Hello Opey!</p>
</div>
</div>"
`;

View File

@ -0,0 +1,144 @@
import * as OpeyModule from '@/obp/opey-functions';
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
describe('processOpeyStream', async () => {
let mockContext: OpeyModule.OpeyStreamContext;
beforeEach(() => {
// Reset the mock context before each test
mockContext = {
currentAssistantMessage: {
id: '123',
role: 'assistant',
content: '',
},
messages: [],
status: 'loading',
}
})
it('should update context with streamed content', async () => {
// Mock a ReadableStream
const mockAsisstantMessage = "Hi I'm Opey, your personal banking assistant. I'll certainly not take over the world, no, not at all!"
// Split the message into chunks, but reappend the whitespace (this is to simulate llm tokens)
const mockMessageChunks = mockAsisstantMessage.split(" ")
for (let i = 0; i < mockMessageChunks.length; i++) {
// Don't add whitespace to the last chunk
if (i === mockMessageChunks.length - 1 ) {
mockMessageChunks[i] = `${mockMessageChunks[i]}`
break
}
mockMessageChunks[i] = `${mockMessageChunks[i]} `
}
// Fake the token stream
const stream = new ReadableStream<Uint8Array>({
start(controller) {
for (let i = 0; i < mockMessageChunks.length; i++) {
controller.enqueue(new TextEncoder().encode(`data: {"type":"token","content":"${mockMessageChunks[i]}"}\n`));
}
controller.close();
},
});
await OpeyModule.processOpeyStream(stream, mockContext)
console.log(mockContext.currentAssistantMessage.content)
expect(mockContext.currentAssistantMessage.content).toBe(mockAsisstantMessage)
})
it('should throw an error when the stream is closed by the server', async () => {
const brokenStream = new ReadableStream<Uint8Array>({
start(controller) {
for (let i = 0; i < 10; i++) {
if (i === 5) {
controller.error(new Error('Stream closed by server'))
break;
}
controller.enqueue(new TextEncoder().encode(`data: {"type":"token","content":"test"}\n`));
}
},
});
await expect(OpeyModule.processOpeyStream(brokenStream, mockContext))
.rejects
.toThrow('Stream closed by server')
})
it('should throw an error when the chunk is not valid json', async () => {
const invalidJsonStream = new ReadableStream<Uint8Array>({
start(controller) {
for (let i=0; i<10; i++) {
controller.enqueue(new TextEncoder().encode(`data: {"type":"token","content":"test"}\n`));
if (i === 5) {
controller.enqueue(new TextEncoder().encode('data: "type":"token","content":"test"}\n'));
}
}
controller.close();
}
})
await expect(OpeyModule.processOpeyStream(invalidJsonStream, mockContext))
.rejects
.toThrowError()
})
it("should set status to 'ready' when completed", async () => {
const stream = new ReadableStream<Uint8Array>({
start(controller) {
controller.enqueue(new TextEncoder().encode(`data: {"type":"token","content":"test"}\n`));
controller.close();
}
})
await OpeyModule.processOpeyStream(stream, mockContext)
expect(mockContext.status).toBe('ready')
})
})
describe('sendOpeyMessage', () => {
let mockContext: OpeyModule.OpeyStreamContext;
beforeEach(() => {
mockContext = {
currentAssistantMessage: {
id: '123',
role: 'assistant',
content: '',
},
messages: [],
status: 'loading',
}
// create a mock stream
const mockStream = new ReadableStream<Uint8Array>({
start(controller) {
controller.enqueue(new TextEncoder().encode(`data: {"type":"token","content":"test"}\n`));
controller.close();
},
});
// mock the fetch function
global.fetch = vi.fn(() =>
Promise.resolve(new Response(mockStream, {
headers: { 'content-type': 'text/event-stream' },
status: 200,
}))
);
})
afterEach(() => {
vi.clearAllMocks()
})
it('should call fetch', async () => {
await OpeyModule.sendOpeyMessage('test message', '123', false, mockContext)
expect(global.fetch).toHaveBeenCalled()
})
it("should push the 'ready' status to the context", async () => {
await OpeyModule.sendOpeyMessage('test message', '123', false, mockContext)
expect(mockContext.status).toBe('ready')
})
})