Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
d79b3c5
add: stream output
quge009 Sep 12, 2025
4278f23
change: use frontend to assign turnId
quge009 Sep 12, 2025
7e01649
modify: state update help message
quge009 Sep 12, 2025
434d8ec
improve: auto scroll
quge009 Sep 12, 2025
b68c194
adding more status update message
quge009 Sep 12, 2025
1adf126
minor: add new status update message
quge009 Sep 17, 2025
92d2869
Add feature: multi user concurrency
quge009 Oct 10, 2025
a0c50a0
fix: minor bug for non-streaming api
quge009 Oct 11, 2025
f4c3f1b
improve user experience: post unauthorized access information to unau…
quge009 Oct 11, 2025
7776476
code cleanup: remove unnecessary llmsession instances
quge009 Oct 14, 2025
1e1285c
code refactor: stage 0
quge009 Oct 14, 2025
99c280a
code refactor: stage 1
quge009 Oct 15, 2025
7f989e7
code refactor: stage 2
quge009 Oct 15, 2025
856f861
code refactor: stage 3, use the same llmsession for each conversation…
quge009 Oct 16, 2025
9f8a8b0
code refactor: stage 4, change ltp into a class for easier state mana…
quge009 Oct 17, 2025
798d738
code refactor: stage 5, fix chuck accumulation bug
quge009 Oct 17, 2025
173a9b2
code refactor: stage 6, smart help into a class
quge009 Oct 22, 2025
01384ec
minor bug fix
quge009 Oct 22, 2025
16f4c63
improve: response latency by merge small llm chat calls into one clas…
quge009 Oct 23, 2025
27d8f64
improve: response latency, by reuse llmsession for requests from the …
quge009 Oct 24, 2025
138ba06
fix bug: missing import, changed prompt file name
quge009 Oct 28, 2025
7ba1374
debugging: log the question parsing output into the dev log
quge009 Oct 28, 2025
b9f3d52
resolve review comment: add appropriate null handling
quge009 Oct 28, 2025
e42bea9
update: nginx configuration to add the new /copilot/api/stream endpoi…
quge009 Oct 28, 2025
372cfe8
Update src/copilot-chat/src/copilot_agent/copilot_conversation.py
quge009 Oct 28, 2025
3c41e32
remove unnecessary comment
quge009 Oct 28, 2025
32a2906
Update src/copilot-chat/src/copilot_agent/copilot_turn.py
quge009 Oct 28, 2025
4019f8a
resolve review comment: remove consle log
quge009 Oct 28, 2025
f8a5211
Update src/copilot-chat/src/copilot_agent/copilot_service.py
quge009 Oct 28, 2025
66d3a90
Update src/copilot-chat/src/copilot_agent/ltp/ltp.py
quge009 Oct 28, 2025
735ec38
Update src/copilot-chat/src/copilot_agent/copilot_conversation.py
quge009 Oct 28, 2025
53ddf4d
update: remove unused function
quge009 Oct 28, 2025
34b95ae
improve: robustness, gracefully handling if classification fail
quge009 Oct 28, 2025
cb34b54
change classifier version for deployment
quge009 Oct 28, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
553 changes: 553 additions & 0 deletions contrib/copilot-plugin/package-lock.json

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions contrib/copilot-plugin/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@
"react-dom": "^18.3.1",
"react-icons": "^5.5.0",
"react-markdown": "^10.1.0",
"rehype-raw": "^6.1.1",
"react-refresh": "^0.11.0",
"remark-gfm": "^4.0.1",
"resolve": "^1.20.0",
Expand All @@ -77,6 +78,7 @@
"start": "node scripts/start.js",
"start:user1": "REACT_APP_USER=dev.eva PORT=3000 node scripts/start.js",
"start:user2": "REACT_APP_USER=dev.ben PORT=3001 node scripts/start.js",
"start:user3": "REACT_APP_USER=dev.unknown PORT=3002 node scripts/start.js",
"build": "node scripts/build.js",
"test": "node scripts/test.js",
"clean": "rimraf build"
Expand Down
123 changes: 104 additions & 19 deletions contrib/copilot-plugin/src/app/ChatBox.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ export default function ChatBox() {
// Use local backend when running the dev server (npm start),
// and use the relative path for production builds (npm run build).
const REMOTE_SERVER_URL = process.env.NODE_ENV === 'development'
? 'http://127.0.0.1:60000/copilot/api/operation'
: '/copilot/api/operation';
? 'http://127.0.0.1:60000/copilot/api/stream'
: '/copilot/api/stream';

const makeChatRequest = async (e: React.FormEvent) => {
e.preventDefault();
Expand All @@ -38,6 +38,17 @@ export default function ChatBox() {
setPrompt("");
setLoading(true);
try {
// create a stable turnId and include it in the payload so server will echo/use it
const turnId = uuidv4();
const messageInfo = {
userId: paiuser,
convId: currentConversationId,
turnId: turnId,
timestamp: Math.floor(Date.now()),
timestampUnit: "ms",
type: "question",
};

const payload = {
async_: false,
stream: false,
Expand All @@ -48,18 +59,14 @@ export default function ChatBox() {
username: paiuser,
restToken: restServerToken,
jobToken: jobServerToken,
currentJob: null // currentJob ? { id: currentJob.id, name: currentJob.name, username: currentJob.username, status: currentJob.status, ip: currentJob.ip, port: currentJob.port } : null
currentJob: null
},
messageInfo: {
userId: paiuser,
convId: currentConversationId,
turnId: uuidv4(),
timestamp: Math.floor(Date.now()),
timestampUnit: "ms",
type: "question",
}
messageInfo: messageInfo
}
};

// Create assistant placeholder and attach the same messageInfo (turnId) so feedback maps to this response
useChatStore.getState().addChat({ role: "assistant", message: "", timestamp: new Date(), messageInfo });
const response = await fetch(REMOTE_SERVER_URL, {
method: "POST",
headers: {
Expand All @@ -69,15 +76,93 @@ export default function ChatBox() {
body: JSON.stringify(payload),
});
if (!response.ok) throw new Error("Remote server error");
const data = await response.json();
if (data?.data?.answer !== "skip") {
useChatStore.getState().addChat({
role: "assistant",
message: data?.data?.answer ?? "No answer found",
timestamp: new Date(),
messageInfo: data?.data?.message_info, // Store the message_info from response
});

const reader = response.body?.getReader();
if (!reader) throw new Error('No response body for streaming');
const decoder = new TextDecoder();
// Buffer incoming bytes and parse SSE-style messages (separated by '\n\n')
let buffer = '';
while (true) {
const { value, done: readerDone } = await reader.read();
if (value) {
buffer += decoder.decode(value, { stream: true });
}

// Process all complete SSE messages in buffer
let sepIndex;
while ((sepIndex = buffer.indexOf('\n\n')) !== -1) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am not sure if this will cause infinite loop here when buffer.indexOf('\n\n') !== -1, please make sure that the loop can be breaked from the loop no matter which parts will be executed

const rawEvent = buffer.slice(0, sepIndex);
buffer = buffer.slice(sepIndex + 2);

// Extract data: lines and join with newline to preserve original formatting
const lines = rawEvent.split(/\n/);
const dataParts: string[] = [];
let isDoneEvent = false;
for (const line of lines) {
if (line.startsWith('data:')) {
dataParts.push(line.slice(5));
} else if (line.startsWith('event:')) {
const ev = line.slice(6).trim();
if (ev === 'done') isDoneEvent = true;
}
}

if (dataParts.length > 0) {
const dataStr = dataParts.join('\n');
// If the server sent a JSON 'append' event, append to last assistant message
let handled = false;
const trimmed = dataStr.trim();
if (trimmed.startsWith('{')) {
try {
const parsed = JSON.parse(trimmed);
if (parsed && parsed.type === 'append' && typeof parsed.text === 'string') {
useChatStore.getState().appendToLastAssistant(parsed.text);
handled = true;
}
else if (parsed && parsed.type === 'meta' && parsed.messageInfo) {
// attach backend-generated messageInfo (turnId etc.) to the last assistant message
useChatStore.getState().setLastAssistantMessageInfo(parsed.messageInfo);
handled = true;
}
} catch (e) {
// not JSON, fall through to full replace
}
}

if (!handled) {
// If server sent a full snapshot repeatedly (common when backend doesn't send structured append events),
// detect the already-displayed prefix and append only the new suffix. This avoids blinking and missing lines
// during rapid streaming of many list items.
const store = useChatStore.getState();
const msgs = store.chatMsgs;
let lastAssistant = "";
for (let i = msgs.length - 1; i >= 0; i--) {
if (msgs[i].role === 'assistant') {
lastAssistant = msgs[i].message || '';
break;
}
}

if (lastAssistant && dataStr.startsWith(lastAssistant)) {
const suffix = dataStr.slice(lastAssistant.length);
if (suffix.length > 0) store.appendToLastAssistant(suffix);
} else {
// Fallback: replace the last assistant message with the full reconstructed text
store.replaceLastAssistant(dataStr);
}
}
}

if (isDoneEvent) {
// stream finished
break;
}
}

if (readerDone) break;
}

// After the streaming loop, do not alter the assembled markdown so newlines are preserved
} catch (err) {
toast.error("Failed to get response from remote server");
}
Expand Down
106 changes: 99 additions & 7 deletions contrib/copilot-plugin/src/app/ChatHistory.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ import { Bot, User, ThumbsUp, ThumbsDown } from "lucide-react";
import Markdown, { Components } from "react-markdown";

import remarkGfm from "remark-gfm";
import rehypeRaw from 'rehype-raw';
import { ChatMessage, useChatStore } from "../libs/state";
import { Pane } from "../components/pane";

Expand Down Expand Up @@ -101,6 +102,7 @@ const CustomMarkdown: React.FC<{ content: string }> = ({ content }) => {
<div className={`prose-sm text-base break-words word-wrap`}>
<Markdown
remarkPlugins={[remarkGfm]}
rehypePlugins={[rehypeRaw as any]}
components={{
pre({ node, ...props }: any) {
return <PreWithLineNumbers>{props.children}</PreWithLineNumbers>;
Expand Down Expand Up @@ -160,7 +162,7 @@ const Message: React.FC<{ message: ChatMessage, expand?: boolean, isAssistant?:
messageInfo: {
userId: paiuser,
convId: currentConversationId,
turnId: uuidv4(), // Use message's turnId or fallback to "0"
turnId: message.messageInfo?.turnId || "0",
timestamp: Math.floor(Date.now()),
timestampUnit: "ms",
type: "feedback",
Expand Down Expand Up @@ -298,13 +300,103 @@ const GroupedChatMessages: React.FC = () => {
const messages = useChatStore((state) => state.chatMsgs);
const scrollRef = useRef<HTMLDivElement>(null);

useEffect(() => {
if (scrollRef.current) {
scrollRef.current.scrollTop = scrollRef.current.scrollHeight;
}
}, [messages]);

// compute grouped messages and helper values early so effects can reference them
const groupedMessages = groupMessages(messages);
const lastText = groupedMessages.length
? groupedMessages[groupedMessages.length - 1].messages.map((m) => m.message).join('\n')
: ''
const NEAR_BOTTOM_THRESHOLD = 120

// Reliable auto-scroll for both new messages and streaming updates.
const prevCountRef = React.useRef<number>(0);
const lastTextRef = React.useRef<string>('');

// Scroll helper: find nearest scrollable element that actually overflows, otherwise fallback to window
const scrollToBottom = (startTarget?: HTMLElement | null) => {
const startEl = startTarget || scrollRef.current
if (!startEl) return
let cur: HTMLElement | null = startEl
while (cur && cur !== document.body) {
try {
const style = window.getComputedStyle(cur)
const overflowY = style.overflowY
if ((overflowY === 'auto' || overflowY === 'scroll' || overflowY === 'overlay') && cur.scrollHeight > cur.clientHeight) {
cur.scrollTop = cur.scrollHeight
return
}
} catch (e) {
// ignore
}
cur = cur.parentElement
}

// fallback to window/document
try {
window.scrollTo(0, document.documentElement.scrollHeight)
} catch (e) {
// ignore
}
}

useEffect(() => {
const el = scrollRef.current
if (!el) return

const distanceFromBottom = el.scrollHeight - el.scrollTop - el.clientHeight
const shouldScroll =
distanceFromBottom < NEAR_BOTTOM_THRESHOLD &&
(prevCountRef.current != groupedMessages.length || lastTextRef.current != lastText)

if (shouldScroll) {
// try smooth scrolling in next frame
requestAnimationFrame(() => {
try {
scrollToBottom(el)
} catch (e) {
// ignore
}
})

// fallback: ensure scroll after a short delay
setTimeout(() => {
try {
scrollToBottom(el)
} catch (e) {
// ignore
}
}, 120)
}

prevCountRef.current = groupedMessages.length
lastTextRef.current = lastText
}, [groupedMessages.length, lastText])

// observe DOM changes to catch streaming incremental updates
useEffect(() => {
const el = scrollRef.current
if (!el) return

const observer = new MutationObserver((mutations) => {
try {
const distanceFromBottom = el.scrollHeight - el.scrollTop - el.clientHeight
if (distanceFromBottom < NEAR_BOTTOM_THRESHOLD) {
requestAnimationFrame(() => {
try {
scrollToBottom(el)
} catch (e) {
// ignore
}
})
}
} catch (e) {
// ignore
}
})

observer.observe(el, { childList: true, subtree: true, characterData: true })
return () => observer.disconnect()
}, [scrollRef]);

return (
<Pane className="p-0">
<div className="bg-white top-0 sticky p-2 px-4 pb-2 border-b text-sm flex items-center gap-1">
Expand Down
29 changes: 17 additions & 12 deletions contrib/copilot-plugin/src/components/pane.tsx
Original file line number Diff line number Diff line change
@@ -1,19 +1,24 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.

import React from "react";
import { cn } from "../libs/utils";

interface PaneProps extends React.HTMLAttributes<HTMLDivElement> {}

export const Pane: React.FC<PaneProps> = ({ children, className }) => {
return (
<div
className={cn(
"bg-background flex-1 p-4 border-2 border-gray-300 rounded-md overflow-y-auto flex flex-col",
className
)}
>
{children}
</div>
);
};
export const Pane = React.forwardRef<HTMLDivElement, PaneProps>(
({ children, className }, ref) => {
return (
<div
ref={ref}
className={cn(
"bg-background flex-1 p-4 border-2 border-gray-300 rounded-md overflow-y-auto flex flex-col",
className
)}
>
{children}
</div>
);
}
);
Pane.displayName = "Pane";
35 changes: 34 additions & 1 deletion contrib/copilot-plugin/src/libs/state.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,9 @@ interface State {
setAllModelsInCurrentJob: (models: string[]) => void;
setCurrentModel: (model: string | null) => void;
addChat: (chat: ChatMessage) => void;
appendToLastAssistant: (chunk: string) => void;
replaceLastAssistant: (text: string) => void;
setLastAssistantMessageInfo: (info: any) => void;

// Conversation management actions
generateNewConversationId: () => void;
Expand Down Expand Up @@ -98,7 +101,37 @@ export const useChatStore = create<State>((set) => ({
setCurrentModel: (model) => set({ currentModel: model }),

addChat: (log) => set((state) => ({ chatMsgs: [...state.chatMsgs, log] })),

appendToLastAssistant: (chunk: string) => set((state) => {
const msgs = [...state.chatMsgs];
for (let i = msgs.length - 1; i >= 0; i--) {
if (msgs[i].role === 'assistant') {
msgs[i] = { ...msgs[i], message: (msgs[i].message || '') + chunk };
break;
}
}
return { chatMsgs: msgs };
},),
replaceLastAssistant: (text: string) => set((state) => {
const msgs = [...state.chatMsgs];
for (let i = msgs.length - 1; i >= 0; i--) {
if (msgs[i].role === 'assistant') {
msgs[i] = { ...msgs[i], message: text };
break;
}
}
return { chatMsgs: msgs };
}),
setLastAssistantMessageInfo: (info: any) => set((state) => {
const msgs = [...state.chatMsgs];
for (let i = msgs.length - 1; i >= 0; i--) {
if (msgs[i].role === 'assistant') {
msgs[i] = { ...msgs[i], messageInfo: info };
break;
}
}
return { chatMsgs: msgs };
}),

// Generate a new conversation ID (useful for starting a new conversation)
generateNewConversationId: () => set((state) => ({
currentConversationId: uuidv4(),
Expand Down
Loading