Enhance chat UI and compatible anthropic thinking messages (#1018)

This commit is contained in:
JeffJiang
2026-03-08 20:19:31 +08:00
committed by GitHub
parent 3512279ce3
commit cf9af1fe75
9 changed files with 213 additions and 129 deletions

View File

@@ -57,6 +57,7 @@ export function ArtifactsProvider({ children }: ArtifactsProviderProps) {
const deselect = useCallback(() => {
setSelectedArtifact(null);
setAutoSelect(true);
setOpen(false);
}, []);
const value: ArtifactsContextType = {

View File

@@ -27,7 +27,9 @@ const ChatBox: React.FC<{ children: React.ReactNode; threadId: string }> = ({
threadId,
}) => {
const { thread } = useThread();
const threadIdRef = useRef(threadId);
const layoutRef = useRef<GroupImperativeHandle>(null);
const {
artifacts,
open: artifactsOpen,
@@ -40,13 +42,22 @@ const ChatBox: React.FC<{ children: React.ReactNode; threadId: string }> = ({
const [autoSelectFirstArtifact, setAutoSelectFirstArtifact] = useState(true);
useEffect(() => {
if (threadIdRef.current !== threadId) {
threadIdRef.current = threadId;
deselect();
}
// Update artifacts from the current thread
setArtifacts(thread.values.artifacts);
// Deselect if the currently selected artifact no longer exists
if (
thread.values.artifacts?.length === 0 ||
(selectedArtifact && !thread.values.artifacts?.includes(selectedArtifact))
selectedArtifact &&
!thread.values.artifacts?.includes(selectedArtifact)
) {
deselect();
}
if (
env.NEXT_PUBLIC_STATIC_WEBSITE_ONLY === "true" &&
autoSelectFirstArtifact
@@ -57,6 +68,7 @@ const ChatBox: React.FC<{ children: React.ReactNode; threadId: string }> = ({
}
}
}, [
threadId,
autoSelectFirstArtifact,
deselect,
selectArtifact,

View File

@@ -54,13 +54,15 @@ export function MessageList({
<ConversationContent className="mx-auto w-full max-w-(--container-width-md) gap-8 pt-12">
{groupMessages(messages, (group) => {
if (group.type === "human" || group.type === "assistant") {
return (
<MessageListItem
key={group.id}
message={group.messages[0]!}
isLoading={thread.isLoading}
/>
);
return group.messages.map((msg) => {
return (
<MessageListItem
key={`${group.id}/${msg.id}`}
message={msg}
isLoading={thread.isLoading}
/>
);
});
} else if (group.type === "assistant:clarification") {
const message = group.messages[0];
if (message && hasContent(message)) {

View File

@@ -18,15 +18,17 @@ export function ThreadTitle({
const { t } = useI18n();
const { isNewThread } = useThreadChat();
useEffect(() => {
const pageTitle = isNewThread
? t.pages.newChat
: thread.values?.title && thread.values.title !== "Untitled"
? thread.values.title
: t.pages.untitled;
let _title = t.pages.untitled;
if (thread.values?.title) {
_title = thread.values.title;
} else if (isNewThread) {
_title = t.pages.newChat;
}
if (thread.isThreadLoading) {
document.title = `Loading... - ${t.pages.appName}`;
} else {
document.title = `${pageTitle} - ${t.pages.appName}`;
document.title = `${_title} - ${t.pages.appName}`;
}
}, [
isNewThread,

View File

@@ -33,96 +33,92 @@ export function groupMessages<T>(
if (messages.length === 0) {
return [];
}
const groups: MessageGroup[] = [];
// Returns the last group if it can still accept tool messages
// (i.e. it's an in-flight processing group, not a terminal human/assistant group).
function lastOpenGroup() {
const last = groups[groups.length - 1];
if (
last &&
last.type !== "human" &&
last.type !== "assistant" &&
last.type !== "assistant:clarification"
) {
return last;
}
return null;
}
for (const message of messages) {
const lastGroup = groups[groups.length - 1];
if (message.type === "human") {
groups.push({
id: message.id,
type: "human",
messages: [message],
});
} else if (message.type === "tool") {
// Check if this is a clarification tool message
groups.push({ id: message.id, type: "human", messages: [message] });
continue;
}
if (message.type === "tool") {
if (isClarificationToolMessage(message)) {
// Add to processing group if available (to maintain tool call association)
if (
lastGroup &&
lastGroup.type !== "human" &&
lastGroup.type !== "assistant" &&
lastGroup.type !== "assistant:clarification"
) {
lastGroup.messages.push(message);
}
// Also create a separate clarification group for prominent display
// Add to the preceding processing group to preserve tool-call association,
// then also open a standalone clarification group for prominent display.
lastOpenGroup()?.messages.push(message);
groups.push({
id: message.id,
type: "assistant:clarification",
messages: [message],
});
} else if (
lastGroup &&
lastGroup.type !== "human" &&
lastGroup.type !== "assistant" &&
lastGroup.type !== "assistant:clarification"
) {
lastGroup.messages.push(message);
} else {
throw new Error(
"Tool message must be matched with a previous assistant message with tool calls",
);
const open = lastOpenGroup();
if (open) {
open.messages.push(message);
} else {
console.error(
"Unexpected tool message outside a processing group",
message,
);
}
}
} else if (message.type === "ai") {
if (hasReasoning(message) || hasToolCalls(message)) {
if (hasPresentFiles(message)) {
continue;
}
if (message.type === "ai") {
if (hasPresentFiles(message)) {
groups.push({
id: message.id,
type: "assistant:present-files",
messages: [message],
});
} else if (hasSubagent(message)) {
groups.push({
id: message.id,
type: "assistant:subagent",
messages: [message],
});
} else if (hasReasoning(message) || hasToolCalls(message)) {
const lastGroup = groups[groups.length - 1];
// Accumulate consecutive intermediate AI messages into one processing group.
if (lastGroup?.type !== "assistant:processing") {
groups.push({
id: message.id,
type: "assistant:present-files",
messages: [message],
});
} else if (hasSubagent(message)) {
groups.push({
id: message.id,
type: "assistant:subagent",
type: "assistant:processing",
messages: [message],
});
} else {
if (lastGroup?.type !== "assistant:processing") {
groups.push({
id: message.id,
type: "assistant:processing",
messages: [],
});
}
const currentGroup = groups[groups.length - 1];
if (currentGroup?.type === "assistant:processing") {
currentGroup.messages.push(message);
} else {
throw new Error(
"Assistant message with reasoning or tool calls must be preceded by a processing group",
);
}
lastGroup.messages.push(message);
}
}
// Not an else-if: a message with reasoning + content (but no tool calls) goes
// into the processing group above AND gets its own assistant bubble here.
if (hasContent(message) && !hasToolCalls(message)) {
groups.push({
id: message.id,
type: "assistant",
messages: [message],
});
groups.push({ id: message.id, type: "assistant", messages: [message] });
}
}
}
const resultsOfGroups: T[] = [];
for (const group of groups) {
const resultOfGroup = mapper(group);
if (resultOfGroup !== undefined && resultOfGroup !== null) {
resultsOfGroups.push(resultOfGroup);
}
}
return resultsOfGroups;
return groups
.map(mapper)
.filter((result) => result !== undefined && result !== null) as T[];
}
export function extractTextFromMessage(message: Message) {
@@ -162,12 +158,21 @@ export function extractContentFromMessage(message: Message) {
}
export function extractReasoningContentFromMessage(message: Message) {
if (message.type !== "ai" || !message.additional_kwargs) {
if (message.type !== "ai") {
return null;
}
if ("reasoning_content" in message.additional_kwargs) {
if (
message.additional_kwargs &&
"reasoning_content" in message.additional_kwargs
) {
return message.additional_kwargs.reasoning_content as string | null;
}
if (Array.isArray(message.content)) {
const part = message.content[0];
if (part && "thinking" in part) {
return part.thinking as string;
}
}
return null;
}
@@ -202,10 +207,18 @@ export function hasContent(message: Message) {
}
export function hasReasoning(message: Message) {
return (
message.type === "ai" &&
typeof message.additional_kwargs?.reasoning_content === "string"
);
if (message.type !== "ai") {
return false;
}
if (typeof message.additional_kwargs?.reasoning_content === "string") {
return true;
}
if (Array.isArray(message.content)) {
const part = message.content[0];
// Compatible with the Anthropic gateway
return (part as unknown as { type: "thinking" })?.type === "thinking";
}
return false;
}
export function hasToolCalls(message: Message) {

View File

@@ -40,39 +40,83 @@ export function useThreadStream({
onToolEnd,
}: ThreadStreamOptions) {
const { t } = useI18n();
const [_threadId, setThreadId] = useState<string | null>(threadId ?? null);
const threadIdRef = useRef<string | null>(threadId ?? null);
const startedRef = useRef(false);
const listeners = useRef({
onStart,
onFinish,
onToolEnd,
});
// Keep listeners ref updated with latest callbacks
useEffect(() => {
if (_threadId && _threadId !== threadId) {
setThreadId(threadId ?? null);
listeners.current = { onStart, onFinish, onToolEnd };
}, [onStart, onFinish, onToolEnd]);
useEffect(() => {
if (threadIdRef.current && threadIdRef.current !== threadId) {
threadIdRef.current = threadId ?? null;
startedRef.current = false; // Reset for new thread
}
}, [threadId, _threadId]);
}, [threadId]);
const _handleStart = useCallback((id: string) => {
if (!startedRef.current) {
listeners.current.onStart?.(id);
startedRef.current = true;
}
}, []);
const queryClient = useQueryClient();
const updateSubtask = useUpdateSubtask();
const thread = useStream<AgentThreadState>({
client: getAPIClient(isMock),
assistantId: "lead_agent",
threadId: _threadId,
threadId: threadIdRef.current,
reconnectOnMount: true,
fetchStateHistory: { limit: 1 },
onCreated(meta) {
setThreadId(meta.thread_id);
if (!startedRef.current) {
onStart?.(meta.thread_id);
startedRef.current = true;
}
threadIdRef.current = meta.thread_id;
_handleStart(meta.thread_id);
},
onLangChainEvent(event) {
if (event.event === "on_tool_end") {
onToolEnd?.({
listeners.current.onToolEnd?.({
name: event.name,
data: event.data,
});
}
},
onUpdateEvent(data) {
const updates: Array<Partial<AgentThreadState> | null> = Object.values(
data || {},
);
for (const update of updates) {
if (update && "title" in update && update.title) {
void queryClient.setQueriesData(
{
queryKey: ["threads", "search"],
exact: false,
},
(oldData: Array<AgentThread> | undefined) => {
return oldData?.map((t) => {
if (t.thread_id === threadIdRef.current) {
return {
...t,
values: {
...t.values,
title: update.title,
},
};
}
return t;
});
},
);
}
}
},
onCustomEvent(event: unknown) {
if (
typeof event === "object" &&
@@ -89,7 +133,7 @@ export function useThreadStream({
}
},
onFinish(state) {
onFinish?.(state.values);
listeners.current.onFinish?.(state.values);
void queryClient.invalidateQueries({ queryKey: ["threads", "search"] });
},
});
@@ -150,10 +194,7 @@ export function useThreadStream({
}
setOptimisticMessages(newOptimistic);
if (!startedRef.current) {
onStart?.(threadId);
startedRef.current = true;
}
_handleStart(threadId);
let uploadedFileInfo: UploadedFileInfo[] = [];
@@ -289,7 +330,7 @@ export function useThreadStream({
throw error;
}
},
[thread, t.uploads.uploadingFiles, onStart, context, queryClient],
[thread, _handleStart, t.uploads.uploadingFiles, context, queryClient],
);
// Merge thread with optimistic messages for display