diff --git a/frontend/AGENTS.md b/frontend/AGENTS.md
index 618d215..0aad01b 100644
--- a/frontend/AGENTS.md
+++ b/frontend/AGENTS.md
@@ -76,6 +76,12 @@ src/
- **MagicUI** - Magic UI components
- **React Bits** - React bits components
+### Interaction Ownership
+
+- `src/app/workspace/chats/[thread_id]/page.tsx` owns composer busy-state wiring.
+- `src/core/threads/hooks.ts` owns pre-submit upload state and thread submission.
+- `src/hooks/usePoseStream.ts` is a passive store selector; global WebSocket lifecycle stays in `App.tsx`.
+
## Resources
- [LangGraph Documentation](https://langchain-ai.github.io/langgraph/)
diff --git a/frontend/src/app/workspace/chats/[thread_id]/page.tsx b/frontend/src/app/workspace/chats/[thread_id]/page.tsx
index 3147358..d47853c 100644
--- a/frontend/src/app/workspace/chats/[thread_id]/page.tsx
+++ b/frontend/src/app/workspace/chats/[thread_id]/page.tsx
@@ -32,7 +32,7 @@ export default function ChatPage() {
const { showNotification } = useNotification();
- const [thread, sendMessage] = useThreadStream({
+ const [thread, sendMessage, isUploading] = useThreadStream({
threadId: isNewThread ? undefined : threadId,
context: settings.context,
isMock,
@@ -127,7 +127,7 @@ export default function ChatPage() {
extraHeader={
isNewThread &&
}
- disabled={env.NEXT_PUBLIC_STATIC_WEBSITE_ONLY === "true"}
+ disabled={env.NEXT_PUBLIC_STATIC_WEBSITE_ONLY === "true" || isUploading}
onContextChange={(context) => setSettings("context", context)}
onSubmit={handleSubmit}
onStop={handleStop}
diff --git a/frontend/src/core/threads/hooks.ts b/frontend/src/core/threads/hooks.ts
index 21ae54b..722bc9b 100644
--- a/frontend/src/core/threads/hooks.ts
+++ b/frontend/src/core/threads/hooks.ts
@@ -156,6 +156,8 @@ export function useThreadStream({
// Optimistic messages shown before the server stream responds
const [optimisticMessages, setOptimisticMessages] = useState([]);
+ const [isUploading, setIsUploading] = useState(false);
+ const sendInFlightRef = useRef(false);
// Track message count before sending so we know when server has responded
const prevMsgCountRef = useRef(thread.messages.length);
@@ -175,6 +177,11 @@ export function useThreadStream({
message: PromptInputMessage,
extraContext?: Record,
) => {
+ if (sendInFlightRef.current) {
+ return;
+ }
+ sendInFlightRef.current = true;
+
const text = message.text.trim();
// Capture current count before showing optimistic messages
@@ -217,6 +224,7 @@ export function useThreadStream({
try {
// Upload files first if any
if (message.files && message.files.length > 0) {
+ setIsUploading(true);
try {
// Convert FileUIPart to File objects by fetching blob URLs
const filePromises = message.files.map(async (fileUIPart) => {
@@ -293,6 +301,8 @@ export function useThreadStream({
toast.error(errorMessage);
setOptimisticMessages([]);
throw error;
+ } finally {
+ setIsUploading(false);
}
}
@@ -342,7 +352,10 @@ export function useThreadStream({
void queryClient.invalidateQueries({ queryKey: ["threads", "search"] });
} catch (error) {
setOptimisticMessages([]);
+ setIsUploading(false);
throw error;
+ } finally {
+ sendInFlightRef.current = false;
}
},
[thread, _handleOnStart, t.uploads.uploadingFiles, context, queryClient],
@@ -357,7 +370,7 @@ export function useThreadStream({
} as typeof thread)
: thread;
- return [mergedThread, sendMessage] as const;
+ return [mergedThread, sendMessage, isUploading] as const;
}
export function useThreads(