Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 11 additions & 20 deletions frontend/app/aipanel/aimessage.tsx
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
// Copyright 2025, Command Line Inc.
// SPDX-License-Identifier: Apache-2.0

import { WaveStreamdown } from "@/app/element/streamdown";
import { cn } from "@/util/util";
import { useAtomValue } from "jotai";
import { memo } from "react";
import { Streamdown } from "streamdown";
import { getFileIcon } from "./ai-utils";
import { WaveUIMessage, WaveUIMessagePart } from "./aitypes";
import { WaveAIModel } from "./waveai-model";

const AIThinking = memo(() => (
<div className="flex items-center gap-2">
Expand Down Expand Up @@ -72,30 +74,21 @@ interface AIMessagePartProps {
}

const AIMessagePart = memo(({ part, role, isStreaming }: AIMessagePartProps) => {
const model = WaveAIModel.getInstance();

if (part.type === "text") {
const content = part.text ?? "";

if (role === "user") {
return <div className="whitespace-pre-wrap break-words">{content}</div>;
} else {
return (
<Streamdown
<WaveStreamdown
text={content}
parseIncompleteMarkdown={isStreaming}
className="markdown-content text-gray-100"
shikiTheme={["github-dark", "github-dark"]}
controls={{
code: true,
table: true,
mermaid: true,
}}
mermaidConfig={{
theme: "dark",
darkMode: true,
}}
defaultOrigin="http://localhost"
>
{content}
</Streamdown>
className="text-gray-100"
codeBlockMaxWidthAtom={model.codeBlockMaxWidth}
/>
);
}
}
Expand Down Expand Up @@ -139,9 +132,7 @@ export const AIMessage = memo(({ message, isStreaming }: AIMessageProps) => {
<div
className={cn(
"px-2 py-2 rounded-lg",
message.role === "user"
? "bg-accent-800 text-white max-w-[calc(100%-20px)]"
: "bg-gray-800 text-gray-100"
message.role === "user" ? "bg-accent-800 text-white max-w-[calc(100%-20px)]" : null
)}
>
{showThinkingOnly ? (
Expand Down
24 changes: 24 additions & 0 deletions frontend/app/aipanel/aipanel.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ const AIPanelComponentInner = memo(({ className, onClose }: AIPanelProps) => {
const [isDragOver, setIsDragOver] = useState(false);
const [isLoadingChat, setIsLoadingChat] = useState(true);
const model = WaveAIModel.getInstance();
const containerRef = useRef<HTMLDivElement>(null);
const errorMessage = jotai.useAtomValue(model.errorMessage);
const realMessageRef = useRef<AIMessage>(null);
const inputRef = useRef<AIPanelInputRef>(null);
Expand Down Expand Up @@ -104,10 +105,32 @@ const AIPanelComponentInner = memo(({ className, onClose }: AIPanelProps) => {
const messages = await model.loadChat();
setMessages(messages as any);
setIsLoadingChat(false);
setTimeout(() => {
model.scrollToBottom();
}, 100);
Comment on lines +108 to +110
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟡 Minor

Clean up setTimeout on unmount.

If the component unmounts during the 100ms delay, the timeout will still fire and attempt to call model.scrollToBottom(). Store the timeout ID and clear it in the cleanup function:

     useEffect(() => {
         const loadMessages = async () => {
             const messages = await model.loadChat();
             setMessages(messages as any);
             setIsLoadingChat(false);
-            setTimeout(() => {
+            const timeoutId = setTimeout(() => {
                 model.scrollToBottom();
             }, 100);
+            return () => clearTimeout(timeoutId);
         };
         loadMessages();
     }, [model, setMessages]);

Committable suggestion skipped: line range outside the PR's diff.

🤖 Prompt for AI Agents
In frontend/app/aipanel/aipanel.tsx around lines 107 to 109, the setTimeout used
to call model.scrollToBottom() is not cleaned up on unmount; store the timeout
ID (e.g., in a ref or local variable captured by the effect) when calling
setTimeout and in the effect's cleanup call clearTimeout(timeoutId) so the
callback won't run after unmount; ensure the timeoutId type is compatible with
TS (number | NodeJS.Timeout depending on environment) and clear it in the
returned cleanup function.

};
loadMessages();
}, [model, setMessages]);

useEffect(() => {
const updateWidth = () => {
if (containerRef.current) {
globalStore.set(model.containerWidth, containerRef.current.offsetWidth);
}
};

updateWidth();

const resizeObserver = new ResizeObserver(updateWidth);
if (containerRef.current) {
resizeObserver.observe(containerRef.current);
}

return () => {
resizeObserver.disconnect();
};
}, [model]);

useEffect(() => {
model.ensureRateLimitSet();
}, [model]);
Expand Down Expand Up @@ -279,6 +302,7 @@ const AIPanelComponentInner = memo(({ className, onClose }: AIPanelProps) => {

return (
<div
ref={containerRef}
data-waveai-panel="true"
className={cn(
"bg-gray-900 flex flex-col relative h-[calc(100%-4px)] mt-1",
Expand Down
6 changes: 6 additions & 0 deletions frontend/app/aipanel/aipanelmessages.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { WorkspaceLayoutModel } from "@/app/workspace/workspace-layout-model";
import { useAtomValue } from "jotai";
import { memo, useEffect, useRef } from "react";
import { AIMessage } from "./aimessage";
import { WaveAIModel } from "./waveai-model";

const AIWelcomeMessage = memo(() => {
return (
Expand All @@ -25,6 +26,7 @@ interface AIPanelMessagesProps {
}

export const AIPanelMessages = memo(({ messages, status, isLoadingChat }: AIPanelMessagesProps) => {
const model = WaveAIModel.getInstance();
const isPanelOpen = useAtomValue(WorkspaceLayoutModel.getInstance().panelVisibleAtom);
const messagesEndRef = useRef<HTMLDivElement>(null);
const messagesContainerRef = useRef<HTMLDivElement>(null);
Expand All @@ -37,6 +39,10 @@ export const AIPanelMessages = memo(({ messages, status, isLoadingChat }: AIPane
}
};

useEffect(() => {
model.registerScrollToBottom(scrollToBottom);
}, [model]);

useEffect(() => {
scrollToBottom();
}, [messages]);
Expand Down
16 changes: 16 additions & 0 deletions frontend/app/aipanel/waveai-model.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,15 @@ export interface DroppedFile {
export class WaveAIModel {
private static instance: WaveAIModel | null = null;
private inputRef: React.RefObject<AIPanelInputRef> | null = null;
private scrollToBottomCallback: (() => void) | null = null;

widgetAccessAtom!: jotai.Atom<boolean>;
droppedFiles: jotai.PrimitiveAtom<DroppedFile[]> = jotai.atom([]);
chatId!: jotai.PrimitiveAtom<string>;
errorMessage: jotai.PrimitiveAtom<string> = jotai.atom(null) as jotai.PrimitiveAtom<string>;
modelAtom!: jotai.Atom<string>;
containerWidth: jotai.PrimitiveAtom<number> = jotai.atom(0);
codeBlockMaxWidth!: jotai.Atom<number>;

private constructor() {
const tabId = globalStore.get(atoms.staticTabId);
Expand Down Expand Up @@ -58,6 +61,11 @@ export class WaveAIModel {
const value = get(widgetAccessMetaAtom);
return value ?? true;
});

this.codeBlockMaxWidth = jotai.atom((get) => {
const width = get(this.containerWidth);
return width > 0 ? width - 35 : 0;
});
}

static getInstance(): WaveAIModel {
Expand Down Expand Up @@ -140,6 +148,14 @@ export class WaveAIModel {
this.inputRef = ref;
}

registerScrollToBottom(callback: () => void) {
this.scrollToBottomCallback = callback;
}

scrollToBottom() {
this.scrollToBottomCallback?.();
}

focusInput() {
if (!WorkspaceLayoutModel.getInstance().getAIPanelVisible()) {
WorkspaceLayoutModel.getInstance().setAIPanelVisible(true);
Expand Down
Loading
Loading