From 68ca2b70a1fe30cec01c64b77ece73aca88943a6 Mon Sep 17 00:00:00 2001 From: Abdullah Asaad Date: Fri, 19 Apr 2024 22:54:26 -0700 Subject: [PATCH] Disable Keip Assistant chat panel if LLM server is unavailable --- .../assistant/AssistantChatPanel.tsx | 37 ++++++++++++++++++- ui/src/components/assistant/llmClient.ts | 14 ++++++- 2 files changed, 48 insertions(+), 3 deletions(-) diff --git a/ui/src/components/assistant/AssistantChatPanel.tsx b/ui/src/components/assistant/AssistantChatPanel.tsx index 0e59d3e..06b2310 100644 --- a/ui/src/components/assistant/AssistantChatPanel.tsx +++ b/ui/src/components/assistant/AssistantChatPanel.tsx @@ -33,6 +33,39 @@ interface ChatInputProps { const llmClient = new LlmClient() +const logKeipAssistantStatus = (available: boolean) => { + if (available) { + console.log( + `Enable Keip Assistant: An LLM server is available at ${llmClient.serverBaseUrl}` + ) + } else { + console.log( + `Disable Keip Assistant: Did not find an LLM server at ${llmClient.serverBaseUrl}` + ) + } +} + +const useLlmServerStatus = () => { + const [isAvailable, setIsAvailable] = useState(false) + + useEffect(() => { + let abortPing: () => void + void (async () => { + const result = llmClient.ping() + abortPing = result.abort + const success = await result.success + logKeipAssistantStatus(success) + setIsAvailable(success) + })() + + return () => { + abortPing && abortPing() + } + }, []) + + return isAvailable +} + const ChatInput = ({ handleInput }: ChatInputProps) => { const [content, setContent] = useState("") const [isWaiting, setWaiting] = useState(false) @@ -78,7 +111,7 @@ const ChatInput = ({ handleInput }: ChatInputProps) => { hasIconOnly iconDescription="cancel" renderIcon={() => } - onClick={() => llmClient.abort()} + onClick={() => llmClient.abortPrompt()} /> ) : ( @@ -139,6 +172,7 @@ const AssistantChatPanel = () => { const [isOpen, setOpen] = useState(false) const [chatEntries, setChatEntries] = useState([]) const [streamingResponse, setStreamingResponse] = useState("") + const isLlmServerAvailable = useLlmServerStatus() const handleStreamUpdate = (chunk: string) => setStreamingResponse((prev) => prev + chunk) @@ -174,6 +208,7 @@ const AssistantChatPanel = () => { kind="secondary" size="lg" onClick={() => setOpen((prev) => !prev)} + disabled={!isLlmServerAvailable} > diff --git a/ui/src/components/assistant/llmClient.ts b/ui/src/components/assistant/llmClient.ts index a32e92a..d30de67 100644 --- a/ui/src/components/assistant/llmClient.ts +++ b/ui/src/components/assistant/llmClient.ts @@ -21,10 +21,11 @@ interface PromptResponse { class LlmClient { private llm private abortCtrl + public serverBaseUrl = "http://localhost:11434" constructor() { this.llm = new Ollama({ - baseUrl: "http://localhost:11434", + baseUrl: this.serverBaseUrl, maxRetries: 3, model: "mistral", format: "json", @@ -68,10 +69,19 @@ class LlmClient { } } - public abort(): void { + public abortPrompt(): void { this.abortCtrl.abort() } + public ping(): { success: Promise; abort: () => void } { + const ctrl = new AbortController() + setTimeout(() => ctrl.abort(), 5000) + const success = fetch(this.serverBaseUrl, { signal: ctrl.signal }) + .then((res) => res.ok) + .catch(() => false) + return { success: success, abort: () => ctrl.abort() } + } + private async generatePrompt() { const nodes = getNodesView() if (nodes && nodes.length > 0) {