Skip to content

Commit

Permalink
Disable Keip Assistant chat panel if LLM server is unavailable
Browse files Browse the repository at this point in the history
  • Loading branch information
a-asaad committed Apr 20, 2024
1 parent 41e7665 commit 68ca2b7
Show file tree
Hide file tree
Showing 2 changed files with 48 additions and 3 deletions.
37 changes: 36 additions & 1 deletion ui/src/components/assistant/AssistantChatPanel.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,39 @@ interface ChatInputProps {

const llmClient = new LlmClient()

const logKeipAssistantStatus = (available: boolean) => {
if (available) {
console.log(
`Enable Keip Assistant: An LLM server is available at ${llmClient.serverBaseUrl}`
)
} else {
console.log(
`Disable Keip Assistant: Did not find an LLM server at ${llmClient.serverBaseUrl}`
)
}
}

const useLlmServerStatus = () => {
const [isAvailable, setIsAvailable] = useState(false)

useEffect(() => {
let abortPing: () => void
void (async () => {
const result = llmClient.ping()
abortPing = result.abort
const success = await result.success
logKeipAssistantStatus(success)
setIsAvailable(success)
})()

return () => {
abortPing && abortPing()
}
}, [])

return isAvailable
}

const ChatInput = ({ handleInput }: ChatInputProps) => {
const [content, setContent] = useState("")
const [isWaiting, setWaiting] = useState(false)
Expand Down Expand Up @@ -78,7 +111,7 @@ const ChatInput = ({ handleInput }: ChatInputProps) => {
hasIconOnly
iconDescription="cancel"
renderIcon={() => <CloseOutline size={24} />}
onClick={() => llmClient.abort()}
onClick={() => llmClient.abortPrompt()}
/>
</>
) : (
Expand Down Expand Up @@ -139,6 +172,7 @@ const AssistantChatPanel = () => {
const [isOpen, setOpen] = useState(false)
const [chatEntries, setChatEntries] = useState<ChatEntry[]>([])
const [streamingResponse, setStreamingResponse] = useState("")
const isLlmServerAvailable = useLlmServerStatus()

const handleStreamUpdate = (chunk: string) =>
setStreamingResponse((prev) => prev + chunk)
Expand Down Expand Up @@ -174,6 +208,7 @@ const AssistantChatPanel = () => {
kind="secondary"
size="lg"
onClick={() => setOpen((prev) => !prev)}
disabled={!isLlmServerAvailable}
>
<MachineLearning />
</IconButton>
Expand Down
14 changes: 12 additions & 2 deletions ui/src/components/assistant/llmClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,11 @@ interface PromptResponse {
class LlmClient {
private llm
private abortCtrl
public serverBaseUrl = "http://localhost:11434"

constructor() {
this.llm = new Ollama({
baseUrl: "http://localhost:11434",
baseUrl: this.serverBaseUrl,
maxRetries: 3,
model: "mistral",
format: "json",
Expand Down Expand Up @@ -68,10 +69,19 @@ class LlmClient {
}
}

public abort(): void {
public abortPrompt(): void {
this.abortCtrl.abort()
}

public ping(): { success: Promise<boolean>; abort: () => void } {
const ctrl = new AbortController()
setTimeout(() => ctrl.abort(), 5000)
const success = fetch(this.serverBaseUrl, { signal: ctrl.signal })
.then((res) => res.ok)
.catch(() => false)
return { success: success, abort: () => ctrl.abort() }
}

private async generatePrompt() {
const nodes = getNodesView()
if (nodes && nodes.length > 0) {
Expand Down

0 comments on commit 68ca2b7

Please sign in to comment.