From 78a12d07ef43518bbc7cd8cb9edcaaf01ec4e2c7 Mon Sep 17 00:00:00 2001 From: marijnvg-tng Date: Thu, 19 Dec 2024 14:20:02 +0100 Subject: [PATCH] Ensure that RAG state is disabled at change of LLM inference-model Mimic the "switchTab" logic of ensuring the correct RAG state. Signed-off-by: marijnvg-tng --- WebUI/src/App.vue | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/WebUI/src/App.vue b/WebUI/src/App.vue index efd2cba3..184b344d 100644 --- a/WebUI/src/App.vue +++ b/WebUI/src/App.vue @@ -142,10 +142,12 @@ import {useTheme} from "./assets/js/store/theme.ts"; import AddLLMDialog from "@/components/AddLLMDialog.vue"; import WarningDialog from "@/components/WarningDialog.vue"; import {useBackendServices} from "./assets/js/store/backendServices.ts"; +import {useTextInference} from "@/assets/js/store/textInference.ts"; const backendServices = useBackendServices(); const theme = useTheme(); const globalSetup = useGlobalSetup(); +const textInference = useTextInference() const enhanceCompt = ref>(); const answer = ref>(); @@ -254,6 +256,14 @@ function switchTab(index: number) { } } +watch(textInference, (newSetting, oldSetting) => { + if (newSetting.backend === 'LLAMA.CPP') { + answer.value!.disableRag(); + } else { + answer.value!.restoreRagState(); + } +}) + function miniWindow() { window.electronAPI.miniWindow(); }