Skip to content

Commit

Permalink
Merge pull request #3 from TNG/Feat/26--LLM_download_from_dropdown_ba…
Browse files Browse the repository at this point in the history
…ckup

Feat/26  llm download from dropdown backup
  • Loading branch information
mschuettlerTNG authored Nov 26, 2024
2 parents 96c6c9f + 2916814 commit f7a6873
Show file tree
Hide file tree
Showing 12 changed files with 267 additions and 29 deletions.
40 changes: 37 additions & 3 deletions WebUI/src/App.vue
Original file line number Diff line number Diff line change
Expand Up @@ -66,17 +66,19 @@
@show-download-model-confirm="showDownloadModelConfirm"></create>
<enhance v-show="activeTabIdx == 1" ref="enhanceCompt" @show-download-model-confirm="showDownloadModelConfirm">
</enhance>
<answer v-show="activeTabIdx == 2" @show-download-model-confirm="showDownloadModelConfirm"></answer>
<answer v-show="activeTabIdx == 2" ref = "answer" @show-download-model-confirm="showDownloadModelConfirm" @show-model-request="showModelRequest"></answer>
<learn-more v-show="activeTabIdx == 3"></learn-more>
<app-settings v-if="showSetting" @close="hideAppSettings" @show-download-model-confirm="showDownloadModelConfirm"></app-settings>
</div>
<download-dialog v-show="showDowloadDlg" ref="downloadDigCompt" @close="showDowloadDlg = false"></download-dialog>
<add-l-l-m-dialog v-show="showModelRequestDialog" ref="addLLMCompt" @close="showModelRequestDialog = false" @call-check-model="callCheckModel" @show-warning="showWarning"></add-l-l-m-dialog>
<warning-dialog v-show="showWarningDialog" ref="warningCompt" @close="showWarningDialog = false"></warning-dialog>
</main>
<footer class="flex-none px-4 flex justify-between items-center select-none" :class="{'bg-black bg-opacity-50': theme.active === 'lnl', 'bg-black bg-opacity-80': theme.active === 'bmg', 'border-t border-color-spilter': theme.active === 'dark'}">
<div>
<p>Al Playground from Intel Corporation <a href="https://github.com/intel/ai-playground" target="_blank"
class="text-blue-500">https://github.com/intel/ai-playground</a></p>
<p>AI Playground version: v{{ productVersion }}
<p>AI Playground version: v{{ productVersion }}
<a href="https://github.com/intel/ai-playground/blob/main/AI%20Playground%20Users%20Guide.pdf" target="_blank"
class="text-blue-500"> User Guide</a>

Expand Down Expand Up @@ -115,6 +117,8 @@ import { useGlobalSetup } from "./assets/js/store/globalSetup";
import DownloadDialog from '@/components/DownloadDialog.vue';
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible'
import { useTheme } from "./assets/js/store/theme.ts";
import AddLLMDialog from "@/components/AddLLMDialog.vue";
import WarningDialog from "@/components/WarningDialog.vue";
const isOpen = ref(false);
Expand All @@ -127,12 +131,22 @@ const showSetting = ref(false);
const enhanceCompt = ref<InstanceType<typeof Enhance>>();
const answer = ref<InstanceType<typeof Answer>>();
const showSettingBtn = ref<HTMLButtonElement>();
const showDowloadDlg = ref(false);
const showModelRequestDialog = ref(false);
const showWarningDialog = ref(false);
const downloadDigCompt = ref<InstanceType<typeof DownloadDialog>>();
const addLLMCompt = ref<InstanceType<typeof AddLLMDialog>>();
const warningCompt = ref<InstanceType<typeof WarningDialog>>();
const fullscreen = ref(false);
const platformTitle = window.envVars.platformTitle;
Expand Down Expand Up @@ -161,7 +175,6 @@ onBeforeMount(async () => {
})
})
function showAppSettings() {
if (showSetting.value === false) {
showSetting.value = true;
Expand Down Expand Up @@ -223,4 +236,25 @@ function showDownloadModelConfirm(downList: DownloadModelParam[], success?: () =
downloadDigCompt.value!.showConfirm(downList, success, fail);
});
}
function showModelRequest() {
showModelRequestDialog.value = true;
nextTick(() => {
addLLMCompt.value!.onShow();
});
}
function callCheckModel(){
answer.value!.checkModel();
}
function showWarning(message : string, func : () => void) {
warningCompt.value!.warningMessage = message;
showWarningDialog.value = true;
warningCompt.value!.confirmFunction = func;
nextTick(() => {
warningCompt.value!.onShow();
});
}
</script>
8 changes: 7 additions & 1 deletion WebUI/src/assets/i18n/en-US.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,11 @@
"COM_SETTINGS": "Settings",
"COM_MINI": "Minimize",
"COM_CLOSE": "Close",
"COM_ADD": "Add",
"COM_RESTORE": "Restore Defaults",
"COM_SD_PROMPT": "Input prompt to generate image",
"COM_LLM_PROMPT": "Input prompt to generate answer",
"COM_LLM_HF_PROMPT": "<namespace>/<repo_name>",
"COM_CLICK_UPLOAD": "- Click To Upload Image -",
"COM_GENERATE": "Generate",
"COM_GENERATING": "Generating",
Expand Down Expand Up @@ -123,6 +125,7 @@
"DECREASE_FONT_SIZE": "Shrink Text",
"ANSWER_RAG_ENABLE":"Enable File Query",
"ANSWER_RAG_OPEN_DIALOG":"Open File Uploader",
"REQUEST_LLM_MODEL_NAME":"Add a model of your choice from huggingface.co,<br />for example: <i>meta-llama/Llama-3.2-1B </i>",
"DOWNLOADER_CONFRIM_TIP":"You are missing one or more models needed to run. Would you like to download the model(s) listed below?",
"DOWNLOADER_MODEL":"Model",
"DOWNLOADER_INFO":"Info",
Expand Down Expand Up @@ -159,10 +162,13 @@
"ERROR_RUNTIME_ERROR": "If a critical failure occurs, please restart the program and try again",
"ERROR_GENERATE_UNKONW_EXCEPTION": "An unknown error occurred. failed to generate from model",
"ERROR_FOLDER_NOT_EXISTS": "The specified directory does not exist",
"ERROR_REPO_NOT_EXISTS": "The specified repository-ID could not be found",
"ERROR_ALREADY_IN_MODELS": "The specified model is already in the list",
"ERROR_ENHANCE_IMAGE_NOT_SET": "Please configure the input image to be generated",
"ERROR_UNFOUND_GRAPHICS": "If the necessary hardware for program execution is not detected on the computer device, the program will terminate upon clicking OK.",
"ERROR_PYTHON_BACKEND_INIT": "Backend initialization failed",
"ERROR_PYTHON_BACKEND_INIT_DETAILS_TEXT": "The AI inference backend failed to initialize. Please try restarting the application. If the problem persists, you can check the Details for additional information about the error.",
"ERROR_PYTHON_BACKEND_INIT_DETAILS": "Details",
"ERROR_PYTHON_BACKEND_INIT_OPEN_LOG": "Open Log"
"ERROR_PYTHON_BACKEND_INIT_OPEN_LOG": "Open Log",
"WARNING_MODEL_TYPE_WRONG": "The model type doesn't seem to fit the requirements. Are you sure, you want to continue?"
}
9 changes: 6 additions & 3 deletions WebUI/src/assets/js/store/models.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { defineStore } from "pinia";

type ModelType = "llm" | "embedding" | "stableDiffusion" | "inpaint" | "lora" | "vae";
export type ModelType = "llm" | "embedding" | "stableDiffusion" | "inpaint" | "lora" | "vae" | "undefined";

export type Model = {
name: string;
Expand All @@ -17,6 +17,9 @@ const predefinedModels: Model[] = [
// { name: 'THUDM/chatglm3-6b', type: 'llm', downloaded: false },
]

export const userModels: Model[] = [
]

export const useModels = defineStore("models", () => {

const hfToken = ref<string | undefined>(undefined);
Expand All @@ -39,10 +42,10 @@ export const useModels = defineStore("models", () => {
...inpaintModels.map<Model>(name => ({ name, type: 'inpaint', downloaded: true })),
...embeddingModels.map<Model>(name => ({ name, type: 'embedding', downloaded: true })),
];

const notYetDownloaded = (model: Model) => !downloadedModels.map(m => m.name).includes(model.name);

models.value = [...downloadedModels, ...predefinedModels.filter(notYetDownloaded)];
console.log(models);
models.value = [...downloadedModels, ...userModels, ...predefinedModels.filter(notYetDownloaded)];

}

Expand Down
106 changes: 106 additions & 0 deletions WebUI/src/components/AddLLMDialog.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
<template>
<div class="dialog-container z-10">
<div class="dialog-mask absolute left-0 top-0 w-full h-full bg-black/55 flex justify-center items-center">
<div
class="py-10 px-20 w-500px flex flex-col items-center justify-center bg-gray-600 rounded-3xl gap-6 text-white"
:class="{ 'animate-scale-in': animate }">
<p v-html="i18nState.REQUEST_LLM_MODEL_NAME"></p>
<Input :placeholder="languages.COM_LLM_HF_PROMPT" v-model="modelRequest" @keyup.enter="addModel"></Input>
<p v-show="addModelError" style="color: #F44336;">{{ addModelErrorMessage }}</p>
<div class="flex justify-center items-center gap-9">
<button @click="closeAdd" class="bg-color-control-bg py-1 px-4 rounded">{{ i18nState.COM_CLOSE }}</button>
<button @click="addModel" class="bg-color-control-bg py-1 px-4 rounded">{{ i18nState.COM_ADD }}</button>
</div>
</div>
</div>
</div>
</template>
<script setup lang="ts">
import { Input } from '@/components/ui/input'
import { useGlobalSetup } from '@/assets/js/store/globalSetup';
import { useI18N } from '@/assets/js/store/i18n';
import { useModels, userModels } from '@/assets/js/store/models';
const i18nState = useI18N().state;
const globalSetup = useGlobalSetup();
const models = useModels();
const modelRequest = ref("");
const addModelErrorMessage = ref("")
const addModelError = ref(false);
const animate = ref(false);
const emits = defineEmits<{
(e: "close"): void,
(e: "callCheckModel"): void,
(e: "showWarning", warning: string, func: () => void): void
}>();
function onShow() {
animate.value = true;
}
async function addModel() {
const previousModel = globalSetup.modelSettings.llm_model
const isInModels = models.models.some((model) => model.name === modelRequest.value)
const cancelAndShowWarning = (text: string) => {
globalSetup.modelSettings.llm_model = previousModel;
addModelErrorMessage.value = text;
addModelError.value = true;
}
if (isInModels) {
cancelAndShowWarning(i18nState.ERROR_ALREADY_IN_MODELS);
return;
}
const urlExists = await checkIfUrlExists(modelRequest.value);
if (!urlExists) {
cancelAndShowWarning(i18nState.ERROR_REPO_NOT_EXISTS);
return;
}
addModelError.value = false;
const isLlm = await isLLM(modelRequest.value);
const downloadNewModel = async () => {
await registerModel();
emits("callCheckModel");
closeAdd();
};
if (!isLlm) {
emits("showWarning", i18nState.WARNING_MODEL_TYPE_WRONG, downloadNewModel);
} else {
downloadNewModel();
}
}
async function registerModel() {
userModels.push({ name: modelRequest.value, type: 'llm', downloaded: false })
await models.refreshModels()
globalSetup.modelSettings.llm_model = modelRequest.value;
}
async function checkIfUrlExists(repo_id: string) {
const response = await fetch(`${globalSetup.apiHost}/api/checkHFRepoExists?repo_id=${repo_id}`)
const data = await response.json()
return data.exists;
}
async function isLLM(repo_id: string) {
const response = await fetch(`${globalSetup.apiHost}/api/isLLM?repo_id=${repo_id}`)
const data = await response.json()
return data.isllm
}
function closeAdd() {
addModelErrorMessage.value = "";
addModelError.value = false;
modelRequest.value = "";
emits("close");
}
defineExpose({ onShow });
</script>
4 changes: 0 additions & 4 deletions WebUI/src/components/DownloadDialog.vue
Original file line number Diff line number Diff line change
Expand Up @@ -111,10 +111,6 @@ const readTerms = ref(false);
const downloadList = ref<DownloadModelRender[]>([]);
onDeactivated(() => {
animate.value = false;
})
function dataProcess(line: string) {
console.log(line);
const dataJson = line.slice(5);
Expand Down
1 change: 0 additions & 1 deletion WebUI/src/components/InpaintMask.vue
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,6 @@ function moveWithShadow(e: MouseEvent) {
function showShadow() {
shadowPos.show = true;
console.log("showShadow");
}
defineExpose({ clearMaskImage, getMaskImage })
Expand Down
41 changes: 41 additions & 0 deletions WebUI/src/components/WarningDialog.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
<template>
<div class="dialog-container z-10">
<div class="dialog-mask absolute left-0 top-0 w-full h-full bg-black/55 flex justify-center items-center">
<div class="py-10 px-20 w-500px flex flex-col items-center justify-center bg-gray-600 rounded-3xl gap-6 text-white"
:class="{ 'animate-scale-in': animate }">
<p v-html= "warningMessage"></p>
<div class="flex justify-center items-center gap-9">
<button @click="cancelConfirm" class="bg-color-control-bg py-1 px-4 rounded">{{i18nState.COM_CANCEL}}</button>
<button @click="confirmAdd" class="bg-color-control-bg py-1 px-4 rounded">{{i18nState.COM_CONFIRM}}</button>
</div>
</div>
</div>
</div>
</template>
<script setup lang="ts">
import { useI18N } from '@/assets/js/store/i18n.ts';
const i18nState = useI18N().state;
const confirmFunction = ref(() => {})
const warningMessage = ref("")
const animate = ref(false);
const emits = defineEmits<{
(e: "close"): void
}>();
async function confirmAdd() {
confirmFunction.value()
emits("close");
}
function cancelConfirm() {
emits("close");
}
function onShow(){
animate.value = true
}
defineExpose({warningMessage, confirmFunction, onShow });
</script>
2 changes: 1 addition & 1 deletion WebUI/src/components/ui/input/Input.vue
Original file line number Diff line number Diff line change
Expand Up @@ -20,5 +20,5 @@ const modelValue = useVModel(props, 'modelValue', emits, {
</script>

<template>
<input type="password" v-model="modelValue" :class="cn('flex h-9 w-full rounded-md border bg-[var(--textbox-bg)] px-3 py-1 text-sm shadow-sm transition-colors file:border-0 file:bg-transparent file:text-sm file:font-medium placeholder:text-slate-500 focus-visible:outline-none focus-visible:border-[var(--color-active)] disabled:cursor-not-allowed disabled:opacity-50 ', props.class)">
<input v-model="modelValue" :class="cn('flex h-9 w-full rounded-md border bg-[var(--textbox-bg)] px-3 py-1 text-sm shadow-sm transition-colors file:border-0 file:bg-transparent file:text-sm file:font-medium placeholder:text-slate-500 focus-visible:outline-none focus-visible:border-[var(--color-active)] disabled:cursor-not-allowed disabled:opacity-50 ', props.class)">
</template>
Loading

0 comments on commit f7a6873

Please sign in to comment.