Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feat/26 llm download from dropdown backup #3

Merged
merged 10 commits into from
Nov 26, 2024
40 changes: 37 additions & 3 deletions WebUI/src/App.vue
Original file line number Diff line number Diff line change
Expand Up @@ -66,17 +66,19 @@
@show-download-model-confirm="showDownloadModelConfirm"></create>
<enhance v-show="activeTabIdx == 1" ref="enhanceCompt" @show-download-model-confirm="showDownloadModelConfirm">
</enhance>
<answer v-show="activeTabIdx == 2" @show-download-model-confirm="showDownloadModelConfirm"></answer>
<answer v-show="activeTabIdx == 2" ref = "answer" @show-download-model-confirm="showDownloadModelConfirm" @show-model-request="showModelRequest"></answer>
<learn-more v-show="activeTabIdx == 3"></learn-more>
<app-settings v-if="showSetting" @close="hideAppSettings" @show-download-model-confirm="showDownloadModelConfirm"></app-settings>
</div>
<download-dialog v-show="showDowloadDlg" ref="downloadDigCompt" @close="showDowloadDlg = false"></download-dialog>
<add-l-l-m-dialog v-show="showModelRequestDialog" ref="addLLMCompt" @close="showModelRequestDialog = false" @call-check-model="callCheckModel" @show-warning="showWarning"></add-l-l-m-dialog>
<warning-dialog v-show="showWarningDialog" ref="warningCompt" @close="showWarningDialog = false"></warning-dialog>

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am not yet convinced, that the llmDialog and the warning dialog should be on this level. My feeling is: keep app.vue as slim as possible, otherwise we will not be able to mentally control it in the long run. We already have some "noise" of events, that simply flip booleans to display some popup/dialogs.

my gut feeling: introduce the add$ModelType as a component on its own , where type is probably (on the long run) corresponding to the view $answer/create/enhance -> llm/stablediff/??? . Probably, in considering the correct hierarchies, one arrives at a cleaner communication between components overall, especially when it comes to delegating events to the download component.

I would also be willing with this design for now and get feedback of intel. But imo we need to be very careful in not letting this degrade.

I am however happy to hear, if you spent thoughts on this

</main>
<footer class="flex-none px-4 flex justify-between items-center select-none" :class="{'bg-black bg-opacity-50': theme.active === 'lnl', 'bg-black bg-opacity-80': theme.active === 'bmg', 'border-t border-color-spilter': theme.active === 'dark'}">
<div>
<p>Al Playground from Intel Corporation <a href="https://github.com/intel/ai-playground" target="_blank"
class="text-blue-500">https://github.com/intel/ai-playground</a></p>
<p>AI Playground version: v{{ productVersion }}
<p>AI Playground version: v{{ productVersion }}
<a href="https://github.com/intel/ai-playground/blob/main/AI%20Playground%20Users%20Guide.pdf" target="_blank"
class="text-blue-500"> User Guide</a>

Expand Down Expand Up @@ -115,6 +117,8 @@ import { useGlobalSetup } from "./assets/js/store/globalSetup";
import DownloadDialog from '@/components/DownloadDialog.vue';
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible'
import { useTheme } from "./assets/js/store/theme.ts";
import AddLLMDialog from "@/components/AddLLMDialog.vue";
import WarningDialog from "@/components/WarningDialog.vue";


const isOpen = ref(false);
Expand All @@ -127,12 +131,22 @@ const showSetting = ref(false);

const enhanceCompt = ref<InstanceType<typeof Enhance>>();

const answer = ref<InstanceType<typeof Answer>>();

const showSettingBtn = ref<HTMLButtonElement>();

const showDowloadDlg = ref(false);

const showModelRequestDialog = ref(false);

const showWarningDialog = ref(false);

const downloadDigCompt = ref<InstanceType<typeof DownloadDialog>>();

const addLLMCompt = ref<InstanceType<typeof AddLLMDialog>>();

const warningCompt = ref<InstanceType<typeof WarningDialog>>();

const fullscreen = ref(false);

const platformTitle = window.envVars.platformTitle;
Expand Down Expand Up @@ -161,7 +175,6 @@ onBeforeMount(async () => {
})
})


function showAppSettings() {
if (showSetting.value === false) {
showSetting.value = true;
Expand Down Expand Up @@ -223,4 +236,25 @@ function showDownloadModelConfirm(downList: DownloadModelParam[], success?: () =
downloadDigCompt.value!.showConfirm(downList, success, fail);
});
}

function showModelRequest() {
showModelRequestDialog.value = true;
nextTick(() => {
addLLMCompt.value!.onShow();
});
}

function callCheckModel(){
answer.value!.checkModel();
}

function showWarning(message : string, func : () => void) {
warningCompt.value!.warningMessage = message;
showWarningDialog.value = true;
warningCompt.value!.confirmFunction = func;
nextTick(() => {
warningCompt.value!.onShow();
});
}

</script>
8 changes: 7 additions & 1 deletion WebUI/src/assets/i18n/en-US.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,11 @@
"COM_SETTINGS": "Settings",
"COM_MINI": "Minimize",
"COM_CLOSE": "Close",
"COM_ADD": "Add",
"COM_RESTORE": "Restore Defaults",
"COM_SD_PROMPT": "Input prompt to generate image",
"COM_LLM_PROMPT": "Input prompt to generate answer",
"COM_LLM_HF_PROMPT": "<namespace>/<repo_name>",
"COM_CLICK_UPLOAD": "- Click To Upload Image -",
"COM_GENERATE": "Generate",
"COM_GENERATING": "Generating",
Expand Down Expand Up @@ -123,6 +125,7 @@
"DECREASE_FONT_SIZE": "Shrink Text",
"ANSWER_RAG_ENABLE":"Enable File Query",
"ANSWER_RAG_OPEN_DIALOG":"Open File Uploader",
"REQUEST_LLM_MODEL_NAME":"Add a model of your choice from huggingface.co,<br />for example: <i>meta-llama/Llama-3.2-1B </i>",
"DOWNLOADER_CONFRIM_TIP":"You are missing one or more models needed to run. Would you like to download the model(s) listed below?",
"DOWNLOADER_MODEL":"Model",
"DOWNLOADER_INFO":"Info",
Expand Down Expand Up @@ -159,10 +162,13 @@
"ERROR_RUNTIME_ERROR": "If a critical failure occurs, please restart the program and try again",
"ERROR_GENERATE_UNKONW_EXCEPTION": "An unknown error occurred. failed to generate from model",
"ERROR_FOLDER_NOT_EXISTS": "The specified directory does not exist",
"ERROR_REPO_NOT_EXISTS": "The specified repository-ID could not be found",
"ERROR_ALREADY_IN_MODELS": "The specified model is already in the list",
"ERROR_ENHANCE_IMAGE_NOT_SET": "Please configure the input image to be generated",
"ERROR_UNFOUND_GRAPHICS": "If the necessary hardware for program execution is not detected on the computer device, the program will terminate upon clicking OK.",
"ERROR_PYTHON_BACKEND_INIT": "Backend initialization failed",
"ERROR_PYTHON_BACKEND_INIT_DETAILS_TEXT": "The AI inference backend failed to initialize. Please try restarting the application. If the problem persists, you can check the Details for additional information about the error.",
"ERROR_PYTHON_BACKEND_INIT_DETAILS": "Details",
"ERROR_PYTHON_BACKEND_INIT_OPEN_LOG": "Open Log"
"ERROR_PYTHON_BACKEND_INIT_OPEN_LOG": "Open Log",
"WARNING_MODEL_TYPE_WRONG": "The model type doesn't seem to fit the requirements. Are you sure, you want to continue?"
}
9 changes: 6 additions & 3 deletions WebUI/src/assets/js/store/models.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { defineStore } from "pinia";

type ModelType = "llm" | "embedding" | "stableDiffusion" | "inpaint" | "lora" | "vae";
export type ModelType = "llm" | "embedding" | "stableDiffusion" | "inpaint" | "lora" | "vae" | "undefined";

export type Model = {
name: string;
Expand All @@ -17,6 +17,9 @@ const predefinedModels: Model[] = [
// { name: 'THUDM/chatglm3-6b', type: 'llm', downloaded: false },
]

export const userModels: Model[] = [
]

export const useModels = defineStore("models", () => {

const hfToken = ref<string | undefined>(undefined);
Expand All @@ -39,10 +42,10 @@ export const useModels = defineStore("models", () => {
...inpaintModels.map<Model>(name => ({ name, type: 'inpaint', downloaded: true })),
...embeddingModels.map<Model>(name => ({ name, type: 'embedding', downloaded: true })),
];

const notYetDownloaded = (model: Model) => !downloadedModels.map(m => m.name).includes(model.name);

models.value = [...downloadedModels, ...predefinedModels.filter(notYetDownloaded)];
console.log(models);
models.value = [...downloadedModels, ...userModels, ...predefinedModels.filter(notYetDownloaded)];

}

Expand Down
106 changes: 106 additions & 0 deletions WebUI/src/components/AddLLMDialog.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
<template>
<div class="dialog-container z-10">
<div class="dialog-mask absolute left-0 top-0 w-full h-full bg-black/55 flex justify-center items-center">
<div
class="py-10 px-20 w-500px flex flex-col items-center justify-center bg-gray-600 rounded-3xl gap-6 text-white"
:class="{ 'animate-scale-in': animate }">
<p v-html="i18nState.REQUEST_LLM_MODEL_NAME"></p>
<Input :placeholder="languages.COM_LLM_HF_PROMPT" v-model="modelRequest" @keyup.enter="addModel"></Input>
<p v-show="addModelError" style="color: #F44336;">{{ addModelErrorMessage }}</p>
<div class="flex justify-center items-center gap-9">
<button @click="closeAdd" class="bg-color-control-bg py-1 px-4 rounded">{{ i18nState.COM_CLOSE }}</button>
<button @click="addModel" class="bg-color-control-bg py-1 px-4 rounded">{{ i18nState.COM_ADD }}</button>
</div>
</div>
</div>
</div>
</template>
<script setup lang="ts">
import { Input } from '@/components/ui/input'
import { useGlobalSetup } from '@/assets/js/store/globalSetup';
import { useI18N } from '@/assets/js/store/i18n';
import { useModels, userModels } from '@/assets/js/store/models';


const i18nState = useI18N().state;
const globalSetup = useGlobalSetup();
const models = useModels();
const modelRequest = ref("");
const addModelErrorMessage = ref("")
const addModelError = ref(false);
const animate = ref(false);
const emits = defineEmits<{
(e: "close"): void,
(e: "callCheckModel"): void,
(e: "showWarning", warning: string, func: () => void): void
}>();

function onShow() {
animate.value = true;
}

async function addModel() {
const previousModel = globalSetup.modelSettings.llm_model
const isInModels = models.models.some((model) => model.name === modelRequest.value)

const cancelAndShowWarning = (text: string) => {
globalSetup.modelSettings.llm_model = previousModel;
addModelErrorMessage.value = text;
addModelError.value = true;
}

if (isInModels) {
cancelAndShowWarning(i18nState.ERROR_ALREADY_IN_MODELS);
return;
}

const urlExists = await checkIfUrlExists(modelRequest.value);
if (!urlExists) {
cancelAndShowWarning(i18nState.ERROR_REPO_NOT_EXISTS);
return;
}

addModelError.value = false;

const isLlm = await isLLM(modelRequest.value);
const downloadNewModel = async () => {
await registerModel();
emits("callCheckModel");
closeAdd();
};

if (!isLlm) {
emits("showWarning", i18nState.WARNING_MODEL_TYPE_WRONG, downloadNewModel);
} else {
downloadNewModel();
}
}

async function registerModel() {
userModels.push({ name: modelRequest.value, type: 'llm', downloaded: false })
await models.refreshModels()
globalSetup.modelSettings.llm_model = modelRequest.value;
}

async function checkIfUrlExists(repo_id: string) {
const response = await fetch(`${globalSetup.apiHost}/api/checkHFRepoExists?repo_id=${repo_id}`)
const data = await response.json()
return data.exists;
}

async function isLLM(repo_id: string) {
const response = await fetch(`${globalSetup.apiHost}/api/isLLM?repo_id=${repo_id}`)
const data = await response.json()
return data.isllm
}

function closeAdd() {
addModelErrorMessage.value = "";
addModelError.value = false;
julianbollig marked this conversation as resolved.
Show resolved Hide resolved
modelRequest.value = "";
emits("close");
}

defineExpose({ onShow });

</script>
4 changes: 0 additions & 4 deletions WebUI/src/components/DownloadDialog.vue
Original file line number Diff line number Diff line change
Expand Up @@ -111,10 +111,6 @@ const readTerms = ref(false);
const downloadList = ref<DownloadModelRender[]>([]);


onDeactivated(() => {
animate.value = false;
})
julianbollig marked this conversation as resolved.
Show resolved Hide resolved

function dataProcess(line: string) {
console.log(line);
const dataJson = line.slice(5);
Expand Down
1 change: 0 additions & 1 deletion WebUI/src/components/InpaintMask.vue
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,6 @@ function moveWithShadow(e: MouseEvent) {

function showShadow() {
shadowPos.show = true;
console.log("showShadow");
}

defineExpose({ clearMaskImage, getMaskImage })
Expand Down
41 changes: 41 additions & 0 deletions WebUI/src/components/WarningDialog.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
<template>
<div class="dialog-container z-10">
<div class="dialog-mask absolute left-0 top-0 w-full h-full bg-black/55 flex justify-center items-center">
<div class="py-10 px-20 w-500px flex flex-col items-center justify-center bg-gray-600 rounded-3xl gap-6 text-white"
:class="{ 'animate-scale-in': animate }">
<p v-html= "warningMessage"></p>
<div class="flex justify-center items-center gap-9">
<button @click="cancelConfirm" class="bg-color-control-bg py-1 px-4 rounded">{{i18nState.COM_CANCEL}}</button>
<button @click="confirmAdd" class="bg-color-control-bg py-1 px-4 rounded">{{i18nState.COM_CONFIRM}}</button>
</div>
</div>
</div>
</div>
</template>
<script setup lang="ts">
import { useI18N } from '@/assets/js/store/i18n.ts';
const i18nState = useI18N().state;
const confirmFunction = ref(() => {})
const warningMessage = ref("")
const animate = ref(false);
const emits = defineEmits<{
(e: "close"): void
}>();


async function confirmAdd() {
confirmFunction.value()
emits("close");
}

function cancelConfirm() {
emits("close");
}

function onShow(){
animate.value = true
}

defineExpose({warningMessage, confirmFunction, onShow });

</script>
2 changes: 1 addition & 1 deletion WebUI/src/components/ui/input/Input.vue
Original file line number Diff line number Diff line change
Expand Up @@ -20,5 +20,5 @@ const modelValue = useVModel(props, 'modelValue', emits, {
</script>

<template>
<input type="password" v-model="modelValue" :class="cn('flex h-9 w-full rounded-md border bg-[var(--textbox-bg)] px-3 py-1 text-sm shadow-sm transition-colors file:border-0 file:bg-transparent file:text-sm file:font-medium placeholder:text-slate-500 focus-visible:outline-none focus-visible:border-[var(--color-active)] disabled:cursor-not-allowed disabled:opacity-50 ', props.class)">
<input v-model="modelValue" :class="cn('flex h-9 w-full rounded-md border bg-[var(--textbox-bg)] px-3 py-1 text-sm shadow-sm transition-colors file:border-0 file:bg-transparent file:text-sm file:font-medium placeholder:text-slate-500 focus-visible:outline-none focus-visible:border-[var(--color-active)] disabled:cursor-not-allowed disabled:opacity-50 ', props.class)">
</template>
Loading
Loading