Al Playground from Intel Corporation https://github.com/intel/ai-playground
-
AI Playground version: v{{ productVersion }}
+
AI Playground version: v{{ productVersion }}
User Guide
@@ -115,6 +117,8 @@ import { useGlobalSetup } from "./assets/js/store/globalSetup";
import DownloadDialog from '@/components/DownloadDialog.vue';
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from '@/components/ui/collapsible'
import { useTheme } from "./assets/js/store/theme.ts";
+import AddLLMDialog from "@/components/AddLLMDialog.vue";
+import WarningDialog from "@/components/WarningDialog.vue";
const isOpen = ref(false);
@@ -127,12 +131,22 @@ const showSetting = ref(false);
const enhanceCompt = ref>();
+const answer = ref>();
+
const showSettingBtn = ref();
const showDowloadDlg = ref(false);
+const showModelRequestDialog = ref(false);
+
+const showWarningDialog = ref(false);
+
const downloadDigCompt = ref>();
+const addLLMCompt = ref>();
+
+const warningCompt = ref>();
+
const fullscreen = ref(false);
const platformTitle = window.envVars.platformTitle;
@@ -161,7 +175,6 @@ onBeforeMount(async () => {
})
})
-
function showAppSettings() {
if (showSetting.value === false) {
showSetting.value = true;
@@ -223,4 +236,25 @@ function showDownloadModelConfirm(downList: DownloadModelParam[], success?: () =
downloadDigCompt.value!.showConfirm(downList, success, fail);
});
}
+
+function showModelRequest() {
+ showModelRequestDialog.value = true;
+ nextTick(() => {
+ addLLMCompt.value!.onShow();
+ });
+}
+
+function callCheckModel(){
+ answer.value!.checkModel();
+}
+
+function showWarning(message : string, func : () => void) {
+ warningCompt.value!.warningMessage = message;
+ showWarningDialog.value = true;
+ warningCompt.value!.confirmFunction = func;
+ nextTick(() => {
+ warningCompt.value!.onShow();
+ });
+}
+
diff --git a/WebUI/src/assets/i18n/en-US.json b/WebUI/src/assets/i18n/en-US.json
index 06315107..892d8ab3 100644
--- a/WebUI/src/assets/i18n/en-US.json
+++ b/WebUI/src/assets/i18n/en-US.json
@@ -3,9 +3,11 @@
"COM_SETTINGS": "Settings",
"COM_MINI": "Minimize",
"COM_CLOSE": "Close",
+ "COM_ADD": "Add",
"COM_RESTORE": "Restore Defaults",
"COM_SD_PROMPT": "Input prompt to generate image",
"COM_LLM_PROMPT": "Input prompt to generate answer",
+ "COM_LLM_HF_PROMPT": "/",
"COM_CLICK_UPLOAD": "- Click To Upload Image -",
"COM_GENERATE": "Generate",
"COM_GENERATING": "Generating",
@@ -123,6 +125,7 @@
"DECREASE_FONT_SIZE": "Shrink Text",
"ANSWER_RAG_ENABLE":"Enable File Query",
"ANSWER_RAG_OPEN_DIALOG":"Open File Uploader",
+ "REQUEST_LLM_MODEL_NAME":"Add a model of your choice from huggingface.co,
for example: meta-llama/Llama-3.2-1B ",
"DOWNLOADER_CONFRIM_TIP":"You are missing one or more models needed to run. Would you like to download the model(s) listed below?",
"DOWNLOADER_MODEL":"Model",
"DOWNLOADER_INFO":"Info",
@@ -159,10 +162,13 @@
"ERROR_RUNTIME_ERROR": "If a critical failure occurs, please restart the program and try again",
"ERROR_GENERATE_UNKONW_EXCEPTION": "An unknown error occurred. failed to generate from model",
"ERROR_FOLDER_NOT_EXISTS": "The specified directory does not exist",
+ "ERROR_REPO_NOT_EXISTS": "The specified repository-ID could not be found",
+ "ERROR_ALREADY_IN_MODELS": "The specified model is already in the list",
"ERROR_ENHANCE_IMAGE_NOT_SET": "Please configure the input image to be generated",
"ERROR_UNFOUND_GRAPHICS": "If the necessary hardware for program execution is not detected on the computer device, the program will terminate upon clicking OK.",
"ERROR_PYTHON_BACKEND_INIT": "Backend initialization failed",
"ERROR_PYTHON_BACKEND_INIT_DETAILS_TEXT": "The AI inference backend failed to initialize. Please try restarting the application. If the problem persists, you can check the Details for additional information about the error.",
"ERROR_PYTHON_BACKEND_INIT_DETAILS": "Details",
- "ERROR_PYTHON_BACKEND_INIT_OPEN_LOG": "Open Log"
+ "ERROR_PYTHON_BACKEND_INIT_OPEN_LOG": "Open Log",
+ "WARNING_MODEL_TYPE_WRONG": "The model type doesn't seem to fit the requirements. Are you sure, you want to continue?"
}
diff --git a/WebUI/src/assets/js/store/models.ts b/WebUI/src/assets/js/store/models.ts
index 7782acc1..4f96b650 100644
--- a/WebUI/src/assets/js/store/models.ts
+++ b/WebUI/src/assets/js/store/models.ts
@@ -1,6 +1,6 @@
import { defineStore } from "pinia";
-type ModelType = "llm" | "embedding" | "stableDiffusion" | "inpaint" | "lora" | "vae";
+export type ModelType = "llm" | "embedding" | "stableDiffusion" | "inpaint" | "lora" | "vae" | "undefined";
export type Model = {
name: string;
@@ -17,6 +17,9 @@ const predefinedModels: Model[] = [
// { name: 'THUDM/chatglm3-6b', type: 'llm', downloaded: false },
]
+export const userModels: Model[] = [
+]
+
export const useModels = defineStore("models", () => {
const hfToken = ref(undefined);
@@ -39,10 +42,10 @@ export const useModels = defineStore("models", () => {
...inpaintModels.map(name => ({ name, type: 'inpaint', downloaded: true })),
...embeddingModels.map(name => ({ name, type: 'embedding', downloaded: true })),
];
+
const notYetDownloaded = (model: Model) => !downloadedModels.map(m => m.name).includes(model.name);
- models.value = [...downloadedModels, ...predefinedModels.filter(notYetDownloaded)];
- console.log(models);
+ models.value = [...downloadedModels, ...userModels, ...predefinedModels.filter(notYetDownloaded)];
}
diff --git a/WebUI/src/components/AddLLMDialog.vue b/WebUI/src/components/AddLLMDialog.vue
new file mode 100644
index 00000000..a80d68d2
--- /dev/null
+++ b/WebUI/src/components/AddLLMDialog.vue
@@ -0,0 +1,106 @@
+
+
+
+
+
+
+
{{ addModelErrorMessage }}
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/WebUI/src/components/DownloadDialog.vue b/WebUI/src/components/DownloadDialog.vue
index 7488bcf5..81a9c3ce 100644
--- a/WebUI/src/components/DownloadDialog.vue
+++ b/WebUI/src/components/DownloadDialog.vue
@@ -111,10 +111,6 @@ const readTerms = ref(false);
const downloadList = ref([]);
-onDeactivated(() => {
- animate.value = false;
-})
-
function dataProcess(line: string) {
console.log(line);
const dataJson = line.slice(5);
diff --git a/WebUI/src/components/InpaintMask.vue b/WebUI/src/components/InpaintMask.vue
index c660d3da..d8b07ec5 100644
--- a/WebUI/src/components/InpaintMask.vue
+++ b/WebUI/src/components/InpaintMask.vue
@@ -108,7 +108,6 @@ function moveWithShadow(e: MouseEvent) {
function showShadow() {
shadowPos.show = true;
- console.log("showShadow");
}
defineExpose({ clearMaskImage, getMaskImage })
diff --git a/WebUI/src/components/WarningDialog.vue b/WebUI/src/components/WarningDialog.vue
new file mode 100644
index 00000000..7cfe91a9
--- /dev/null
+++ b/WebUI/src/components/WarningDialog.vue
@@ -0,0 +1,41 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/WebUI/src/components/ui/input/Input.vue b/WebUI/src/components/ui/input/Input.vue
index a7d9ed70..a16a1ef9 100644
--- a/WebUI/src/components/ui/input/Input.vue
+++ b/WebUI/src/components/ui/input/Input.vue
@@ -20,5 +20,5 @@ const modelValue = useVModel(props, 'modelValue', emits, {
-
+
diff --git a/WebUI/src/views/Answer.vue b/WebUI/src/views/Answer.vue
index 89523f4b..56c60963 100644
--- a/WebUI/src/views/Answer.vue
+++ b/WebUI/src/views/Answer.vue
@@ -104,6 +104,7 @@
+