Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Provide arc docs #37

Closed
wants to merge 28 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
744475c
Fix status updates for backend installation when switching windows
julianbollig Dec 18, 2024
bf49b78
Fix git ref provided by workflows
florianesser-tng Dec 18, 2024
44596b3
Final Changes
julianbollig Dec 19, 2024
e074eda
Disable RAG for Llama-cpp backend
julianbollig Dec 18, 2024
7bc2351
Allow ComfyUI workflows to be sorted via displayPriority
mschuettlerTNG Dec 19, 2024
78a12d0
Ensure that RAG state is disabled at change of LLM inference-model
marijnvg-tng Dec 19, 2024
35b2f17
Move default value to zod schema
mschuettlerTNG Dec 19, 2024
dc20cfe
Merge pull request #32 from TNG/fix_installation_status_updates
florianesser-tng Dec 19, 2024
7e69bff
Merge pull request #33 from TNG/fix/FluxWorkflowMissingGitRef
florianesser-tng Dec 19, 2024
37b27f1
Merge pull request #34 from TNG/DisableRag
florianesser-tng Dec 19, 2024
c396117
Merge pull request #35 from TNG/feat/allow-sorting-of-workflows
florianesser-tng Dec 19, 2024
13078c1
Fix RAG panel
julianbollig Dec 19, 2024
e60cd70
provide additional license check in downloader
florianesser-tng Dec 19, 2024
b960068
Fix additional licenced download
florianesser-tng Dec 19, 2024
6ef9539
Remove "implicit" downloads
florianesser-tng Dec 19, 2024
5da89ab
Fix UI of extended downloadDialog
marijnvg-tng Dec 19, 2024
d63cb2c
Diverse backend services stati fix
florianesser-tng Dec 19, 2024
6f08ac0
Respect removal of uv in python backend
florianesser-tng Dec 19, 2024
97762b4
Fix evil bugs
florianesser-tng Dec 19, 2024
a2fdd10
Merge pull request #36 from TNG/feature/licencesForWorkflowInDownload…
mschuettlerTNG Dec 19, 2024
23959a0
Fix enabled checkbox for required ai backend
mschuettlerTNG Dec 19, 2024
469f4cc
Correctly select workflow on first selection
mschuettlerTNG Dec 19, 2024
161e308
Properly disable rag for llama.cpp
mschuettlerTNG Dec 19, 2024
b650f50
Disable git ref checking for now due to windows + subprocess file per…
mschuettlerTNG Dec 19, 2024
947394b
Revert ls level zero changes to avoid issues due to mismatching oneAP…
mschuettlerTNG Dec 19, 2024
be68aec
Increase reserve-vram to 5.0 after testing
mschuettlerTNG Dec 19, 2024
1d92696
Provide arc docs
florianesser-tng Dec 20, 2024
1272adf
Fix typos
mschuettlerTNG Dec 20, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion WebUI/build/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"--disable-ipex-optimize",
"--bf16-unet",
"--reserve-vram",
"4.0"
"5.0"
],
"availableThemes": ["dark","lnl","bmg"],
"currentTheme": "bmg"
Expand Down
2 changes: 1 addition & 1 deletion WebUI/electron/subprocesses/aiBackendService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ export class AiBackendService extends LongLivedPythonApiService {
readonly lsLevelZeroDir = this.pythonEnvDir
readonly lsLevelZeroExe = getLsLevelZeroPath(this.lsLevelZeroDir)
healthEndpointUrl = `${this.baseUrl}/healthy`
serviceIsSetUp = () => filesystem.existsSync(this.pythonExe) && filesystem.existsSync(this.lsLevelZeroExe);
serviceIsSetUp = () => filesystem.existsSync(this.pythonExe);
isSetUp = this.serviceIsSetUp();

async *set_up(): AsyncIterable<SetupProgress> {
Expand Down
3 changes: 2 additions & 1 deletion WebUI/electron/subprocesses/apiService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,8 +53,9 @@ export abstract class LongLivedPythonApiService implements ApiService {
readonly baseDir = app.isPackaged ? process.resourcesPath : path.join(__dirname, "../../../");
readonly prototypicalPythonEnv = path.join(this.baseDir, "prototype-python-env")
readonly customIntelExtensionForPytorch = path.join(app.isPackaged ? this.baseDir : path.join(__dirname, "../../external/"), ipexWheel)
abstract readonly serviceDir: string
abstract readonly pythonEnvDir: string
abstract readonly lsLevelZeroDir: string
abstract readonly serviceDir: string
abstract readonly pythonExe: string
abstract isSetUp: boolean;

Expand Down
6 changes: 3 additions & 3 deletions WebUI/electron/subprocesses/comfyUIBackendService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,21 +13,21 @@ export class ComfyUiBackendService extends LongLivedPythonApiService {
readonly isRequired = false
readonly serviceDir = path.resolve(path.join(this.baseDir, "ComfyUI"));
readonly pythonEnvDir = path.resolve(path.join(this.baseDir, `comfyui-backend-env`));
readonly pythonExe = getPythonPath(this.pythonEnvDir)
readonly lsLevelZeroDir = this.pythonEnvDir
readonly lsLevelZeroExe = getLsLevelZeroPath(this.lsLevelZeroDir)
readonly pythonExe = getPythonPath(this.pythonEnvDir)
healthEndpointUrl = `${this.baseUrl}/queue`

private readonly comfyUIStartupParameters = this.settings.comfyUiParameters ? this.settings.comfyUiParameters : [
"--lowvram",
"--disable-ipex-optimize",
"--bf16-unet",
"--reserve-vram",
"4.0"
"5.0"
]

serviceIsSetUp(): boolean {
return filesystem.existsSync(this.pythonEnvDir) && filesystem.existsSync(this.serviceDir) && filesystem.existsSync(this.lsLevelZeroExe)
return filesystem.existsSync(this.pythonEnvDir) && filesystem.existsSync(this.serviceDir)
}

isSetUp = this.serviceIsSetUp();
Expand Down
2 changes: 1 addition & 1 deletion WebUI/electron/subprocesses/llamaCppBackendService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@ export class LlamaCppBackendService extends LongLivedPythonApiService {
readonly serviceDir = path.resolve(path.join(this.baseDir, "LlamaCPP"));
readonly pythonEnvDir = path.resolve(path.join(this.baseDir, `llama-cpp-env`));
readonly pythonExe = this.getPythonPath(this.pythonEnvDir)
readonly isRequired = false;
readonly lsLevelZeroDir = path.resolve(path.join(this.baseDir, "ai-backend-env"));
readonly lsLevelZeroExe = getLsLevelZeroPath(this.lsLevelZeroDir)
readonly isRequired = false;

healthEndpointUrl = `${this.baseUrl}/health`

Expand Down
2 changes: 1 addition & 1 deletion WebUI/external/settings-dev.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"--disable-ipex-optimize",
"--bf16-unet",
"--reserve-vram",
"4.0"
"5.0"
],
"availableThemes": ["dark","lnl","bmg"],
"currentTheme": "bmg"
Expand Down
38 changes: 30 additions & 8 deletions WebUI/external/workflows/FaceSwapHD.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,38 @@
"onnxruntime"
],
"customNodes": [
"Gourieff/comfyui-reactor-node/be1c60bd62d1fb35511153533032c5a6811c8fab"
"Gourieff/comfyui-reactor-node@be1c60bd62d1fb35511153533032c5a6811c8fab"
],
"requiredModels": [
"defaultCheckpoint:RunDiffusion/Juggernaut-XL-v9/unet/diffusion_pytorch_model.fp16.safetensors",
"defaultCheckpoint:RunDiffusion/Juggernaut-XL-v9/text_encoder/model.fp16.safetensors",
"defaultCheckpoint:RunDiffusion/Juggernaut-XL-v9/text_encoder_2/model.fp16.safetensors",
"defaultCheckpoint:RunDiffusion/Juggernaut-XL-v9/vae/diffusion_pytorch_model.fp16.safetensors",
"defaultLora:latent-consistency/lcm-lora-sdxl/pytorch_lora_weights.safetensors",
"faceswap:Aitrepreneur/insightface/inswapper_128.onnx",
"facerestore:gmk123/GFPGAN/GFPGANv1.4.pth"
{
"type": "defaultCheckpoint",
"model": "RunDiffusion/Juggernaut-XL-v9/unet/diffusion_pytorch_model.fp16.safetensors"
},
{
"type": "defaultCheckpoint",
"model": "RunDiffusion/Juggernaut-XL-v9/text_encoder/model.fp16.safetensors"
},
{
"type": "defaultCheckpoint",
"model": "RunDiffusion/Juggernaut-XL-v9/text_encoder_2/model.fp16.safetensors"
},
{
"type": "defaultCheckpoint",
"model": "RunDiffusion/Juggernaut-XL-v9/vae/diffusion_pytorch_model.fp16.safetensors"
},
{
"type": "defaultLora",
"model": "latent-consistency/lcm-lora-sdxl/pytorch_lora_weights.safetensors"
},
{
"type": "faceswap",
"model": "Aitrepreneur/insightface/inswapper_128.onnx",
"additionalLicenceLink": "https://huggingface.co/datasets/Gourieff/ReActor"
},
{
"type": "facerestore",
"model": "gmk123/GFPGAN/GFPGANv1.4.pth"
}
]
},
"tags": [
Expand Down
31 changes: 25 additions & 6 deletions WebUI/external/workflows/Line2ImageHD-Fast.json
Original file line number Diff line number Diff line change
@@ -1,16 +1,35 @@
{
"name": "Line2Image-HD-Fast",
"displayPriority": 200,
"backend": "comfyui",
"comfyUIRequirements": {
"customNodes": [
],
"requiredModels": [
"defaultCheckpoint:RunDiffusion/Juggernaut-XL-v9/unet/diffusion_pytorch_model.fp16.safetensors",
"defaultCheckpoint:RunDiffusion/Juggernaut-XL-v9/text_encoder/model.fp16.safetensors",
"defaultCheckpoint:RunDiffusion/Juggernaut-XL-v9/text_encoder_2/model.fp16.safetensors",
"defaultCheckpoint:RunDiffusion/Juggernaut-XL-v9/vae/diffusion_pytorch_model.fp16.safetensors",
"defaultLora:latent-consistency/lcm-lora-sdxl/pytorch_lora_weights.safetensors",
"controlNet:stabilityai/control-lora/control-LoRAs-rank128/control-lora-canny-rank128.safetensors"
{
"type": "defaultCheckpoint",
"model": "RunDiffusion/Juggernaut-XL-v9/unet/diffusion_pytorch_model.fp16.safetensors"
},
{
"type": "defaultCheckpoint",
"model": "RunDiffusion/Juggernaut-XL-v9/text_encoder/model.fp16.safetensors"
},
{
"type": "defaultCheckpoint",
"model": "RunDiffusion/Juggernaut-XL-v9/text_encoder_2/model.fp16.safetensors"
},
{
"type": "defaultCheckpoint",
"model": "RunDiffusion/Juggernaut-XL-v9/vae/diffusion_pytorch_model.fp16.safetensors"
},
{
"type": "defaultLora",
"model": "latent-consistency/lcm-lora-sdxl/pytorch_lora_weights.safetensors"
},
{
"type": "controlNet",
"model": "stabilityai/control-lora/control-LoRAs-rank128/control-lora-canny-rank128.safetensors"
}
]
},
"tags": [
Expand Down
26 changes: 21 additions & 5 deletions WebUI/external/workflows/Line2ImageHD-Quality.json
Original file line number Diff line number Diff line change
@@ -1,15 +1,31 @@
{
"name": "Line2Image-HD-Quality",
"displayPriority": 150,
"backend": "comfyui",
"comfyUIRequirements": {
"customNodes": [
],
"requiredModels": [
"defaultCheckpoint:RunDiffusion/Juggernaut-XL-v9/unet/diffusion_pytorch_model.fp16.safetensors",
"defaultCheckpoint:RunDiffusion/Juggernaut-XL-v9/text_encoder/model.fp16.safetensors",
"defaultCheckpoint:RunDiffusion/Juggernaut-XL-v9/text_encoder_2/model.fp16.safetensors",
"defaultCheckpoint:RunDiffusion/Juggernaut-XL-v9/vae/diffusion_pytorch_model.fp16.safetensors",
"controlNet:stabilityai/control-lora/control-LoRAs-rank128/control-lora-canny-rank128.safetensors"
{
"type": "defaultCheckpoint",
"model": "RunDiffusion/Juggernaut-XL-v9/unet/diffusion_pytorch_model.fp16.safetensors"
},
{
"type": "defaultCheckpoint",
"model": "RunDiffusion/Juggernaut-XL-v9/text_encoder/model.fp16.safetensors"
},
{
"type": "defaultCheckpoint",
"model": "RunDiffusion/Juggernaut-XL-v9/text_encoder_2/model.fp16.safetensors"
},
{
"type": "defaultCheckpoint",
"model": "RunDiffusion/Juggernaut-XL-v9/vae/diffusion_pytorch_model.fp16.safetensors"
},
{
"type": "controlNet",
"model": "stabilityai/control-lora/control-LoRAs-rank128/control-lora-canny-rank128.safetensors"
}
]
},
"tags": [
Expand Down
23 changes: 18 additions & 5 deletions WebUI/external/workflows/fluxQ4.json
Original file line number Diff line number Diff line change
@@ -1,19 +1,32 @@
{
"name": "Flux.1-Schnell Med Quality",
"displayPriority": 500,
"tags": [
"Q4",
"Fast"
],
"backend": "comfyui",
"comfyUIRequirements": {
"customNodes": [
"city96/ComfyUI-GGUF/65a7c895bb0ac9547ba2f89d55fbdb609aa2bfe7"
"city96/ComfyUI-GGUF@65a7c895bb0ac9547ba2f89d55fbdb609aa2bfe7"
],
"requiredModels": [
"unet:city96/FLUX.1-schnell-gguf/flux1-schnell-Q4_K_S.gguf",
"clip:city96/t5-v1_1-xxl-encoder-gguf/t5-v1_1-xxl-encoder-Q3_K_M.gguf",
"clip:comfyanonymous/flux_text_encoders/clip_l.safetensors",
"vae:black-forest-labs/FLUX.1-schnell/ae.safetensors"
{
"type": "unet",
"model": "city96/FLUX.1-schnell-gguf/flux1-schnell-Q4_K_S.gguf"
},
{
"type": "clip",
"model": "city96/t5-v1_1-xxl-encoder-gguf/t5-v1_1-xxl-encoder-Q3_K_M.gguf"
},
{
"type": "clip",
"model": "comfyanonymous/flux_text_encoders/clip_l.safetensors"
},
{
"type": "vae",
"model": "black-forest-labs/FLUX.1-schnell/ae.safetensors"
}
]
},

Expand Down
24 changes: 18 additions & 6 deletions WebUI/external/workflows/fluxQ8.json
Original file line number Diff line number Diff line change
@@ -1,16 +1,28 @@
{
"name": "Flux.1-Schnell High Quality",

"displayPriority": 450,
"backend": "comfyui",
"comfyUIRequirements": {
"customNodes": [
"city96/ComfyUI-GGUF"
"city96/ComfyUI-GGUF@65a7c895bb0ac9547ba2f89d55fbdb609aa2bfe7"
],
"requiredModels": [
"unet:city96/FLUX.1-schnell-gguf/flux1-schnell-Q8_0.gguf",
"clip:city96/t5-v1_1-xxl-encoder-gguf/t5-v1_1-xxl-encoder-Q3_K_M.gguf",
"clip:comfyanonymous/flux_text_encoders/clip_l.safetensors",
"vae:black-forest-labs/FLUX.1-schnell/ae.safetensors"
{
"type": "unet",
"model": "city96/FLUX.1-schnell-gguf/flux1-schnell-Q8_0.gguf"
},
{
"type": "clip",
"model": "city96/t5-v1_1-xxl-encoder-gguf/t5-v1_1-xxl-encoder-Q3_K_M.gguf"
},
{
"type": "clip",
"model": "comfyanonymous/flux_text_encoders/clip_l.safetensors"
},
{
"type": "vae",
"model": "black-forest-labs/FLUX.1-schnell/ae.safetensors"
}
]
},
"tags": [
Expand Down
20 changes: 15 additions & 5 deletions WebUI/src/App.vue
Original file line number Diff line number Diff line change
Expand Up @@ -21,17 +21,17 @@
<button :title="languages.COM_CLOSE" @click="closeWindow" class="svg-icon i-close w-6 h-6"></button>
</div>
</header>
<main v-if="globalSetup.loadingState === 'verifyBackend'" class="flex-auto flex items-center justify-center">
<main v-show="globalSetup.loadingState === 'verifyBackend'" class="flex-auto flex items-center justify-center">
<loading-bar :text="'Verifying backends'" class="w-3/5" style="word-spacing: 8px;"></loading-bar>
</main>
<main v-else-if="globalSetup.loadingState === 'manageInstallations'"
<main v-show="globalSetup.loadingState === 'manageInstallations'"
class="flex-auto flex items-center justify-center">
<installation-management @close="concludeLoadingStateAfterManagedInstallationDialog"></installation-management>
</main>
<main v-else-if="globalSetup.loadingState === 'loading'" class="flex-auto flex items-center justify-center">
<main v-show="globalSetup.loadingState === 'loading'" class="flex-auto flex items-center justify-center">
<loading-bar :text="'AI Playground Loading'" class="w-3/5" style="word-spacing: 8px;"></loading-bar>
</main>
<main v-else-if="globalSetup.loadingState === 'failed'" class="flex-auto flex items-start mt-[10vh] justify-center">
<main v-show="globalSetup.loadingState === 'failed'" class="flex-auto flex items-start mt-[10vh] justify-center">
<div
class="dialog-container z-10 text-white w-[60vw] align-top bg-black bg-opacity-50 p-4 rounded-lg border border-gray-400">
<Collapsible v-model:open="isOpen" class=" space-y-2">
Expand Down Expand Up @@ -59,7 +59,7 @@
</Collapsible>
</div>
</main>
<main v-else class="flex-auto flex flex-col relative">
<main v-show="globalSetup.loadingState === 'running'" class="flex-auto flex flex-col relative">
<div class="main-tabs flex-none pt-2 px-3 flex items-end justify-start gap-1 text-gray-400">
<button class="tab" :class="{ 'active': activeTabIdx == 0 }" @click="switchTab(0)">{{
languages.TAB_CREATE
Expand Down Expand Up @@ -142,10 +142,12 @@ import {useTheme} from "./assets/js/store/theme.ts";
import AddLLMDialog from "@/components/AddLLMDialog.vue";
import WarningDialog from "@/components/WarningDialog.vue";
import {useBackendServices} from "./assets/js/store/backendServices.ts";
import {useTextInference} from "@/assets/js/store/textInference.ts";

const backendServices = useBackendServices();
const theme = useTheme();
const globalSetup = useGlobalSetup();
const textInference = useTextInference()

const enhanceCompt = ref<InstanceType<typeof Enhance>>();
const answer = ref<InstanceType<typeof Answer>>();
Expand Down Expand Up @@ -254,6 +256,14 @@ function switchTab(index: number) {
}
}

watch(textInference, (newSetting, oldSetting) => {
if (newSetting.backend === 'LLAMA.CPP') {
answer.value!.disableRag();
} else {
answer.value!.restoreRagState();
}
})

function miniWindow() {
window.electronAPI.miniWindow();
}
Expand Down
3 changes: 2 additions & 1 deletion WebUI/src/assets/i18n/en-US.json
Original file line number Diff line number Diff line change
Expand Up @@ -159,8 +159,9 @@
"DOWNLOADER_ACCESS_INFO": "You don't have access to some models you want to download",
"DOWNLOADER_GATED_ACCEPT": "Some of the models are gated. Please make sure to visit the model info page and request access. ",
"DOWNLOADER_ACCESS_ACCEPT": "Inaccessible models will not be downloaded.",
"DOWNLOADER_REASON":"Reason",
"DOWNLOADER_REASON":"Type",
"DOWNLOADER_TERMS":"Visit",
"DOWNLOADER_LICENSE":"Terms",
"DOWNLOADER_CONFLICT":"Another download task is currently in progress, and a new task cannot be started. You can cancel the current download task and start a new download task",
"DOWNLOADER_TERMS_TIP":"I have reviewed the model card(s) and license(s). I agree to all terms and conditions would like to download the third-party model(s).",
"DOWNLOADER_FOR_ANSWER_GENERATE":"Answer Model",
Expand Down
3 changes: 2 additions & 1 deletion WebUI/src/assets/js/store/backendServices.ts
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ export const useBackendServices = defineStore("backendServices", () => {
setUpService,
startService,
stopService,

}
}, {
persist: {
Expand Down Expand Up @@ -125,14 +126,14 @@ class BackendServiceSetupProgressListener {
}, 1000)
})
}
return this.collectedSetupProgress
}

async awaitFinalizationAndResetData(): Promise<{success: boolean, logs: SetupProgress[]}> {
return this.awaitFinalization().then( collectedSetupProgress => {
console.log(`server startup complete for ${this.associatedServiceName}`)
const clonedSetupProgress = collectedSetupProgress.slice()
this.collectedSetupProgress = []
this.terminalUpdateReceived = false
return { success: this.installationSuccess, logs: clonedSetupProgress}
})
}
Expand Down
Loading
Loading