Skip to content

Commit

Permalink
fix: update packages and remove errors
Browse files Browse the repository at this point in the history
  • Loading branch information
danielclough committed Jul 31, 2024
1 parent 403f950 commit 782e817
Show file tree
Hide file tree
Showing 32 changed files with 13,193 additions and 161 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ ifeq ($(shell uname),Darwin)
@brew update && brew install openssl pkg-config
else
@sudo apt-get update
@sudo apt-get install -y libwebkit2gtk-4.1-dev build-essential curl wget file libssl-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev
@sudo apt-get install -y libwebkit2gtk-4.1-dev build-essential curl wget file libssl-dev libxdo-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev
@sudo apt-get install -y gcc pkg-config nvidia-cuda-toolkit
endif

Expand Down
2 changes: 1 addition & 1 deletion backend/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ intel-mkl-src = { version = "0.8.1", features = ["mkl-static-lp64-iomp"] }
tower-http = { version = "0.5", features = ["cors"] }
http = "0.2.11"
pdf-extract = "0.7.2"
tauri = "1.5.4"
tauri = "2.0.0-beta.25"

common = {path = "../common" }
glob = "0.3.1"
Expand Down
1 change: 0 additions & 1 deletion backend/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,2 @@
pub mod llm;
pub mod server;
pub mod utilities;
8 changes: 4 additions & 4 deletions backend/src/llm/infer.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
#[cfg(feature = "mkl")]
extern crate intel_mkl_src;
// #[cfg(feature = "mkl")]
// extern crate intel_mkl_src;

#[cfg(feature = "accelerate")]
extern crate accelerate_src;
// #[cfg(feature = "accelerate")]
// extern crate accelerate_src;

use crate::llm::{inference_args::InferenceArgs, load_model::ModelTokenizerDevice};
use anyhow::Result;
Expand Down
3 changes: 1 addition & 2 deletions backend/src/llm/inference_args.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use common::utilities::config_path::{config_file_path, context_file_dir};
use serde::{Deserialize, Serialize};

use crate::utilities::config_path::{config_file_path, context_file_dir};

#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)]
pub struct InferenceArgs {
/// The temperature used to generate samples.
Expand Down
16 changes: 9 additions & 7 deletions backend/src/llm/load_model.rs
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
#[cfg(feature = "mkl")]
extern crate intel_mkl_src;
// #[cfg(feature = "mkl")]
// extern crate intel_mkl_src;

#[cfg(feature = "accelerate")]
extern crate accelerate_src;
// #[cfg(feature = "accelerate")]
// extern crate accelerate_src;

impl LoadModel {
/// Default config to prevent failure.
/// Will load "$HOME/.config/fireside-chat/config_model.yaml" if available.
pub fn load_current_args() -> LoadModel {
// Tauri config dir
let config_path = config_file_path("config_model.yaml");
println!("{:#?}",config_path);
println!("{:#?}", config_path);
let config_model_string = std::fs::read_to_string(config_path);

if config_model_string.is_ok() {
Expand Down Expand Up @@ -266,7 +266,9 @@ impl LoadModel {
let model_config_str = Some(model_config.as_str());

let config = match model_config_str {
Some("ChatML") | Some("Teknium") => MistralConfig::config_chat_ml(args.use_flash_attn),
Some("ChatML") | Some("Teknium") => {
MistralConfig::config_chat_ml(args.use_flash_attn)
}
Some("Amazon") => {
MistralConfig::config_amazon_mistral_lite(args.use_flash_attn)
}
Expand Down Expand Up @@ -471,8 +473,8 @@ pub struct NoModel {
}

use crate::server::rest::model_list::get_default_list;
use crate::utilities::config_path::config_file_path;
use common::llm::model_list::{ModelList, ModelListEntry};
use common::utilities::config_path::config_file_path;

use anyhow::{Error as E, Result};
use serde::{Deserialize, Serialize};
Expand Down
2 changes: 1 addition & 1 deletion backend/src/llm/prompt_template.rs
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@ use std::io::Read;
use common::llm::role_list::RoleListEntry;

use crate::server::rest::role_list::get_default_list;
use crate::utilities::config_path::context_file_dir;
use common::utilities::config_path::context_file_dir;

fn get_context() -> String {
let directory = context_file_dir();
Expand Down
8 changes: 4 additions & 4 deletions backend/src/llm/text_generation.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
#[cfg(feature = "accelerate")]
extern crate accelerate_src;
// #[cfg(feature = "accelerate")]
// extern crate accelerate_src;

#[cfg(feature = "mkl")]
extern crate intel_mkl_src;
// #[cfg(feature = "mkl")]
// extern crate intel_mkl_src;

use std::borrow::BorrowMut;

Expand Down
3 changes: 1 addition & 2 deletions backend/src/server/rest/model_list.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use crate::utilities::cache_path::cache_file_path;
use axum::{extract::Path, http::StatusCode, Json};
use common::llm::model_list::{ModelArgs, ModelDLList, ModelDLListEntry, ModelList};
use common::{llm::model_list::{ModelArgs, ModelDLList, ModelDLListEntry, ModelList}, utilities::cache_path::cache_file_path};
use glob::glob;

// use serde::{Deserialize, Serialize};
Expand Down
1 change: 1 addition & 0 deletions common/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
pub mod database;
pub mod llm;
pub mod utilities;
8 changes: 0 additions & 8 deletions common/src/llm/model_list.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,11 +92,3 @@ pub struct Tags {
pub safetensors: bool,
pub bin: bool,
}

#[derive(Clone, Debug)]
pub enum GPU {
CUDA,
Mac,
Intel,
AMD,
}
Original file line number Diff line number Diff line change
@@ -1,11 +1,8 @@
use std::{env, path::PathBuf};

use tauri::{api::path::app_cache_dir, Config};

pub fn cache_file_path(slug: &str) -> PathBuf {
let cache_dir = match env::var_os("USER") {
Some(value) => {

if cfg!(target_os = "macos") {
PathBuf::from(format!("/Users/{}/.cache", value.to_string_lossy()))
} else if cfg!(target_os = "linux") {
Expand All @@ -14,8 +11,8 @@ pub fn cache_file_path(slug: &str) -> PathBuf {
println!("\n\tUnspported OS!!!\n");
PathBuf::from(format!("/home/{}/.cache", value.to_string_lossy()))
}
},
None => app_cache_dir(&Config::default()).expect("load huggingface/hub cache dir"),
}
None => PathBuf::from("/root/.cache"),
};

let path = format!("huggingface/hub/{}", slug);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
use std::{env, path::PathBuf};

use tauri::{api::path::app_config_dir, Config};

pub fn config_file_path(slug: &str) -> PathBuf {
let config_dir = app_config_file_path();
let fireside = "fireside-chat".to_string();
Expand Down Expand Up @@ -37,9 +35,9 @@ pub fn app_config_file_path() -> PathBuf {
PathBuf::from(format!("/home/{}/.config", value.to_string_lossy()))
} else {
println!("\n\tUnspported OS!!!\n");
app_config_dir(&Config::default()).expect("load huggingface/hub cache dir")
PathBuf::from("/root/.cache")
}
},
None => app_config_dir(&Config::default()).expect("load huggingface/hub cache dir"),
None => PathBuf::from("/root/.cache"),
}
}
File renamed without changes.
2 changes: 1 addition & 1 deletion database/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,6 @@ tokio = { version = "1", features = ["full"] }
sqlx = { version = "0.7", features = ["runtime-tokio-native-tls", "json", "sqlite", "chrono"] }
serde_json = "1"
tower-http = { version = "0.5", features = ["trace", "cors"] }
tauri = "1.5.4"
tauri = "2.0.0-beta.25"

common = {path = "../common" }
4 changes: 2 additions & 2 deletions database/src/server.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use crate::controllers;

use common::utilities::config_path::config_file_dir;
use tokio::net::TcpListener;
use tower_http::cors::{Any, CorsLayer};

Expand Down Expand Up @@ -134,10 +135,9 @@ pub async fn db() {
axum::serve(listener, app).await.unwrap();
}

use tauri::{api::path::app_config_dir, Config};

pub fn db_file_path() -> String {
let config_dir = app_config_dir(&Config::default()).expect("load tauri config");
let config_dir = config_file_dir();
let fireside = "fireside-chat".to_string();
let config_dir_path = config_dir.join(fireside);
_ = std::fs::create_dir_all(&config_dir_path);
Expand Down
4 changes: 2 additions & 2 deletions frontend/src/app.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,9 @@ pub fn App() -> impl IntoView {
//
let (database_url, set_database_url, _) =
use_local_storage::<String, JsonCodec>("database_url");
let (database_error, set_database_error) = create_signal(false);
let (_database_error, set_database_error) = create_signal(false);
let (backend_url, set_backend_url, _) = use_local_storage::<String, JsonCodec>("backend_url");
let (backend_error, set_backend_error) = create_signal(false);
let (_backend_error, set_backend_error) = create_signal(false);

// GPU
//
Expand Down
2 changes: 1 addition & 1 deletion frontend/src/components/home/model_config/index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ pub fn ModelConfig(
view! {
<Show
when=move || {
gpu_type.get() == "Mac" || gpu_type.get() == "CUDA"
gpu_type.get() == "GPU"
}

fallback=move || {
Expand Down
7 changes: 4 additions & 3 deletions frontend/src/components/home/model_config/init_model.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use common::llm::model_list::ModelArgs;
use leptonic::components::{
button::{Button, ButtonColor, ButtonWrapper},
button::ButtonWrapper,
modal::{Modal, ModalBody, ModalFooter, ModalHeader, ModalTitle},
};
use leptos::{component, view, IntoView, Show, Signal, SignalGet, WriteSignal};
Expand Down Expand Up @@ -37,9 +37,10 @@ where
<Show when=move || show_when.get()>
<ModalFooter>
<ButtonWrapper>
<Button on_press=move |_| (on_accept)() color=ButtonColor::Danger>
<button on:click=move |_| (on_accept)()>
// color=ButtonColor::Danger
"Confirm"
</Button>
</button>
</ButtonWrapper>
</ModalFooter>
</Show>
Expand Down
9 changes: 3 additions & 6 deletions frontend/src/components/home/model_config/model_list_chip.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,4 @@
use leptonic::components::{
button::Button,
chip::{Chip, ChipColor},
};
use leptonic::components::chip::{Chip, ChipColor};
use leptos::*;

#[component]
Expand All @@ -27,7 +24,7 @@ pub fn ModelListChip(

fallback=move || {
view! {
<Button on_press=move |_| {
<button on:click=move |_| {
if catch_all {
set_tags_enabled.update(|x| *x = vec![String::new()]);
} else {
Expand All @@ -41,7 +38,7 @@ pub fn ModelListChip(
"DISABLE ALL".to_string()
}}

</Button>
</button>
}
}
>
Expand Down
34 changes: 11 additions & 23 deletions frontend/src/components/home/model_config/model_list_container.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use crate::components::home::model_config::model_list_grid::ModelListGrid;

use common::llm::model_list::{ModelArgs, ModelDLList};
use leptonic::components::{button::Button, prelude::Box, typography::H1};
use leptonic::components::{prelude::Box, typography::H1};
use leptos::*;

#[component]
Expand Down Expand Up @@ -56,9 +56,9 @@ pub fn ModelListContainer(
// set_selected=move |v| set_quantized_str.set(v)
// />

<Button
<button
style="padding:1rem;"
on_press=move |_| {
on:click=move |_| {
if quantized_str.get().as_str() == "Safetensors" {
set_quantized_str.set("Quantized".to_string());
} else {
Expand All @@ -73,7 +73,7 @@ pub fn ModelListContainer(
"Use Safetensors"
}}

</Button>
</button>

<Show
when=move || model_args.get().repo_id.clone() != *"NoModel"
Expand All @@ -91,32 +91,20 @@ pub fn ModelListContainer(
</Show>
// <P>"Revision: "{model_args.get().revision}</P>

<Button
<button
style="padding:1rem;"
on_press=move |_| {
if cfg!(target_os = "macos") {
if gpu_type.get().as_str() == "Mac" {
set_gpu_type.set("None".to_string());
} else {
set_gpu_type.set("Mac".to_string());
}
on:click=move |_| {
if gpu_type.get().as_str() == "GPU" {
set_gpu_type.set("None".to_string());
} else {
if gpu_type.get().as_str() == "CUDA" {
set_gpu_type.set("None".to_string());
} else {
set_gpu_type.set("CUDA".to_string());
}
set_gpu_type.set("GPU".to_string());
}
}
>

{if cfg!(target_os = "macos") {
if gpu_type.get().as_str() != "Mac" { "Use Metal" } else { "Use CPU" }
} else {
if gpu_type.get().as_str() != "CUDA" { "Use CUDA" } else { "Use CPU" }
}}
{if gpu_type.get().as_str() != "GPU" { "Use GPU" } else { "Use CPU" }}

</Button>
</button>
</Box>
</Box>
// <GpuSelect gpu_type=gpu_type set_gpu_type=set_gpu_type/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ pub fn ModelListItem(
&& (&template_signal.get() != "NoModel")
&& gpu_type.get() == init_gpu.get(),
);
let check_cuda_or_mac = gpu_type.get() == "Mac" || gpu_type.get() == "CUDA";
let check_cuda_or_mac = gpu_type.get() == "GPU";
let (cpu, _set_cpu) = create_signal(!check_cuda_or_mac);

let (name_signal, _set_name_signal) = create_signal(item.clone().name);
Expand Down
Loading

0 comments on commit 782e817

Please sign in to comment.