Skip to content

Commit

Permalink
feat: add context_window_size to model config
Browse files Browse the repository at this point in the history
  • Loading branch information
Neet-Nestor committed Sep 29, 2024
1 parent 713b227 commit d7488ac
Show file tree
Hide file tree
Showing 5 changed files with 49 additions and 5 deletions.
2 changes: 2 additions & 0 deletions app/client/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@ export interface LLMConfig {
model: string;
cache: CacheType;
temperature?: number;
context_window_size?: number;
top_p?: number;
stream?: boolean;
presence_penalty?: number;
Expand Down Expand Up @@ -63,6 +64,7 @@ export interface ModelRecord {
family?: string;
recommended_config?: {
temperature?: number;
context_window_size?: number;
top_p?: number;
presence_penalty?: number;
frequency_penalty?: number;
Expand Down
1 change: 1 addition & 0 deletions app/client/webllm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,7 @@ export class WebLLMApi implements LLMApi {
// Compare optional fields
const optionalFields: (keyof LLMConfig)[] = [
"temperature",
"context_window_size",
"top_p",
"stream",
"presence_penalty",
Expand Down
29 changes: 29 additions & 0 deletions app/components/model-config.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,35 @@ export function ModelConfigList() {
))}
</Select>
</ListItem>

{/* New setting item for LLM model context window length */}
<ListItem
title={Locale.Settings.ContextWindowLength.Title}
subTitle={Locale.Settings.ContextWindowLength.SubTitle}
>
<Select
value={config.modelConfig.context_window_size}
onChange={(e) => {
updateModelConfig(
(config) =>
(config.context_window_size =
ModalConfigValidator.context_window_size(
parseInt(e.currentTarget.value),
)),
);
}}
>
<option value="1024">1K</option>
<option value="2048">2K</option>
<option value="4096">4K</option>
<option value="8192">8K</option>
<option value="16384">16K</option>
<option value="32768">32K</option>
<option value="65536">64K</option>
<option value="131072">128K</option>
</Select>
</ListItem>

<ListItem
title={Locale.Settings.Temperature.Title}
subTitle={Locale.Settings.Temperature.SubTitle}
Expand Down
5 changes: 4 additions & 1 deletion app/locales/en.ts
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,10 @@ const en = {
SubTitle: "Connect to the API",
},
},

ContextWindowLength: {
Title: "Context Window Length",
SubTitle: "The maximum number of tokens for the context window",
},
Temperature: {
Title: "Temperature",
SubTitle: "A larger value makes the more random output",
Expand Down
17 changes: 13 additions & 4 deletions app/store/config.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { LogLevel } from "@neet-nestor/web-llm";
import { LogLevel, prebuiltAppConfig } from "@neet-nestor/web-llm";
import { ModelRecord } from "../client/api";
import {
DEFAULT_INPUT_TEMPLATE,
Expand Down Expand Up @@ -39,6 +39,7 @@ export type ModelConfig = {

// Chat configs
temperature: number;
context_window_size?: number;
top_p: number;
max_tokens: number;
presence_penalty: number;
Expand Down Expand Up @@ -83,6 +84,10 @@ const DEFAULT_MODEL_CONFIG: ModelConfig = {
// Chat configs
temperature: 1.0,
top_p: 1,
context_window_size:
prebuiltAppConfig.model_list.find(
(m) => m.model_id === DEFAULT_MODELS[0].name,
)?.overrides?.context_window_size ?? 4096,
max_tokens: 4000,
presence_penalty: 0,
frequency_penalty: 0,
Expand Down Expand Up @@ -142,7 +147,10 @@ export const ModalConfigValidator = {
return x as Model;
},
max_tokens(x: number) {
return limitNumber(x, 0, 512000, 1024);
return limitNumber(x, 0, 131072, 1024);
},
context_window_size(x: number) {
return limitNumber(x, 0, 131072, 1024);
},
presence_penalty(x: number) {
return limitNumber(x, -2, 2, 0);
Expand Down Expand Up @@ -208,9 +216,9 @@ export const useAppConfig = createPersistStore(
}),
{
name: StoreKey.Config,
version: 0.54,
version: 0.55,
migrate: (persistedState, version) => {
if (version < 0.54) {
if (version < 0.55) {
return {
...DEFAULT_CONFIG,
...(persistedState as any),
Expand All @@ -222,6 +230,7 @@ export const useAppConfig = createPersistStore(
// Chat configs
temperature: 1.0,
top_p: 1,
context_window_size: 4096,
max_tokens: 4000,
presence_penalty: 0,
frequency_penalty: 0,
Expand Down

0 comments on commit d7488ac

Please sign in to comment.