Skip to content

Commit

Permalink
🦺 Use readonly
Browse files Browse the repository at this point in the history
  • Loading branch information
NatoBoram committed Mar 25, 2024
1 parent 48d3504 commit 75fc306
Show file tree
Hide file tree
Showing 4 changed files with 26 additions and 31 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
docs/
package-lock.json

# Created by https://www.toptal.com/developers/gitignore/api/node,linux,macos,windows,visualstudiocode
Expand Down
14 changes: 7 additions & 7 deletions src/functions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,19 +7,19 @@ type OpenAIFunction = OpenAI.Chat.ChatCompletionCreateParams.Function
* Record<string, unknown>, which isn't very useful for type checking this
* formatting code. */
export interface FunctionDef extends Omit<OpenAIFunction, "parameters"> {
name: string
description?: string
parameters: ObjectProp
readonly name: string
readonly description?: string
readonly parameters: ObjectProp
}

export interface ObjectProp {
type: "object"
properties?: Record<string, Prop>
required?: string[]
readonly type: "object"
readonly properties?: Record<string, Prop>
readonly required?: string[]
}

export interface AnyOfProp {
anyOf: Prop[]
readonly anyOf: Prop[]
}

export type Prop = {
Expand Down
17 changes: 5 additions & 12 deletions src/token-counts.test.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import OpenAI from "openai"
import { loadEnv } from "vite"
import { describe, test } from "vitest"
import { promptTokensEstimate } from "./token-counts.js"
import { promptTokensEstimate, type Prompt } from "./token-counts.js"

const mode = process.env["NODE_ENV"] ?? "development"
Object.assign(process.env, loadEnv(mode, process.cwd(), ""))
Expand All @@ -19,20 +19,13 @@ declare module "openai" {
}
}

type Message = OpenAI.Chat.ChatCompletionMessageParam
type Function = OpenAI.Chat.ChatCompletionCreateParams.Function
type FunctionCall = OpenAI.Chat.ChatCompletionFunctionCallOption
interface Example {
messages: Message[]
// eslint-disable-next-line @typescript-eslint/ban-types
functions?: Function[]
function_call?: FunctionCall | "auto" | "none"
tokens: number
validate?: boolean
interface Example extends Prompt {
readonly tokens: number
readonly validate?: boolean
}

/** These match <https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb>. */
const TEST_CASES: Example[] = [
// these match https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb
{
messages: [
{
Expand Down
25 changes: 13 additions & 12 deletions src/token-counts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,28 +5,29 @@ import type { FunctionDef } from "./functions.js"
import { formatFunctionDefinitions } from "./functions.js"

type Message = OpenAI.Chat.ChatCompletionMessageParam
type Function = OpenAI.Chat.ChatCompletionCreateParams.Function
type OpenAIFunction = OpenAI.Chat.ChatCompletionCreateParams.Function
type FunctionCall = OpenAI.Chat.ChatCompletionFunctionCallOption

let encoder: Tiktoken | undefined

/** OpenAI prompt data */
export interface Prompt {
/** OpenAI chat messages */
readonly messages: Message[]
readonly function_call?: FunctionCall | "auto" | "none"
/** OpenAI function definitions */
readonly functions?: OpenAIFunction[]
}

/**
* Estimate the number of tokens a prompt will use.
* @param prompt OpenAI prompt data
* @param prompt.messages OpenAI chat messages
* @param prompt.functions OpenAI function definitions
* @returns An estimate for the number of tokens the prompt will use
* Estimates the number of tokens a prompt will use.
* @returns An estimate for the number of tokens the prompt will use.
*/
export function promptTokensEstimate({
messages,
functions,
function_call,
}: {
messages: Message[]
// eslint-disable-next-line @typescript-eslint/ban-types
functions?: Function[]
function_call?: FunctionCall | "auto" | "none"
}): number {
}: Prompt): number {
// It appears that if functions are present, the first system message is padded with a trailing newline. This
// was inferred by trying lots of combinations of messages and functions and seeing what the token counts were.
let paddedSystem = false
Expand Down

0 comments on commit 75fc306

Please sign in to comment.