diff --git a/packages/agent/src/server.ts b/packages/agent/src/server.ts index 31c014290bc..bb77e1303e9 100644 --- a/packages/agent/src/server.ts +++ b/packages/agent/src/server.ts @@ -6,7 +6,7 @@ import { generateMessageResponse, generateObject, messageCompletionFooter, - ModelClass, + AsyncHandlerType, stringToUuid, type Content, type Media, @@ -166,7 +166,7 @@ export class CharacterServer { return; } - const transcription = await runtime.call(ModelClass.TRANSCRIPTION, { + const transcription = await runtime.call(AsyncHandlerType.TRANSCRIPTION, { file: fs.createReadStream(audioFile.path), model: "whisper-1", }); @@ -276,7 +276,7 @@ export class CharacterServer { const response = await generateMessageResponse({ runtime: runtime, context, - modelClass: ModelClass.TEXT_LARGE, + handlerType: AsyncHandlerType.TEXT_LARGE, }); if (!response) { @@ -487,7 +487,7 @@ export class CharacterServer { const response = await generateObject({ runtime, context, - modelClass: ModelClass.TEXT_SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, schema: hyperfiOutSchema, }); @@ -790,7 +790,7 @@ export class CharacterServer { const response = await generateMessageResponse({ runtime: runtime, context, - modelClass: ModelClass.TEXT_LARGE, + handlerType: AsyncHandlerType.TEXT_LARGE, }); // save response to memory @@ -823,7 +823,7 @@ export class CharacterServer { // Get the text to convert to speech const textToSpeak = response.text; - const speechResponse = await runtime.call(ModelClass.TRANSCRIPTION, { + const speechResponse = await runtime.call(AsyncHandlerType.TRANSCRIPTION, { text: textToSpeak, runtime, }); diff --git a/packages/core/src/generation.ts b/packages/core/src/generation.ts index 3b3cf2164e0..c8241bfe32b 100644 --- a/packages/core/src/generation.ts +++ b/packages/core/src/generation.ts @@ -5,13 +5,13 @@ import { parseJSONObjectFromText } from "./parsing.ts"; import { type Content, type IAgentRuntime, - ModelClass + AsyncHandlerType } from "./types.ts"; interface GenerateObjectOptions { runtime: IAgentRuntime; context: string; - modelClass: ModelClass; + handlerType: AsyncHandlerType; output?: "object" | "array" | "enum" | "no-schema" | undefined; schema?: ZodSchema; schemaName?: string; @@ -99,7 +99,7 @@ export async function trimTokens( if (maxTokens <= 0) throw new Error("maxTokens must be positive"); try { - const tokens = await runtime.call(ModelClass.TEXT_TOKENIZER_ENCODE, { context }); + const tokens = await runtime.call(AsyncHandlerType.TEXT_TOKENIZER_ENCODE, { context }); // If already within limits, return unchanged if (tokens.length <= maxTokens) { @@ -110,7 +110,7 @@ export async function trimTokens( const truncatedTokens = tokens.slice(-maxTokens); // Decode back to text - js-tiktoken decode() returns a string directly - return await runtime.call(ModelClass.TEXT_TOKENIZER_DECODE, { tokens: truncatedTokens }); + return await runtime.call(AsyncHandlerType.TEXT_TOKENIZER_DECODE, { tokens: truncatedTokens }); } catch (error) { logger.error("Error in trimTokens:", error); // Return truncated string if tokenization fails @@ -122,16 +122,16 @@ export async function trimTokens( export async function generateText({ runtime, context, - modelClass = ModelClass.TEXT_SMALL, + handlerType = AsyncHandlerType.TEXT_SMALL, stopSequences = [], }: { runtime: IAgentRuntime; context: string; - modelClass: ModelClass; + handlerType: AsyncHandlerType; stopSequences?: string[]; customSystemPrompt?: string; }): Promise { - const text = await runtime.call(modelClass, { + const text = await runtime.call(handlerType, { runtime, context, stopSequences, @@ -143,19 +143,19 @@ export async function generateText({ export async function generateTextArray({ runtime, context, - modelClass = ModelClass.TEXT_SMALL, + handlerType = AsyncHandlerType.TEXT_SMALL, stopSequences, }: { runtime: IAgentRuntime; context: string; - modelClass: ModelClass; + handlerType: AsyncHandlerType; stopSequences?: string[]; }): Promise { const result = await withRetry(async () => { const result = await generateObject({ runtime, context, - modelClass, + handlerType, schema: z.array(z.string()), stopSequences, }); @@ -169,14 +169,14 @@ export async function generateTextArray({ async function generateEnum({ runtime, context, - modelClass = ModelClass.TEXT_SMALL, + handlerType = AsyncHandlerType.TEXT_SMALL, enumValues, functionName, stopSequences, }: { runtime: IAgentRuntime; context: string; - modelClass: ModelClass; + handlerType: AsyncHandlerType; enumValues: Array; functionName: string; stopSequences?: string[]; @@ -189,7 +189,7 @@ async function generateEnum({ const result = await generateObject({ runtime, context, - modelClass, + handlerType, output: "enum", enum: enumValues, mode: "json", @@ -206,12 +206,12 @@ async function generateEnum({ export async function generateShouldRespond({ runtime, context, - modelClass = ModelClass.TEXT_SMALL, + handlerType = AsyncHandlerType.TEXT_SMALL, stopSequences, }: { runtime: IAgentRuntime; context: string; - modelClass: ModelClass; + handlerType: AsyncHandlerType; stopSequences?: string[]; }): Promise<"RESPOND" | "IGNORE" | "STOP" | null> { const RESPONSE_VALUES = ["RESPOND", "IGNORE", "STOP"] as string[]; @@ -219,7 +219,7 @@ export async function generateShouldRespond({ const result = await generateEnum({ runtime, context, - modelClass, + handlerType, enumValues: RESPONSE_VALUES, functionName: "generateShouldRespond", stopSequences, @@ -231,12 +231,12 @@ export async function generateShouldRespond({ export async function generateTrueOrFalse({ runtime, context = "", - modelClass = ModelClass.TEXT_SMALL, + handlerType = AsyncHandlerType.TEXT_SMALL, stopSequences, }: { runtime: IAgentRuntime; context: string; - modelClass: ModelClass; + handlerType: AsyncHandlerType; stopSequences?: string[]; }): Promise { const BOOL_VALUES = ["true", "false"]; @@ -244,7 +244,7 @@ export async function generateTrueOrFalse({ const result = await generateEnum({ runtime, context, - modelClass, + handlerType, enumValues: BOOL_VALUES, functionName: "generateTrueOrFalse", stopSequences, @@ -257,7 +257,7 @@ export async function generateTrueOrFalse({ export const generateObject = async ({ runtime, context, - modelClass = ModelClass.TEXT_SMALL, + handlerType = AsyncHandlerType.TEXT_SMALL, stopSequences, }: GenerateObjectOptions): Promise => { if (!context) { @@ -266,29 +266,29 @@ export const generateObject = async ({ throw new Error(errorMessage); } - const { object } = await runtime.call(modelClass, { + const { object } = await runtime.call(handlerType, { runtime, context, - modelClass, + handlerType, stopSequences, object: true, }); - logger.debug(`Received Object response from ${modelClass} model.`); + logger.debug(`Received Object response from ${handlerType} model.`); return object; }; export async function generateObjectArray({ runtime, context, - modelClass = ModelClass.TEXT_SMALL, + handlerType = AsyncHandlerType.TEXT_SMALL, schema, schemaName, schemaDescription, }: { runtime: IAgentRuntime; context: string; - modelClass: ModelClass; + handlerType: AsyncHandlerType; schema?: ZodSchema; schemaName?: string; schemaDescription?: string; @@ -300,7 +300,7 @@ export async function generateObjectArray({ const result = await generateObject({ runtime, context, - modelClass, + handlerType, output: "array", schema, schemaName, @@ -313,18 +313,18 @@ export async function generateObjectArray({ export async function generateMessageResponse({ runtime, context, - modelClass = ModelClass.TEXT_SMALL, + handlerType = AsyncHandlerType.TEXT_SMALL, stopSequences, }: { runtime: IAgentRuntime; context: string; - modelClass: ModelClass; + handlerType: AsyncHandlerType; stopSequences?: string[]; }): Promise { logger.debug("Context:", context); return await withRetry(async () => { - const text = await runtime.call(modelClass, { + const text = await runtime.call(handlerType, { runtime, context, stop: stopSequences, @@ -369,7 +369,7 @@ export const generateImage = async ( }> => { return await withRetry( async () => { - const result = await runtime.call(ModelClass.IMAGE, data); + const result = await runtime.call(AsyncHandlerType.IMAGE, data); return { success: true, data: result.images, @@ -391,7 +391,7 @@ export const generateCaption = async ( description: string; }> => { const { imageUrl } = data; - const resp = await runtime.call(ModelClass.IMAGE_DESCRIPTION, imageUrl); + const resp = await runtime.call(AsyncHandlerType.IMAGE_DESCRIPTION, imageUrl); return { title: resp.title.trim(), diff --git a/packages/core/src/knowledge.ts b/packages/core/src/knowledge.ts index e7df1c797e0..bca059e871f 100644 --- a/packages/core/src/knowledge.ts +++ b/packages/core/src/knowledge.ts @@ -1,7 +1,7 @@ import { splitChunks } from "./parsing.ts"; import logger from "./logger.ts"; import type { AgentRuntime } from "./runtime.ts"; -import { type KnowledgeItem, type Memory, ModelClass, type UUID } from "./types.ts"; +import { type KnowledgeItem, type Memory, AsyncHandlerType, type UUID } from "./types.ts"; import { stringToUuid } from "./uuid.ts"; async function get( @@ -31,7 +31,7 @@ async function get( return []; } - const embedding = await runtime.call(ModelClass.TEXT_EMBEDDING, processed); + const embedding = await runtime.call(AsyncHandlerType.TEXT_EMBEDDING, processed); const fragments = await runtime.knowledgeManager.searchMemories( { embedding, @@ -69,7 +69,7 @@ async function set( chunkSize = 512, bleed = 20 ) { - const embedding = await runtime.call(ModelClass.TEXT_EMBEDDING, null); + const embedding = await runtime.call(AsyncHandlerType.TEXT_EMBEDDING, null); await runtime.documentsManager.createMemory({ id: item.id, agentId: runtime.agentId, @@ -84,7 +84,7 @@ async function set( const fragments = await splitChunks(preprocessed, chunkSize, bleed); for (const fragment of fragments) { - const embedding = await runtime.call(ModelClass.TEXT_EMBEDDING, fragment); + const embedding = await runtime.call(AsyncHandlerType.TEXT_EMBEDDING, fragment); await runtime.knowledgeManager.createMemory({ // We namespace the knowledge base uuid to avoid id // collision with the document above. diff --git a/packages/core/src/memory.ts b/packages/core/src/memory.ts index d85fe471917..cd240223793 100644 --- a/packages/core/src/memory.ts +++ b/packages/core/src/memory.ts @@ -1,6 +1,6 @@ import logger from "./logger.ts"; import { - ModelClass, + AsyncHandlerType, type IAgentRuntime, type IMemoryManager, type Memory, @@ -66,11 +66,11 @@ export class MemoryManager implements IMemoryManager { try { // Generate embedding from text content - memory.embedding = await this.runtime.call(ModelClass.TEXT_EMBEDDING, memoryText); + memory.embedding = await this.runtime.call(AsyncHandlerType.TEXT_EMBEDDING, memoryText); } catch (error) { logger.error("Failed to generate embedding:", error); // Fallback to zero vector if embedding fails - memory.embedding = await this.runtime.call(ModelClass.TEXT_EMBEDDING, null); + memory.embedding = await this.runtime.call(AsyncHandlerType.TEXT_EMBEDDING, null); } return memory; @@ -185,7 +185,7 @@ export class MemoryManager implements IMemoryManager { logger.log("Creating Memory", memory.id, memory.content.text); if(!memory.embedding){ - const embedding = await this.runtime.call(ModelClass.TEXT_EMBEDDING, null); + const embedding = await this.runtime.call(AsyncHandlerType.TEXT_EMBEDDING, null); memory.embedding = embedding; } diff --git a/packages/core/src/runtime.ts b/packages/core/src/runtime.ts index 14d317d5edc..02b6ffea624 100644 --- a/packages/core/src/runtime.ts +++ b/packages/core/src/runtime.ts @@ -39,7 +39,7 @@ import { type IMemoryManager, type KnowledgeItem, type Memory, - ModelClass, + AsyncHandlerType, type Plugin, type Provider, type State, @@ -248,7 +248,7 @@ export class AgentRuntime implements IAgentRuntime { private readonly knowledgeRoot: string; private readonly memoryManagerService: MemoryManagerService; - handlers = new Map Promise)[]>(); + handlers = new Map Promise)[]>(); constructor(opts: { conversationLength?: number; @@ -348,8 +348,8 @@ export class AgentRuntime implements IAgentRuntime { } } if (plugin.handlers) { - for (const [modelClass, handler] of Object.entries(plugin.handlers)) { - this.registerHandler(modelClass as ModelClass, handler as (params: any) => Promise); + for (const [handlerType, handler] of Object.entries(plugin.handlers)) { + this.registerHandler(handlerType as AsyncHandlerType, handler as (params: any) => Promise); } } if (plugin.services) { @@ -588,7 +588,7 @@ export class AgentRuntime implements IAgentRuntime { const result = await generateText({ runtime: this, context, - modelClass: ModelClass.TEXT_SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); console.log("***** result", result); @@ -1271,14 +1271,14 @@ Text: ${attachment.text} return this.memoryManagerService.getKnowledgeManager(); } - registerHandler(handlerType: ModelClass, handler: (params: any) => Promise) { + registerHandler(handlerType: AsyncHandlerType, handler: (params: any) => Promise) { if (!this.handlers.has(handlerType)) { this.handlers.set(handlerType, []); } this.handlers.get(handlerType)?.push(handler); } - getHandler(handlerType: ModelClass): ((params: any) => Promise) | undefined { + getHandler(handlerType: AsyncHandlerType): ((params: any) => Promise) | undefined { const handlers = this.handlers.get(handlerType); if (!handlers?.length) { return undefined; @@ -1286,7 +1286,7 @@ Text: ${attachment.text} return handlers[0]; } - async call(handlerType: ModelClass, params: any): Promise { + async call(handlerType: AsyncHandlerType, params: any): Promise { const handler = this.getHandler(handlerType); if (!handler) { throw new Error(`No handler found for delegate type: ${handlerType}`); diff --git a/packages/core/src/types.ts b/packages/core/src/types.ts index 9b4af94bf89..b9603f079e7 100644 --- a/packages/core/src/types.ts +++ b/packages/core/src/types.ts @@ -116,7 +116,7 @@ export interface Goal { /** * Model size/type classification */ -export enum ModelClass { +export enum AsyncHandlerType { SMALL = "text_small", // for backwards compatibility MEDIUM = "text_large", // for backwards compatibility LARGE = "text_large", // for backwards compatibility @@ -1023,9 +1023,9 @@ export interface IAgentRuntime { updateRecentMessageState(state: State): Promise; - call(modelClass: ModelClass, params: T): Promise; - registerHandler(modelClass: ModelClass, handler: (params: any) => Promise): void; - getHandler(modelClass: ModelClass): ((params: any) => Promise) | undefined; + call(handlerType: AsyncHandlerType, params: T): Promise; + registerHandler(handlerType: AsyncHandlerType, handler: (params: any) => Promise): void; + getHandler(handlerType: AsyncHandlerType): ((params: any) => Promise) | undefined; } export enum LoggingLevel { @@ -1061,18 +1061,18 @@ export interface ChunkRow { export type GenerateTextParams = { runtime: IAgentRuntime; context: string; - modelClass: ModelClass; + handlerType: AsyncHandlerType; stopSequences?: string[]; }; export interface TokenizeTextParams { context: string; - modelClass: ModelClass; + handlerType: AsyncHandlerType; } export interface DetokenizeTextParams { tokens: number[]; - modelClass: ModelClass; + handlerType: AsyncHandlerType; } // Inventory diff --git a/packages/plugin-anthropic/src/index.ts b/packages/plugin-anthropic/src/index.ts index a619d7ccf0c..6b7a67499a2 100644 --- a/packages/plugin-anthropic/src/index.ts +++ b/packages/plugin-anthropic/src/index.ts @@ -2,7 +2,7 @@ import { anthropic } from "@ai-sdk/anthropic"; import type { Plugin } from "@elizaos/core"; import { GenerateTextParams, - ModelClass + AsyncHandlerType } from "@elizaos/core"; import { generateText } from "ai"; import { z } from "zod"; @@ -38,7 +38,7 @@ export const anthropicPlugin: Plugin = { } }, handlers: { - [ModelClass.TEXT_SMALL]: async ({ + [AsyncHandlerType.TEXT_SMALL]: async ({ runtime, context, stopSequences = [], @@ -60,7 +60,7 @@ export const anthropicPlugin: Plugin = { }, // TEXT_LARGE generation using Anthropics (e.g. using a "claude-3" model). - [ModelClass.TEXT_LARGE]: async ({ + [AsyncHandlerType.TEXT_LARGE]: async ({ runtime, context, stopSequences = [], diff --git a/packages/plugin-bootstrap/__tests__/actions/continue.test.ts b/packages/plugin-bootstrap/__tests__/actions/continue.test.ts index f94981c8bfd..f8f8d684e9c 100644 --- a/packages/plugin-bootstrap/__tests__/actions/continue.test.ts +++ b/packages/plugin-bootstrap/__tests__/actions/continue.test.ts @@ -1,6 +1,6 @@ import { describe, expect, it, vi, beforeEach } from 'vitest'; import { continueAction } from '../../src/actions/continue'; -import { composeContext, generateMessageResponse, generateTrueOrFalse, ModelClass } from '@elizaos/core'; +import { composeContext, generateMessageResponse, generateTrueOrFalse, AsyncHandlerType } from '@elizaos/core'; vi.mock('@elizaos/core', () => ({ composeContext: vi.fn(), @@ -14,7 +14,7 @@ vi.mock('@elizaos/core', () => ({ }, messageCompletionFooter: '\nResponse format:\n```\n{"content": {"text": string}}\n```', booleanFooter: '\nResponse format: YES or NO', - ModelClass: { + AsyncHandlerType: { SMALL: 'small', LARGE: 'large' } diff --git a/packages/plugin-bootstrap/__tests__/evaluators/fact.test.ts b/packages/plugin-bootstrap/__tests__/evaluators/fact.test.ts index 70c11bf1da7..99d9dab095e 100644 --- a/packages/plugin-bootstrap/__tests__/evaluators/fact.test.ts +++ b/packages/plugin-bootstrap/__tests__/evaluators/fact.test.ts @@ -21,7 +21,7 @@ vi.mock('@elizaos/core', () => ({ } }) })), - ModelClass: { + AsyncHandlerType: { SMALL: 'small' } })); diff --git a/packages/plugin-bootstrap/__tests__/evaluators/goal.test.ts b/packages/plugin-bootstrap/__tests__/evaluators/goal.test.ts index 2b278321f9d..7c39c4e33bd 100644 --- a/packages/plugin-bootstrap/__tests__/evaluators/goal.test.ts +++ b/packages/plugin-bootstrap/__tests__/evaluators/goal.test.ts @@ -7,7 +7,7 @@ vi.mock('@elizaos/core', () => ({ generateText: vi.fn(), getGoals: vi.fn(), parseJsonArrayFromText: vi.fn(), - ModelClass: { + AsyncHandlerType: { SMALL: 'small' } })); diff --git a/packages/plugin-bootstrap/src/actions/continue.ts b/packages/plugin-bootstrap/src/actions/continue.ts index c2635322f37..c3874cff299 100644 --- a/packages/plugin-bootstrap/src/actions/continue.ts +++ b/packages/plugin-bootstrap/src/actions/continue.ts @@ -8,7 +8,7 @@ import { type HandlerCallback, type IAgentRuntime, type Memory, - ModelClass, + AsyncHandlerType, type State, } from "@elizaos/core"; @@ -167,7 +167,7 @@ export const continueAction: Action = { const response = await generateTrueOrFalse({ context: shouldRespondContext, - modelClass: ModelClass.TEXT_SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, runtime, }); @@ -194,7 +194,7 @@ export const continueAction: Action = { const response = await generateMessageResponse({ runtime, context, - modelClass: ModelClass.TEXT_LARGE, + handlerType: AsyncHandlerType.TEXT_LARGE, }); response.inReplyTo = message.id; diff --git a/packages/plugin-bootstrap/src/actions/followRoom.ts b/packages/plugin-bootstrap/src/actions/followRoom.ts index cdac3ad7226..f67e3eda3a7 100644 --- a/packages/plugin-bootstrap/src/actions/followRoom.ts +++ b/packages/plugin-bootstrap/src/actions/followRoom.ts @@ -6,7 +6,7 @@ import { type ActionExample, type IAgentRuntime, type Memory, - ModelClass, + AsyncHandlerType, type State, } from "@elizaos/core"; @@ -67,7 +67,7 @@ export const followRoomAction: Action = { const response = await generateTrueOrFalse({ runtime, context: shouldFollowContext, - modelClass: ModelClass.TEXT_LARGE, + handlerType: AsyncHandlerType.TEXT_LARGE, }); return response; diff --git a/packages/plugin-bootstrap/src/actions/muteRoom.ts b/packages/plugin-bootstrap/src/actions/muteRoom.ts index d1ae5e5d4f6..04549abebc9 100644 --- a/packages/plugin-bootstrap/src/actions/muteRoom.ts +++ b/packages/plugin-bootstrap/src/actions/muteRoom.ts @@ -6,7 +6,7 @@ import { type ActionExample, type IAgentRuntime, type Memory, - ModelClass, + AsyncHandlerType, type State, } from "@elizaos/core"; @@ -54,7 +54,7 @@ export const muteRoomAction: Action = { const response = await generateTrueOrFalse({ runtime, context: shouldMuteContext, - modelClass: ModelClass.TEXT_LARGE, + handlerType: AsyncHandlerType.TEXT_LARGE, }); return response; diff --git a/packages/plugin-bootstrap/src/actions/unfollowRoom.ts b/packages/plugin-bootstrap/src/actions/unfollowRoom.ts index 0598445aecb..c999a60c382 100644 --- a/packages/plugin-bootstrap/src/actions/unfollowRoom.ts +++ b/packages/plugin-bootstrap/src/actions/unfollowRoom.ts @@ -6,7 +6,7 @@ import { type ActionExample, type IAgentRuntime, type Memory, - ModelClass, + AsyncHandlerType, type State, } from "@elizaos/core"; @@ -52,7 +52,7 @@ export const unfollowRoomAction: Action = { const response = await generateTrueOrFalse({ runtime, context: shouldUnfollowContext, - modelClass: ModelClass.TEXT_LARGE, + handlerType: AsyncHandlerType.TEXT_LARGE, }); return response; diff --git a/packages/plugin-bootstrap/src/actions/unmuteRoom.ts b/packages/plugin-bootstrap/src/actions/unmuteRoom.ts index 308cec076da..b111202e8eb 100644 --- a/packages/plugin-bootstrap/src/actions/unmuteRoom.ts +++ b/packages/plugin-bootstrap/src/actions/unmuteRoom.ts @@ -6,7 +6,7 @@ import { type ActionExample, type IAgentRuntime, type Memory, - ModelClass, + AsyncHandlerType, type State, } from "@elizaos/core"; @@ -52,7 +52,7 @@ export const unmuteRoomAction: Action = { const response = generateTrueOrFalse({ context: shouldUnmuteContext, runtime, - modelClass: ModelClass.TEXT_LARGE, + handlerType: AsyncHandlerType.TEXT_LARGE, }); return response; diff --git a/packages/plugin-bootstrap/src/evaluators/fact.ts b/packages/plugin-bootstrap/src/evaluators/fact.ts index 4ed418569de..0118806749e 100644 --- a/packages/plugin-bootstrap/src/evaluators/fact.ts +++ b/packages/plugin-bootstrap/src/evaluators/fact.ts @@ -6,7 +6,7 @@ import { type ActionExample, type IAgentRuntime, type Memory, - ModelClass, + AsyncHandlerType, type Evaluator, } from "@elizaos/core"; @@ -75,7 +75,7 @@ async function handler(runtime: IAgentRuntime, message: Memory) { const facts = await generateObjectArray({ runtime, context, - modelClass: ModelClass.TEXT_LARGE, + handlerType: AsyncHandlerType.TEXT_LARGE, schema: claimSchema, schemaName: "Fact", schemaDescription: "A fact about the user or the world", diff --git a/packages/plugin-bootstrap/src/evaluators/goal.ts b/packages/plugin-bootstrap/src/evaluators/goal.ts index be7a1f91f11..7ab9c175b78 100644 --- a/packages/plugin-bootstrap/src/evaluators/goal.ts +++ b/packages/plugin-bootstrap/src/evaluators/goal.ts @@ -5,7 +5,7 @@ import { parseJsonArrayFromText } from "@elizaos/core"; import { type IAgentRuntime, type Memory, - ModelClass, + AsyncHandlerType, type Goal, type State, type Evaluator, @@ -64,7 +64,7 @@ async function handler( const response = await generateText({ runtime, context, - modelClass: ModelClass.TEXT_LARGE, + handlerType: AsyncHandlerType.TEXT_LARGE, }); // Parse the JSON response to extract goal updates diff --git a/packages/plugin-bootstrap/src/providers/facts.ts b/packages/plugin-bootstrap/src/providers/facts.ts index cb4d120a477..df2aa133ef4 100644 --- a/packages/plugin-bootstrap/src/providers/facts.ts +++ b/packages/plugin-bootstrap/src/providers/facts.ts @@ -3,7 +3,7 @@ import { formatMessages, IAgentRuntime, MemoryManager, - ModelClass + AsyncHandlerType } from "@elizaos/core"; import { formatFacts } from "../evaluators/fact.ts"; @@ -16,7 +16,7 @@ const factsProvider: Provider = { actors: state?.actorsData, }); - const embedding = await runtime.call(ModelClass.TEXT_EMBEDDING, recentMessages); + const embedding = await runtime.call(AsyncHandlerType.TEXT_EMBEDDING, recentMessages); const memoryManager = new MemoryManager({ runtime, diff --git a/packages/plugin-discord/src/actions/chat_with_attachments.ts b/packages/plugin-discord/src/actions/chat_with_attachments.ts index 899bd495f1c..3855709e387 100644 --- a/packages/plugin-discord/src/actions/chat_with_attachments.ts +++ b/packages/plugin-discord/src/actions/chat_with_attachments.ts @@ -3,7 +3,7 @@ import { type ActionExample, composeContext, type Content, generateText, type HandlerCallback, type IAgentRuntime, type Memory, - ModelClass, parseJSONObjectFromText, type State, trimTokens + AsyncHandlerType, parseJSONObjectFromText, type State, trimTokens } from "@elizaos/core"; import * as fs from "fs"; @@ -49,7 +49,7 @@ const getAttachmentIds = async ( const response = await generateText({ runtime, context, - modelClass: ModelClass.TEXT_SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); console.log("response", response); // try parsing to a json object @@ -197,7 +197,7 @@ const summarizeAction = { const summary = await generateText({ runtime, context, - modelClass: ModelClass.TEXT_SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); currentSummary = currentSummary + "\n" + summary; diff --git a/packages/plugin-discord/src/actions/download_media.ts b/packages/plugin-discord/src/actions/download_media.ts index 68c6b3c9539..28af3057431 100644 --- a/packages/plugin-discord/src/actions/download_media.ts +++ b/packages/plugin-discord/src/actions/download_media.ts @@ -9,7 +9,7 @@ import { type IAgentRuntime, type IVideoService, type Memory, - ModelClass, + AsyncHandlerType, ServiceType, type State, } from "@elizaos/core"; @@ -47,7 +47,7 @@ const getMediaUrl = async ( const response = await generateText({ runtime, context, - modelClass: ModelClass.TEXT_SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); const parsedResponse = parseJSONObjectFromText(response) as { diff --git a/packages/plugin-discord/src/actions/joinvoice.ts b/packages/plugin-discord/src/actions/joinvoice.ts index 7d9d9a699fd..e4521d85cbe 100644 --- a/packages/plugin-discord/src/actions/joinvoice.ts +++ b/packages/plugin-discord/src/actions/joinvoice.ts @@ -9,7 +9,7 @@ import { type Memory, type State, generateText, - ModelClass, + AsyncHandlerType, } from "@elizaos/core"; import { type Channel, @@ -170,7 +170,7 @@ You should only respond with the name of the voice channel or none, no commentar const responseContent = await generateText({ runtime, context, - modelClass: ModelClass.TEXT_SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); runtime.databaseAdapter.log({ diff --git a/packages/plugin-discord/src/actions/summarize_conversation.ts b/packages/plugin-discord/src/actions/summarize_conversation.ts index 7dc086bc0a1..6a13d7ee9c3 100644 --- a/packages/plugin-discord/src/actions/summarize_conversation.ts +++ b/packages/plugin-discord/src/actions/summarize_conversation.ts @@ -10,7 +10,7 @@ import { type IAgentRuntime, type Media, type Memory, - ModelClass, + AsyncHandlerType, type State, } from "@elizaos/core"; export const summarizationTemplate = `# Summarized so far (we are adding to this) @@ -58,7 +58,7 @@ const getDateRange = async ( const response = await generateText({ runtime, context, - modelClass: ModelClass.TEXT_SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); console.log("response", response); // try parsing to a json object @@ -273,7 +273,7 @@ const summarizeAction = { const summary = await generateText({ runtime, context, - modelClass: ModelClass.TEXT_SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); currentSummary = currentSummary + "\n" + summary; diff --git a/packages/plugin-discord/src/actions/transcribe_media.ts b/packages/plugin-discord/src/actions/transcribe_media.ts index 2693367fae8..0f59b53999f 100644 --- a/packages/plugin-discord/src/actions/transcribe_media.ts +++ b/packages/plugin-discord/src/actions/transcribe_media.ts @@ -8,7 +8,7 @@ import { type HandlerCallback, type IAgentRuntime, type Memory, - ModelClass, + AsyncHandlerType, type State, } from "@elizaos/core"; @@ -47,7 +47,7 @@ const getMediaAttachmentId = async ( const response = await generateText({ runtime, context, - modelClass: ModelClass.TEXT_SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); console.log("response", response); diff --git a/packages/plugin-discord/src/attachments.ts b/packages/plugin-discord/src/attachments.ts index c120da98bfe..b0ca201c042 100644 --- a/packages/plugin-discord/src/attachments.ts +++ b/packages/plugin-discord/src/attachments.ts @@ -5,7 +5,7 @@ import { type IPdfService, type IVideoService, type Media, - ModelClass, + AsyncHandlerType, ServiceType, } from "@elizaos/core"; import { type Attachment, Collection } from "discord.js"; @@ -36,7 +36,7 @@ async function generateSummary( const response = await generateText({ runtime, context: prompt, - modelClass: ModelClass.TEXT_SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); const parsedResponse = parseJSONObjectFromText(response); @@ -133,7 +133,7 @@ export class AttachmentManager { throw new Error("Unsupported audio/video format"); } - const transcription = await this.runtime.call(ModelClass.TRANSCRIPTION, audioBuffer); + const transcription = await this.runtime.call(AsyncHandlerType.TRANSCRIPTION, audioBuffer); const { title, description } = await generateSummary( this.runtime, transcription diff --git a/packages/plugin-discord/src/index.ts b/packages/plugin-discord/src/index.ts index f82fae50f33..b4fdcd92a31 100644 --- a/packages/plugin-discord/src/index.ts +++ b/packages/plugin-discord/src/index.ts @@ -1,6 +1,6 @@ import { logger, - ModelClass, + AsyncHandlerType, stringToUuid, type Character, type Client as ElizaClient, diff --git a/packages/plugin-discord/src/messages.ts b/packages/plugin-discord/src/messages.ts index 687f6ea29a6..efe047799be 100644 --- a/packages/plugin-discord/src/messages.ts +++ b/packages/plugin-discord/src/messages.ts @@ -2,7 +2,7 @@ import { composeContext, composeRandomUser, type Content, generateMessageResponse, generateShouldRespond, type HandlerCallback, type IAgentRuntime, type IBrowserService, type IVideoService, logger, type Media, - type Memory, ModelClass, ServiceType, + type Memory, AsyncHandlerType, ServiceType, type State, stringToUuid, type UUID } from "@elizaos/core"; import { @@ -386,7 +386,7 @@ export class MessageManager { // For voice channels, use text-to-speech for the error message const errorMessage = "Sorry, I had a glitch. What was that?"; - const audioStream = await this.runtime.call(ModelClass.TEXT_TO_SPEECH, errorMessage) + const audioStream = await this.runtime.call(AsyncHandlerType.TEXT_TO_SPEECH, errorMessage) await this.voiceManager.playAudioStream(userId, audioStream); } else { @@ -856,7 +856,7 @@ export class MessageManager { const response = await generateShouldRespond({ runtime: this.runtime, context: shouldRespondContext, - modelClass: ModelClass.TEXT_SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); if (response === "RESPOND") { @@ -892,7 +892,7 @@ export class MessageManager { const response = await generateMessageResponse({ runtime: this.runtime, context, - modelClass: ModelClass.TEXT_LARGE, + handlerType: AsyncHandlerType.TEXT_LARGE, }); if (!response) { diff --git a/packages/plugin-discord/src/utils.ts b/packages/plugin-discord/src/utils.ts index 69dac38ff43..df14a9516f5 100644 --- a/packages/plugin-discord/src/utils.ts +++ b/packages/plugin-discord/src/utils.ts @@ -1,6 +1,6 @@ import { type IAgentRuntime, - ModelClass, + AsyncHandlerType, logger, generateText, trimTokens, @@ -66,7 +66,7 @@ export async function generateSummary( const response = await generateText({ runtime, context: prompt, - modelClass: ModelClass.TEXT_SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); const parsedResponse = parseJSONObjectFromText(response); diff --git a/packages/plugin-discord/src/voice.ts b/packages/plugin-discord/src/voice.ts index 4383a471d5c..465b224782f 100644 --- a/packages/plugin-discord/src/voice.ts +++ b/packages/plugin-discord/src/voice.ts @@ -16,7 +16,7 @@ import { type HandlerCallback, type IAgentRuntime, type Memory, - ModelClass, + AsyncHandlerType, type State, type UUID, composeContext, @@ -587,7 +587,7 @@ export class VoiceManager extends EventEmitter { const wavBuffer = await this.convertOpusToWav(inputBuffer); console.log("Starting transcription..."); - const transcriptionText = await this.runtime.call(ModelClass.TRANSCRIPTION, wavBuffer) + const transcriptionText = await this.runtime.call(AsyncHandlerType.TRANSCRIPTION, wavBuffer) function isValidTranscription(text: string): boolean { if (!text || text.includes("[BLANK_AUDIO]")) return false; return true; @@ -733,7 +733,7 @@ export class VoiceManager extends EventEmitter { ); state = await this.runtime.updateRecentMessageState(state); - const responseStream = await this.runtime.call(ModelClass.TEXT_TO_SPEECH, content.text) + const responseStream = await this.runtime.call(AsyncHandlerType.TEXT_TO_SPEECH, content.text) if (responseStream) { await this.playAudioStream( @@ -834,7 +834,7 @@ export class VoiceManager extends EventEmitter { const response = await generateShouldRespond({ runtime: this.runtime, context: shouldRespondContext, - modelClass: ModelClass.TEXT_SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); if (response === "RESPOND") { @@ -862,7 +862,7 @@ export class VoiceManager extends EventEmitter { const response = await generateMessageResponse({ runtime: this.runtime, context, - modelClass: ModelClass.TEXT_SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); response.source = "discord"; diff --git a/packages/plugin-local-ai/src/index.ts b/packages/plugin-local-ai/src/index.ts index 8456221863f..280d4c2beb3 100644 --- a/packages/plugin-local-ai/src/index.ts +++ b/packages/plugin-local-ai/src/index.ts @@ -1,4 +1,4 @@ -import { ModelClass, Plugin, logger } from "@elizaos/core"; +import { AsyncHandlerType, Plugin, logger } from "@elizaos/core"; import { AutoTokenizer } from "@huggingface/transformers"; import { EmbeddingModel, FlagEmbedding } from "fastembed"; import path from "node:path"; @@ -136,7 +136,7 @@ export const localAIPlugin: Plugin = { handlers: { // Text generation for small tasks - [ModelClass.TEXT_SMALL]: async ({ + [AsyncHandlerType.TEXT_SMALL]: async ({ context, stopSequences = [], runtime, @@ -155,7 +155,7 @@ export const localAIPlugin: Plugin = { }, // Text generation for larger tasks - [ModelClass.TEXT_LARGE]: async ({ + [AsyncHandlerType.TEXT_LARGE]: async ({ context, stopSequences = [], runtime, @@ -172,7 +172,7 @@ export const localAIPlugin: Plugin = { }, // Text embedding using FastEmbed - [ModelClass.TEXT_EMBEDDING]: async ({ text }) => { + [AsyncHandlerType.TEXT_EMBEDDING]: async ({ text }) => { try { return await localAIManager.generateEmbedding(text); } catch (error) { @@ -182,7 +182,7 @@ export const localAIPlugin: Plugin = { }, // Text tokenization using AutoTokenizer - [ModelClass.TEXT_TOKENIZER_ENCODE]: async ({ text }) => { + [AsyncHandlerType.TEXT_TOKENIZER_ENCODE]: async ({ text }) => { try { if (!localAIManager.tokenizer) { throw new Error("Tokenizer not initialized"); @@ -195,7 +195,7 @@ export const localAIPlugin: Plugin = { }, // Text detokenization using AutoTokenizer - [ModelClass.TEXT_TOKENIZER_DECODE]: async ({ tokens }) => { + [AsyncHandlerType.TEXT_TOKENIZER_DECODE]: async ({ tokens }) => { try { if (!localAIManager.tokenizer) { throw new Error("Tokenizer not initialized"); @@ -208,7 +208,7 @@ export const localAIPlugin: Plugin = { }, // Image description using local Florence model - [ModelClass.IMAGE_DESCRIPTION]: async (imageUrlw) => { + [AsyncHandlerType.IMAGE_DESCRIPTION]: async (imageUrlw) => { try { // TODO: Add florence diff --git a/packages/plugin-node/src/services/browser.ts b/packages/plugin-node/src/services/browser.ts index 407585fe98a..dfdc9681dc4 100644 --- a/packages/plugin-node/src/services/browser.ts +++ b/packages/plugin-node/src/services/browser.ts @@ -2,7 +2,7 @@ import { generateText, type IBrowserService, trimTokens } from "@elizaos/core"; import { parseJSONObjectFromText } from "@elizaos/core"; import { Service } from "@elizaos/core"; import { settings } from "@elizaos/core"; -import { type IAgentRuntime, ModelClass, ServiceType } from "@elizaos/core"; +import { type IAgentRuntime, AsyncHandlerType, ServiceType } from "@elizaos/core"; import { stringToUuid } from "@elizaos/core"; import { PlaywrightBlocker } from "@cliqz/adblocker-playwright"; import CaptchaSolver from "capsolver-npm"; @@ -38,7 +38,7 @@ async function generateSummary( const response = await generateText({ runtime, context: prompt, - modelClass: ModelClass.SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); const parsedResponse = parseJSONObjectFromText(response); diff --git a/packages/plugin-node/src/services/video.ts b/packages/plugin-node/src/services/video.ts index c4879b9ed1c..f8d8e7bad36 100644 --- a/packages/plugin-node/src/services/video.ts +++ b/packages/plugin-node/src/services/video.ts @@ -6,7 +6,7 @@ import { ServiceType, stringToUuid, logger, - ModelClass, + AsyncHandlerType, } from "@elizaos/core"; import ffmpeg from "fluent-ffmpeg"; import fs from "fs"; @@ -339,7 +339,7 @@ export class VideoService extends Service implements IVideoService { logger.log("Starting transcription..."); const startTime = Date.now(); - const transcript = await runtime.call(ModelClass.TRANSCRIPTION, audioBuffer); + const transcript = await runtime.call(AsyncHandlerType.TRANSCRIPTION, audioBuffer); const endTime = Date.now(); logger.log( diff --git a/packages/plugin-openai/src/index.ts b/packages/plugin-openai/src/index.ts index 1b924a32c9e..d1a5ef227f8 100644 --- a/packages/plugin-openai/src/index.ts +++ b/packages/plugin-openai/src/index.ts @@ -3,16 +3,16 @@ import type { Plugin } from "@elizaos/core"; import { DetokenizeTextParams, GenerateTextParams, - ModelClass, + AsyncHandlerType, TokenizeTextParams, } from "@elizaos/core"; import { generateText as aiGenerateText } from "ai"; import { encodingForModel, type TiktokenModel } from "js-tiktoken"; import { z } from "zod"; -async function tokenizeText(model: ModelClass, context: string) { +async function tokenizeText(model: AsyncHandlerType, context: string) { const modelName = - model === ModelClass.TEXT_SMALL + model === AsyncHandlerType.TEXT_SMALL ? process.env.OPENAI_SMALL_MODEL ?? process.env.SMALL_MODEL ?? "gpt-4o-mini" @@ -22,9 +22,9 @@ async function tokenizeText(model: ModelClass, context: string) { return tokens; } -async function detokenizeText(model: ModelClass, tokens: number[]) { +async function detokenizeText(model: AsyncHandlerType, tokens: number[]) { const modelName = - model === ModelClass.TEXT_SMALL + model === AsyncHandlerType.TEXT_SMALL ? process.env.OPENAI_SMALL_MODEL ?? process.env.SMALL_MODEL ?? "gpt-4o-mini" @@ -78,7 +78,7 @@ export const openaiPlugin: Plugin = { } }, handlers: { - [ModelClass.TEXT_EMBEDDING]: async (text: string | null) => { + [AsyncHandlerType.TEXT_EMBEDDING]: async (text: string | null) => { if (!text) { // Return zero vector of appropriate length for model return new Array(1536).fill(0); @@ -106,19 +106,19 @@ export const openaiPlugin: Plugin = { const data = await response.json() as { data: [{ embedding: number[] }] }; return data.data[0].embedding; }, - [ModelClass.TEXT_TOKENIZER_ENCODE]: async ({ + [AsyncHandlerType.TEXT_TOKENIZER_ENCODE]: async ({ context, - modelClass = ModelClass.TEXT_LARGE, + handlerType = AsyncHandlerType.TEXT_LARGE, }: TokenizeTextParams) => { - return await tokenizeText(modelClass ?? ModelClass.TEXT_LARGE, context); + return await tokenizeText(handlerType ?? AsyncHandlerType.TEXT_LARGE, context); }, - [ModelClass.TEXT_TOKENIZER_DECODE]: async ({ + [AsyncHandlerType.TEXT_TOKENIZER_DECODE]: async ({ tokens, - modelClass = ModelClass.TEXT_LARGE, + handlerType = AsyncHandlerType.TEXT_LARGE, }: DetokenizeTextParams) => { - return await detokenizeText(modelClass ?? ModelClass.TEXT_LARGE, tokens); + return await detokenizeText(handlerType ?? AsyncHandlerType.TEXT_LARGE, tokens); }, - [ModelClass.TEXT_SMALL]: async ({ + [AsyncHandlerType.TEXT_SMALL]: async ({ runtime, context, stopSequences = [], @@ -154,7 +154,7 @@ export const openaiPlugin: Plugin = { return openaiResponse; }, - [ModelClass.TEXT_LARGE]: async ({ + [AsyncHandlerType.TEXT_LARGE]: async ({ runtime, context, stopSequences = [], @@ -192,7 +192,7 @@ export const openaiPlugin: Plugin = { return openaiResponse; }, - [ModelClass.IMAGE]: async (params: { + [AsyncHandlerType.IMAGE]: async (params: { prompt: string; n?: number; size?: string; @@ -218,7 +218,7 @@ export const openaiPlugin: Plugin = { const typedData = data as { data: { url: string }[] }; return typedData.data; }, - [ModelClass.IMAGE_DESCRIPTION]: async (imageUrl: string) => { + [AsyncHandlerType.IMAGE_DESCRIPTION]: async (imageUrl: string) => { console.log("IMAGE_DESCRIPTION") const baseURL = process.env.OPENAI_BASE_URL ?? "https://api.openai.com/v1"; @@ -264,7 +264,7 @@ export const openaiPlugin: Plugin = { description: descriptionMatch[1] }; }, - [ModelClass.TRANSCRIPTION]: async (params: { + [AsyncHandlerType.TRANSCRIPTION]: async (params: { audioFile: any; language?: string; }) => { diff --git a/packages/plugin-telegram/src/messageManager.ts b/packages/plugin-telegram/src/messageManager.ts index f3b33b64fc6..fc1df591c34 100644 --- a/packages/plugin-telegram/src/messageManager.ts +++ b/packages/plugin-telegram/src/messageManager.ts @@ -9,7 +9,7 @@ import { type IAgentRuntime, type Media, type Memory, - ModelClass, + AsyncHandlerType, type State, stringToUuid, type UUID, @@ -453,7 +453,7 @@ export class MessageManager { if (imageUrl) { const { title, description } = - await this.runtime.call(ModelClass.IMAGE_DESCRIPTION, imageUrl) + await this.runtime.call(AsyncHandlerType.IMAGE_DESCRIPTION, imageUrl) return { description: `[Image: ${title}\n${description}]` }; } } catch (error) { @@ -505,7 +505,7 @@ export class MessageManager { const response = await generateShouldRespond({ runtime: this.runtime, context: shouldRespondContext, - modelClass: ModelClass.SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); return response === "RESPOND"; @@ -669,7 +669,7 @@ export class MessageManager { const response = await generateMessageResponse({ runtime: this.runtime, context, - modelClass: ModelClass.LARGE, + handlerType: AsyncHandlerType.TEXT_LARGE, }); if (!response) { diff --git a/packages/plugin-twitter/src/SttTtsSpacesPlugin.ts b/packages/plugin-twitter/src/SttTtsSpacesPlugin.ts index 8e26359bb42..5b8ad7f2589 100644 --- a/packages/plugin-twitter/src/SttTtsSpacesPlugin.ts +++ b/packages/plugin-twitter/src/SttTtsSpacesPlugin.ts @@ -11,7 +11,7 @@ import { logger, generateMessageResponse, generateShouldRespond, - ModelClass, + AsyncHandlerType, stringToUuid } from "@elizaos/core"; import type { @@ -288,7 +288,7 @@ export class SttTtsPlugin implements Plugin { const wavBuffer = await this.convertPcmToWavInMemory(merged, 48000); // Whisper STT - const sttText = await this.runtime.call(ModelClass.TRANSCRIPTION, wavBuffer); + const sttText = await this.runtime.call(AsyncHandlerType.TRANSCRIPTION, wavBuffer); logger.log( `[SttTtsPlugin] Transcription result: "${sttText}"`, @@ -487,7 +487,7 @@ export class SttTtsPlugin implements Plugin { const response = await generateMessageResponse({ runtime: this.runtime, context, - modelClass: ModelClass.SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); response.source = "discord"; @@ -585,7 +585,7 @@ export class SttTtsPlugin implements Plugin { const response = await generateShouldRespond({ runtime: this.runtime, context: shouldRespondContext, - modelClass: ModelClass.SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); if (response === "RESPOND") { diff --git a/packages/plugin-twitter/src/interactions.ts b/packages/plugin-twitter/src/interactions.ts index 2c287a1e339..82e2100cf5b 100644 --- a/packages/plugin-twitter/src/interactions.ts +++ b/packages/plugin-twitter/src/interactions.ts @@ -9,7 +9,7 @@ import { type HandlerCallback, type IAgentRuntime, type Memory, - ModelClass, + AsyncHandlerType, type State, stringToUuid, logger, @@ -348,7 +348,7 @@ export class TwitterInteractionClient { const imageDescriptionsArray = []; try{ for (const photo of tweet.photos) { - const description = await this.runtime.call(ModelClass.IMAGE_DESCRIPTION, photo.url) + const description = await this.runtime.call(AsyncHandlerType.IMAGE_DESCRIPTION, photo.url) imageDescriptionsArray.push(description); } } catch (error) { @@ -414,7 +414,7 @@ export class TwitterInteractionClient { const shouldRespond = await generateShouldRespond({ runtime: this.runtime, context: shouldRespondContext, - modelClass: ModelClass.MEDIUM, + handlerType: AsyncHandlerType.MEDIUM, }); // Promise<"RESPOND" | "IGNORE" | "STOP" | null> { @@ -453,7 +453,7 @@ export class TwitterInteractionClient { const response = await generateMessageResponse({ runtime: this.runtime, context, - modelClass: ModelClass.LARGE, + handlerType: AsyncHandlerType.TEXT_LARGE, }); const removeQuotes = (str: string) => diff --git a/packages/plugin-twitter/src/post.ts b/packages/plugin-twitter/src/post.ts index 3386b3085b2..74199a106cc 100644 --- a/packages/plugin-twitter/src/post.ts +++ b/packages/plugin-twitter/src/post.ts @@ -6,7 +6,7 @@ import { generateText, type IAgentRuntime, logger, - ModelClass, + AsyncHandlerType, parseJSONObjectFromText, postActionResponseFooter, stringToUuid, @@ -449,7 +449,7 @@ export class TwitterPostClient { const response = await generateText({ runtime: this.runtime, context, - modelClass: ModelClass.SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); const rawTweetContent = cleanJsonResponse(response); @@ -556,7 +556,7 @@ export class TwitterPostClient { const response = await generateText({ runtime: this.runtime, context: options?.context || context, - modelClass: ModelClass.SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); logger.log("generate tweet content response:\n" + response); @@ -679,7 +679,7 @@ export class TwitterPostClient { const actionResponse = await generateTweetActions({ runtime: this.runtime, context: actionContext, - modelClass: ModelClass.SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); if (!actionResponse) { @@ -832,7 +832,7 @@ export class TwitterPostClient { "Processing images in tweet for context" ); for (const photo of tweet.photos) { - const description = await this.runtime.call(ModelClass.IMAGE_DESCRIPTION, photo.url); + const description = await this.runtime.call(AsyncHandlerType.IMAGE_DESCRIPTION, photo.url); imageDescriptions.push(description); } } @@ -1040,7 +1040,7 @@ export class TwitterPostClient { if (tweet.photos?.length > 0) { logger.log("Processing images in tweet for context"); for (const photo of tweet.photos) { - const description = await this.runtime.call(ModelClass.IMAGE_DESCRIPTION, photo.url) + const description = await this.runtime.call(AsyncHandlerType.IMAGE_DESCRIPTION, photo.url) imageDescriptions.push(description); } } diff --git a/packages/plugin-twitter/src/search.ts b/packages/plugin-twitter/src/search.ts index 5cf170830ab..7bc43f0d44a 100644 --- a/packages/plugin-twitter/src/search.ts +++ b/packages/plugin-twitter/src/search.ts @@ -1,6 +1,6 @@ import { composeContext, type Content, generateMessageResponse, generateText, type HandlerCallback, - type IAgentRuntime, logger, messageCompletionFooter, ModelClass, type State, stringToUuid + type IAgentRuntime, logger, messageCompletionFooter, AsyncHandlerType, type State, stringToUuid } from "@elizaos/core"; import type { ClientBase } from "./base.ts"; import { SearchMode } from "./client"; @@ -135,7 +135,7 @@ export class TwitterSearchClient { const mostInterestingTweetResponse = await generateText({ runtime: this.runtime, context: prompt, - modelClass: ModelClass.SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); const tweetId = mostInterestingTweetResponse.trim(); @@ -219,7 +219,7 @@ export class TwitterSearchClient { // Generate image descriptions using GPT-4 vision API const imageDescriptions = []; for (const photo of selectedTweet.photos) { - const description = await this.runtime.call(ModelClass.IMAGE_DESCRIPTION, photo.url) + const description = await this.runtime.call(AsyncHandlerType.IMAGE_DESCRIPTION, photo.url) imageDescriptions.push(description); } @@ -249,7 +249,7 @@ export class TwitterSearchClient { const responseContent = await generateMessageResponse({ runtime: this.runtime, context, - modelClass: ModelClass.LARGE, + handlerType: AsyncHandlerType.TEXT_LARGE, }); responseContent.inReplyTo = message.id; diff --git a/packages/plugin-twitter/src/spaces.ts b/packages/plugin-twitter/src/spaces.ts index dc3a6a41765..073e82a06b5 100644 --- a/packages/plugin-twitter/src/spaces.ts +++ b/packages/plugin-twitter/src/spaces.ts @@ -3,7 +3,7 @@ import { type IAgentRuntime, composeContext, generateText, - ModelClass, + AsyncHandlerType, type TwitterSpaceDecisionOptions, State, } from "@elizaos/core"; @@ -47,7 +47,7 @@ Only return the text, no additional formatting. const output = await generateText({ runtime, context, - modelClass: ModelClass.SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); return output.trim(); } catch (err) { @@ -99,7 +99,7 @@ Example: const response = await generateText({ runtime, context, - modelClass: ModelClass.SMALL, + handlerType: AsyncHandlerType.TEXT_SMALL, }); const topics = response .split(",") diff --git a/packages/plugin-twitter/src/utils.ts b/packages/plugin-twitter/src/utils.ts index d35f5ba4457..dcd35fab2e0 100644 --- a/packages/plugin-twitter/src/utils.ts +++ b/packages/plugin-twitter/src/utils.ts @@ -1,5 +1,5 @@ import type { Tweet } from "./client"; -import type { Content, IAgentRuntime, Memory, ModelClass, UUID } from "@elizaos/core"; +import type { Content, IAgentRuntime, Memory, AsyncHandlerType, UUID } from "@elizaos/core"; import { generateText, stringToUuid } from "@elizaos/core"; import type { ClientBase } from "./base"; import { logger } from "@elizaos/core"; @@ -497,11 +497,11 @@ export const parseActionResponseFromText = ( export async function generateTweetActions({ runtime, context, - modelClass, + handlerType, }: { runtime: IAgentRuntime; context: string; - modelClass: ModelClass; + handlerType: AsyncHandlerType; }): Promise { let retryDelay = 1000; while (true) { @@ -509,7 +509,7 @@ export async function generateTweetActions({ const response = await generateText({ runtime, context, - modelClass, + handlerType, }); logger.debug( "Received response from generateText for tweet actions:",