Skip to content

Commit

Permalink
AsyncHandderType
Browse files Browse the repository at this point in the history
  • Loading branch information
lalalune committed Feb 9, 2025
1 parent 26629f8 commit 125d3b8
Show file tree
Hide file tree
Showing 39 changed files with 168 additions and 168 deletions.
12 changes: 6 additions & 6 deletions packages/agent/src/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import {
generateMessageResponse,
generateObject,
messageCompletionFooter,
ModelClass,
AsyncHandlerType,
stringToUuid,
type Content,
type Media,
Expand Down Expand Up @@ -166,7 +166,7 @@ export class CharacterServer {
return;
}

const transcription = await runtime.call(ModelClass.TRANSCRIPTION, {
const transcription = await runtime.call(AsyncHandlerType.TRANSCRIPTION, {
file: fs.createReadStream(audioFile.path),

Check failure

Code scanning / CodeQL

Uncontrolled data used in path expression High

This path depends on a
user-provided value
.
model: "whisper-1",
});
Expand Down Expand Up @@ -276,7 +276,7 @@ export class CharacterServer {
const response = await generateMessageResponse({
runtime: runtime,
context,
modelClass: ModelClass.TEXT_LARGE,
handlerType: AsyncHandlerType.TEXT_LARGE,
});

if (!response) {
Expand Down Expand Up @@ -487,7 +487,7 @@ export class CharacterServer {
const response = await generateObject({
runtime,
context,
modelClass: ModelClass.TEXT_SMALL,
handlerType: AsyncHandlerType.TEXT_SMALL,
schema: hyperfiOutSchema,
});

Expand Down Expand Up @@ -790,7 +790,7 @@ export class CharacterServer {
const response = await generateMessageResponse({
runtime: runtime,
context,
modelClass: ModelClass.TEXT_LARGE,
handlerType: AsyncHandlerType.TEXT_LARGE,
});

// save response to memory
Expand Down Expand Up @@ -823,7 +823,7 @@ export class CharacterServer {
// Get the text to convert to speech
const textToSpeak = response.text;

const speechResponse = await runtime.call(ModelClass.TRANSCRIPTION, {
const speechResponse = await runtime.call(AsyncHandlerType.TRANSCRIPTION, {
text: textToSpeak,
runtime,
});
Expand Down
62 changes: 31 additions & 31 deletions packages/core/src/generation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@ import { parseJSONObjectFromText } from "./parsing.ts";
import {
type Content,
type IAgentRuntime,
ModelClass
AsyncHandlerType
} from "./types.ts";

interface GenerateObjectOptions {
runtime: IAgentRuntime;
context: string;
modelClass: ModelClass;
handlerType: AsyncHandlerType;
output?: "object" | "array" | "enum" | "no-schema" | undefined;
schema?: ZodSchema;
schemaName?: string;
Expand Down Expand Up @@ -99,7 +99,7 @@ export async function trimTokens(
if (maxTokens <= 0) throw new Error("maxTokens must be positive");

try {
const tokens = await runtime.call(ModelClass.TEXT_TOKENIZER_ENCODE, { context });
const tokens = await runtime.call(AsyncHandlerType.TEXT_TOKENIZER_ENCODE, { context });

// If already within limits, return unchanged
if (tokens.length <= maxTokens) {
Expand All @@ -110,7 +110,7 @@ export async function trimTokens(
const truncatedTokens = tokens.slice(-maxTokens);

// Decode back to text - js-tiktoken decode() returns a string directly
return await runtime.call(ModelClass.TEXT_TOKENIZER_DECODE, { tokens: truncatedTokens });
return await runtime.call(AsyncHandlerType.TEXT_TOKENIZER_DECODE, { tokens: truncatedTokens });
} catch (error) {
logger.error("Error in trimTokens:", error);
// Return truncated string if tokenization fails
Expand All @@ -122,16 +122,16 @@ export async function trimTokens(
export async function generateText({
runtime,
context,
modelClass = ModelClass.TEXT_SMALL,
handlerType = AsyncHandlerType.TEXT_SMALL,
stopSequences = [],
}: {
runtime: IAgentRuntime;
context: string;
modelClass: ModelClass;
handlerType: AsyncHandlerType;
stopSequences?: string[];
customSystemPrompt?: string;
}): Promise<string> {
const text = await runtime.call(modelClass, {
const text = await runtime.call(handlerType, {
runtime,
context,
stopSequences,
Expand All @@ -143,19 +143,19 @@ export async function generateText({
export async function generateTextArray({
runtime,
context,
modelClass = ModelClass.TEXT_SMALL,
handlerType = AsyncHandlerType.TEXT_SMALL,
stopSequences,
}: {
runtime: IAgentRuntime;
context: string;
modelClass: ModelClass;
handlerType: AsyncHandlerType;
stopSequences?: string[];
}): Promise<string[]> {
const result = await withRetry(async () => {
const result = await generateObject({
runtime,
context,
modelClass,
handlerType,
schema: z.array(z.string()),
stopSequences,
});
Expand All @@ -169,14 +169,14 @@ export async function generateTextArray({
async function generateEnum<T extends string>({
runtime,
context,
modelClass = ModelClass.TEXT_SMALL,
handlerType = AsyncHandlerType.TEXT_SMALL,
enumValues,
functionName,
stopSequences,
}: {
runtime: IAgentRuntime;
context: string;
modelClass: ModelClass;
handlerType: AsyncHandlerType;
enumValues: Array<T>;
functionName: string;
stopSequences?: string[];
Expand All @@ -189,7 +189,7 @@ async function generateEnum<T extends string>({
const result = await generateObject({
runtime,
context,
modelClass,
handlerType,
output: "enum",
enum: enumValues,
mode: "json",
Expand All @@ -206,20 +206,20 @@ async function generateEnum<T extends string>({
export async function generateShouldRespond({
runtime,
context,
modelClass = ModelClass.TEXT_SMALL,
handlerType = AsyncHandlerType.TEXT_SMALL,
stopSequences,
}: {
runtime: IAgentRuntime;
context: string;
modelClass: ModelClass;
handlerType: AsyncHandlerType;
stopSequences?: string[];
}): Promise<"RESPOND" | "IGNORE" | "STOP" | null> {
const RESPONSE_VALUES = ["RESPOND", "IGNORE", "STOP"] as string[];

const result = await generateEnum({
runtime,
context,
modelClass,
handlerType,
enumValues: RESPONSE_VALUES,
functionName: "generateShouldRespond",
stopSequences,
Expand All @@ -231,20 +231,20 @@ export async function generateShouldRespond({
export async function generateTrueOrFalse({
runtime,
context = "",
modelClass = ModelClass.TEXT_SMALL,
handlerType = AsyncHandlerType.TEXT_SMALL,
stopSequences,
}: {
runtime: IAgentRuntime;
context: string;
modelClass: ModelClass;
handlerType: AsyncHandlerType;
stopSequences?: string[];
}): Promise<boolean> {
const BOOL_VALUES = ["true", "false"];

const result = await generateEnum({
runtime,
context,
modelClass,
handlerType,
enumValues: BOOL_VALUES,
functionName: "generateTrueOrFalse",
stopSequences,
Expand All @@ -257,7 +257,7 @@ export async function generateTrueOrFalse({
export const generateObject = async ({
runtime,
context,
modelClass = ModelClass.TEXT_SMALL,
handlerType = AsyncHandlerType.TEXT_SMALL,
stopSequences,
}: GenerateObjectOptions): Promise<any> => {
if (!context) {
Expand All @@ -266,29 +266,29 @@ export const generateObject = async ({
throw new Error(errorMessage);
}

const { object } = await runtime.call(modelClass, {
const { object } = await runtime.call(handlerType, {
runtime,
context,
modelClass,
handlerType,
stopSequences,
object: true,
});

logger.debug(`Received Object response from ${modelClass} model.`);
logger.debug(`Received Object response from ${handlerType} model.`);
return object;
};

export async function generateObjectArray({
runtime,
context,
modelClass = ModelClass.TEXT_SMALL,
handlerType = AsyncHandlerType.TEXT_SMALL,
schema,
schemaName,
schemaDescription,
}: {
runtime: IAgentRuntime;
context: string;
modelClass: ModelClass;
handlerType: AsyncHandlerType;
schema?: ZodSchema;
schemaName?: string;
schemaDescription?: string;
Expand All @@ -300,7 +300,7 @@ export async function generateObjectArray({
const result = await generateObject({
runtime,
context,
modelClass,
handlerType,
output: "array",
schema,
schemaName,
Expand All @@ -313,18 +313,18 @@ export async function generateObjectArray({
export async function generateMessageResponse({
runtime,
context,
modelClass = ModelClass.TEXT_SMALL,
handlerType = AsyncHandlerType.TEXT_SMALL,
stopSequences,
}: {
runtime: IAgentRuntime;
context: string;
modelClass: ModelClass;
handlerType: AsyncHandlerType;
stopSequences?: string[];
}): Promise<Content> {
logger.debug("Context:", context);

return await withRetry(async () => {
const text = await runtime.call(modelClass, {
const text = await runtime.call(handlerType, {
runtime,
context,
stop: stopSequences,
Expand Down Expand Up @@ -369,7 +369,7 @@ export const generateImage = async (
}> => {
return await withRetry(
async () => {
const result = await runtime.call(ModelClass.IMAGE, data);
const result = await runtime.call(AsyncHandlerType.IMAGE, data);
return {
success: true,
data: result.images,
Expand All @@ -391,7 +391,7 @@ export const generateCaption = async (
description: string;
}> => {
const { imageUrl } = data;
const resp = await runtime.call(ModelClass.IMAGE_DESCRIPTION, imageUrl);
const resp = await runtime.call(AsyncHandlerType.IMAGE_DESCRIPTION, imageUrl);

return {
title: resp.title.trim(),
Expand Down
8 changes: 4 additions & 4 deletions packages/core/src/knowledge.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { splitChunks } from "./parsing.ts";
import logger from "./logger.ts";
import type { AgentRuntime } from "./runtime.ts";
import { type KnowledgeItem, type Memory, ModelClass, type UUID } from "./types.ts";
import { type KnowledgeItem, type Memory, AsyncHandlerType, type UUID } from "./types.ts";
import { stringToUuid } from "./uuid.ts";

async function get(
Expand Down Expand Up @@ -31,7 +31,7 @@ async function get(
return [];
}

const embedding = await runtime.call(ModelClass.TEXT_EMBEDDING, processed);
const embedding = await runtime.call(AsyncHandlerType.TEXT_EMBEDDING, processed);
const fragments = await runtime.knowledgeManager.searchMemories(
{
embedding,
Expand Down Expand Up @@ -69,7 +69,7 @@ async function set(
chunkSize = 512,
bleed = 20
) {
const embedding = await runtime.call(ModelClass.TEXT_EMBEDDING, null);
const embedding = await runtime.call(AsyncHandlerType.TEXT_EMBEDDING, null);
await runtime.documentsManager.createMemory({
id: item.id,
agentId: runtime.agentId,
Expand All @@ -84,7 +84,7 @@ async function set(
const fragments = await splitChunks(preprocessed, chunkSize, bleed);

for (const fragment of fragments) {
const embedding = await runtime.call(ModelClass.TEXT_EMBEDDING, fragment);
const embedding = await runtime.call(AsyncHandlerType.TEXT_EMBEDDING, fragment);
await runtime.knowledgeManager.createMemory({
// We namespace the knowledge base uuid to avoid id
// collision with the document above.
Expand Down
8 changes: 4 additions & 4 deletions packages/core/src/memory.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logger from "./logger.ts";
import {
ModelClass,
AsyncHandlerType,
type IAgentRuntime,
type IMemoryManager,
type Memory,
Expand Down Expand Up @@ -66,11 +66,11 @@ export class MemoryManager implements IMemoryManager {

try {
// Generate embedding from text content
memory.embedding = await this.runtime.call(ModelClass.TEXT_EMBEDDING, memoryText);
memory.embedding = await this.runtime.call(AsyncHandlerType.TEXT_EMBEDDING, memoryText);
} catch (error) {
logger.error("Failed to generate embedding:", error);
// Fallback to zero vector if embedding fails
memory.embedding = await this.runtime.call(ModelClass.TEXT_EMBEDDING, null);
memory.embedding = await this.runtime.call(AsyncHandlerType.TEXT_EMBEDDING, null);
}

return memory;
Expand Down Expand Up @@ -185,7 +185,7 @@ export class MemoryManager implements IMemoryManager {
logger.log("Creating Memory", memory.id, memory.content.text);

if(!memory.embedding){
const embedding = await this.runtime.call(ModelClass.TEXT_EMBEDDING, null);
const embedding = await this.runtime.call(AsyncHandlerType.TEXT_EMBEDDING, null);
memory.embedding = embedding;
}

Expand Down
Loading

0 comments on commit 125d3b8

Please sign in to comment.