Skip to content

Commit

Permalink
Merge branch 'v2-develop' into v2-add-character-methods
Browse files Browse the repository at this point in the history
  • Loading branch information
wtfsayo authored Feb 10, 2025
2 parents ae6c4c5 + 20a7b47 commit 2adae43
Show file tree
Hide file tree
Showing 72 changed files with 2,708 additions and 4,276 deletions.
Binary file added bun.lockb
Binary file not shown.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
"build:cli": "turbo run build --filter=./packages/cli && cd packages/cli && bun link",
"start": "turbo run start --filter=!./packages/docs",
"agent": "turbo run start --filter=@elizaos/agent",
"dev": "turbo run dev --filter=!./packages/docs",
"dev": "turbo run dev --filter=!./packages/docs --concurrency=20",
"release": "bun run build && bun format && npx lerna publish --no-private --force-publish",
"docker:build": "bash ./scripts/docker.sh build",
"docker:run": "bash ./scripts/docker.sh run",
Expand Down
14 changes: 7 additions & 7 deletions packages/agent/src/api.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import {
type AgentRuntime,
type Character,
logger,
getEnvVariable,
IAgentRuntime,
logger,
type UUID,
validateCharacterConfig,
validateUuid,
validateUuid
} from "@elizaos/core";
import bodyParser from "body-parser";
import cors from "cors";
Expand Down Expand Up @@ -46,7 +46,7 @@ function validateUUIDParams(
}

export function createApiRouter(
agents: Map<string, AgentRuntime>,
agents: Map<string, IAgentRuntime>,
directClient: CharacterServer
): express.Router {
const router = express.Router();
Expand Down Expand Up @@ -117,7 +117,7 @@ export function createApiRouter(
};
if (!agentId) return;

const agent: AgentRuntime = agents.get(agentId);
const agent: IAgentRuntime = agents.get(agentId);

if (agent) {
agent.stop();
Expand All @@ -134,7 +134,7 @@ export function createApiRouter(
};
if (!agentId) return;

let agent: AgentRuntime = agents.get(agentId);
let agent: IAgentRuntime = agents.get(agentId);

// update character
if (agent) {
Expand Down Expand Up @@ -338,7 +338,7 @@ export function createApiRouter(
router.post("/agents/:agentId/stop", async (req, res) => {
const agentId = req.params.agentId;
console.log("agentId", agentId);
const agent: AgentRuntime = agents.get(agentId);
const agent: IAgentRuntime = agents.get(agentId);

// update character
if (agent) {
Expand Down
6 changes: 3 additions & 3 deletions packages/agent/src/defaultCharacter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@ export const defaultCharacter: Character = {
name: "Eliza",
username: "eliza",
plugins: [
"@elizaos/plugin-node",
"@elizaos/plugin-bootstrap",
"@elizaos/plugin-anthropic",
"@elizaos/plugin-openai",
"@elizaos/plugin-local-ai",
"@elizaos/plugin-discord",
"@elizaos/plugin-node",
"elizaos/plugin-telegram",
],
settings: {
secrets: {},
Expand Down
20 changes: 4 additions & 16 deletions packages/agent/src/server.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import {
composeContext,
logger,
generateCaption,
generateImage,
generateMessageResponse,
generateObject,
messageCompletionFooter,
Expand Down Expand Up @@ -166,10 +164,7 @@ export class CharacterServer {
return;
}

const transcription = await runtime.call(ModelClass.TRANSCRIPTION, {
file: fs.createReadStream(audioFile.path),
model: "whisper-1",
});
const transcription = await runtime.useModel(ModelClass.TRANSCRIPTION, fs.createReadStream(audioFile.path));

res.json(transcription);
}
Expand Down Expand Up @@ -591,15 +586,11 @@ export class CharacterServer {
res.status(404).send("Agent not found");
return;
}

const images = await generateImage({ ...req.body }, agent);
const images = await agent.useModel(ModelClass.IMAGE, { ...req.body });
const imagesRes: { image: string; caption: string }[] = [];
if (images.data && images.data.length > 0) {
for (let i = 0; i < images.data.length; i++) {
const caption = await generateCaption(
{ imageUrl: images.data[i] },
agent
);
const caption = await agent.useModel(ModelClass.IMAGE_DESCRIPTION, images.data[i]);
imagesRes.push({
image: images.data[i],
caption: caption.title,
Expand Down Expand Up @@ -823,10 +814,7 @@ export class CharacterServer {
// Get the text to convert to speech
const textToSpeak = response.text;

const speechResponse = await runtime.call(ModelClass.TRANSCRIPTION, {
text: textToSpeak,
runtime,
});
const speechResponse = await runtime.useModel(ModelClass.TEXT_TO_SPEECH, textToSpeak);

if (!speechResponse.ok) {
throw new Error(
Expand Down
2 changes: 1 addition & 1 deletion packages/core/src/context.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ export const composeContext = ({
template: TemplateType;
}) => {
const templateStr =
typeof template === "function" ? template({ state }) : template;
composeRandomUser(typeof template === "function" ? template({ state }) : template, 10);

const templateFunction = handlebars.compile(templateStr);
return templateFunction(state);
Expand Down
74 changes: 11 additions & 63 deletions packages/core/src/generation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -91,11 +91,15 @@ export async function trimTokens(
maxTokens: number,
runtime: IAgentRuntime
) {
if (!context) return "";
if (!context) throw new Error("Trim tokens received a null context");

// if context is less than of maxtokens / 5, skip
if (context.length < (maxTokens / 5)) return context;

if (maxTokens <= 0) throw new Error("maxTokens must be positive");

try {
const tokens = await runtime.call(ModelClass.TEXT_TOKENIZER_ENCODE, context);
const tokens = await runtime.useModel(ModelClass.TEXT_TOKENIZER_ENCODE, { context });

// If already within limits, return unchanged
if (tokens.length <= maxTokens) {
Expand All @@ -106,7 +110,7 @@ export async function trimTokens(
const truncatedTokens = tokens.slice(-maxTokens);

// Decode back to text - js-tiktoken decode() returns a string directly
return await runtime.call(ModelClass.TEXT_TOKENIZER_DECODE, truncatedTokens);
return await runtime.useModel(ModelClass.TEXT_TOKENIZER_DECODE, { tokens: truncatedTokens });
} catch (error) {
logger.error("Error in trimTokens:", error);
// Return truncated string if tokenization fails
Expand All @@ -127,7 +131,7 @@ export async function generateText({
stopSequences?: string[];
customSystemPrompt?: string;
}): Promise<string> {
const text = await runtime.call(modelClass, {
const text = await runtime.useModel(modelClass, {
runtime,
context,
stopSequences,
Expand Down Expand Up @@ -262,7 +266,7 @@ export const generateObject = async ({
throw new Error(errorMessage);
}

const { object } = await runtime.call(modelClass, {
const { object } = await runtime.useModel(modelClass, {
runtime,
context,
modelClass,
Expand Down Expand Up @@ -320,7 +324,7 @@ export async function generateMessageResponse({
logger.debug("Context:", context);

return await withRetry(async () => {
const text = await runtime.call(modelClass, {
const text = await runtime.useModel(modelClass, {
runtime,
context,
stop: stopSequences,
Expand All @@ -337,60 +341,4 @@ export async function generateMessageResponse({

return parsedContent;
});
}

// ================ IMAGE-RELATED FUNCTIONS ================
export const generateImage = async (
data: {
prompt: string;
width: number;
height: number;
count?: number;
negativePrompt?: string;
numIterations?: number;
guidanceScale?: number;
seed?: number;
modelId?: string;
jobId?: string;
stylePreset?: string;
hideWatermark?: boolean;
safeMode?: boolean;
cfgScale?: number;
},
runtime: IAgentRuntime
): Promise<{
success: boolean;
data?: string[];
error?: any;
}> => {
return await withRetry(
async () => {
const result = await runtime.call(ModelClass.IMAGE, data);
return {
success: true,
data: result.images,
error: undefined,
};
},
{
maxRetries: 2,
initialDelay: 2000,
}
);
};

export const generateCaption = async (
data: { imageUrl: string },
runtime: IAgentRuntime
): Promise<{
title: string;
description: string;
}> => {
const { imageUrl } = data;
const resp = await runtime.call(ModelClass.IMAGE_DESCRIPTION, imageUrl);

return {
title: resp.title.trim(),
description: resp.description.trim(),
};
};
}
6 changes: 3 additions & 3 deletions packages/core/src/knowledge.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ async function get(
return [];
}

const embedding = await runtime.call(ModelClass.TEXT_EMBEDDING, processed);
const embedding = await runtime.useModel(ModelClass.TEXT_EMBEDDING, processed);
const fragments = await runtime.knowledgeManager.searchMemories(
{
embedding,
Expand Down Expand Up @@ -69,7 +69,7 @@ async function set(
chunkSize = 512,
bleed = 20
) {
const embedding = await runtime.call(ModelClass.TEXT_EMBEDDING, null);
const embedding = await runtime.useModel(ModelClass.TEXT_EMBEDDING, null);
await runtime.documentsManager.createMemory({
id: item.id,
agentId: runtime.agentId,
Expand All @@ -84,7 +84,7 @@ async function set(
const fragments = await splitChunks(preprocessed, chunkSize, bleed);

for (const fragment of fragments) {
const embedding = await runtime.call(ModelClass.TEXT_EMBEDDING, fragment);
const embedding = await runtime.useModel(ModelClass.TEXT_EMBEDDING, fragment);
await runtime.knowledgeManager.createMemory({
// We namespace the knowledge base uuid to avoid id
// collision with the document above.
Expand Down
6 changes: 3 additions & 3 deletions packages/core/src/memory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,11 +66,11 @@ export class MemoryManager implements IMemoryManager {

try {
// Generate embedding from text content
memory.embedding = await this.runtime.call(ModelClass.TEXT_EMBEDDING, memoryText);
memory.embedding = await this.runtime.useModel(ModelClass.TEXT_EMBEDDING, memoryText);
} catch (error) {
logger.error("Failed to generate embedding:", error);
// Fallback to zero vector if embedding fails
memory.embedding = await this.runtime.call(ModelClass.TEXT_EMBEDDING, null);
memory.embedding = await this.runtime.useModel(ModelClass.TEXT_EMBEDDING, null);
}

return memory;
Expand Down Expand Up @@ -185,7 +185,7 @@ export class MemoryManager implements IMemoryManager {
logger.log("Creating Memory", memory.id, memory.content.text);

if(!memory.embedding){
const embedding = await this.runtime.call(ModelClass.TEXT_EMBEDDING, null);
const embedding = await this.runtime.useModel(ModelClass.TEXT_EMBEDDING, null);
memory.embedding = embedding;
}

Expand Down
Loading

0 comments on commit 2adae43

Please sign in to comment.