Skip to content

Commit

Permalink
openai is working
Browse files Browse the repository at this point in the history
  • Loading branch information
lalalune committed Feb 8, 2025
2 parents f379568 + 84702dd commit c05c93c
Show file tree
Hide file tree
Showing 14 changed files with 37 additions and 33 deletions.
Binary file modified bun.lockb
Binary file not shown.
7 changes: 7 additions & 0 deletions packages/agent/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,19 @@
"@elizaos/plugin-bootstrap": "workspace:*",
"@elizaos/plugin-openai": "workspace:*",
"@elizaos/core": "workspace:*",
"@types/body-parser": "1.19.5",
"@types/cors": "2.8.17",
"@types/express": "5.0.0",
"cors": "2.8.5",
"express": "4.21.1",
"multer": "1.4.5-lts.1",
"readline": "1.3.0",
"ws": "8.18.0",
"yargs": "17.7.2",
"minipass": "7.1.2"
},
"devDependencies": {
"@types/multer": "^1.4.12",
"ts-node": "10.9.2",
"tsup": "8.3.5"
}
Expand Down
4 changes: 2 additions & 2 deletions packages/agent/src/defaultCharacter.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import { type Character } from "@elizaos/core";
import { OpenAIPlugin } from "@elizaos/plugin-openai";
import { openaiPlugin } from "@elizaos/plugin-openai";

export const defaultCharacter: Character = {
name: "Eliza",
username: "eliza",
plugins: [
OpenAIPlugin
openaiPlugin
],
settings: {
secrets: {},
Expand Down
3 changes: 2 additions & 1 deletion packages/agent/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ export function parseArguments(): {
characters?: string;
} {
try {
return yargs(process.argv.slice(3))
return yargs(process.argv.slice(2))
.option("character", {
type: "string",
description: "Path to the character JSON file",
Expand Down Expand Up @@ -184,6 +184,7 @@ async function loadCharacterTryPath(characterPath: string): Promise<Character> {
// Try different path resolutions in order
const pathsToTry = [
characterPath, // exact path as specified
path.resolve(process.cwd(), '..', '..', characterPath), // relative to root directory
path.resolve(process.cwd(), characterPath), // relative to cwd
path.resolve(process.cwd(), "agent", characterPath), // Add this
path.resolve(__dirname, characterPath), // relative to current script
Expand Down
8 changes: 3 additions & 5 deletions packages/agent/src/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ export class CharacterServer {
return;
}

const transcription = await runtime.getModelProviderManager().call(ModelType.AUDIO_TRANSCRIPTION, {
const transcription = await runtime.call(ModelType.TRANSCRIPTION, {
file: fs.createReadStream(audioFile.path),
model: "whisper-1",
});
Expand Down Expand Up @@ -286,9 +286,7 @@ export class CharacterServer {
return;
}

const zeroVector = runtime.getModelProviderManager().call(ModelType.EMBEDDING, {
text: null,
});
const zeroVector = await runtime.call(ModelType.TEXT_EMBEDDING, text);

// save response to memory
const responseMessage: Memory = {
Expand Down Expand Up @@ -828,7 +826,7 @@ export class CharacterServer {
// Get the text to convert to speech
const textToSpeak = response.text;

const speechResponse = await runtime.getModelProviderManager().call(ModelType.AUDIO_TRANSCRIPTION, {
const speechResponse = await runtime.call(ModelType.TRANSCRIPTION, {
text: textToSpeak,
runtime,
});
Expand Down
7 changes: 5 additions & 2 deletions packages/core/src/generation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,8 @@ export async function generateText({
}): Promise<string> {
logFunctionCall("generateText", runtime);

const { text } = await runtime.call(modelType, {
const text = await runtime.call(modelType, {
runtime,
context,
stopSequences,
});
Expand Down Expand Up @@ -228,6 +229,7 @@ export const generateObject = async ({
}

const { object } = await runtime.call(modelType, {
runtime,
context,
stop: stopSequences,
});
Expand Down Expand Up @@ -285,7 +287,8 @@ export async function generateMessageResponse({
logger.debug("Context:", context);

return await withRetry(async () => {
const { text } = await runtime.call(modelType, {
const text = await runtime.call(modelType, {
runtime,
context,
stop: stopSequences,
});
Expand Down
12 changes: 3 additions & 9 deletions packages/core/src/knowledge.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,7 @@ async function get(
return [];
}

const embedding = await runtime.call(ModelType.TEXT_EMBEDDING, {
text: processed,
});
const embedding = await runtime.call(ModelType.TEXT_EMBEDDING, processed);
const fragments = await runtime.knowledgeManager.searchMemoriesByEmbedding(
embedding,
{
Expand Down Expand Up @@ -71,9 +69,7 @@ async function set(
chunkSize = 512,
bleed = 20
) {
const embedding = await runtime.call(ModelType.TEXT_EMBEDDING, {
text: null,
});
const embedding = await runtime.call(ModelType.TEXT_EMBEDDING, null);
await runtime.documentsManager.createMemory({
id: item.id,
agentId: runtime.agentId,
Expand All @@ -88,9 +84,7 @@ async function set(
const fragments = await splitChunks(preprocessed, chunkSize, bleed);

for (const fragment of fragments) {
const embedding = await runtime.call(ModelType.TEXT_EMBEDDING, {
text: fragment,
});
const embedding = await runtime.call(ModelType.TEXT_EMBEDDING, fragment);
await runtime.knowledgeManager.createMemory({
// We namespace the knowledge base uuid to avoid id
// collision with the document above.
Expand Down
8 changes: 2 additions & 6 deletions packages/core/src/memory.ts
Original file line number Diff line number Diff line change
Expand Up @@ -66,15 +66,11 @@ export class MemoryManager implements IMemoryManager {

try {
// Generate embedding from text content
memory.embedding = await this.runtime.call(ModelType.TEXT_EMBEDDING, {
text: memoryText,
});
memory.embedding = await this.runtime.call(ModelType.TEXT_EMBEDDING, memoryText);
} catch (error) {
logger.error("Failed to generate embedding:", error);
// Fallback to zero vector if embedding fails
memory.embedding = await this.runtime.call(ModelType.TEXT_EMBEDDING, {
text: null,
});
memory.embedding = await this.runtime.call(ModelType.TEXT_EMBEDDING, null);
}

return memory;
Expand Down
4 changes: 3 additions & 1 deletion packages/core/src/parsing.ts
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ export function parseJsonArrayFromText(text: string) {
let jsonData = null;

// First try to parse with the original JSON format
const jsonBlockMatch = text.match(jsonBlockPattern);
const jsonBlockMatch = text?.match(jsonBlockPattern);

Check failure

Code scanning / CodeQL

Polynomial regular expression used on uncontrolled data High

This
regular expression
that depends on
library input
may run slow on strings starting with 'json\n' and with many repetitions of 'json\na'.

if (jsonBlockMatch) {
try {
Expand Down Expand Up @@ -144,6 +144,8 @@ export function parseJSONObjectFromText(
let jsonData = null;
const jsonBlockMatch = text.match(jsonBlockPattern);

console.log("parseJsonArrayFromText", text);

try {
if (jsonBlockMatch) {
// Parse the JSON from inside the code block
Expand Down
2 changes: 2 additions & 0 deletions packages/core/src/runtime.ts
Original file line number Diff line number Diff line change
Expand Up @@ -567,6 +567,8 @@ export class AgentRuntime implements IAgentRuntime {
modelType: ModelType.TEXT_SMALL,
});

console.log("***** result", result);

const evaluators = parseJsonArrayFromText(
result,
) as unknown as string[];
Expand Down
2 changes: 1 addition & 1 deletion packages/core/tsup.config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ export default defineConfig({
clean: true,
format: ["esm"], // Ensure you're targeting CommonJS
platform: "node",
target: "node18",
target: "node23",
bundle: true,
splitting: true, // Add this for better code splitting
dts: true, // Generate declaration files
Expand Down
4 changes: 1 addition & 3 deletions packages/plugin-bootstrap/src/providers/facts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,7 @@ const factsProvider: Provider = {
actors: state?.actorsData,
});

const embedding = await runtime.call(ModelType.TEXT_EMBEDDING, {
text: recentMessages,
});
const embedding = await runtime.call(ModelType.TEXT_EMBEDDING, recentMessages);

const memoryManager = new MemoryManager({
runtime,
Expand Down
6 changes: 4 additions & 2 deletions packages/plugin-openai/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ export const openaiPlugin: Plugin = {
return new Array(1536).fill(0);
}

console.log("text", text)

const baseURL = process.env.OPENAI_BASE_URL ?? "https://api.openai.com/v1";

// use fetch to call embedding endpoint
Expand All @@ -29,13 +31,14 @@ export const openaiPlugin: Plugin = {
});

const data = await response.json();
console.log("data", data);
return data.data[0].embedding;
},
[ModelType.TEXT_LARGE]: async ({
runtime,
context,
modelType,
stopSequences = ["\n"],
stopSequences = [],
}: GenerateTextParams
) => {
// TODO: pull variables from json
Expand All @@ -51,7 +54,6 @@ export const openaiPlugin: Plugin = {
const openai = createOpenAI({
apiKey: process.env.OPENAI_API_KEY,
baseURL,
fetch: runtime.fetch,
});

const smallModel = process.env.OPENAI_SMALL_MODEL ?? process.env.SMALL_MODEL ?? "gpt-4o-mini";
Expand Down
3 changes: 2 additions & 1 deletion turbo.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@
},
"dev": {
"persistent": true,
"dependsOn": ["build"]
"dependsOn": ["build"],
"cache": false
}
}
}

0 comments on commit c05c93c

Please sign in to comment.