Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Show current LLM model used #2001

Merged
merged 13 commits into from
Jan 10, 2025
16 changes: 15 additions & 1 deletion packages/ai-bot/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import type {
CommandEvent,
Tool,
SkillsConfigEvent,
ActiveLLMEvent,
CommandResultEvent,
} from 'https://cardstack.com/base/matrix-event';
import { MatrixEvent, type IRoomEvent } from 'matrix-js-sdk';
Expand All @@ -25,6 +26,8 @@ import {
APP_BOXEL_MESSAGE_MSGTYPE,
APP_BOXEL_COMMAND_MSGTYPE,
APP_BOXEL_ROOM_SKILLS_EVENT_TYPE,
DEFAULT_LLM,
APP_BOXEL_ACTIVE_LLM,
} from '@cardstack/runtime-common/matrix-constants';

let log = logger('ai-bot');
Expand Down Expand Up @@ -86,10 +89,11 @@ export function getPromptParts(
let tools = getTools(history, aiBotUserId);
let toolChoice = getToolChoice(history, aiBotUserId);
let messages = getModifyPrompt(history, aiBotUserId, tools, skills);
let model = getModel(eventList);
return {
tools,
messages,
model: 'openai/gpt-4o',
model,
history,
toolChoice: toolChoice,
};
Expand Down Expand Up @@ -570,6 +574,16 @@ export function isCommandEvent(
);
}

function getModel(eventlist: DiscreteMatrixEvent[]): string {
let activeLLMEvent = eventlist.findLast(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Should we be reading this from room state instead?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We could read this from the room state, but I chose to read it from the event list to make testing easier. Additionally, this approach is consistent with how we read skills from the room.

(event) => event.type === APP_BOXEL_ACTIVE_LLM,
) as ActiveLLMEvent;
if (!activeLLMEvent) {
return DEFAULT_LLM;
}
return activeLLMEvent.content.model;
}

export function isCommandResultEvent(
event?: DiscreteMatrixEvent,
): event is CommandResultEvent {
Expand Down
16 changes: 16 additions & 0 deletions packages/ai-bot/lib/matrix.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,13 @@ export interface MatrixClient {
content: IContent,
): Promise<{ event_id: string }>;

sendStateEvent(
roomId: string,
eventType: string,
content: IContent,
stateKey: string,
): Promise<{ event_id: string }>;

setRoomName(roomId: string, title: string): Promise<{ event_id: string }>;
}

Expand All @@ -37,6 +44,15 @@ export async function sendEvent(
return await client.sendEvent(roomId, eventType, content);
}

export async function updateStateEvent(
client: MatrixClient,
roomId: string,
eventType: string,
content: IContent,
) {
return await client.sendStateEvent(roomId, eventType, content, '');
}

export async function sendMessage(
client: MatrixClient,
roomId: string,
Expand Down
17 changes: 14 additions & 3 deletions packages/ai-bot/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,11 @@ import {
getPromptParts,
extractCardFragmentsFromEvents,
} from './helpers';
import { APP_BOXEL_CARDFRAGMENT_MSGTYPE } from '@cardstack/runtime-common/matrix-constants';
import {
APP_BOXEL_CARDFRAGMENT_MSGTYPE,
APP_BOXEL_ACTIVE_LLM,
DEFAULT_LLM,
} from '@cardstack/runtime-common/matrix-constants';

import {
shouldSetRoomTitle,
Expand All @@ -24,7 +28,7 @@ import {
} from './lib/set-title';
import { Responder } from './lib/send-response';
import { handleDebugCommands } from './lib/debug';
import { MatrixClient } from './lib/matrix';
import { MatrixClient, updateStateEvent } from './lib/matrix';
import type { MatrixEvent as DiscreteMatrixEvent } from 'https://cardstack.com/base/matrix-event';
import * as Sentry from '@sentry/node';

Expand Down Expand Up @@ -100,6 +104,12 @@ class Assistant {
) {
return setTitle(this.openai, this.client, roomId, history, this.id, event);
}

async setDefaultLLM(roomId: string) {
await updateStateEvent(this.client, roomId, APP_BOXEL_ACTIVE_LLM, {
model: DEFAULT_LLM,
});
}
}

let startTime = Date.now();
Expand Down Expand Up @@ -139,8 +149,9 @@ Common issues are:
if (member.membership === 'invite' && member.userId === aiBotUserId) {
client
.joinRoom(member.roomId)
.then(function () {
.then(async function () {
log.info('%s auto-joined %s', member.name, member.roomId);
await assistant.setDefaultLLM(member.roomId);
})
.catch(function (err) {
log.info(
Expand Down
26 changes: 26 additions & 0 deletions packages/ai-bot/tests/prompt-construction-test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import {
APP_BOXEL_COMMAND_MSGTYPE,
APP_BOXEL_COMMAND_RESULT_EVENT_TYPE,
APP_BOXEL_COMMAND_RESULT_WITH_OUTPUT_MSGTYPE,
DEFAULT_LLM,
} from '@cardstack/runtime-common/matrix-constants';

import type {
Expand Down Expand Up @@ -1611,3 +1612,28 @@ test('Tools can be required to be called if done so in the last message', () =>
},
});
});

module('set model in prompt', () => {
test('default active LLM must be equal to `DEFAULT_LLM`', () => {
const eventList: DiscreteMatrixEvent[] = JSON.parse(
readFileSync(
path.join(
__dirname,
'resources/chats/required-tool-call-in-last-message.json',
),
),
);

const { model } = getPromptParts(eventList, '@aibot:localhost');
assert.strictEqual(model, DEFAULT_LLM);
});

test('use latest active llm', () => {
const eventList: DiscreteMatrixEvent[] = JSON.parse(
readFileSync(path.join(__dirname, 'resources/chats/set-active-llm.json')),
);

const { model } = getPromptParts(eventList, '@aibot:localhost');
assert.strictEqual(model, 'google/gemini-pro-1.5');
});
});
Loading
Loading