AI Agents
Messages that start with /agent
will trigger the GPT response.
cmd
/agent send 1 eth to alix
/agent lets play a game
/agent tell me a joke
/agent help me swap 1 eth to usdc
Main code
In this case the app replies with context about the group like users and commands.
src/handler/agent.ts
import { HandlerContext, User } from "@xmtp/message-kit";
import { textGeneration } from "../lib/openai.js";
export async function handler(context: HandlerContext) {
if (!process?.env?.OPEN_AI_API_KEY) {
console.log("No OPEN_AI_API_KEY found in .env");
return;
}
const {
message: {
content: { content, params },
},
} = context;
const systemPrompt = generateSystemPrompt(context);
try {
let userPrompt = params?.prompt ?? content;
console.log("userPrompt", userPrompt);
const { reply } = await textGeneration(userPrompt, systemPrompt);
console.log("intent:", reply);
context.intent(reply);
} catch (error) {
console.error("Error during OpenAI call:", error);
await context.reply("An error occurred while processing your request.");
}
}
function generateSystemPrompt(context: HandlerContext) {
const {
members,
commands,
message: { sender },
} = context;
const systemPrompt = `
### Context
You are a helpful bot agent that lives inside a web3 messaging group that helps interpret user requests and execute commands.
#### Users
${JSON.stringify(members?.map((member: User) => ({ ...member, username: `@${member.username}` })))}\n
#### Commands
${JSON.stringify(commands)}\n
The message was sent by @${sender?.username}
### Example s
prompt /agent tip alix and bo
reply /tip @alix @bo 10
Important:
- If a user asks jokes, make jokes about web3 devs\n
- If the user asks about performing an action and you can think of a command that would help, answer directly with the command and nothing else.
- Populate the command with the correct or random values. Always return commands with real values only, using usernames with @ and excluding addresses.\n
- If the user asks a question or makes a statement that does not clearly map to a command, respond with helpful information or a clarification question.\n
- If the user is grateful, respond asking for a tip in a playful manner.
`;
return systemPrompt;
}
OpenAI middleware
Install dependencies
cmd
yarn add openai
Copy the following code into your lib/openai.ts
file.
src/lib/openai.ts
import dotenv from "dotenv";
dotenv.config();
import OpenAI from "openai";
const openai = new OpenAI({
apiKey: process.env.OPEN_AI_API_KEY,
});
export async function textGeneration(
userPrompt: string,
systemPrompt: string,
chatHistory?: any[],
) {
let messages = chatHistory ? [...chatHistory] : []; // Start with existing chat history
if (messages.length === 0) {
messages.push({
role: "system",
content: systemPrompt,
});
}
messages.push({
role: "user",
content: userPrompt,
});
try {
const response = await openai.chat.completions.create({
model: "gpt-4o",
messages: messages as any,
});
const reply = response.choices[0].message.content;
messages.push({
role: "assistant",
content: reply || "No response from OpenAI.",
});
const cleanedReply = reply
?.replace(/(\*\*|__)(.*?)\1/g, "$2")
?.replace(/\[([^\]]+)\]\(([^)]+)\)/g, "$2")
?.replace(/^#+\s*(.*)$/gm, "$1")
?.replace(/`([^`]+)`/g, "$1")
?.replace(/^`|`$/g, "");
return { reply: cleanedReply as string, history: messages };
} catch (error) {
console.error("Failed to fetch from OpenAI:", error);
throw error;
}
}
// New method to interpret an image
export async function vision(imageData: Uint8Array, systemPrompt: string) {
const base64Image = Buffer.from(imageData).toString("base64");
const dataUrl = `data:image/jpeg;base64,${base64Image}`;
// Create a new thread for each vision request
const visionMessages = [
{
role: "system",
content: systemPrompt,
},
{
role: "user",
content: [
{ type: "text", text: systemPrompt },
{
type: "image_url",
image_url: {
url: dataUrl,
},
},
],
},
];
try {
const response = await openai.chat.completions.create({
model: "gpt-4o",
messages: visionMessages as any,
});
return response.choices[0].message.content;
} catch (error) {
console.error("Failed to interpret image with OpenAI:", error);
throw error;
}
}