Closed
Description
This enables us to do something like this:
import { openai } from '@ai-sdk/openai';
import { streamText } from 'ai';
import { llamacloud } from '@ai-sdk/llamaindex';
export async function POST(req: Request) {
const { messages } = await req.json();
const result = streamText({
model: openai('gpt-4o'),
messages,
system: `You are a helpful assistant. Check your knowledge base before answering any questions.
Only respond to questions using information from tool calls.
if no relevant information is found in the tool calls, respond, "Sorry, I don't know."`,
tools: {
getInformation: llamacloud({
name: "test",
project_name: "Default",
api_key: "llx-..."
}),
},
});
return result.toDataStreamResponse();
}
Metadata
Metadata
Assignees
Labels
Type
Projects
Status
Done