const client = createLLMClient({ provider: "anthropic" });
// Standard completion
const response = await client.chat.completions.create({
model: "claude-3-opus-20240229",
messages: [{ role: "user", content: "Hello!" }]
});
// Streaming
const stream = await client.chat.completions.create({
model: "claude-3-opus-20240229",
messages: [{ role: "user", content: "Hello!" }],
stream: true
});
for await (const chunk of stream) {
process.stdout.write(chunk.choices[0]?.delta?.content ?? "");
}
// Tool/Function calling
const result = await client.chat.completions.create({
model: "claude-3-opus-20240229",
messages: [{ role: "user", content: "Analyze this data" }],
tools: [{
type: "function",
function: {
name: "analyze",
parameters: {
type: "object",
properties: {
sentiment: { type: "string" }
}
}
}
}]
});