import { createGateway } from '@ai-sdk/gateway';
import { generateText } from 'ai';
// Initialize AI Gateway with Helicone
const gateway = createGateway({
apiKey: process.env.VERCEL_AI_GATEWAY_API_KEY,
baseURL: 'https://vercel.helicone.ai/v1/ai',
headers: {
'Helicone-Auth': `Bearer ${process.env.HELICONE_API_KEY}`,
}
});
interface SupportTicket {
id: string;
customerId: string;
query: string;
priority: 'low' | 'medium' | 'high';
}
async function processSupportTicket(ticket: SupportTicket) {
const complexity = await classifyQueryComplexity(ticket.query);
// Model selection based on complexity and priority
let model;
if (ticket.priority === 'high' || complexity === 'technical') {
model = gateway('anthropic/claude-3-5-sonnet');
} else if (complexity === 'complex') {
model = gateway('openai/gpt-4o');
} else {
model = gateway('openai/gpt-4o-mini');
}
try {
const response = await generateText({
model,
messages: [
{
role: 'system',
content: `You are a customer support agent. Priority: ${ticket.priority}. Be helpful and professional.`
},
{
role: 'user',
content: ticket.query
}
],
headers: {
'Helicone-User-Id': ticket.customerId,
'Helicone-Property-TicketId': ticket.id,
'Helicone-Property-Priority': ticket.priority,
'Helicone-Property-Complexity': complexity,
// Enable caching for all queries
'Helicone-Cache-Enabled': 'true',
'Helicone-Cache-Bucket-Max-Size': '20',
'Helicone-Cache-Seed': 'support-v1'
},
temperature: 0, // Zero temperature for consistent cache hits
maxTokens: 250
});
return {
ticketId: ticket.id,
response: response.text,
model: model.modelId,
cost: response.usage // Track in Helicone dashboard
};
} catch (error) {
console.error('Support ticket processing failed:', error);
throw error;
}
}
// Example usage
const ticket: SupportTicket = {
id: 'TICKET-12345',
customerId: 'CUST-789',
query: 'How do I reset my password?',
priority: 'low'
};
const result = await processSupportTicket(ticket);
console.log(`Response sent to customer: ${result.response}`);