Build advanced AI agents with AI SDK. Connect 600+ integrations, automate workflows, and deploy with ease using Metorial.
Guidelines for deploying Metorial-powered applications to production environments.
Never hardcode API keys:
// ❌ Bad
let metorial = new Metorial({
apiKey: 'met_12345abcde'
});
// ✅ Good
let metorial = new Metorial({
apiKey: process.env.METORIAL_API_KEY
});
Use different keys and deployments for each environment:
let metorial = new Metorial({
apiKey: process.env.METORIAL_API_KEY
});
let deploymentId = process.env.NODE_ENV === 'production'
? process.env.PROD_DEPLOYMENT_ID
: process.env.DEV_DEPLOYMENT_ID;
Only enable integrations you actually need in your server deployments.
metorial.withProviderSession(
metorialAiSdk,
{ serverDeployments: [deploymentId] },
async session => {
try {
let result = await generateText({
model: openai('gpt-4o'),
prompt: userInput,
maxSteps: 10,
tools: session.tools
});
return { success: true, data: result.text };
} catch (error) {
// Log error for monitoring
console.error('Agent error:', {
message: error.message,
stack: error.stack,
timestamp: new Date().toISOString()
});
// Return user-friendly error
return {
success: false,
error: 'Unable to process request. Please try again.'
};
}
}
);
Set timeouts to prevent hanging requests:
async function generateWithTimeout(options, timeoutMs = 30000) {
return Promise.race([
generateText(options),
new Promise((_, reject) =>
setTimeout(() => reject(new Error('Request timeout')), timeoutMs)
)
]);
}
Don't set maxSteps too high unnecessarily:
// For simple queries
let simple = await generateText({
model: openai('gpt-4o'),
prompt: 'Quick fact check',
maxSteps: 3, // Low for simple tasks
tools: session.tools
});
// For complex workflows
let complex = await generateText({
model: openai('gpt-4o'),
prompt: 'Multi-step analysis',
maxSteps: 15, // Higher for complex tasks
tools: session.tools
});
Cache responses when appropriate:
let cache = new Map();
async function getCachedResponse(prompt) {
if (cache.has(prompt)) {
return cache.get(prompt);
}
let result = await generateText({
model: openai('gpt-4o'),
prompt,
maxSteps: 10,
tools: session.tools
});
cache.set(prompt, result.text);
return result.text;
}
console.log('Agent request:', {
timestamp: new Date().toISOString(),
userId: user.id,
prompt: prompt.substring(0, 100), // Log preview only
deployment: deploymentId
});
let result = await generateText({
model: openai('gpt-4o'),
prompt,
maxSteps: 10,
tools: session.tools
});
console.log('Agent response:', {
timestamp: new Date().toISOString(),
userId: user.id,
success: true,
stepsUsed: result.steps?.length
});
Track your Metorial and AI provider usage:
Implement rate limiting to prevent abuse:
let userRequests = new Map();
function checkRateLimit(userId, maxRequests = 100, windowMs = 3600000) {
let now = Date.now();
let userHistory = userRequests.get(userId) || [];
// Remove old requests outside the window
userHistory = userHistory.filter(time => now - time < windowMs);
if (userHistory.length >= maxRequests) {
throw new Error('Rate limit exceeded');
}
userHistory.push(now);
userRequests.set(userId, userHistory);
}
Always validate and sanitize user input:
function validatePrompt(prompt) {
if (!prompt || typeof prompt !== 'string') {
throw new Error('Invalid prompt');
}
if (prompt.length > 10000) {
throw new Error('Prompt too long');
}
// Remove or sanitize sensitive patterns
return prompt.trim();
}
let sanitizedPrompt = validatePrompt(userInput);
By following these best practices, you'll build reliable, secure, and efficient applications with Metorial and the AI SDK.
Connect Vercel AI SDK to Metorial and unlock instant access to over 600 integrations for your AI-powered applications. Our open-source, MCP-powered platform makes it effortless to add tools, APIs, and services to your AI SDK projects without writing complex integration code. With Metorial's TypeScript SDK, you can integrate calendars, databases, communication tools, and hundreds of other services in just a couple of lines of code. Whether you're building chatbots, AI assistants, or intelligent workflows with Vercel's AI SDK, Metorial eliminates integration headaches so you can focus on creating exceptional user experiences. Our developer-friendly approach means less time wrestling with authentication, API documentation, and maintenance—and more time innovating. Join developers who are shipping AI applications faster by letting Metorial handle the integration layer while you concentrate on what makes your app unique.
Let's take your AI-powered applications to the next level, together.
Metorial provides developers with instant access to 600+ MCP servers for building AI agents that can interact with real-world tools and services. Built on MCP, Metorial simplifies agent tool integration by offering pre-configured connections to popular platforms like Google Drive, Slack, GitHub, Notion, and hundreds of other APIs. Our platform supports all major AI agent frameworks—including LangChain, AutoGen, CrewAI, and LangGraph—enabling developers to add tool calling capabilities to their agents in just a few lines of code. By eliminating the need for custom integration code, Metorial helps AI developers move from prototype to production faster while maintaining security and reliability. Whether you're building autonomous research agents, customer service bots, or workflow automation tools, Metorial's MCP server library provides the integrations you need to connect your agents to the real world.