MCP Design Patterns
Proven patterns for building reliable, scalable MCP tools
API Integration
External service integration with error handling
Data Processing
Transform, validate, and analyze data safely
System Integration
Connect with databases, files, and services
🌐 API Integration Patterns
REST API with Retry Logic
Handle API failures gracefully with exponential backoff and meaningful error messages.
export const apiTool = {
name: "fetch_user_data",
description: "Fetch user profile data from external API with retry logic",
inputSchema: {
type: "object",
properties: {
userId: {
type: "string",
description: "Unique user identifier",
pattern: "^[a-zA-Z0-9-_]+$"
}
},
required: ["userId"]
},
handler: async (args) => {
const maxRetries = 3;
const baseDelay = 1000; // 1 second
for (let attempt = 1; attempt <= maxRetries; attempt++) {
try {
const response = await fetch(
`${process.env.API_BASE_URL}/users/${args.userId}`,
{
headers: {
'Authorization': `Bearer ${process.env.API_TOKEN}`,
'User-Agent': 'MCP-Tool/1.0'
},
timeout: 10000 // 10 second timeout
}
);
if (response.ok) {
const data = await response.json();
return {
success: true,
user: data,
metadata: {
source: 'external_api',
attempt: attempt,
timestamp: new Date().toISOString()
}
};
}
// Handle specific HTTP errors
if (response.status === 404) {
return {
success: false,
error: `User with ID "${args.userId}" not found`,
code: "USER_NOT_FOUND"
};
}
if (response.status === 401) {
return {
success: false,
error: "API authentication failed. Please check credentials.",
code: "AUTH_ERROR"
};
}
// Retry on server errors (5xx)
if (response.status >= 500 && attempt < maxRetries) {
const delay = baseDelay * Math.pow(2, attempt - 1);
await new Promise(resolve => setTimeout(resolve, delay));
continue;
}
throw new Error(`API error: ${response.status}`);
} catch (error) {
if (attempt === maxRetries) {
return {
success: false,
error: "API service temporarily unavailable. Please try again later.",
code: "API_UNAVAILABLE",
details: {
attempts: maxRetries,
lastError: error.message
}
};
}
// Wait before retry
const delay = baseDelay * Math.pow(2, attempt - 1);
await new Promise(resolve => setTimeout(resolve, delay));
}
}
}
};
API with Intelligent Caching
Cache responses to improve performance and reduce API costs.
// In-memory cache (use Redis for production)
const cache = new Map();
export const cachedApiTool = {
name: "get_stock_price",
description: "Get current stock price with 5-minute caching",
inputSchema: {
type: "object",
properties: {
symbol: {
type: "string",
description: "Stock symbol (e.g., AAPL, GOOGL)",
pattern: "^[A-Z]{1,5}$"
}
},
required: ["symbol"]
},
handler: async (args) => {
const cacheKey = `stock_${args.symbol}`;
const cacheTimeout = 5 * 60 * 1000; // 5 minutes
// Check cache first
const cached = cache.get(cacheKey);
if (cached && Date.now() - cached.timestamp < cacheTimeout) {
return {
success: true,
...cached.data,
metadata: {
source: 'cache',
cachedAt: new Date(cached.timestamp).toISOString()
}
};
}
try {
const response = await fetch(
`https://api.stockprice.com/quote/${args.symbol}?key=${process.env.STOCK_API_KEY}`
);
if (!response.ok) {
throw new Error(`Stock API error: ${response.status}`);
}
const data = await response.json();
const result = {
symbol: data.symbol,
price: data.currentPrice,
change: data.change,
changePercent: data.changePercent,
lastUpdated: data.lastUpdated
};
// Cache the result
cache.set(cacheKey, {
data: result,
timestamp: Date.now()
});
// Clean old cache entries periodically
if (cache.size > 1000) {
const oldEntries = Array.from(cache.entries())
.filter(([_, value]) => Date.now() - value.timestamp > cacheTimeout);
oldEntries.forEach(([key]) => cache.delete(key));
}
return {
success: true,
...result,
metadata: {
source: 'api',
timestamp: new Date().toISOString()
}
};
} catch (error) {
return {
success: false,
error: "Unable to fetch stock price. Please try again later.",
code: "STOCK_API_ERROR"
};
}
}
};
📊 Data Processing Patterns
Safe File Processing
Process uploaded files with size limits, type validation, and error recovery.
export const csvProcessorTool = {
name: "process_csv_file",
description: "Parse and validate CSV data with configurable processing options",
inputSchema: {
type: "object",
properties: {
csvData: {
type: "string",
description: "Raw CSV content to process",
maxLength: 1048576 // 1MB limit
},
options: {
type: "object",
properties: {
hasHeaders: {
type: "boolean",
description: "Whether first row contains column headers",
default: true
},
delimiter: {
type: "string",
description: "Column delimiter character",
enum: [",", ";", "\t", "|"],
default: ","
},
skipRows: {
type: "integer",
description: "Number of rows to skip from beginning",
minimum: 0,
maximum: 100,
default: 0
},
maxRows: {
type: "integer",
description: "Maximum number of rows to process",
minimum: 1,
maximum: 10000,
default: 1000
}
}
}
},
required: ["csvData"]
},
handler: async (args) => {
const options = { hasHeaders: true, delimiter: ",", skipRows: 0, maxRows: 1000, ...args.options };
try {
// Basic validation
if (!args.csvData.trim()) {
return {
success: false,
error: "CSV data cannot be empty",
code: "EMPTY_DATA"
};
}
// Size check
const sizeInBytes = new TextEncoder().encode(args.csvData).length;
if (sizeInBytes > 1048576) {
return {
success: false,
error: "File size exceeds 1MB limit",
code: "FILE_TOO_LARGE"
};
}
// Parse CSV
const lines = args.csvData.trim().split('\n');
if (lines.length <= options.skipRows) {
return {
success: false,
error: "Not enough data rows after skipping specified rows",
code: "INSUFFICIENT_DATA"
};
}
// Skip specified rows
const dataLines = lines.slice(options.skipRows);
// Extract headers
let headers = [];
let dataRows = dataLines;
if (options.hasHeaders) {
headers = dataLines[0].split(options.delimiter).map(h => h.trim().replace(/"/g, ''));
dataRows = dataLines.slice(1);
} else {
// Generate default headers
const firstRow = dataLines[0].split(options.delimiter);
headers = firstRow.map((_, index) => `Column${index + 1}`);
}
// Validate headers
if (headers.length === 0) {
return {
success: false,
error: "No columns detected in CSV",
code: "NO_COLUMNS"
};
}
// Process data rows
const processedRows = [];
const errors = [];
for (let i = 0; i < Math.min(dataRows.length, options.maxRows); i++) {
const line = dataRows[i].trim();
if (!line) continue; // Skip empty lines
try {
const values = line.split(options.delimiter).map(v => v.trim().replace(/"/g, ''));
if (values.length !== headers.length) {
errors.push({
row: i + 1,
error: `Expected ${headers.length} columns, got ${values.length}`,
data: line
});
continue;
}
const rowData = headers.reduce((obj, header, index) => {
obj[header] = values[index];
return obj;
}, {});
processedRows.push(rowData);
} catch (error) {
errors.push({
row: i + 1,
error: error.message,
data: line
});
}
}
return {
success: true,
data: {
headers: headers,
rows: processedRows,
totalRows: processedRows.length,
originalRowCount: dataRows.length,
skippedRows: options.skipRows,
hasHeaders: options.hasHeaders
},
errors: errors.length > 0 ? errors.slice(0, 10) : undefined, // Show first 10 errors
metadata: {
processingTime: Date.now(),
options: options
}
};
} catch (error) {
return {
success: false,
error: "Failed to process CSV data",
code: "PROCESSING_ERROR",
details: error.message
};
}
}
};
🔧 System Integration Patterns
Database Query Tool
Safe database queries with connection pooling and SQL injection prevention.
import { Pool } from 'pg'; // PostgreSQL example
// Connection pool (initialize once)
const pool = new Pool({
connectionString: process.env.DATABASE_URL,
max: 10, // Maximum number of connections
idleTimeoutMillis: 30000,
connectionTimeoutMillis: 2000,
});
export const databaseQueryTool = {
name: "search_products",
description: "Search products in database with filters and pagination",
inputSchema: {
type: "object",
properties: {
searchTerm: {
type: "string",
description: "Product name or description to search for",
maxLength: 100
},
category: {
type: "string",
description: "Product category filter",
enum: ["electronics", "clothing", "books", "home", "sports"]
},
priceRange: {
type: "object",
properties: {
min: { type: "number", minimum: 0 },
max: { type: "number", minimum: 0 }
}
},
page: {
type: "integer",
minimum: 1,
default: 1
},
limit: {
type: "integer",
minimum: 1,
maximum: 100,
default: 20
}
}
},
handler: async (args) => {
const client = await pool.connect();
try {
// Build query with parameterized statements (prevents SQL injection)
let query = `
SELECT
id, name, description, price, category,
created_at, updated_at
FROM products
WHERE 1=1
`;
const params = [];
let paramCount = 0;
// Add search term filter
if (args.searchTerm) {
paramCount++;
query += ` AND (name ILIKE $${paramCount} OR description ILIKE $${paramCount})`;
params.push(`%${args.searchTerm}%`);
}
// Add category filter
if (args.category) {
paramCount++;
query += ` AND category = $${paramCount}`;
params.push(args.category);
}
// Add price range filter
if (args.priceRange) {
if (args.priceRange.min !== undefined) {
paramCount++;
query += ` AND price >= $${paramCount}`;
params.push(args.priceRange.min);
}
if (args.priceRange.max !== undefined) {
paramCount++;
query += ` AND price <= $${paramCount}`;
params.push(args.priceRange.max);
}
}
// Add ordering and pagination
query += ` ORDER BY created_at DESC`;
const limit = args.limit || 20;
const offset = ((args.page || 1) - 1) * limit;
paramCount++;
query += ` LIMIT $${paramCount}`;
params.push(limit);
paramCount++;
query += ` OFFSET $${paramCount}`;
params.push(offset);
// Execute query with timeout
const result = await client.query({
text: query,
values: params,
timeout: 10000 // 10 second timeout
});
// Get total count for pagination
let countQuery = `
SELECT COUNT(*) as total
FROM products
WHERE 1=1
`;
const countParams = params.slice(0, -2); // Remove LIMIT and OFFSET
let countParamCount = 0;
if (args.searchTerm) {
countParamCount++;
countQuery += ` AND (name ILIKE $${countParamCount} OR description ILIKE $${countParamCount})`;
}
if (args.category) {
countParamCount++;
countQuery += ` AND category = $${countParamCount}`;
}
if (args.priceRange?.min !== undefined) {
countParamCount++;
countQuery += ` AND price >= $${countParamCount}`;
}
if (args.priceRange?.max !== undefined) {
countParamCount++;
countQuery += ` AND price <= $${countParamCount}`;
}
const countResult = await client.query(countQuery, countParams);
const total = parseInt(countResult.rows[0].total);
return {
success: true,
data: {
products: result.rows,
pagination: {
page: args.page || 1,
limit: limit,
total: total,
totalPages: Math.ceil(total / limit),
hasNext: ((args.page || 1) * limit) < total,
hasPrev: (args.page || 1) > 1
}
},
metadata: {
query: query,
executionTime: Date.now(),
filters: {
searchTerm: args.searchTerm,
category: args.category,
priceRange: args.priceRange
}
}
};
} catch (error) {
console.error('Database query error:', error);
if (error.code === '23505') { // Unique violation
return {
success: false,
error: "Duplicate entry found",
code: "DUPLICATE_ENTRY"
};
}
if (error.code === 'ECONNREFUSED') {
return {
success: false,
error: "Database connection failed",
code: "DATABASE_UNAVAILABLE"
};
}
return {
success: false,
error: "Database query failed. Please try again.",
code: "QUERY_ERROR"
};
} finally {
client.release(); // Always release the connection
}
}
};
⚠️ Error Handling Patterns
Consistent Error Format
Always return errors in a consistent format that AI can understand and users can act on.
// Good error response structure
return {
success: false,
error: "User-friendly error message",
code: "MACHINE_READABLE_CODE",
details: {
field: "email",
value: "invalid-email",
suggestion: "Please provide a valid email address"
},
metadata: {
timestamp: new Date().toISOString(),
requestId: generateRequestId()
}
};
Input Validation
Validate inputs early and provide specific feedback about what's wrong.
// Validate before processing
if (!args.email || !args.email.includes('@')) {
return {
success: false,
error: "Please provide a valid email address",
code: "INVALID_EMAIL",
details: {
field: "email",
value: args.email,
requirement: "Must contain @ symbol and valid domain"
}
};
}
🚀 Performance Patterns
⏱️ Timeout Management
// Set reasonable timeouts
const controller = new AbortController();
const timeoutId = setTimeout(() => controller.abort(), 30000);
try {
const response = await fetch(url, {
signal: controller.signal
});
clearTimeout(timeoutId);
// Process response...
} catch (error) {
if (error.name === 'AbortError') {
return {
success: false,
error: "Request timed out",
code: "TIMEOUT"
};
}
}
📊 Result Pagination
// Always paginate large results
const limit = Math.min(args.limit || 20, 100);
const offset = ((args.page || 1) - 1) * limit;
return {
success: true,
data: results.slice(offset, offset + limit),
pagination: {
page: args.page || 1,
limit,
total: results.length,
hasMore: offset + limit < results.length
}
};