Batch Sending
Learn how to send emails and SMS messages in bulk efficiently.
Batch Sending
Batch sending allows you to send multiple messages in a single API call, improving efficiency and reducing latency. This guide covers batch sending for both email and SMS.
Overview
- Email batch limit: Up to 500 messages per request
- SMS batch limit: Up to 500 messages per request
- Individual responses: Each message gets its own success/failure status
- Partial success: Some messages may succeed while others fail
Email Batch Sending
Basic Batch Send
import { Transactional } from 'transactional-sdk';
const client = new Transactional({
apiKey: process.env.TRANSACTIONAL_API_KEY,
});
const messages = [
{
from: 'hello@yourapp.com',
to: 'user1@example.com',
subject: 'Hello User 1',
html: '<p>This is a personalized message for User 1.</p>',
},
{
from: 'hello@yourapp.com',
to: 'user2@example.com',
subject: 'Hello User 2',
html: '<p>This is a personalized message for User 2.</p>',
},
{
from: 'hello@yourapp.com',
to: 'user3@example.com',
subject: 'Hello User 3',
html: '<p>This is a personalized message for User 3.</p>',
},
];
const results = await client.emails.sendBatch(messages);
// Check results
results.forEach((result, index) => {
if (result.errorCode === 0) {
console.log(`Message ${index + 1} sent: ${result.messageId}`);
} else {
console.error(`Message ${index + 1} failed: ${result.message}`);
}
});With Templates
const users = await db.users.findMany({ where: { subscribed: true } });
const messages = users.map((user) => ({
from: 'hello@yourapp.com',
to: user.email,
templateAlias: 'weekly-newsletter',
templateModel: {
firstName: user.firstName,
featuredArticles: getArticlesForUser(user),
unsubscribeUrl: `https://yourapp.com/unsubscribe?token=${user.unsubscribeToken}`,
},
tag: 'newsletter',
metadata: { userId: user.id },
}));
// Split into batches of 500
const batches = chunk(messages, 500);
for (const batch of batches) {
const results = await client.emails.sendBatch(batch);
await processResults(results);
}Handling Large Batches
For large recipient lists, split into manageable chunks:
function chunk<T>(array: T[], size: number): T[][] {
const chunks: T[][] = [];
for (let i = 0; i < array.length; i += size) {
chunks.push(array.slice(i, i + size));
}
return chunks;
}
async function sendNewsletterToAll(recipients: User[]) {
const messages = recipients.map((user) => ({
from: 'hello@yourapp.com',
to: user.email,
templateAlias: 'newsletter',
templateModel: { name: user.name },
}));
const batches = chunk(messages, 500);
const allResults: SendResult[] = [];
for (let i = 0; i < batches.length; i++) {
console.log(`Sending batch ${i + 1} of ${batches.length}`);
const results = await client.emails.sendBatch(batches[i]);
allResults.push(...results);
// Optional: Add delay between batches to avoid rate limits
if (i < batches.length - 1) {
await sleep(1000);
}
}
return allResults;
}SMS Batch Sending
Basic SMS Batch
const smsMessages = [
{
to: '+14155551001',
templateAlias: 'order-shipped',
templateModel: { orderId: 'ORD-001', trackingUrl: 'https://track.co/abc' },
},
{
to: '+14155551002',
templateAlias: 'order-shipped',
templateModel: { orderId: 'ORD-002', trackingUrl: 'https://track.co/def' },
},
{
to: '+14155551003',
templateAlias: 'order-shipped',
templateModel: { orderId: 'ORD-003', trackingUrl: 'https://track.co/ghi' },
},
];
const results = await client.sms.sendBatch(smsMessages);
results.forEach((result) => {
if (result.errorCode === 0) {
console.log(`SMS sent to ${result.to}: ${result.messageId}`);
} else {
console.error(`SMS failed to ${result.to}: ${result.message}`);
}
});OTP Batch (Multiple Verifications)
const verificationRequests = [
{ phone: '+14155551001', userId: 'user_1' },
{ phone: '+14155551002', userId: 'user_2' },
{ phone: '+14155551003', userId: 'user_3' },
];
// Generate codes and prepare messages
const messages = verificationRequests.map((req) => {
const code = Math.floor(100000 + Math.random() * 900000).toString();
// Store code in Redis
redis.setex(`otp:${req.userId}`, 600, code);
return {
to: req.phone,
templateAlias: 'otp-verification',
templateModel: { code },
metadata: { userId: req.userId },
};
});
await client.sms.sendBatch(messages);Error Handling
Partial Failures
Batch requests can have partial failures. Always check each result:
const results = await client.emails.sendBatch(messages);
const successful = results.filter((r) => r.errorCode === 0);
const failed = results.filter((r) => r.errorCode !== 0);
console.log(`Sent: ${successful.length}, Failed: ${failed.length}`);
// Handle failures
for (const failure of failed) {
switch (failure.errorCode) {
case 406: // Inactive recipient
await markEmailAsInactive(failure.to);
break;
case 300: // Invalid email
await logInvalidEmail(failure.to);
break;
default:
await retryLater(failure);
}
}Retry Logic
async function sendBatchWithRetry(
messages: EmailMessage[],
maxRetries: number = 3
): Promise<SendResult[]> {
let attempts = 0;
let remaining = messages;
const allResults: SendResult[] = [];
while (remaining.length > 0 && attempts < maxRetries) {
attempts++;
try {
const results = await client.emails.sendBatch(remaining);
// Separate successful and failed
results.forEach((result, index) => {
if (result.errorCode === 0) {
allResults.push(result);
} else if (isRetryable(result.errorCode)) {
// Keep for retry
} else {
allResults.push(result); // Non-retryable failure
}
});
// Get messages that need retry
remaining = messages.filter((msg, index) => {
const result = results[index];
return result.errorCode !== 0 && isRetryable(result.errorCode);
});
if (remaining.length > 0) {
await sleep(1000 * attempts); // Exponential backoff
}
} catch (error) {
if (attempts < maxRetries) {
await sleep(1000 * attempts);
} else {
throw error;
}
}
}
return allResults;
}
function isRetryable(errorCode: number): boolean {
// Retryable errors: rate limit, temporary server issues
return [429, 500, 503].includes(errorCode);
}Performance Optimization
Parallel Batch Processing
For very large sends, process batches in parallel:
async function sendMassEmail(
recipients: User[],
templateAlias: string,
concurrency: number = 5
) {
const messages = recipients.map((user) => ({
from: 'hello@yourapp.com',
to: user.email,
templateAlias,
templateModel: { name: user.name },
}));
const batches = chunk(messages, 500);
const results: SendResult[][] = [];
// Process batches with limited concurrency
for (let i = 0; i < batches.length; i += concurrency) {
const batchGroup = batches.slice(i, i + concurrency);
const batchResults = await Promise.all(
batchGroup.map((batch) => client.emails.sendBatch(batch))
);
results.push(...batchResults);
}
return results.flat();
}Queue-Based Processing
For large campaigns, use a job queue:
import { Queue, Worker } from 'bullmq';
const emailQueue = new Queue('email-batch');
// Add batches to queue
async function queueCampaign(recipients: User[], templateAlias: string) {
const messages = recipients.map((user) => ({
from: 'hello@yourapp.com',
to: user.email,
templateAlias,
templateModel: { name: user.name },
}));
const batches = chunk(messages, 500);
for (let i = 0; i < batches.length; i++) {
await emailQueue.add('send-batch', {
batchIndex: i,
messages: batches[i],
}, {
attempts: 3,
backoff: { type: 'exponential', delay: 5000 },
});
}
console.log(`Queued ${batches.length} batches`);
}
// Worker processes batches
const worker = new Worker('email-batch', async (job) => {
const { batchIndex, messages } = job.data;
console.log(`Processing batch ${batchIndex}`);
const results = await client.emails.sendBatch(messages);
const successful = results.filter((r) => r.errorCode === 0).length;
const failed = results.filter((r) => r.errorCode !== 0).length;
console.log(`Batch ${batchIndex}: ${successful} sent, ${failed} failed`);
return { successful, failed };
});Monitoring and Analytics
Track Batch Results
async function sendAndTrack(messages: EmailMessage[], campaignId: string) {
const results = await client.emails.sendBatch(messages);
// Store results in database
await db.campaignBatches.create({
data: {
campaignId,
totalSent: messages.length,
successful: results.filter((r) => r.errorCode === 0).length,
failed: results.filter((r) => r.errorCode !== 0).length,
results: JSON.stringify(results),
sentAt: new Date(),
},
});
// Update campaign stats
await db.campaigns.update({
where: { id: campaignId },
data: {
totalSent: { increment: messages.length },
successCount: { increment: results.filter((r) => r.errorCode === 0).length },
failCount: { increment: results.filter((r) => r.errorCode !== 0).length },
},
});
return results;
}Progress Updates
async function sendWithProgress(
recipients: User[],
onProgress: (sent: number, total: number) => void
) {
const messages = recipients.map((user) => ({
from: 'hello@yourapp.com',
to: user.email,
templateAlias: 'notification',
templateModel: { name: user.name },
}));
const batches = chunk(messages, 500);
let totalSent = 0;
for (const batch of batches) {
await client.emails.sendBatch(batch);
totalSent += batch.length;
onProgress(totalSent, messages.length);
}
return totalSent;
}
// Usage
await sendWithProgress(users, (sent, total) => {
console.log(`Progress: ${sent}/${total} (${Math.round(sent/total*100)}%)`);
});Best Practices
- Always handle partial failures: Check each result individually
- Use templates for personalization: More efficient than building HTML per message
- Add metadata: Tag messages for easier tracking and debugging
- Implement retries: Use exponential backoff for transient failures
- Monitor rate limits: Respect the 429 response and back off
- Use queues for large campaigns: Don't block your main application
- Track results: Store batch results for analytics and debugging
Next Steps
- Sending Emails - Basic email sending
- Email Templates - Using templates
- SMS API Reference - SMS batch endpoint details
On This Page
- Batch Sending
- Overview
- Email Batch Sending
- Basic Batch Send
- With Templates
- Handling Large Batches
- SMS Batch Sending
- Basic SMS Batch
- OTP Batch (Multiple Verifications)
- Error Handling
- Partial Failures
- Retry Logic
- Performance Optimization
- Parallel Batch Processing
- Queue-Based Processing
- Monitoring and Analytics
- Track Batch Results
- Progress Updates
- Best Practices
- Next Steps