Best Practices
Learn how to use Sales Webhooks effectively, reliably, and at scale. These practices are based on real-world usage patterns from successful customers.
Subscription Management
🎯 Use LinkedIn URLs Instead of IDs
LinkedIn URLs are more stable than usernames/IDs. People can change their custom URLs, but the canonical URL remains constant.
✅ Recommended
{
"linkedin_url": "https://linkedin.com/in/john-doe-123456"
}
❌ Avoid
{
"linkedin_id": "john-doe" // Can change
}
📦 Batch Your Operations
Use bulk endpoints when creating or deleting multiple subscriptions. This reduces API calls and improves performance.
// Instead of multiple individual calls
for (const url of linkedinUrls) {
await client.subscriptions.create({ linkedin_url: url });
}
// Use bulk create
const subscriptions = linkedinUrls.map(url => ({
entity_type: 'contact',
linkedin_url: url
}));
const result = await client.subscriptions.bulkCreate({ subscriptions });
console.log(`Created ${result.created} subscriptions`);
🔄 Sync with Your Database
Maintain a local record of your subscriptions to avoid hitting API rate limits and enable faster operations.
// Store subscription data locally
async function createSubscriptionWithSync(linkedinUrl) {
// Create in Sales Webhooks
const subscription = await client.subscriptions.create({
entity_type: 'contact',
linkedin_url: linkedinUrl
});
// Store locally
await db.subscriptions.insert({
saleswebhooks_id: subscription.id,
linkedin_url: linkedinUrl,
status: 'active',
created_at: new Date()
});
return subscription;
}
// Periodic sync to catch any discrepancies
async function syncSubscriptions() {
const remoteSubscriptions = await client.subscriptions.list({ limit: 100 });
const localSubscriptions = await db.subscriptions.findAll();
// Reconcile differences...
}
🏷️ Organize with Metadata
Tag subscriptions in your system with metadata like account owner, priority, or segment for better organization.
// Track additional metadata locally
await db.subscriptions.insert({
saleswebhooks_id: subscription.id,
linkedin_url: contact.linkedin_url,
// Your custom metadata
account_id: contact.account_id,
owner_id: contact.owner_id,
segment: contact.segment,
priority: calculatePriority(contact),
tags: ['enterprise', 'decision-maker', 'target-account']
});
Webhook Handling
⚡ Process Webhooks Asynchronously
Return 200 OK immediately and process webhooks in the background to avoid timeouts.
app.post('/webhook', async (req, res) => {
// 1. Quick validation
const signature = req.headers['x-webhook-signature'];
if (!verifySignature(req.body, signature)) {
return res.status(401).send('Unauthorized');
}
// 2. Return immediately
res.status(200).send('OK');
// 3. Process async (don't await!)
processWebhookAsync(req.body).catch(error => {
console.error('Webhook processing failed:', error);
// Log to error tracking service
});
});
async function processWebhookAsync(event) {
// Queue for processing
await queue.add('webhook', event, {
removeOnComplete: true,
removeOnFail: false,
attempts: 3
});
}
🔁 Implement Idempotency
Webhooks may be delivered more than once. Use the event ID to ensure idempotent processing.
const PROCESSED_EVENTS_TTL = 7 * 24 * 60 * 60; // 7 days
async function processWebhook(event) {
// Check if already processed
const processed = await redis.get(`webhook:processed:${event.id}`);
if (processed) {
console.log(`Already processed event ${event.id}`);
return;
}
try {
// Process the event
await handleEvent(event);
// Mark as processed with TTL
await redis.setex(
`webhook:processed:${event.id}`,
PROCESSED_EVENTS_TTL,
'1'
);
} catch (error) {
// Don't mark as processed on error
throw error;
}
}
📊 Track Webhook Metrics
Monitor webhook processing to identify issues and optimize performance.
class WebhookMetrics {
constructor() {
this.counters = {
received: 0,
processed: 0,
failed: 0,
duplicates: 0
};
this.processingTimes = [];
}
async trackWebhook(event, processFn) {
this.counters.received++;
const startTime = Date.now();
try {
const wasDuplicate = await this.checkDuplicate(event.id);
if (wasDuplicate) {
this.counters.duplicates++;
return;
}
await processFn(event);
this.counters.processed++;
this.processingTimes.push(Date.now() - startTime);
// Alert if processing is slow
if (Date.now() - startTime > 5000) {
console.warn(`Slow webhook processing: ${event.id}`);
}
} catch (error) {
this.counters.failed++;
throw error;
}
}
getStats() {
return {
...this.counters,
avgProcessingTime: this.calculateAverage(this.processingTimes),
successRate: this.counters.processed / this.counters.received
};
}
}
API Usage Optimization
🚦 Implement Smart Rate Limiting
Respect rate limits and implement backoff to avoid hitting limits.
class RateLimitedClient {
constructor(client) {
this.client = client;
this.queue = [];
this.processing = false;
}
async request(fn) {
return new Promise((resolve, reject) => {
this.queue.push({ fn, resolve, reject });
this.processQueue();
});
}
async processQueue() {
if (this.processing || this.queue.length === 0) return;
this.processing = true;
while (this.queue.length > 0) {
const { fn, resolve, reject } = this.queue.shift();
try {
const result = await fn();
resolve(result);
} catch (error) {
if (error.status === 429) {
// Rate limited - put back in queue
this.queue.unshift({ fn, resolve, reject });
// Wait based on retry-after header
const retryAfter = error.headers['retry-after'] || 60;
await this.sleep(retryAfter * 1000);
} else {
reject(error);
}
}
// Small delay between requests
await this.sleep(100);
}
this.processing = false;
}
}
📄 Use Pagination Efficiently
When fetching large datasets, use pagination and process pages concurrently when possible.
// Fetch all subscriptions efficiently
async function* getAllSubscriptions() {
let offset = 0;
const limit = 100; // Maximum allowed
while (true) {
const response = await client.subscriptions.list({
limit,
offset,
status: 'active'
});
// Yield subscriptions as they come
for (const subscription of response.data) {
yield subscription;
}
// Check if more pages
if (!response.pagination.has_more) {
break;
}
offset += limit;
}
}
// Process subscriptions in batches
const batch = [];
for await (const subscription of getAllSubscriptions()) {
batch.push(subscription);
if (batch.length >= 50) {
await processBatch(batch);
batch.length = 0;
}
}
// Process remaining
if (batch.length > 0) {
await processBatch(batch);
}
💾 Cache When Appropriate
Cache data that doesn't change frequently to reduce API calls.
class CachedClient {
constructor(client, redis) {
this.client = client;
this.redis = redis;
}
async getWebhookConfig() {
// Check cache first
const cached = await this.redis.get('webhook:config');
if (cached) {
return JSON.parse(cached);
}
// Fetch from API
const config = await this.client.webhooks.getConfiguration();
// Cache for 1 hour
await this.redis.setex(
'webhook:config',
3600,
JSON.stringify(config)
);
return config;
}
async invalidateWebhookConfig() {
await this.redis.del('webhook:config');
}
}
Sales Engagement Strategies
🎯 Prioritize High-Value Changes
Not all changes are equally valuable. Focus on the ones that matter most for your sales process.
const CHANGE_PRIORITIES = {
// High priority - immediate action
'contact.company_changed': {
priority: 'high',
action: 'immediate_outreach',
reason: 'New budget authority'
},
'contact.position_changed': {
priority: 'high',
action: 'congratulations_sequence',
reason: 'Perfect timing for outreach'
},
// Medium priority - timely follow-up
'contact.post_created': {
priority: 'medium',
action: 'content_engagement',
reason: 'Shows activity and interests'
},
// Low priority - update records
'contact.about_changed': {
priority: 'low',
action: 'crm_update',
reason: 'Data hygiene'
}
};
function triageWebhook(event) {
const config = CHANGE_PRIORITIES[event.type];
if (config.priority === 'high') {
// Route to sales team immediately
notifySalesTeam(event, config);
} else if (config.priority === 'medium') {
// Queue for daily batch
queueForBatchProcessing(event, config);
} else {
// Update CRM only
updateCRM(event);
}
}
📈 Track ROI Metrics
Measure the effectiveness of your LinkedIn monitoring to optimize your strategy.
class ROITracker {
async trackOutreach(webhook, action) {
await db.outreach_tracking.insert({
webhook_id: webhook.id,
event_type: webhook.type,
entity_id: webhook.data.entity.id,
action_taken: action,
action_date: new Date(),
sales_rep: action.assignedTo
});
}
async trackOutcome(webhookId, outcome) {
await db.outreach_tracking.update({
where: { webhook_id: webhookId },
data: {
outcome: outcome, // 'meeting_booked', 'opportunity_created', etc.
outcome_date: new Date(),
revenue_impact: calculateRevenue(outcome)
}
});
}
async getMetrics(timeframe = 30) {
const results = await db.query(`
SELECT
event_type,
COUNT(*) as total_events,
COUNT(outcome) as responded,
COUNT(CASE WHEN outcome = 'meeting_booked' THEN 1 END) as meetings,
COUNT(CASE WHEN outcome = 'opportunity_created' THEN 1 END) as opportunities,
SUM(revenue_impact) as pipeline_value
FROM outreach_tracking
WHERE action_date > NOW() - INTERVAL '${timeframe} days'
GROUP BY event_type
`);
return results.map(r => ({
...r,
response_rate: r.responded / r.total_events,
meeting_rate: r.meetings / r.total_events,
opportunity_rate: r.opportunities / r.total_events,
avg_deal_size: r.pipeline_value / r.opportunities
}));
}
}
Error Handling & Resilience
🔄 Implement Retry Logic
Build resilient systems that handle transient failures gracefully.
async function withRetry(fn, options = {}) {
const {
maxAttempts = 3,
backoff = 'exponential',
initialDelay = 1000,
maxDelay = 30000,
shouldRetry = (error) => error.status >= 500
} = options;
let lastError;
for (let attempt = 0; attempt < maxAttempts; attempt++) {
try {
return await fn();
} catch (error) {
lastError = error;
// Check if we should retry
if (!shouldRetry(error) || attempt === maxAttempts - 1) {
throw error;
}
// Calculate delay
const delay = backoff === 'exponential'
? Math.min(initialDelay * Math.pow(2, attempt), maxDelay)
: initialDelay;
console.log(`Retry attempt ${attempt + 1} after ${delay}ms`);
await sleep(delay);
}
}
throw lastError;
}
// Usage
const subscription = await withRetry(
() => client.subscriptions.create(data),
{
shouldRetry: (error) =>
error.status >= 500 || error.code === 'ECONNRESET'
}
);
📝 Comprehensive Logging
Log enough context to debug issues without exposing sensitive data.
class SafeLogger {
log(level, message, context = {}) {
// Sanitize sensitive data
const safeContext = this.sanitize(context);
console.log({
timestamp: new Date().toISOString(),
level,
message,
...safeContext,
// Add request context
requestId: context.requestId,
userId: context.userId,
// Performance metrics
duration: context.duration,
// Error details
error: context.error ? {
message: context.error.message,
stack: process.env.NODE_ENV === 'development'
? context.error.stack
: undefined,
code: context.error.code
} : undefined
});
}
sanitize(obj) {
const sensitive = ['api_key', 'secret', 'password', 'linkedin_url'];
return Object.entries(obj).reduce((acc, [key, value]) => {
if (sensitive.some(s => key.toLowerCase().includes(s))) {
acc[key] = '[REDACTED]';
} else if (typeof value === 'object' && value !== null) {
acc[key] = this.sanitize(value);
} else {
acc[key] = value;
}
return acc;
}, {});
}
}
Scaling Considerations
🚀 Preparing for Scale
Design your integration to handle growth from hundreds to thousands of monitored entities.
Database Optimization
- Index webhook event IDs for duplicate checking
- Partition large tables by date
- Archive old webhook data regularly
- Use read replicas for analytics queries
Queue Management
- Use separate queues for different priority levels
- Implement circuit breakers for failing endpoints
- Monitor queue depth and processing times
- Scale workers based on queue metrics
Monitoring & Alerting
- Alert on webhook processing failures >5%
- Monitor API rate limit usage
- Track subscription creation/deletion patterns
- Set up dashboards for key metrics
Implementation Checklist
✅ Production Readiness Checklist
Next Steps
Now that you understand the best practices:
- Review our webhook security tutorial
- Check the troubleshooting guide for common issues
- Join our Discord community to share your experiences