Performance Architecture Fundamentals
Performance optimization in Salesforce requires a deep understanding of platform limits, architecture patterns, and optimization techniques. This guide provides technical leaders with advanced strategies for building scalable, high-performance Salesforce applications.
Poor performance can cripple user adoption and business processes. By implementing the optimization techniques in this guide, you can ensure your Salesforce instance performs optimally even under heavy load and with millions of records.
Query Optimization and SOQL Performance
## Advanced SOQL Optimization
### Query Performance Analysis
```apex
public class QueryOptimizer {
// BAD: Non-selective query with multiple OR conditions
public static List<Account> getAccountsInefficient(Set<String> industries) {
return [
SELECT Id, Name, Industry, AnnualRevenue,
(SELECT Id, Name FROM Contacts),
(SELECT Id, Amount FROM Opportunities)
FROM Account
WHERE Industry IN :industries
OR AnnualRevenue > 1000000
OR CreatedDate = THIS_YEAR
];
}
// GOOD: Optimized query with selective filters
public static List<Account> getAccountsOptimized(
Set<String> industries,
Decimal minRevenue
) {
// Use indexed fields and avoid OR conditions
String query = 'SELECT Id, Name, Industry, AnnualRevenue ' +
'FROM Account ' +
'WHERE Industry IN :industries ' +
'AND AnnualRevenue >= :minRevenue';
// Use SOQL binding for better performance
return Database.query(query);
}
// BEST: Selective query with proper indexing
public static List<Account> getAccountsHighPerformance(
Set<Id> accountIds,
Set<String> industries
) {
// Query plan optimization
Map<Id, Account> accountMap = new Map<Id, Account>([
SELECT Id, Name, Industry, AnnualRevenue,
LastModifiedDate, OwnerId
FROM Account
WHERE Id IN :accountIds
AND Industry IN :industries
WITH SECURITY_ENFORCED
LIMIT 10000
]);
// Separate queries for related data to avoid query complexity
Map<Id, List<Contact>> contactsByAccountId = getContactsForAccounts(accountIds);
Map<Id, List<Opportunity>> oppsByAccountId = getOpportunitiesForAccounts(accountIds);
// Combine results efficiently
for (Account acc : accountMap.values()) {
acc.Contacts = contactsByAccountId.get(acc.Id);
acc.Opportunities = oppsByAccountId.get(acc.Id);
}
return accountMap.values();
}
}
```
### Query Plan Analysis
```apex
// Use Query Plan to analyze performance
@isTest
private class QueryPerformanceTest {
@isTest
static void analyzeQueryPerformance() {
// Create test data
List<Account> testAccounts = TestDataFactory.createAccounts(10000);
insert testAccounts;
// Measure query performance
Long startTime = System.currentTimeMillis();
List<Account> results = [
SELECT Id, Name
FROM Account
WHERE Industry = 'Technology'
AND AnnualRevenue > 1000000
ORDER BY AnnualRevenue DESC
LIMIT 100
];
Long queryTime = System.currentTimeMillis() - startTime;
System.debug('Query execution time: ' + queryTime + 'ms');
// Assert performance threshold
System.assert(queryTime < 1000, 'Query too slow: ' + queryTime + 'ms');
}
}
```
Governor Limits Management
## Strategic Governor Limits Management
### Heap Size Optimization
```apex
public class HeapSizeOptimizer {
// BAD: Memory-intensive approach
public static void processLargeDataSetInefficient(List<Account> accounts) {
Map<String, List<Account>> accountsByIndustry = new Map<String, List<Account>>();
for (Account acc : accounts) {
if (!accountsByIndustry.containsKey(acc.Industry)) {
accountsByIndustry.put(acc.Industry, new List<Account>());
}
accountsByIndustry.get(acc.Industry).add(acc);
}
// This holds all data in memory
for (String industry : accountsByIndustry.keySet()) {
processIndustryAccounts(accountsByIndustry.get(industry));
}
}
// GOOD: Memory-efficient streaming approach
public static void processLargeDataSetOptimized(Set<Id> accountIds) {
// Process in chunks to manage heap size
Integer chunkSize = 200;
Integer offset = 0;
while (offset < accountIds.size()) {
Set<Id> chunkIds = new Set<Id>();
Integer count = 0;
for (Id accId : accountIds) {
if (count >= offset && count < offset + chunkSize) {
chunkIds.add(accId);
}
count++;
}
// Process chunk
processAccountChunk(chunkIds);
// Clear references to free memory
chunkIds.clear();
offset += chunkSize;
// Monitor heap usage
Integer heapUsed = Limits.getHeapSize();
Integer heapLimit = Limits.getLimitHeapSize();
System.debug('Heap usage: ' + heapUsed + '/' + heapLimit);
if (heapUsed > heapLimit * 0.8) {
// Implement defensive strategy
System.debug('Heap usage critical, implementing defensive measures');
break;
}
}
}
}
```
### CPU Time Optimization
```apex
public class CPUTimeOptimizer {
// CPU-intensive operations management
public static void optimizeCPUIntensiveOperation(List<String> dataToProcess) {
Long startCPU = Limits.getCpuTime();
Integer processedCount = 0;
for (String data : dataToProcess) {
// Check CPU time periodically
if (Math.mod(processedCount, 100) == 0) {
Long currentCPU = Limits.getCpuTime();
Long cpuUsed = currentCPU - startCPU;
// If approaching limit, defer processing
if (cpuUsed > 9000) { // 9 seconds of 10 second limit
System.debug('CPU limit approaching, deferring remaining work');
deferRemainingWork(dataToProcess, processedCount);
break;
}
}
// Process data
performComplexCalculation(data);
processedCount++;
}
}
private static void deferRemainingWork(List<String> allData, Integer startIndex) {
// Create platform event or queueable job
List<String> remainingData = new List<String>();
for (Integer i = startIndex; i < allData.size(); i++) {
remainingData.add(allData[i]);
}
// Enqueue for asynchronous processing
System.enqueueJob(new ProcessRemainingDataQueueable(remainingData));
}
}
```
Bulk Processing Patterns
## High-Performance Bulk Processing
### Batch Apex Optimization
```apex
global class OptimizedBatchProcessor implements
Database.Batchable<sObject>,
Database.Stateful,
Database.AllowsCallouts {
private BatchMetrics metrics;
private List<String> errors;
global OptimizedBatchProcessor() {
this.metrics = new BatchMetrics();
this.errors = new List<String>();
}
global Database.QueryLocator start(Database.BatchableContext bc) {
metrics.startTime = System.now();
// Optimized query with proper filtering and ordering
String query = 'SELECT Id, Name, Status__c, Data__c ' +
'FROM Custom_Object__c ' +
'WHERE Status__c = \'Pending\' ' +
'AND CreatedDate >= LAST_N_DAYS:30 ' +
'ORDER BY CreatedDate ASC';
return Database.getQueryLocator(query);
}
global void execute(Database.BatchableContext bc, List<Custom_Object__c> scope) {
// Track execution metrics
metrics.batchesProcessed++;
metrics.recordsProcessed += scope.size();
try {
// Process with savepoint for rollback capability
Savepoint sp = Database.setSavepoint();
try {
// Bulk process records
List<Custom_Object__c> toUpdate = new List<Custom_Object__c>();
List<Integration_Request__c> integrationRequests = new List<Integration_Request__c>();
for (Custom_Object__c obj : scope) {
// Process logic
ProcessResult result = processRecord(obj);
if (result.isSuccess) {
obj.Status__c = 'Processed';
obj.Processed_Date__c = System.now();
toUpdate.add(obj);
// Create integration request if needed
if (result.requiresIntegration) {
integrationRequests.add(createIntegrationRequest(obj));
}
} else {
errors.add('Failed to process: ' + obj.Id + ' - ' + result.errorMessage);
}
}
// Bulk DML with partial success
if (!toUpdate.isEmpty()) {
Database.SaveResult[] updateResults = Database.update(toUpdate, false);
handleDMLResults(updateResults, toUpdate);
}
if (!integrationRequests.isEmpty()) {
insert integrationRequests;
}
} catch (Exception e) {
Database.rollback(sp);
errors.add('Batch execution failed: ' + e.getMessage());
throw e;
}
} catch (Exception e) {
// Log but don't fail entire batch
System.debug(LoggingLevel.ERROR, 'Batch error: ' + e);
metrics.failedBatches++;
}
}
global void finish(Database.BatchableContext bc) {
metrics.endTime = System.now();
metrics.duration = metrics.endTime.getTime() - metrics.startTime.getTime();
// Send summary email
Messaging.SingleEmailMessage email = new Messaging.SingleEmailMessage();
email.setToAddresses(new String[] { UserInfo.getUserEmail() });
email.setSubject('Batch Process Completed - ' + bc.getJobId());
String body = 'Batch processing completed with the following metrics:\n\n';
body += 'Total Records: ' + metrics.recordsProcessed + '\n';
body += 'Batches Processed: ' + metrics.batchesProcessed + '\n';
body += 'Failed Batches: ' + metrics.failedBatches + '\n';
body += 'Duration: ' + (metrics.duration / 1000) + ' seconds\n';
body += 'Average Records/Second: ' +
(metrics.recordsProcessed / (metrics.duration / 1000)) + '\n\n';
if (!errors.isEmpty()) {
body += 'Errors:\n' + String.join(errors, '\n');
}
email.setPlainTextBody(body);
Messaging.sendEmail(new Messaging.SingleEmailMessage[] { email });
}
// Metrics tracking class
public class BatchMetrics {
public DateTime startTime { get; set; }
public DateTime endTime { get; set; }
public Long duration { get; set; }
public Integer batchesProcessed { get; set; }
public Integer recordsProcessed { get; set; }
public Integer failedBatches { get; set; }
public BatchMetrics() {
this.batchesProcessed = 0;
this.recordsProcessed = 0;
this.failedBatches = 0;
}
}
}
```
### Queueable Chain Pattern
```apex
public class QueueableChainProcessor implements Queueable, Database.AllowsCallouts {
private List<Id> recordIds;
private Integer currentIndex;
private Integer batchSize;
private ProcessingContext context;
public QueueableChainProcessor(List<Id> recordIds, ProcessingContext context) {
this.recordIds = recordIds;
this.currentIndex = 0;
this.batchSize = 100;
this.context = context != null ? context : new ProcessingContext();
}
public void execute(QueueableContext qc) {
try {
// Process current batch
List<Id> currentBatch = new List<Id>();
Integer endIndex = Math.min(currentIndex + batchSize, recordIds.size());
for (Integer i = currentIndex; i < endIndex; i++) {
currentBatch.add(recordIds[i]);
}
// Process with governor limits checking
ProcessingResult result = processBatchWithLimitsCheck(currentBatch);
context.addResult(result);
currentIndex = endIndex;
// Chain if more records to process
if (currentIndex < recordIds.size() && !Test.isRunningTest()) {
// Check if we can chain (max 5 depth)
if (context.chainDepth < 5) {
context.chainDepth++;
System.enqueueJob(new QueueableChainProcessor(
recordIds.subList(currentIndex, recordIds.size()),
context
));
} else {
// Schedule for later processing
scheduleRemainingWork(recordIds, currentIndex, context);
}
} else {
// All processing complete
finalizeProcessing(context);
}
} catch (Exception e) {
handleProcessingError(e, context);
}
}
private ProcessingResult processBatchWithLimitsCheck(List<Id> batchIds) {
ProcessingResult result = new ProcessingResult();
for (Id recordId : batchIds) {
// Check limits before processing
if (Limits.getQueries() > 90) {
result.addWarning('Approaching SOQL limit');
break;
}
if (Limits.getDMLRows() > 9000) {
result.addWarning('Approaching DML row limit');
break;
}
// Process record
try {
processIndividualRecord(recordId);
result.successCount++;
} catch (Exception e) {
result.addError(recordId, e.getMessage());
}
}
return result;
}
}
```
Lightning Component Performance
## Lightning Web Component Optimization
### Data Loading Strategies
```javascript
import { LightningElement, wire, track } from 'lwc';
import { refreshApex } from '@salesforce/apex';
import getAccountData from '@salesforce/apex/AccountController.getAccountData';
import getAccountDataLazy from '@salesforce/apex/AccountController.getAccountDataLazy';
export default class PerformantDataTable extends LightningElement {
@track accounts = [];
@track isLoading = true;
@track loadedRecords = 0;
// Pagination parameters
pageSize = 50;
currentPage = 1;
totalRecords = 0;
// Wire with caching
@wire(getAccountData, {
pageSize: '$pageSize',
pageNumber: '$currentPage'
})
wiredAccounts(result) {
this.wiredAccountResult = result;
if (result.data) {
// Process data efficiently
this.processAccountData(result.data);
this.isLoading = false;
} else if (result.error) {
this.handleError(result.error);
}
}
processAccountData(data) {
// Use requestAnimationFrame for smooth rendering
requestAnimationFrame(() => {
this.accounts = data.accounts.map(account => ({
...account,
// Computed properties
formattedRevenue: this.formatCurrency(account.AnnualRevenue),
industryClass: this.getIndustryClass(account.Industry)
}));
this.totalRecords = data.totalCount;
this.loadedRecords = this.accounts.length;
});
}
// Lazy loading implementation
loadMoreData() {
if (this.isLoading || this.loadedRecords >= this.totalRecords) {
return;
}
this.isLoading = true;
getAccountDataLazy({
offset: this.loadedRecords,
limitSize: this.pageSize
})
.then(result => {
// Append new data
const newAccounts = result.accounts.map(account => ({
...account,
formattedRevenue: this.formatCurrency(account.AnnualRevenue),
industryClass: this.getIndustryClass(account.Industry)
}));
// Use spread operator for immutability
this.accounts = [...this.accounts, ...newAccounts];
this.loadedRecords = this.accounts.length;
})
.catch(error => {
this.handleError(error);
})
.finally(() => {
this.isLoading = false;
});
}
// Debounced search
handleSearch(event) {
// Clear existing timeout
clearTimeout(this.searchTimeout);
const searchTerm = event.target.value;
// Debounce search calls
this.searchTimeout = setTimeout(() => {
this.performSearch(searchTerm);
}, 300);
}
// Virtual scrolling for large datasets
renderedCallback() {
if (this.template.querySelector('.data-table-container')) {
this.setupVirtualScrolling();
}
}
setupVirtualScrolling() {
const container = this.template.querySelector('.data-table-container');
const scrollHandler = this.handleScroll.bind(this);
// Remove existing listener
container.removeEventListener('scroll', scrollHandler);
// Add optimized scroll listener
container.addEventListener('scroll',
this.throttle(scrollHandler, 100),
{ passive: true }
);
}
// Utility: Throttle function calls
throttle(func, limit) {
let inThrottle;
return function() {
const args = arguments;
const context = this;
if (!inThrottle) {
func.apply(context, args);
inThrottle = true;
setTimeout(() => inThrottle = false, limit);
}
}
}
}
```
### Apex Controller Optimization
```apex
public with sharing class AccountController {
// Cacheable method for wire service
@AuraEnabled(cacheable=true)
public static AccountDataWrapper getAccountData(
Integer pageSize,
Integer pageNumber
) {
try {
// Calculate offset
Integer offset = (pageNumber - 1) * pageSize;
// Get total count with efficient query
Integer totalCount = [
SELECT COUNT()
FROM Account
WHERE IsActive__c = true
];
// Get paginated data with selective fields
List<Account> accounts = [
SELECT Id, Name, Industry, AnnualRevenue,
Rating, NumberOfEmployees
FROM Account
WHERE IsActive__c = true
ORDER BY Name
LIMIT :pageSize
OFFSET :offset
];
return new AccountDataWrapper(accounts, totalCount);
} catch (Exception e) {
throw new AuraHandledException(e.getMessage());
}
}
// Non-cacheable for data mutations
@AuraEnabled
public static AccountDataWrapper getAccountDataLazy(
Integer offset,
Integer limitSize
) {
try {
// Query with field-level security
List<Account> accounts = Security.stripInaccessible(
AccessType.READABLE,
[SELECT Id, Name, Industry, AnnualRevenue,
Rating, NumberOfEmployees
FROM Account
WHERE IsActive__c = true
ORDER BY Name
LIMIT :limitSize
OFFSET :offset]
).getRecords();
return new AccountDataWrapper(accounts, null);
} catch (Exception e) {
throw new AuraHandledException(e.getMessage());
}
}
// Wrapper class for structured response
public class AccountDataWrapper {
@AuraEnabled public List<Account> accounts { get; set; }
@AuraEnabled public Integer totalCount { get; set; }
public AccountDataWrapper(List<Account> accounts, Integer totalCount) {
this.accounts = accounts;
this.totalCount = totalCount;
}
}
}
```
Database Performance Patterns
## Advanced Database Optimization
### Skinny Tables Implementation
```apex
// Work with Salesforce Support to implement skinny tables
// for frequently accessed objects with many fields
public class SkinnyTableOptimization {
// Query using skinny table fields
public static List<Account> getAccountsSkinnyOptimized() {
// Skinny table includes only frequently used fields
return [
SELECT Id, Name, Industry, AnnualRevenue, OwnerId
FROM Account
WHERE Industry = 'Technology'
AND AnnualRevenue > 1000000
ORDER BY AnnualRevenue DESC
LIMIT 1000
];
}
// Avoid querying large text fields unless necessary
public static List<Case> getCasesOptimized(Set<Id> caseIds) {
// Initial query without large fields
Map<Id, Case> caseMap = new Map<Id, Case>([
SELECT Id, Subject, Status, Priority, OwnerId
FROM Case
WHERE Id IN :caseIds
]);
// Separate query for cases that need description
Set<Id> casesNeedingDescription = new Set<Id>();
for (Case c : caseMap.values()) {
if (c.Status == 'Escalated') {
casesNeedingDescription.add(c.Id);
}
}
// Query large fields only when needed
if (!casesNeedingDescription.isEmpty()) {
for (Case c : [
SELECT Id, Description
FROM Case
WHERE Id IN :casesNeedingDescription
]) {
caseMap.get(c.Id).Description = c.Description;
}
}
return caseMap.values();
}
}
```
### Custom Indexing Strategies
```apex
public class IndexOptimization {
// Use External IDs for efficient lookups
public static Account getAccountByExternalId(String externalId) {
return [
SELECT Id, Name, Industry
FROM Account
WHERE External_ID__c = :externalId
LIMIT 1
];
}
// Composite index usage
public static List<Opportunity> getOpportunitiesOptimized(
Date startDate,
Date endDate,
Set<String> stages
) {
// Query uses composite index on CloseDate + StageName
return [
SELECT Id, Name, Amount, CloseDate, StageName
FROM Opportunity
WHERE CloseDate >= :startDate
AND CloseDate <= :endDate
AND StageName IN :stages
AND IsDeleted = false
ORDER BY CloseDate DESC
];
}
// Two-step filtering for non-selective queries
public static List<Contact> getContactsWithComplexCriteria(
String emailDomain,
String title
) {
// Step 1: Get IDs using selective query
Set<Id> contactIds = new Map<Id, Contact>([
SELECT Id
FROM Contact
WHERE Email LIKE :('%@' + emailDomain)
LIMIT 10000
]).keySet();
// Step 2: Filter with non-selective criteria
return [
SELECT Id, Name, Email, Title, AccountId
FROM Contact
WHERE Id IN :contactIds
AND Title LIKE :('%' + title + '%')
];
}
}
```
Monitoring and Diagnostics
## Performance Monitoring Framework
### Custom Performance Monitoring
```apex
public class PerformanceMonitor {
private Long startTime;
private Long startCPU;
private Integer startQueries;
private Integer startDMLRows;
private Integer startHeap;
private Map<String, OperationMetrics> operations;
public PerformanceMonitor() {
this.operations = new Map<String, OperationMetrics>();
captureStartMetrics();
}
private void captureStartMetrics() {
this.startTime = System.currentTimeMillis();
this.startCPU = Limits.getCpuTime();
this.startQueries = Limits.getQueries();
this.startDMLRows = Limits.getDMLRows();
this.startHeap = Limits.getHeapSize();
}
public void startOperation(String operationName) {
OperationMetrics metrics = new OperationMetrics();
metrics.startTime = System.currentTimeMillis();
metrics.startCPU = Limits.getCpuTime();
operations.put(operationName, metrics);
}
public void endOperation(String operationName) {
if (operations.containsKey(operationName)) {
OperationMetrics metrics = operations.get(operationName);
metrics.endTime = System.currentTimeMillis();
metrics.endCPU = Limits.getCpuTime();
metrics.duration = metrics.endTime - metrics.startTime;
metrics.cpuTime = metrics.endCPU - metrics.startCPU;
}
}
public PerformanceReport generateReport() {
PerformanceReport report = new PerformanceReport();
// Overall metrics
report.totalDuration = System.currentTimeMillis() - startTime;
report.totalCPUTime = Limits.getCpuTime() - startCPU;
report.totalQueries = Limits.getQueries() - startQueries;
report.totalDMLRows = Limits.getDMLRows() - startDMLRows;
report.peakHeapSize = Limits.getHeapSize();
// Operation breakdown
report.operations = new List<OperationReport>();
for (String opName : operations.keySet()) {
OperationMetrics metrics = operations.get(opName);
OperationReport opReport = new OperationReport();
opReport.name = opName;
opReport.duration = metrics.duration;
opReport.cpuTime = metrics.cpuTime;
opReport.percentOfTotal = (metrics.duration * 100) / report.totalDuration;
report.operations.add(opReport);
}
// Limits usage
report.limitsUsage = new Map<String, LimitUsage>{
'SOQL Queries' => new LimitUsage(
Limits.getQueries(),
Limits.getLimitQueries()
),
'DML Rows' => new LimitUsage(
Limits.getDMLRows(),
Limits.getLimitDMLRows()
),
'CPU Time' => new LimitUsage(
Limits.getCpuTime(),
Limits.getLimitCpuTime()
),
'Heap Size' => new LimitUsage(
Limits.getHeapSize(),
Limits.getLimitHeapSize()
)
};
return report;
}
public void logPerformanceMetrics() {
PerformanceReport report = generateReport();
// Create performance log record
Performance_Log__c log = new Performance_Log__c(
Class_Name__c = getClassName(),
Method_Name__c = getMethodName(),
Duration_ms__c = report.totalDuration,
CPU_Time_ms__c = report.totalCPUTime,
SOQL_Queries__c = report.totalQueries,
DML_Rows__c = report.totalDMLRows,
Heap_Size__c = report.peakHeapSize,
Timestamp__c = System.now()
);
// Store operation details in JSON
log.Operation_Details__c = JSON.serialize(report.operations);
// Async insert to avoid impacting performance
System.enqueueJob(new LogPerformanceQueueable(log));
}
// Helper classes
public class OperationMetrics {
public Long startTime;
public Long endTime;
public Long startCPU;
public Long endCPU;
public Long duration;
public Long cpuTime;
}
public class PerformanceReport {
public Long totalDuration;
public Long totalCPUTime;
public Integer totalQueries;
public Integer totalDMLRows;
public Integer peakHeapSize;
public List<OperationReport> operations;
public Map<String, LimitUsage> limitsUsage;
}
public class OperationReport {
public String name;
public Long duration;
public Long cpuTime;
public Decimal percentOfTotal;
}
public class LimitUsage {
public Integer used;
public Integer limit;
public Decimal percentUsed;
public LimitUsage(Integer used, Integer limit) {
this.used = used;
this.limit = limit;
this.percentUsed = limit > 0 ? (used * 100.0) / limit : 0;
}
}
}
// Usage example
public class BusinessLogicClass {
public void complexBusinessProcess() {
PerformanceMonitor monitor = new PerformanceMonitor();
try {
// Monitor individual operations
monitor.startOperation('Data Query');
List<Account> accounts = queryAccounts();
monitor.endOperation('Data Query');
monitor.startOperation('Processing');
processAccounts(accounts);
monitor.endOperation('Processing');
monitor.startOperation('Integration');
callExternalSystem(accounts);
monitor.endOperation('Integration');
} finally {
// Always log metrics
monitor.logPerformanceMetrics();
}
}
}
```
Certified Partner
Salesforce certified consultants
5-Star Rated
Consistently high client satisfaction
200+ Projects
Successfully delivered
Enterprise Ready
Fortune 500 trusted