Performance Optimization
Advanced techniques for optimizing EvoBin storage performance, reducing costs, and improving user experience.
Understanding Performance Metrics​
Key Metrics to Monitor​
interface PerformanceMetrics {
// Upload performance
uploadTime: number // ms
uploadSpeed: number // bytes/ms
chunkUploadTimes: number[]
// Download performance
downloadTime: number // ms
downloadSpeed: number // bytes/ms
// Blockchain metrics
confirmationTime: number // ms
gasUsed: number // credits
transactionCount: number
// Storage metrics
storageUsed: number // bytes
chunkDistribution: Record<string, number> // node -> chunk count
redundancyLevel: number
}
class PerformanceMonitor {
private metrics: Map<string, PerformanceMetrics> = new Map()
startUpload(fileId: string): void {
this.metrics.set(fileId, {
uploadTime: 0,
uploadSpeed: 0,
chunkUploadTimes: [],
downloadTime: 0,
downloadSpeed: 0,
confirmationTime: 0,
gasUsed: 0,
transactionCount: 0,
storageUsed: 0,
chunkDistribution: {},
redundancyLevel: 0
})
}
recordChunkUpload(fileId: string, chunkSize: number, uploadTime: number): void {
const metrics = this.metrics.get(fileId)
if (metrics) {
metrics.chunkUploadTimes.push(uploadTime)
metrics.uploadTime += uploadTime
metrics.uploadSpeed = chunkSize / uploadTime
}
}
getMetrics(fileId: string): PerformanceMetrics | undefined {
return this.metrics.get(fileId)
}
getAverageUploadSpeed(): number {
const speeds = Array.from(this.metrics.values())
.map(m => m.uploadSpeed)
.filter(s => s > 0)
if (speeds.length === 0) return 0
return speeds.reduce((sum, speed) => sum + speed, 0) / speeds.length
}
}
Optimization Strategies​
1. Chunk Size Optimization​
class ChunkOptimizer {
private optimalChunkSize = 1024 * 1024 // Start with 1MB
async determineOptimalChunkSize(networkConditions: NetworkConditions): Promise<number> {
const testSizes = [
256 * 1024, // 256KB
512 * 1024, // 512KB
1024 * 1024, // 1MB
2 * 1024 * 1024, // 2MB
4 * 1024 * 1024 // 4MB
]
const results = await Promise.all(
testSizes.map(size => this.testChunkSize(size, networkConditions))
)
return this.findOptimalSize(results)
}
private async testChunkSize(
chunkSize: number,
conditions: NetworkConditions
): Promise<ChunkTestResult> {
const testData = new ArrayBuffer(chunkSize)
const startTime = performance.now()
try {
await this.uploadChunk(testData, conditions)
const uploadTime = performance.now() - startTime
return {
chunkSize,
uploadTime,
success: true,
speed: chunkSize / uploadTime
}
} catch (error) {
return {
chunkSize,
uploadTime: performance.now() - startTime,
success: false,
error: error.message
}
}
}
private findOptimalSize(results: ChunkTestResult[]): number {
const successful = results.filter(r => r.success)
if (successful.length === 0) {
return 1024 * 1024 // Default to 1MB
}
let optimal = successful[0]
for (const result of successful) {
if (result.speed > optimal.speed * 0.9 && result.chunkSize > optimal.chunkSize) {
optimal = result
}
}
return optimal.chunkSize
}
}
interface NetworkConditions {
latency: number // ms
bandwidth: number // bytes/ms
reliability: number // 0-1
}
interface ChunkTestResult {
chunkSize: number
uploadTime: number
success: boolean
speed?: number
error?: string
}
2. Parallel Upload Optimization​
class ParallelUploadManager {
private maxParallelUploads: number
private activeUploads = 0
private queue: Array<() => Promise<any>> = []
constructor(maxParallelUploads = 3) {
this.maxParallelUploads = maxParallelUploads
}
async uploadFileInParallel(
file: File,
chunkSize: number,
onProgress?: (progress: number) => void
): Promise<string[]> {
const chunks = await this.splitFile(file, chunkSize)
const totalChunks = chunks.length
const chunkIds: string[] = []
let completed = 0
const uploadTasks = chunks.map((chunk, index) => async () => {
try {
const chunkId = await this.uploadChunk(chunk, index)
chunkIds[index] = chunkId
completed++
onProgress?.(Math.round((completed / totalChunks) * 100))
} catch (error) {
console.error(`Chunk ${index} upload failed:`, error)
throw error
}
})
await this.executeWithConcurrency(uploadTasks, this.maxParallelUploads)
return chunkIds
}
private async executeWithConcurrency(
tasks: Array<() => Promise<any>>,
concurrency: number
): Promise<void> {
const executing: Promise<any>[] = []
for (const task of tasks) {
const promise = task().finally(() => {
executing.splice(executing.indexOf(promise), 1)
})
executing.push(promise)
if (executing.length >= concurrency) {
await Promise.race(executing)
}
}
await Promise.all(executing)
}
private async splitFile(file: File, chunkSize: number): Promise<ArrayBuffer[]> {
const chunks: ArrayBuffer[] = []
const fileSize = file.size
let offset = 0
while (offset < fileSize) {
const chunk = file.slice(offset, offset + chunkSize)
const arrayBuffer = await chunk.arrayBuffer()
chunks.push(arrayBuffer)
offset += chunkSize
}
return chunks
}
private async uploadChunk(chunk: ArrayBuffer, index: number): Promise<string> {
return`chunk_${index}_${Date.now()}`
}
}
// Usage
const uploadManager = new ParallelUploadManager(4) // 4 parallel uploads
const file = new File(['your file content'], 'large-file.zip')
const chunkIds = await uploadManager.uploadFileInParallel(
file,
1024 * 1024, // 1MB chunks
(progress) => {
console.log(`Upload progress: ${progress}%`)
}
)
3. Cache Optimization​
class EvoBinCache {
private cache = new Map<string, CacheEntry>()
private maxSize = 100 * 1024 * 1024 // 100MB cache
private currentSize = 0
async getOrFetch<T>(
key: string,
fetchFn: () => Promise<T>,
ttl: number = 5 * 60 * 1000 // 5 minutes default
): Promise<T> {
const entry = this.cache.get(key)
if (entry && entry.expiresAt > Date.now()) {
entry.lastAccessed = Date.now()
return entry.data as T
}
const data = await fetchFn()
this.set(key, data, ttl)
return data
}
set(key: string, data: any, ttl: number): void {
const size = this.estimateSize(data)
while (this.currentSize + size > this.maxSize && this.cache.size > 0) {
this.evictOldest()
}
const entry: CacheEntry = {
data,
size,
createdAt: Date.now(),
lastAccessed: Date.now(),
expiresAt: Date.now() + ttl
}
this.cache.set(key, entry)
this.currentSize += size
}
private evictOldest(): void {
let oldestKey: string | null = null
let oldestTime = Infinity
for (const [key, entry] of this.cache.entries()) {
if (entry.lastAccessed < oldestTime) {
oldestTime = entry.lastAccessed
oldestKey = key
}
}
if (oldestKey) {
const entry = this.cache.get(oldestKey)!
this.currentSize -= entry.size
this.cache.delete(oldestKey)
}
}
private estimateSize(data: any): number {
if (typeof data === 'string') {
return data.length * 2 // UTF-16
} else if (typeof data === 'number') {
return 8 // 64-bit float
} else if (typeof data === 'boolean') {
return 1
} else if (data === null || data === undefined) {
return 0
} else if (Array.isArray(data)) {
return data.reduce((sum, item) => sum + this.estimateSize(item), 0)
} else if (typeof data === 'object') {
return Object.entries(data).reduce(
(sum, [key, value]) => sum + key.length * 2 + this.estimateSize(value),
0
)
} else if (data instanceof ArrayBuffer) {
return data.byteLength
}
return 0
}
// Specialized cache methods for EvoBin
async getFileMetadata(fileId: string): Promise<FileMetadata> {
return this.getOrFetch(
`metadata:${fileId}`,
async () => this.fetchFileMetadata(fileId),
60 * 60 * 1000 // 1 hour TTL
)
}
async getDocument<T>(contractId: string, documentId: string): Promise<T> {
return this.getOrFetch(
`document:${contractId}:${documentId}`,
async () => this.fetchDocument<T>(contractId, documentId),
5 * 60 * 1000 // 5 minute TTL
)
}
async preloadRelatedData(fileId: string): Promise<void> {
const metadata = await this.getFileMetadata(fileId)
const chunkPromises = metadata.chunkIds.map(chunkId =>
this.getOrFetch(
`chunk:${chunkId}`,
async () => this.fetchChunkMetadata(chunkId),
10 * 60 * 1000 // 10 minute TTL
).catch(() => null)
)
await Promise.all(chunkPromises)
}
}
interface CacheEntry {
data: any
size: number
createdAt: number
lastAccessed: number
expiresAt: number
}
interface FileMetadata {
fileId: string
fileName: string
fileSize: number
chunkIds: string[]
encryption: string
createdAt: number
}
4. Gas Optimization​
class GasOptimizer {
private readonly GAS_PRICE_MULTIPLIER = 1.1 // 10% buffer
private readonly MAX_GAS_LIMIT = 1000000
async estimateGasForOperation(operation: string, params: any): Promise<GasEstimate> {
const baseEstimates: Record<string, number> = {
'document.create': 50000,
'document.update': 30000,
'document.delete': 20000,
'identity.create': 100000,
'identity.topup': 80000,
'contract.create': 200000
}
let baseGas = baseEstimates[operation] || 100000
// Adjust based on parameters
if (operation === 'document.create') {
const dataSize = JSON.stringify(params.data).length
baseGas += Math.ceil(dataSize / 1024) * 1000 // 1000 gas per KB
}
const estimatedGas = Math.ceil(baseGas * this.GAS_PRICE_MULTIPLIER)
return {
estimated: estimatedGas,
min: baseGas,
max: Math.min(estimatedGas * 2, this.MAX_GAS_LIMIT)
}
}
async batchOperations<T>(
operations: Array<() => Promise<T>>,
batchSize: number = 10,
delayBetweenBatches: number = 100
): Promise<T[]> {
const results: T[] = []
for (let i = 0; i < operations.length; i += batchSize) {
const batch = operations.slice(i, i + batchSize)
try {
const gasEstimate = await this.estimateBatchGas(batch)
if (gasEstimate > this.MAX_GAS_LIMIT) {
const splitResults = await this.batchOperations(batch, Math.ceil(batchSize / 2))
results.push(...splitResults)
} else {
const batchResults = await Promise.all(batch.map(op => op()))
results.push(...batchResults)
}
} catch (error) {
console.error(`Batch ${Math.floor(i / batchSize)} failed:`, error)
// Fallback to individual execution
for (const operation of batch) {
try {
const result = await operation()
results.push(result)
} catch (individualError) {
console.error('Individual operation failed:', individualError)
throw individualError
}
}
}
}
return results
}
private async estimateBatchGas(operations: Array<() => Promise<any>>): Promise<number> {
return operations.length * 50000 // Simplified estimate
}
optimizeDocumentForStorage(document: any): OptimizedDocument {
const optimized = { ...document }
if (optimized.content && typeof optimized.content === 'string') {
optimized.content = this.compressString(optimized.content)
}
if (optimized.createdAt instanceof Date) {
optimized.createdAt = optimized.createdAt.getTime()
}
Object.keys(optimized).forEach(key => {
if (optimized[key] === undefined || optimized[key] === null) {
delete optimized[key]
}
})
return optimized
}
private compressString(str: string): string {
return str
.replace(/\s+/g, ' ')
.replace(/<!--.*?-->/g, '')
.trim()
}
}
interface GasEstimate {
estimated: number
min: number
max: number
}
interface OptimizedDocument {
[key: string]: any
}
// Usage example
const gasOptimizer = new GasOptimizer()
async function uploadDocumentsOptimized(documents: any[]): Promise<string[]> {
const operations = documents.map(doc => async () => {
const optimized = gasOptimizer.optimizeDocumentForStorage(doc)
// Gas estimate for this document
const gasEstimate = await gasOptimizer.estimateGasForOperation(
'document.create',
{ data: optimized }
)
console.log(`Estimated gas: ${gasEstimate.estimated}`)
return uploadDocument(optimized, gasEstimate.estimated)
})
return gasOptimizer.batchOperations(operations, 5)
}
5. Network Optimization​
class NetworkOptimizer {
private dapiEndpoints: string[] = []
private currentEndpointIndex = 0
private endpointLatencies: Map<string, number> = new Map()
private endpointErrors: Map<string, number> = new Map()
constructor(endpoints: string[]) {
this.dapiEndpoints = endpoints
this.measureLatencies()
}
async getOptimalEndpoint(): Promise<string> {
const sortedEndpoints = this.dapiEndpoints
.map(endpoint => ({
endpoint,
latency: this.endpointLatencies.get(endpoint) || Infinity,
errors: this.endpointErrors.get(endpoint) || 0
}))
.sort((a, b) => {
const scoreA = a.latency * (1 + a.errors * 0.1)
const scoreB = b.latency * (1 + b.errors * 0.1)
return scoreA - scoreB
})
return sortedEndpoints[0].endpoint
}
async executeWithFallback<T>(
operation: (endpoint: string) => Promise<T>,
maxRetries: number = 3
): Promise<T> {
const errors: Array<{ endpoint: string; error: Error }> = []
for (let attempt = 0; attempt < maxRetries; attempt++) {
const endpoint = await this.getOptimalEndpoint()
try {
const startTime = Date.now()
const result = await operation(endpoint)
const latency = Date.now() - startTime
this.endpointLatencies.set(endpoint,
(this.endpointLatencies.get(endpoint) || latency + latency) / 2
)
return result
} catch (error) {
errors.push({ endpoint, error })
const errorCount = this.endpointErrors.get(endpoint) || 0
this.endpointErrors.set(endpoint, errorCount + 1)
if (attempt < maxRetries - 1) {
await this.sleep(Math.pow(2, attempt) * 100)
}
}
}
throw new AggregateError(
errors.map(e => e.error),
`All endpoints failed after ${maxRetries} attempts`
)
}
private async measureLatencies(): Promise<void> {
const measurements = await Promise.all(
this.dapiEndpoints.map(async endpoint => {
try {
const startTime = Date.now()
await fetch(`${endpoint}/status`, {
method: 'HEAD',
timeout: 5000
})
const latency = Date.now() - startTime
return { endpoint, latency }
} catch {
return { endpoint, latency: Infinity }
}
})
)
measurements.forEach(({ endpoint, latency }) => {
this.endpointLatencies.set(endpoint, latency)
})
setTimeout(() => this.measureLatencies(), 60000)
}
private sleep(ms: number): Promise<void> {
return new Promise(resolve => setTimeout(resolve, ms))
}
// Smart batching for network requests
async batchNetworkRequests<T, R>(
items: T[],
processor: (item: T) => Promise<R>,
batchSize: number = 10,
delayBetweenBatches: number = 100
): Promise<R[]> {
const results: R[] = []
for (let i = 0; i < items.length; i += batchSize) {
const batch = items.slice(i, i + batchSize)
const batchPromises = batch.map(item =>
this.executeWithFallback(() => processor(item))
)
const batchResults = await Promise.all(batchPromises)
results.push(...batchResults)
if (i + batchSize < items.length) {
await this.sleep(delayBetweenBatches)
}
}
return results
}
}
// Usage
const optimizer = new NetworkOptimizer([
'https://api.dash.org',
'https://dapi1.dash.org',
'https://dapi2.dash.org',
'https://testnet-api.dash.org'
])
async function fetchIdentityData(identityId: string) {
return optimizer.executeWithFallback(async (endpoint) => {
const sdk = new DashPlatformSDK({
network: 'testnet',
dapiAddresses: [endpoint]
})
return sdk.identities.getIdentityByIdentifier(identityId)
})
}
// Batch multiple identity fetches
async function fetchMultipleIdentities(identityIds: string[]) {
return optimizer.batchNetworkRequests(
identityIds,
fetchIdentityData,
5, // 5 identities per batch
200 // 200ms between batches
)
}
Monitoring & Analytics​
interface PerformanceMetrics {
timestamp: number
operation: string
duration: number
success: boolean
error?: string
dataSize?: number
gasUsed?: number
endpoint?: string
}
class PerformanceTracker {
private metrics: PerformanceMetrics[] = []
private readonly MAX_METRICS = 1000
record(metric: Omit<PerformanceMetrics, 'timestamp'>): void {
const fullMetric: PerformanceMetrics = {
timestamp: Date.now(),
...metric
}
this.metrics.push(fullMetric)
if (this.metrics.length > this.MAX_METRICS) {
this.metrics = this.metrics.slice(-this.MAX_METRICS)
}
}
getStatistics(operation?: string): PerformanceStatistics {
const filtered = operation
? this.metrics.filter(m => m.operation === operation)
: this.metrics
if (filtered.length === 0) {
return {
count: 0,
successRate: 0,
averageDuration: 0,
p95Duration: 0,
totalGas: 0,
averageDataSize: 0
}
}
const successful = filtered.filter(m => m.success)
const durations = successful.map(m => m.duration).sort((a, b) => a - b)
const p95Index = Math.floor(durations.length * 0.95)
return {
count: filtered.length,
successRate: successful.length / filtered.length,
averageDuration: durations.reduce((a, b) => a + b, 0) / durations.length,
p95Duration: durations[p95Index] || 0,
totalGas: filtered.reduce((sum, m) => sum + (m.gasUsed || 0), 0),
averageDataSize: filtered.reduce((sum, m) => sum + (m.dataSize || 0), 0) / filtered.length
}
}
generateReport(): PerformanceReport {
const operations = Array.from(new Set(this.metrics.map(m => m.operation)))
return {
period: {
start: this.metrics[0]?.timestamp || Date.now(),
end: Date.now()
},
operations: operations.map(op => ({
operation: op,
...this.getStatistics(op)
})),
summary: this.getStatistics(),
recommendations: this.generateRecommendations()
}
}
private generateRecommendations(): string[] {
const recommendations: string[] = []
const stats = this.getStatistics()
if (stats.successRate < 0.95) {
recommendations.push('Consider implementing retry logic with exponential backoff')
}
if (stats.p95Duration > 5000) {
recommendations.push('Optimize network calls - consider batching or compression')
}
if (stats.averageDataSize > 1024 * 1024) {
recommendations.push('Large data transfers detected - implement chunking')
}
return recommendations
}
}
interface PerformanceStatistics {
count: number
successRate: number
averageDuration: number
p95Duration: number
totalGas: number
averageDataSize: number
}
interface PerformanceReport {
period: {
start: number
end: number
}
operations: Array<{
operation: string
} & PerformanceStatistics>
summary: PerformanceStatistics
recommendations: string[]
}
// Usage in your application
const tracker = new PerformanceTracker()
async function monitoredOperation<T>(
operationName: string,
executor: () => Promise<T>,
metadata?: { dataSize?: number }
): Promise<T> {
const startTime = performance.now()
try {
const result = await executor()
const duration = performance.now() - startTime
tracker.record({
operation: operationName,
duration,
success: true,
dataSize: metadata?.dataSize
})
return result
} catch (error) {
const duration = performance.now() - startTime
tracker.record({
operation: operationName,
duration,
success: false,
error: error.message,
dataSize: metadata?.dataSize
})
throw error
}
}
Best Practices Summary​
Do's:​
const RECOMMENDED_SETTINGS = {
// File upload settings
chunkSize: {
smallFiles: 256 * 1024, // 256KB
mediumFiles: 1024 * 1024, // 1MB
largeFiles: 4 * 1024 * 1024 // 4MB
},
// Network settings
maxParallelUploads: 3,
connectionTimeout: 30000, // 30 seconds
requestTimeout: 10000, // 10 seconds
// Cache settings
metadataTTL: 5 * 60 * 1000, // 5 minutes
documentTTL: 60 * 60 * 1000, // 1 hour
maxCacheSize: 100 * 1024 * 1024, // 100MB
// Retry settings
maxRetries: 3,
initialRetryDelay: 1000, // 1 second
maxRetryDelay: 10000, // 10 seconds
// Gas settings
gasMultiplier: 1.1, // 10% buffer
maxGasPerBatch: 1000000,
// Monitoring
metricsSampleRate: 0.1, // 10% of operations
reportInterval: 5 * 60 * 1000 // 5 minutes
}
Next Steps​
- Implement monitoring to track real-world performance
- A/B test different settings to find optimal configuration
- Set up alerts for performance degradation
- Regularly review and update optimization strategies
- Contribute improvements back to the EvoBin community Remember: Performance optimization is an ongoing process. Continuously monitor, measure, and improve based on real usage data.