Thanks to visit codestin.com
Credit goes to www.scribd.com

0% found this document useful (0 votes)
11 views5 pages

Measure Function Performance

The document describes a utility library called PerfMetrics that measures and stores performance data for synchronous and asynchronous functions using the Browser Performance API. It includes methods for measuring function performance, storing metrics, and retrieving performance statistics, while also handling errors and ensuring proper cleanup of performance marks. The library supports both synchronous and asynchronous functions, providing detailed metrics such as duration, success/failure counts, and average durations.

Uploaded by

vib_gup
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
11 views5 pages

Measure Function Performance

The document describes a utility library called PerfMetrics that measures and stores performance data for synchronous and asynchronous functions using the Browser Performance API. It includes methods for measuring function performance, storing metrics, and retrieving performance statistics, while also handling errors and ensuring proper cleanup of performance marks. The library supports both synchronous and asynchronous functions, providing detailed metrics such as duration, success/failure counts, and average durations.

Uploaded by

vib_gup
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
You are on page 1/ 5

Performance Measurement Utility for Browser

Functions
Here's a comprehensive utility library that measures and stores performance
data for both synchronous and asynchronous functions using the Browser
Performance API:
/**
* Performance measurement utility for browser functions
*/
class PerfMetrics {
constructor() {
this.metrics = new Map();
this.performance = window.performance || window.webkitPerformance
|| window.msPerformance || window.mozPerformance;

if (!this.performance) {
console.warn('Performance API not supported in this browser');
}
}

/**
* Measure performance of a synchronous function
* @param {Function} fn - The function to measure
* @param {string} name - Unique identifier for this measurement
* @param {...any} args - Arguments to pass to the function
* @returns {any} The result of the function execution
*/
measureSync(fn, name, ...args) {
if (typeof fn !== 'function') {
throw new Error('First argument must be a function');
}

if (!this.performance) return fn(...args);

const startMark = `${name}-start`;


const endMark = `${name}-end`;
const measureName = name;

// Set marks for precise measurement


this.performance.mark(startMark);

try {
const result = fn(...args);

this.performance.mark(endMark);
this.performance.measure(measureName, startMark, endMark);
this._storeMeasurement(name);
return result;
} catch (error) {
this.performance.mark(endMark);
this.performance.measure(measureName, startMark, endMark);
this._storeMeasurement(name, error);
throw error;
} finally {
this._cleanupMarks(startMark, endMark, measureName);
}
}

/**
* Measure performance of an asynchronous function
* @param {Function} asyncFn - The async function to measure
* @param {string} name - Unique identifier for this measurement
* @param {...any} args - Arguments to pass to the function
* @returns {Promise<any>} Promise that resolves with the function's
result
*/
async measureAsync(asyncFn, name, ...args) {
if (typeof asyncFn !== 'function') {
throw new Error('First argument must be a function');
}

if (!this.performance) return asyncFn(...args);

const startMark = `${name}-start`;


const endMark = `${name}-end`;
const measureName = name;

this.performance.mark(startMark);

try {
const result = await asyncFn(...args);

this.performance.mark(endMark);
this.performance.measure(measureName, startMark, endMark);

this._storeMeasurement(name);
return result;
} catch (error) {
this.performance.mark(endMark);
this.performance.measure(measureName, startMark, endMark);
this._storeMeasurement(name, error);
throw error;
} finally {
this._cleanupMarks(startMark, endMark, measureName);
}
}

/**
* Store measurement data
* @private
* @param {string} name - Measurement name
* @param {Error} [error] - Optional error object
*/
_storeMeasurement(name, error) {
const measures = this.performance.getEntriesByName(name);
if (measures.length === 0) return;

const lastMeasure = measures[measures.length - 1];

const metric = {
name,
duration: lastMeasure.duration,
startTime: lastMeasure.startTime,
entryType: lastMeasure.entryType,
timestamp: Date.now(),
error: error ? error.message : null
};

if (!this.metrics.has(name)) {
this.metrics.set(name, []);
}

this.metrics.get(name).push(metric);
}

/**
* Clean up performance marks and measures
* @private
* @param {string} startMark - Start mark name
* @param {string} endMark - End mark name
* @param {string} measureName - Measure name
*/
_cleanupMarks(startMark, endMark, measureName) {
this.performance.clearMarks(startMark);
this.performance.clearMarks(endMark);
this.performance.clearMeasures(measureName);
}

/**
* Get all stored metrics
* @returns {Array} Array of all stored metrics
*/
getAllMetrics() {
return Array.from(this.metrics.values()).flat();
}
/**
* Get metrics by name
* @param {string} name - The metric name to retrieve
* @returns {Array|null} Array of metrics or null if not found
*/
getMetricsByName(name) {
return this.metrics.get(name) || null;
}

/**
* Clear all stored metrics
*/
clearMetrics() {
this.metrics.clear();
}

/**
* Get performance statistics for a named metric
* @param {string} name - The metric name
* @returns {Object} Object containing statistics
*/
getStats(name) {
const metrics = this.getMetricsByName(name);
if (!metrics || metrics.length === 0) return null;

const durations = metrics.map(m => m.duration);


const successful = metrics.filter(m => !m.error).length;
const failed = metrics.filter(m => m.error).length;

return {
count: metrics.length,
successful,
failed,
avgDuration: durations.reduce((sum, d) => sum + d, 0) /
durations.length,
minDuration: Math.min(...durations),
maxDuration: Math.max(...durations),
totalDuration: durations.reduce((sum, d) => sum + d, 0),
lastDuration: durations[durations.length - 1]
};
}
}

// Export as module
export default PerfMetrics;

Features
1. Supports both synchronous and asynchronous functions
o measureSync() for synchronous functions
o measureAsync() for async functions/Promises
2. Comprehensive Performance API usage

o Uses performance.mark() and performance.measure()


o Proper cleanup of marks and measures to prevent memory leaks
3. Storage and retrieval of metrics

o Stores all measurements with timestamps


o Provides methods to retrieve metrics by name or get all metrics
4. Error handling

o Captures errors during function execution


o Includes error information in metrics
5. Performance statistics

o Calculates min, max, and average durations


o Tracks success/failure counts

Usage Examples
const perf = new PerfMetrics();

// Measure synchronous function


const result = perf.measureSync(() => {
// Some heavy computation
let sum = 0;
for (let i = 0; i < 1000000; i++) {
sum += Math.random();
}
return sum;
}, 'random-sum');

// Measure asynchronous function


await perf.measureAsync(async () => {
await fetch('https://api.example.com/data');
}, 'api-fetch');

// Get statistics
const stats = perf.getStats('api-fetch');
console.log('Average duration:', stats.avgDuration);

This utility provides a robust way to track performance metrics in your


application while maintaining clean usage of the Performance API.

You might also like