Dive into secure and efficient coding practices with our curated list of the top 10 examples showcasing 'datadog-metrics' in functional components in JavaScript. Our advanced machine learning engine meticulously scans each line of code, cross-referencing millions of open source libraries to ensure your implementation is not just functional, but also robust and secure. Elevate your React applications to new heights by mastering the art of handling side effects, API calls, and asynchronous operations with confidence and precision.
timestamp
);
},
gauge: (key: string, value: number, tags?: Tags, timestamp?: number) => {
log('gauge', key, value, tags, timestamp);
},
};
if (
!process.env.DATADOG_API_KEY ||
process.env.DATADOG_API_KEY === 'undefined'
) {
console.warn('No DATADOG_API_KEY provided, not tracking metrics.');
} else {
console.warn('Tracking metrics to DataDog.');
const metrics = require('datadog-metrics');
metrics.init({
defaultTags: [
`server:${process.env.SENTRY_NAME || 'unknown_server'}`,
`hostname: ${os.hostname() || 'unknown_instance_hostname'}`,
],
});
// This is necessary for express-hot-shots to work
const handleObjectTags = method => {
const original = metrics[method];
metrics[method] = (key, val, tags, timestamp) => {
return original.call(
metrics,
key,
val,
Array.isArray(tags) ? tags : stringify(tags),
timestamp
// "target": "https://shop.nordstrom.com",
// "mobile": false
// }
console.log(event)
// Flags for launching lighhouse
// ref: https://github.com/GoogleChrome/lighthouse/blob/HEAD/docs/configuration.md
const flags = {
disableDeviceEmulation: !event.mobile || true,
disableCpuThrottling: true,
disableNetworkThrottling: true
}
// Initialize datadog metrics collection
// ref: https://github.com/dbader/node-datadog-metrics#initialization
metrics.init({
host: 'host',
prefix: 'prefix.',
flushIntervalSeconds: 0,
apiKey: process.env.DATADOG_API_KEY,
appKey: process.env.DATADOG_APP_KEY,
defaultTags: [ `audit-target:${event.target}` ]
})
// Attach lighthouse to chrome and run an audit.
// ref: https://github.com/GoogleChrome/lighthouse/blob/master/docs/readme.md#using-programmatically
lighthouse(event.target, flags).then(function(results) {
// Increment the lighthouse counter
metrics.increment('lighthouse.invoke')
// Get total page load time metric
// push metric to datadog, ref: https://github.com/dbader/node-datadog-metrics#gauges
.then(() => {
// Flush metrics to Datadog
// ref: https://github.com/dbader/node-datadog-metrics#flushing
console.log('flushing metrics')
return metrics.flush()
})
.then(() => callback())
import { BufferedMetricsLogger } from "datadog-metrics";
import { AkairoClient } from "discord-akairo";
import { countMembers, logger } from "../utils";
const { HIFUMI_DATADOG_API_KEY } = process.env;
const _dd = new BufferedMetricsLogger({
apiKey: HIFUMI_DATADOG_API_KEY || "disabled in development",
host: "hifumi",
prefix: "hifumi.",
// allows us to buffer stats and send them in bulk
flushIntervalSeconds: 15,
});
/**
* Wrapper function that prevents calling metrics in development mode
* @param func
*/
export const withDatadog = (func: (client: BufferedMetricsLogger) => void): void => {
if (HIFUMI_DATADOG_API_KEY) {
func(_dd);
}
};
lighthouse(event.target, flags).then(function(results) {
// Increment the lighthouse counter
metrics.increment('lighthouse.invoke')
// Get total page load time metric
// push metric to datadog, ref: https://github.com/dbader/node-datadog-metrics#gauges
metrics.gauge("total", results.timing.total)
// Get the Lighthouse score for the website
// ref: https://developers.google.com/web/tools/lighthouse/scoring
metrics.gauge("score", results.score)
// Parse report blob to extract performance metrics
results.reportCategories.filter(function(v){
return v["id"] == "performance"
})[0].audits.filter(function(v){
return v["group"] == "perf-metric"
}).forEach(function(chunk){
// Push each metric to datadog
metrics.gauge(chunk.id, chunk.result.rawValue)
})
lighthouse(event.target, flags).then(function(results) {
// Increment the lighthouse counter
metrics.increment('lighthouse.invoke')
// Get total page load time metric
// push metric to datadog, ref: https://github.com/dbader/node-datadog-metrics#gauges
metrics.gauge("total", results.timing.total)
// Get the Lighthouse score for the website
// ref: https://developers.google.com/web/tools/lighthouse/scoring
metrics.gauge("score", results.score)
// Parse report blob to extract performance metrics
results.reportCategories.filter(function(v){
return v["id"] == "performance"
})[0].audits.filter(function(v){
return v["group"] == "perf-metric"
}).forEach(function(chunk){
// Push each metric to datadog
metrics.gauge(chunk.id, chunk.result.rawValue)
})
return results
})
.then(() => {
}).forEach(function(chunk){
// Push each metric to datadog
metrics.gauge(chunk.id, chunk.result.rawValue)
})
return results
lighthouse(event.target, flags).then(function(results) {
// Increment the lighthouse counter
metrics.increment('lighthouse.invoke')
// Get total page load time metric
// push metric to datadog, ref: https://github.com/dbader/node-datadog-metrics#gauges
metrics.gauge("total", results.timing.total)
// Get the Lighthouse score for the website
// ref: https://developers.google.com/web/tools/lighthouse/scoring
metrics.gauge("score", results.score)
// Parse report blob to extract performance metrics
results.reportCategories.filter(function(v){
return v["id"] == "performance"
})[0].audits.filter(function(v){
return v["group"] == "perf-metric"
}).forEach(function(chunk){
const handleObjectTags = method => {
const original = metrics[method];
metrics[method] = (key, val, tags, timestamp) => {
return original.call(
metrics,
key,
val,
Array.isArray(tags) ? tags : stringify(tags),
timestamp
);
};
};
handleObjectTags('histogram');
handleObjectTags('gauge');
handleObjectTags('increment');
metrics.timing = (...args) => metrics.histogram.call(metrics, ...args);
statsd = metrics;
}
function collectMemoryStats() {
var memory = process.memoryUsage();
statsd.gauge('memory.rss', memory.rss);
statsd.gauge('memory.heapTotal', memory.heapTotal);
statsd.gauge('memory.heapUsed', memory.heapUsed);
}
// Report memory usage every second
setInterval(collectMemoryStats, 1000);
function buildDataDog(pkg, env) {
const dd = new datadog.BufferedMetricsLogger({
apiKey: env.METRICS_API_KEY,
host: env.METRICS_HOST || require('os').hostname(),
prefix: utils.buildMetricPrefix(env, pkg),
flushIntervalSeconds: env.METRICS_FLUSH_INTERVAL || 15
});
return new DatadogClientToStatsdAdapter(dd);
}