mirror of https://github.com/lukechilds/node.git
Browse Source
An initial implementation of the Performance Timing API for Node.js. This is the same Performance Timing API implemented by modern browsers with a number of Node.js specific properties. The User Timing mark() and measure() APIs are implemented, garbage collection timing, and node startup milestone timing. ```js const { performance } = require('perf_hooks'); performance.mark('A'); setTimeout(() => { performance.mark('B'); performance.measure('A to B', 'A', 'B'); const entry = performance.getEntriesByName('A to B', 'measure')[0]; console.log(entry.duration); }, 10000); ``` The implementation is at the native layer and makes use of uv_hrtime(). This should enable *eventual* integration with things like Tracing and Inspection. The implementation is extensible and should allow us to add new performance entry types as we go (e.g. for measuring i/o perf, etc). Documentation and a test are provided. PR-URL: https://github.com/nodejs/node/pull/14680 Reviewed-By: Matteo Collina <matteo.collina@gmail.com>canary-base
21 changed files with 2381 additions and 8 deletions
@ -0,0 +1,656 @@ |
|||
# Performance Timing API |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
The Performance Timing API provides an implementation of the |
|||
[W3C Performance Timeline][] specification. The purpose of the API |
|||
is to support collection of high resolution performance metrics. |
|||
This is the same Performance API as implemented in modern Web browsers. |
|||
|
|||
```js |
|||
const { performance } = require('perf_hooks'); |
|||
performance.mark('A'); |
|||
doSomeLongRunningProcess(() => { |
|||
performance.mark('B'); |
|||
performance.measure('A to B', 'A', 'B'); |
|||
const measure = performance.getEntriesByName('A to B')[0]; |
|||
console.log(measure.duration); |
|||
// Prints the number of milliseconds between Mark 'A' and Mark 'B' |
|||
}); |
|||
``` |
|||
|
|||
## Class: Performance |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
The `Performance` provides access to performance metric data. A single |
|||
instance of this class is provided via the `performance` property. |
|||
|
|||
### performance.clearFunctions([name]) |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* `name` {string} |
|||
|
|||
If `name` is not provided, removes all `PerformanceFunction` objects from the |
|||
Performance Timeline. If `name` is provided, removes entries with `name`. |
|||
|
|||
### performance.clearMarks([name]) |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* `name` {string} |
|||
|
|||
If `name` is not provided, removes all `PerformanceMark` objects from the |
|||
Performance Timeline. If `name` is provided, removes only the named mark. |
|||
|
|||
### performance.clearMeasures([name]) |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* `name` {string} |
|||
|
|||
If `name` is not provided, removes all `PerformanceMeasure` objects from the |
|||
Performance Timeline. If `name` is provided, removes only objects whose |
|||
`performanceEntry.name` matches `name`. |
|||
|
|||
### performance.getEntries() |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* Returns: {Array} |
|||
|
|||
Returns a list of all `PerformanceEntry` objects in chronological order |
|||
with respect to `performanceEntry.startTime`. |
|||
|
|||
### performance.getEntriesByName(name[, type]) |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* `name` {string} |
|||
* `type` {string} |
|||
* Returns: {Array} |
|||
|
|||
Returns a list of all `PerformanceEntry` objects in chronological order |
|||
with respect to `performanceEntry.startTime` whose `performanceEntry.name` is |
|||
equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to |
|||
`type`. |
|||
|
|||
### performance.getEntriesByType(type) |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* `type` {string} |
|||
* Returns: {Array} |
|||
|
|||
Returns a list of all `PerformanceEntry` objects in chronological order |
|||
with respect to `performanceEntry.startTime` whose `performanceEntry.entryType` |
|||
is equal to `type`. |
|||
|
|||
### performance.mark([name]) |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* `name` {string} |
|||
|
|||
Creates a new `PerformanceMark` entry in the Performance Timeline. A |
|||
`PerformanceMark` is a subclass of `PerformanceEntry` whose |
|||
`performanceEntry.entryType` is always `'mark'`, and whose |
|||
`performanceEntry.duration` is always `0`. Performance marks are used |
|||
to mark specific significant moments in the Performance Timeline. |
|||
|
|||
### performance.measure(name, startMark, endMark) |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* `name` {string} |
|||
* `startMark` {string} |
|||
* `endMark` {string} |
|||
|
|||
Creates a new `PerformanceMeasure` entry in the Performance Timeline. A |
|||
`PerformanceMeasure` is a subclass of `PerformanceEntry` whose |
|||
`performanceEntry.entryType` is always `'measure'`, and whose |
|||
`performanceEntry.duration` measures the number of milliseconds elapsed since |
|||
`startMark` and `endMark`. |
|||
|
|||
The `startMark` argument may identify any *existing* `PerformanceMark` in the |
|||
the Performance Timeline, or *may* identify any of the timestamp properties |
|||
provided by the `PerformanceNodeTiming` class. If the named `startMark` does |
|||
not exist, then `startMark` is set to [`timeOrigin`][] by default. |
|||
|
|||
The `endMark` argument must identify any *existing* `PerformanceMark` in the |
|||
the Performance Timeline or any of the timestamp properties provided by the |
|||
`PerformanceNodeTiming` class. If the named `endMark` does not exist, an |
|||
error will be thrown. |
|||
|
|||
### performance.nodeFrame |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {PerformanceFrame} |
|||
|
|||
An instance of the `PerformanceFrame` class that provides performance metrics |
|||
for the event loop. |
|||
|
|||
### performance.nodeTiming |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {PerformanceNodeTiming} |
|||
|
|||
An instance of the `PerformanceNodeTiming` class that provides performance |
|||
metrics for specific Node.js operational milestones. |
|||
|
|||
### performance.now() |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* Returns: {number} |
|||
|
|||
Returns the current high resolution millisecond timestamp. |
|||
|
|||
### performance.timeOrigin |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
The [`timeOrigin`][] specifies the high resolution millisecond timestamp from |
|||
which all performance metric durations are measured. |
|||
|
|||
### performance.timerify(fn) |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* `fn` {Function} |
|||
|
|||
Wraps a function within a new function that measures the running time of the |
|||
wrapped function. A `PerformanceObserver` must be subscribed to the `'function'` |
|||
event type in order for the timing details to be accessed. |
|||
|
|||
```js |
|||
const { |
|||
performance, |
|||
PerformanceObserver |
|||
} = require('perf_hooks'); |
|||
|
|||
function someFunction() { |
|||
console.log('hello world'); |
|||
} |
|||
|
|||
const wrapped = performance.timerify(someFunction); |
|||
|
|||
const obs = new PerformanceObserver((list) => { |
|||
console.log(list.getEntries()[0].duration); |
|||
obs.disconnect(); |
|||
performance.clearFunctions(); |
|||
}); |
|||
obs.observe({ entryTypes: 'function' }); |
|||
|
|||
// A performance timeline entry will be created |
|||
wrapped(); |
|||
``` |
|||
|
|||
## Class: PerformanceEntry |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
### performanceEntry.duration |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
The total number of milliseconds elapsed for this entry. This value will not |
|||
be meaningful for all Performance Entry types. |
|||
|
|||
### performanceEntry.name |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {string} |
|||
|
|||
The name of the performance entry. |
|||
|
|||
### performanceEntry.startTime |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
The high resolution millisecond timestamp marking the starting time of the |
|||
Performance Entry. |
|||
|
|||
### performanceEntry.entryType |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {string} |
|||
|
|||
The type of the performance entry. Current it may be one of: `'node'`, `'mark'`, |
|||
`'measure'`, `'gc'`, or `'function'`. |
|||
|
|||
### performanceEntry.kind |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
When `performanceEntry.entryType` is equal to `'gc'`, the `performance.kind` |
|||
property identifies the type of garbage collection operation that occurred. |
|||
The value may be one of: |
|||
|
|||
* `perf_hooks.constants.NODE_PERFORMANCE_GC_MAJOR` |
|||
* `perf_hooks.constants.NODE_PERFORMANCE_GC_MINOR` |
|||
* `perf_hooks.constants.NODE_PERFORMANCE_GC_INCREMENTAL` |
|||
* `perf_hooks.constants.NODE_PERFORMANCE_GC_WEAKCB` |
|||
|
|||
## Class: PerformanceNodeFrame extends PerformanceEntry |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
Provides timing details for the Node.js event loop. |
|||
|
|||
### performanceNodeFrame.frameCheck |
|||
|
|||
The high resolution timestamp when `uv_check_t` processing occurred on the |
|||
current loop. |
|||
|
|||
### performanceNodeFrame.frameCount |
|||
|
|||
The total number of event loop iterations (iterated when `uv_idle_t` |
|||
processing occurrs). |
|||
|
|||
### performanceNodeFrame.frameIdle |
|||
|
|||
The high resolution timestamp when `uv_idle_t` processing occurred on the |
|||
current loop. |
|||
|
|||
### performanceNodeFrame.framesPerSecond |
|||
|
|||
The number of event loop iterations per second. |
|||
|
|||
### performanceNodeFrame.framePrepare |
|||
|
|||
The high resolution timestamp when `uv_prepare_t` processing occurred on the |
|||
current loop. |
|||
|
|||
## Class: PerformanceNodeTiming extends PerformanceEntry |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
Provides timing details for Node.js itself. |
|||
|
|||
### performanceNodeTiming.bootstrapComplete |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
The high resolution millisecond timestamp at which the Node.js process |
|||
completed bootstrap. |
|||
|
|||
### performanceNodeTiming.clusterSetupEnd |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
The high resolution millisecond timestamp at which cluster processing ended. |
|||
|
|||
### performanceNodeTiming.clusterSetupStart |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
The high resolution millisecond timestamp at which cluster processing started. |
|||
|
|||
### performanceNodeTiming.loopExit |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
The high resolution millisecond timestamp at which the Node.js event loop |
|||
exited. |
|||
|
|||
### performanceNodeTiming.loopStart |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
The high resolution millisecond timestamp at which the Node.js event loop |
|||
started. |
|||
|
|||
### performanceNodeTiming.moduleLoadEnd |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
The high resolution millisecond timestamp at which main module load ended. |
|||
|
|||
### performanceNodeTiming.moduleLoadStart |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
The high resolution millisecond timestamp at which main module load started. |
|||
|
|||
### performanceNodeTiming.nodeStart |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
The high resolution millisecond timestamp at which the Node.js process was |
|||
initialized. |
|||
|
|||
### performanceNodeTiming.preloadModuleLoadEnd |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
The high resolution millisecond timestamp at which preload module load ended. |
|||
|
|||
### performanceNodeTiming.preloadModuleLoadStart |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
The high resolution millisecond timestamp at which preload module load started. |
|||
|
|||
### performanceNodeTiming.thirdPartyMainEnd |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
The high resolution millisecond timestamp at which third_party_main processing |
|||
ended. |
|||
|
|||
### performanceNodeTiming.thirdPartyMainStart |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
The high resolution millisecond timestamp at which third_party_main processing |
|||
started. |
|||
|
|||
### performanceNodeTiming.v8Start |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* {number} |
|||
|
|||
The high resolution millisecond timestamp at which the V8 platform was |
|||
initialized. |
|||
|
|||
|
|||
## Class: PerformanceObserver(callback) |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* `callback` {Function} A `PerformanceObserverCallback` callback function. |
|||
|
|||
`PerformanceObserver` objects provide notifications when new |
|||
`PerformanceEntry` instances have been added to the Performance Timeline. |
|||
|
|||
```js |
|||
const { |
|||
performance, |
|||
PerformanceObserver |
|||
} = require('perf_hooks'); |
|||
|
|||
const obs = new PerformanceObserver((list, observer) => { |
|||
console.log(list.getEntries()); |
|||
observer.disconnect(); |
|||
}); |
|||
obs.observe({ entryTypes: ['mark'], buffered: true }); |
|||
|
|||
performance.mark('test'); |
|||
``` |
|||
|
|||
Because `PerformanceObserver` instances introduce their own additional |
|||
performance overhead, instances should not be left subscribed to notifications |
|||
indefinitely. Users should disconnect observers as soon as they are no |
|||
longer needed. |
|||
|
|||
### Callback: PerformanceObserverCallback(list, observer) |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* `list` {PerformanceObserverEntryList} |
|||
* `observer` {PerformanceObserver} |
|||
|
|||
The `PerformanceObserverCallback` is invoked when a `PerformanceObserver` is |
|||
notified about new `PerformanceEntry` instances. The callback receives a |
|||
`PerformanceObserverEntryList` instance and a reference to the |
|||
`PerformanceObserver`. |
|||
|
|||
### Class: PerformanceObserverEntryList |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
The `PerformanceObserverEntryList` class is used to provide access to the |
|||
`PerformanceEntry` instances passed to a `PerformanceObserver`. |
|||
|
|||
#### performanceObserverEntryList.getEntries() |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* Returns: {Array} |
|||
|
|||
Returns a list of `PerformanceEntry` objects in chronological order |
|||
with respect to `performanceEntry.startTime`. |
|||
|
|||
#### performanceObserverEntryList.getEntriesByName(name[, type]) |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* `name` {string} |
|||
* `type` {string} |
|||
* Returns: {Array} |
|||
|
|||
Returns a list of `PerformanceEntry` objects in chronological order |
|||
with respect to `performanceEntry.startTime` whose `performanceEntry.name` is |
|||
equal to `name`, and optionally, whose `performanceEntry.entryType` is equal to |
|||
`type`. |
|||
|
|||
#### performanceObserverEntryList.getEntriesByType(type) |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
|
|||
* `type` {string} |
|||
* Returns: {Array} |
|||
|
|||
Returns a list of `PerformanceEntry` objects in chronological order |
|||
with respect to `performanceEntry.startTime` whose `performanceEntry.entryType` |
|||
is equal to `type`. |
|||
|
|||
### performanceObserver.disconnect() |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
Disconnects the `PerformanceObserver` instance from all notifications. |
|||
|
|||
### performanceObserver.observe(options) |
|||
<!-- YAML |
|||
added: REPLACEME |
|||
--> |
|||
* `options` {Object} |
|||
* `entryTypes` {Array} An array of strings identifying the types of |
|||
`PerformanceEntry` instances the observer is interested in. If not |
|||
provided an error will be thrown. |
|||
* `buffered` {boolean} If true, the notification callback will be |
|||
called using `setImmediate()` and multiple `PerformanceEntry` instance |
|||
notifications will be buffered internally. If `false`, notifications will |
|||
be immediate and synchronous. Defaults to `false`. |
|||
|
|||
Subscribes the `PerformanceObserver` instance to notifications of new |
|||
`PerformanceEntry` instances identified by `options.entryTypes`. |
|||
|
|||
When `options.buffered` is `false`, the `callback` will be invoked once for |
|||
every `PerformanceEntry` instance: |
|||
|
|||
```js |
|||
const { |
|||
performance, |
|||
PerformanceObserver |
|||
} = require('perf_hooks'); |
|||
|
|||
const obs = new PerformanceObserver((list, observer) => { |
|||
// called three times synchronously. list contains one item |
|||
}); |
|||
obs.observe({ entryTypes: ['mark'] }); |
|||
|
|||
for (let n = 0; n < 3; n++) |
|||
performance.mark(`test${n}`); |
|||
``` |
|||
|
|||
```js |
|||
const { |
|||
performance, |
|||
PerformanceObserver |
|||
} = require('perf_hooks'); |
|||
|
|||
const obs = new PerformanceObserver((list, observer) => { |
|||
// called once. list contains three items |
|||
}); |
|||
obs.observe({ entryTypes: ['mark'], buffered: true }); |
|||
|
|||
for (let n = 0; n < 3; n++) |
|||
performance.mark(`test${n}`); |
|||
``` |
|||
|
|||
## Examples |
|||
|
|||
### Measuring the duration of async operations |
|||
|
|||
The following example uses the [Async Hooks][] and Performance APIs to measure |
|||
the actual duration of a Timeout operation (including the amount of time it |
|||
to execute the callback). |
|||
|
|||
```js |
|||
'use strict'; |
|||
const async_hooks = require('async_hooks'); |
|||
const { |
|||
performance, |
|||
PerformanceObserver |
|||
} = require('perf_hooks'); |
|||
|
|||
const set = new Set(); |
|||
const hook = async_hooks.createHook({ |
|||
init(id, type) { |
|||
if (type === 'Timeout') { |
|||
performance.mark(`Timeout-${id}-Init`); |
|||
set.add(id); |
|||
} |
|||
}, |
|||
destroy(id) { |
|||
if (set.has(id)) { |
|||
set.delete(id); |
|||
performance.mark(`Timeout-${id}-Destroy`); |
|||
performance.measure(`Timeout-${id}`, |
|||
`Timeout-${id}-Init`, |
|||
`Timeout-${id}-Destroy`); |
|||
} |
|||
} |
|||
}); |
|||
hook.enable(); |
|||
|
|||
const obs = new PerformanceObserver((list, observer) => { |
|||
console.log(list.getEntries()[0]); |
|||
performance.clearMarks(); |
|||
performance.clearMeasures(); |
|||
observer.disconnect(); |
|||
}); |
|||
obs.observe({ entryTypes: ['measure'], buffered: true }); |
|||
|
|||
setTimeout(() => {}, 1000); |
|||
``` |
|||
|
|||
### Measuring how long it takes to load dependencies |
|||
|
|||
The following example measures the duration of `require()` operations to load |
|||
dependencies: |
|||
|
|||
<!-- eslint-disable no-global-assign --> |
|||
```js |
|||
'use strict'; |
|||
const { |
|||
performance, |
|||
PerformanceObserver |
|||
} = require('perf_hooks'); |
|||
const mod = require('module'); |
|||
|
|||
// Monkey patch the require function |
|||
mod.Module.prototype.require = |
|||
performance.timerify(mod.Module.prototype.require); |
|||
require = performance.timerify(require); |
|||
|
|||
// Activate the observer |
|||
const obs = new PerformanceObserver((list) => { |
|||
const entries = list.getEntries(); |
|||
entries.forEach((entry) => { |
|||
console.log(`require('${entry[0]}')`, entry.duration); |
|||
}); |
|||
obs.disconnect(); |
|||
// Free memory |
|||
performance.clearFunctions(); |
|||
}); |
|||
obs.observe({ entryTypes: ['function'], buffered: true }); |
|||
|
|||
require('some-module'); |
|||
``` |
|||
|
|||
[`timeOrigin`]: https://w3c.github.io/hr-time/#dom-performance-timeorigin |
|||
[W3C Performance Timeline]: https://w3c.github.io/performance-timeline/ |
@ -0,0 +1,553 @@ |
|||
'use strict'; |
|||
|
|||
const { |
|||
PerformanceEntry, |
|||
mark: _mark, |
|||
measure: _measure, |
|||
milestones, |
|||
observerCounts, |
|||
setupObservers, |
|||
timeOrigin, |
|||
timerify, |
|||
constants |
|||
} = process.binding('performance'); |
|||
|
|||
const { |
|||
NODE_PERFORMANCE_ENTRY_TYPE_NODE, |
|||
NODE_PERFORMANCE_ENTRY_TYPE_MARK, |
|||
NODE_PERFORMANCE_ENTRY_TYPE_MEASURE, |
|||
NODE_PERFORMANCE_ENTRY_TYPE_GC, |
|||
NODE_PERFORMANCE_ENTRY_TYPE_FUNCTION, |
|||
|
|||
NODE_PERFORMANCE_MILESTONE_NODE_START, |
|||
NODE_PERFORMANCE_MILESTONE_V8_START, |
|||
NODE_PERFORMANCE_MILESTONE_LOOP_START, |
|||
NODE_PERFORMANCE_MILESTONE_LOOP_EXIT, |
|||
NODE_PERFORMANCE_MILESTONE_BOOTSTRAP_COMPLETE, |
|||
NODE_PERFORMANCE_MILESTONE_ENVIRONMENT, |
|||
NODE_PERFORMANCE_MILESTONE_THIRD_PARTY_MAIN_START, |
|||
NODE_PERFORMANCE_MILESTONE_THIRD_PARTY_MAIN_END, |
|||
NODE_PERFORMANCE_MILESTONE_CLUSTER_SETUP_START, |
|||
NODE_PERFORMANCE_MILESTONE_CLUSTER_SETUP_END, |
|||
NODE_PERFORMANCE_MILESTONE_MODULE_LOAD_START, |
|||
NODE_PERFORMANCE_MILESTONE_MODULE_LOAD_END, |
|||
NODE_PERFORMANCE_MILESTONE_PRELOAD_MODULE_LOAD_START, |
|||
NODE_PERFORMANCE_MILESTONE_PRELOAD_MODULE_LOAD_END |
|||
} = constants; |
|||
|
|||
const L = require('internal/linkedlist'); |
|||
const kInspect = require('internal/util').customInspectSymbol; |
|||
const { inherits } = require('util'); |
|||
|
|||
const kCallback = Symbol('callback'); |
|||
const kTypes = Symbol('types'); |
|||
const kEntries = Symbol('entries'); |
|||
const kBuffer = Symbol('buffer'); |
|||
const kBuffering = Symbol('buffering'); |
|||
const kQueued = Symbol('queued'); |
|||
const kTimerified = Symbol('timerified'); |
|||
const kInsertEntry = Symbol('insert-entry'); |
|||
const kIndexEntry = Symbol('index-entry'); |
|||
const kClearEntry = Symbol('clear-entry'); |
|||
const kGetEntries = Symbol('get-entries'); |
|||
const kIndex = Symbol('index'); |
|||
const kMarks = Symbol('marks'); |
|||
|
|||
observerCounts[NODE_PERFORMANCE_ENTRY_TYPE_MARK] = 1; |
|||
observerCounts[NODE_PERFORMANCE_ENTRY_TYPE_MEASURE] = 1; |
|||
const observers = {}; |
|||
const observerableTypes = [ |
|||
'node', |
|||
'mark', |
|||
'measure', |
|||
'gc', |
|||
'function' |
|||
]; |
|||
|
|||
let errors; |
|||
function lazyErrors() { |
|||
if (errors === undefined) |
|||
errors = require('internal/errors'); |
|||
return errors; |
|||
} |
|||
|
|||
function now() { |
|||
const hr = process.hrtime(); |
|||
return hr[0] * 1000 + hr[1] / 1e6; |
|||
} |
|||
|
|||
class PerformanceNodeTiming { |
|||
constructor() {} |
|||
|
|||
get name() { |
|||
return 'node'; |
|||
} |
|||
|
|||
get entryType() { |
|||
return 'node'; |
|||
} |
|||
|
|||
get startTime() { |
|||
return timeOrigin; |
|||
} |
|||
|
|||
get duration() { |
|||
return now() - timeOrigin; |
|||
} |
|||
|
|||
get nodeStart() { |
|||
return milestones[NODE_PERFORMANCE_MILESTONE_NODE_START]; |
|||
} |
|||
|
|||
get v8Start() { |
|||
return milestones[NODE_PERFORMANCE_MILESTONE_V8_START]; |
|||
} |
|||
|
|||
get environment() { |
|||
return milestones[NODE_PERFORMANCE_MILESTONE_ENVIRONMENT]; |
|||
} |
|||
|
|||
get loopStart() { |
|||
return milestones[NODE_PERFORMANCE_MILESTONE_LOOP_START]; |
|||
} |
|||
|
|||
get loopExit() { |
|||
return milestones[NODE_PERFORMANCE_MILESTONE_LOOP_EXIT]; |
|||
} |
|||
|
|||
get bootstrapComplete() { |
|||
return milestones[NODE_PERFORMANCE_MILESTONE_BOOTSTRAP_COMPLETE]; |
|||
} |
|||
|
|||
get thirdPartyMainStart() { |
|||
return milestones[NODE_PERFORMANCE_MILESTONE_THIRD_PARTY_MAIN_START]; |
|||
} |
|||
|
|||
get thirdPartyMainEnd() { |
|||
return milestones[NODE_PERFORMANCE_MILESTONE_THIRD_PARTY_MAIN_END]; |
|||
} |
|||
|
|||
get clusterSetupStart() { |
|||
return milestones[NODE_PERFORMANCE_MILESTONE_CLUSTER_SETUP_START]; |
|||
} |
|||
|
|||
get clusterSetupEnd() { |
|||
return milestones[NODE_PERFORMANCE_MILESTONE_CLUSTER_SETUP_END]; |
|||
} |
|||
|
|||
get moduleLoadStart() { |
|||
return milestones[NODE_PERFORMANCE_MILESTONE_MODULE_LOAD_START]; |
|||
} |
|||
|
|||
get moduleLoadEnd() { |
|||
return milestones[NODE_PERFORMANCE_MILESTONE_MODULE_LOAD_END]; |
|||
} |
|||
|
|||
get preloadModuleLoadStart() { |
|||
return milestones[NODE_PERFORMANCE_MILESTONE_PRELOAD_MODULE_LOAD_START]; |
|||
} |
|||
|
|||
get preloadModuleLoadEnd() { |
|||
return milestones[NODE_PERFORMANCE_MILESTONE_PRELOAD_MODULE_LOAD_END]; |
|||
} |
|||
|
|||
[kInspect]() { |
|||
return { |
|||
name: 'node', |
|||
entryType: 'node', |
|||
startTime: this.startTime, |
|||
duration: this.duration, |
|||
nodeStart: this.nodeStart, |
|||
v8Start: this.v8Start, |
|||
bootstrapComplete: this.bootstrapComplete, |
|||
environment: this.environment, |
|||
loopStart: this.loopStart, |
|||
loopExit: this.loopExit, |
|||
thirdPartyMainStart: this.thirdPartyMainStart, |
|||
thirdPartyMainEnd: this.thirdPartyMainEnd, |
|||
clusterSetupStart: this.clusterSetupStart, |
|||
clusterSetupEnd: this.clusterSetupEnd, |
|||
moduleLoadStart: this.moduleLoadStart, |
|||
moduleLoadEnd: this.moduleLoadEnd, |
|||
preloadModuleLoadStart: this.preloadModuleLoadStart, |
|||
preloadModuleLoadEnd: this.preloadModuleLoadEnd |
|||
}; |
|||
} |
|||
} |
|||
// Use this instead of Extends because we want PerformanceEntry in the
|
|||
// prototype chain but we do not want to use the PerformanceEntry
|
|||
// constructor for this.
|
|||
inherits(PerformanceNodeTiming, PerformanceEntry); |
|||
|
|||
const nodeTiming = new PerformanceNodeTiming(); |
|||
|
|||
// Maintains a list of entries as a linked list stored in insertion order.
|
|||
class PerformanceObserverEntryList { |
|||
constructor() { |
|||
Object.defineProperty(this, kEntries, { |
|||
writable: true, |
|||
enumerable: false, |
|||
value: {} |
|||
}); |
|||
L.init(this[kEntries]); |
|||
} |
|||
|
|||
[kInsertEntry](entry) { |
|||
const item = { entry }; |
|||
L.append(this[kEntries], item); |
|||
this[kIndexEntry](item); |
|||
} |
|||
|
|||
[kIndexEntry](entry) { |
|||
// Default implementation does nothing
|
|||
} |
|||
|
|||
[kGetEntries](name, type) { |
|||
const ret = []; |
|||
const list = this[kEntries]; |
|||
if (!L.isEmpty(list)) { |
|||
let item = L.peek(list); |
|||
while (item && item !== list) { |
|||
const entry = item.entry; |
|||
if ((name && entry.name !== name) || |
|||
(type && entry.entryType !== type)) { |
|||
item = item._idlePrev; |
|||
continue; |
|||
} |
|||
sortedInsert(ret, entry); |
|||
item = item._idlePrev; |
|||
} |
|||
} |
|||
return ret; |
|||
} |
|||
|
|||
// While the items are stored in insertion order, getEntries() is
|
|||
// required to return items sorted by startTime.
|
|||
getEntries() { |
|||
return this[kGetEntries](); |
|||
} |
|||
|
|||
getEntriesByType(type) { |
|||
return this[kGetEntries](undefined, `${type}`); |
|||
} |
|||
|
|||
getEntriesByName(name, type) { |
|||
return this[kGetEntries](`${name}`, type !== undefined ? `${type}` : type); |
|||
} |
|||
} |
|||
|
|||
class PerformanceObserver { |
|||
constructor(callback) { |
|||
if (typeof callback !== 'function') { |
|||
const errors = lazyErrors(); |
|||
throw new errors.TypeError('ERR_INVALID_CALLBACK'); |
|||
} |
|||
Object.defineProperties(this, { |
|||
[kTypes]: { |
|||
enumerable: false, |
|||
writable: true, |
|||
value: {} |
|||
}, |
|||
[kCallback]: { |
|||
enumerable: false, |
|||
writable: true, |
|||
value: callback |
|||
}, |
|||
[kBuffer]: { |
|||
enumerable: false, |
|||
writable: true, |
|||
value: new PerformanceObserverEntryList() |
|||
}, |
|||
[kBuffering]: { |
|||
enumerable: false, |
|||
writable: true, |
|||
value: false |
|||
}, |
|||
[kQueued]: { |
|||
enumerable: false, |
|||
writable: true, |
|||
value: false |
|||
} |
|||
}); |
|||
} |
|||
|
|||
disconnect() { |
|||
const types = this[kTypes]; |
|||
const keys = Object.keys(types); |
|||
for (var n = 0; n < keys.length; n++) { |
|||
const item = types[keys[n]]; |
|||
if (item) { |
|||
L.remove(item); |
|||
observerCounts[keys[n]]--; |
|||
} |
|||
} |
|||
this[kTypes] = {}; |
|||
} |
|||
|
|||
observe(options) { |
|||
const errors = lazyErrors(); |
|||
if (typeof options !== 'object' || options == null) { |
|||
throw new errors.TypeError('ERR_INVALID_ARG_TYPE', 'options', 'Object'); |
|||
} |
|||
if (!Array.isArray(options.entryTypes)) { |
|||
throw new errors.TypeError('ERR_INVALID_OPT_VALUE', |
|||
'entryTypes', options); |
|||
} |
|||
const entryTypes = options.entryTypes.filter(filterTypes).map(mapTypes); |
|||
if (entryTypes.length === 0) { |
|||
throw new errors.Error('ERR_VALID_PERFORMANCE_ENTRY_TYPE'); |
|||
} |
|||
this.disconnect(); |
|||
this[kBuffer][kEntries] = []; |
|||
L.init(this[kBuffer][kEntries]); |
|||
this[kBuffering] = Boolean(options.buffered); |
|||
for (var n = 0; n < entryTypes.length; n++) { |
|||
const entryType = entryTypes[n]; |
|||
const list = getObserversList(entryType); |
|||
const item = { obs: this }; |
|||
this[kTypes][entryType] = item; |
|||
L.append(list, item); |
|||
observerCounts[entryType]++; |
|||
} |
|||
} |
|||
} |
|||
|
|||
class Performance extends PerformanceObserverEntryList { |
|||
constructor() { |
|||
super(); |
|||
this[kIndex] = { |
|||
[kMarks]: new Set() |
|||
}; |
|||
this[kInsertEntry](nodeTiming); |
|||
} |
|||
|
|||
[kIndexEntry](item) { |
|||
const index = this[kIndex]; |
|||
const type = item.entry.entryType; |
|||
let items = index[type]; |
|||
if (!items) { |
|||
items = index[type] = {}; |
|||
L.init(items); |
|||
} |
|||
const entry = item.entry; |
|||
L.append(items, { entry, item }); |
|||
} |
|||
|
|||
[kClearEntry](type, name) { |
|||
const index = this[kIndex]; |
|||
const items = index[type]; |
|||
if (!items) return; |
|||
let item = L.peek(items); |
|||
while (item && item !== items) { |
|||
const entry = item.entry; |
|||
const next = item._idlePrev; |
|||
if (name !== undefined) { |
|||
if (entry.name === `${name}`) { |
|||
L.remove(item); // remove from the index
|
|||
L.remove(item.item); // remove from the master
|
|||
} |
|||
} else { |
|||
L.remove(item); // remove from the index
|
|||
L.remove(item.item); // remove from the master
|
|||
} |
|||
item = next; |
|||
} |
|||
} |
|||
|
|||
get nodeTiming() { |
|||
return nodeTiming; |
|||
} |
|||
|
|||
get timeOrigin() { |
|||
return timeOrigin; |
|||
} |
|||
|
|||
now() { |
|||
return now(); |
|||
} |
|||
|
|||
mark(name) { |
|||
name = `${name}`; |
|||
_mark(name); |
|||
this[kIndex][kMarks].add(name); |
|||
} |
|||
|
|||
measure(name, startMark, endMark) { |
|||
name = `${name}`; |
|||
endMark = `${endMark}`; |
|||
startMark = startMark !== undefined ? `${startMark}` : ''; |
|||
const marks = this[kIndex][kMarks]; |
|||
if (!marks.has(endMark) && !(endMark in nodeTiming)) { |
|||
const errors = lazyErrors(); |
|||
throw new errors.Error('ERR_INVALID_PERFORMANCE_MARK', endMark); |
|||
} |
|||
_measure(name, startMark, endMark); |
|||
} |
|||
|
|||
clearMarks(name) { |
|||
name = name !== undefined ? `${name}` : name; |
|||
this[kClearEntry]('mark', name); |
|||
if (name !== undefined) |
|||
this[kIndex][kMarks].delete(name); |
|||
else |
|||
this[kIndex][kMarks].clear(); |
|||
} |
|||
|
|||
clearMeasures(name) { |
|||
this[kClearEntry]('measure', name); |
|||
} |
|||
|
|||
clearGC() { |
|||
this[kClearEntry]('gc'); |
|||
} |
|||
|
|||
clearFunctions(name) { |
|||
this[kClearEntry]('function', name); |
|||
} |
|||
|
|||
timerify(fn) { |
|||
if (typeof fn !== 'function') { |
|||
const errors = lazyErrors(); |
|||
throw new errors.TypeError('ERR_INVALID_ARG_TYPE', 'fn', 'Function'); |
|||
} |
|||
if (fn[kTimerified]) |
|||
return fn[kTimerified]; |
|||
const ret = timerify(fn, fn.length); |
|||
Object.defineProperty(fn, kTimerified, { |
|||
enumerable: false, |
|||
configurable: true, |
|||
writable: false, |
|||
value: ret |
|||
}); |
|||
Object.defineProperties(ret, { |
|||
[kTimerified]: { |
|||
enumerable: false, |
|||
configurable: true, |
|||
writable: false, |
|||
value: ret |
|||
}, |
|||
name: { |
|||
enumerable: false, |
|||
configurable: true, |
|||
writable: false, |
|||
value: `timerified ${fn.name}` |
|||
} |
|||
}); |
|||
return ret; |
|||
} |
|||
|
|||
[kInspect]() { |
|||
return { |
|||
timeOrigin, |
|||
nodeTiming |
|||
}; |
|||
} |
|||
} |
|||
|
|||
const performance = new Performance(); |
|||
|
|||
function getObserversList(type) { |
|||
let list = observers[type]; |
|||
if (list === undefined) { |
|||
list = observers[type] = {}; |
|||
L.init(list); |
|||
} |
|||
return list; |
|||
} |
|||
|
|||
function doNotify() { |
|||
this[kQueued] = false; |
|||
this[kCallback](this[kBuffer], this); |
|||
this[kBuffer][kEntries] = []; |
|||
L.init(this[kBuffer][kEntries]); |
|||
} |
|||
|
|||
// Set up the callback used to receive PerformanceObserver notifications
|
|||
function observersCallback(entry) { |
|||
const type = mapTypes(entry.entryType); |
|||
performance[kInsertEntry](entry); |
|||
const list = getObserversList(type); |
|||
|
|||
let current = L.peek(list); |
|||
|
|||
while (current && current.obs) { |
|||
const observer = current.obs; |
|||
// First, add the item to the observers buffer
|
|||
const buffer = observer[kBuffer]; |
|||
buffer[kInsertEntry](entry); |
|||
// Second, check to see if we're buffering
|
|||
if (observer[kBuffering]) { |
|||
// If we are, schedule a setImmediate call if one hasn't already
|
|||
if (!observer[kQueued]) { |
|||
observer[kQueued] = true; |
|||
// Use setImmediate instead of nextTick to give more time
|
|||
// for multiple entries to collect.
|
|||
setImmediate(doNotify.bind(observer)); |
|||
} |
|||
} else { |
|||
// If not buffering, notify immediately
|
|||
doNotify.call(observer); |
|||
} |
|||
current = current._idlePrev; |
|||
} |
|||
} |
|||
setupObservers(observersCallback); |
|||
|
|||
function filterTypes(i) { |
|||
return observerableTypes.indexOf(`${i}`) >= 0; |
|||
} |
|||
|
|||
function mapTypes(i) { |
|||
switch (i) { |
|||
case 'node': return NODE_PERFORMANCE_ENTRY_TYPE_NODE; |
|||
case 'mark': return NODE_PERFORMANCE_ENTRY_TYPE_MARK; |
|||
case 'measure': return NODE_PERFORMANCE_ENTRY_TYPE_MEASURE; |
|||
case 'gc': return NODE_PERFORMANCE_ENTRY_TYPE_GC; |
|||
case 'function': return NODE_PERFORMANCE_ENTRY_TYPE_FUNCTION; |
|||
} |
|||
} |
|||
|
|||
// The specification requires that PerformanceEntry instances are sorted
|
|||
// according to startTime. Unfortunately, they are not necessarily created
|
|||
// in that same order, and can be reported to the JS layer in any order,
|
|||
// which means we need to keep the list sorted as we insert.
|
|||
function getInsertLocation(list, entryStartTime) { |
|||
let start = 0; |
|||
let end = list.length; |
|||
while (start < end) { |
|||
const pivot = (end + start) >>> 1; |
|||
if (list[pivot].startTime === entryStartTime) |
|||
return pivot; |
|||
if (list[pivot].startTime < entryStartTime) |
|||
start = pivot + 1; |
|||
else |
|||
end = pivot; |
|||
} |
|||
return start; |
|||
} |
|||
|
|||
function sortedInsert(list, entry) { |
|||
const entryStartTime = entry.startTime; |
|||
if (list.length === 0 || |
|||
(list[list.length - 1].startTime < entryStartTime)) { |
|||
list.push(entry); |
|||
return; |
|||
} |
|||
if (list[0] && (list[0].startTime > entryStartTime)) { |
|||
list.unshift(entry); |
|||
return; |
|||
} |
|||
const location = getInsertLocation(list, entryStartTime); |
|||
list.splice(location, 0, entry); |
|||
} |
|||
|
|||
module.exports = { |
|||
performance, |
|||
PerformanceObserver |
|||
}; |
|||
|
|||
Object.defineProperty(module.exports, 'constants', { |
|||
configurable: false, |
|||
enumerable: true, |
|||
value: constants |
|||
}); |
@ -0,0 +1,389 @@ |
|||
#include "node.h" |
|||
#include "v8.h" |
|||
#include "env.h" |
|||
#include "env-inl.h" |
|||
#include "node_perf.h" |
|||
#include "uv.h" |
|||
|
|||
#include <vector> |
|||
|
|||
namespace node { |
|||
namespace performance { |
|||
|
|||
using v8::Array; |
|||
using v8::ArrayBuffer; |
|||
using v8::Context; |
|||
using v8::Function; |
|||
using v8::FunctionCallbackInfo; |
|||
using v8::FunctionTemplate; |
|||
using v8::HandleScope; |
|||
using v8::Integer; |
|||
using v8::Isolate; |
|||
using v8::Local; |
|||
using v8::Name; |
|||
using v8::Object; |
|||
using v8::ObjectTemplate; |
|||
using v8::PropertyCallbackInfo; |
|||
using v8::String; |
|||
using v8::Value; |
|||
|
|||
const uint64_t timeOrigin = PERFORMANCE_NOW(); |
|||
uint64_t performance_node_start; |
|||
uint64_t performance_v8_start; |
|||
|
|||
uint64_t performance_last_gc_start_mark_ = 0; |
|||
v8::GCType performance_last_gc_type_ = v8::GCType::kGCTypeAll; |
|||
|
|||
void PerformanceEntry::New(const FunctionCallbackInfo<Value>& args) { |
|||
Environment* env = Environment::GetCurrent(args); |
|||
Isolate* isolate = env->isolate(); |
|||
Utf8Value name(isolate, args[0]); |
|||
Utf8Value type(isolate, args[1]); |
|||
uint64_t now = PERFORMANCE_NOW(); |
|||
new PerformanceEntry(env, args.This(), *name, *type, now, now); |
|||
} |
|||
|
|||
void PerformanceEntry::NotifyObservers(Environment* env, |
|||
PerformanceEntry* entry) { |
|||
uint32_t* observers = env->performance_state()->observers; |
|||
PerformanceEntryType type = ToPerformanceEntryTypeEnum(entry->type().c_str()); |
|||
if (observers == nullptr || |
|||
type == NODE_PERFORMANCE_ENTRY_TYPE_INVALID || |
|||
!observers[type]) { |
|||
return; |
|||
} |
|||
Local<Context> context = env->context(); |
|||
Isolate* isolate = env->isolate(); |
|||
Local<Value> argv = entry->object(); |
|||
env->performance_entry_callback()->Call(context, |
|||
v8::Undefined(isolate), |
|||
1, &argv); |
|||
} |
|||
|
|||
void Mark(const FunctionCallbackInfo<Value>& args) { |
|||
Environment* env = Environment::GetCurrent(args); |
|||
Local<Context> context = env->context(); |
|||
Isolate* isolate = env->isolate(); |
|||
Utf8Value name(isolate, args[0]); |
|||
uint64_t now = PERFORMANCE_NOW(); |
|||
auto marks = env->performance_marks(); |
|||
(*marks)[*name] = now; |
|||
|
|||
// TODO(jasnell): Once Tracing API is fully implemented, this should
|
|||
// record a trace event also.
|
|||
|
|||
Local<Function> fn = env->performance_entry_template(); |
|||
Local<Object> obj = fn->NewInstance(context).ToLocalChecked(); |
|||
new PerformanceEntry(env, obj, *name, "mark", now, now); |
|||
args.GetReturnValue().Set(obj); |
|||
} |
|||
|
|||
inline uint64_t GetPerformanceMark(Environment* env, std::string name) { |
|||
auto marks = env->performance_marks(); |
|||
auto res = marks->find(name); |
|||
return res != marks->end() ? res->second : 0; |
|||
} |
|||
|
|||
void Measure(const FunctionCallbackInfo<Value>& args) { |
|||
Environment* env = Environment::GetCurrent(args); |
|||
Local<Context> context = env->context(); |
|||
Isolate* isolate = env->isolate(); |
|||
Utf8Value name(isolate, args[0]); |
|||
Utf8Value startMark(isolate, args[1]); |
|||
Utf8Value endMark(isolate, args[2]); |
|||
|
|||
double* milestones = env->performance_state()->milestones; |
|||
|
|||
uint64_t startTimestamp = timeOrigin; |
|||
uint64_t start = GetPerformanceMark(env, *startMark); |
|||
if (start != 0) { |
|||
startTimestamp = start; |
|||
} else { |
|||
PerformanceMilestone milestone = ToPerformanceMilestoneEnum(*startMark); |
|||
if (milestone != NODE_PERFORMANCE_MILESTONE_INVALID) |
|||
startTimestamp = milestones[milestone]; |
|||
} |
|||
|
|||
uint64_t endTimestamp = GetPerformanceMark(env, *endMark); |
|||
if (endTimestamp == 0) { |
|||
PerformanceMilestone milestone = ToPerformanceMilestoneEnum(*endMark); |
|||
if (milestone != NODE_PERFORMANCE_MILESTONE_INVALID) |
|||
endTimestamp = milestones[milestone]; |
|||
} |
|||
|
|||
if (endTimestamp < startTimestamp) |
|||
endTimestamp = startTimestamp; |
|||
|
|||
// TODO(jasnell): Once Tracing API is fully implemented, this should
|
|||
// record a trace event also.
|
|||
|
|||
Local<Function> fn = env->performance_entry_template(); |
|||
Local<Object> obj = fn->NewInstance(context).ToLocalChecked(); |
|||
new PerformanceEntry(env, obj, *name, "measure", |
|||
startTimestamp, endTimestamp); |
|||
args.GetReturnValue().Set(obj); |
|||
} |
|||
|
|||
void GetPerformanceEntryName(const Local<String> prop, |
|||
const PropertyCallbackInfo<Value>& info) { |
|||
Isolate* isolate = info.GetIsolate(); |
|||
PerformanceEntry* entry; |
|||
ASSIGN_OR_RETURN_UNWRAP(&entry, info.Holder()); |
|||
info.GetReturnValue().Set( |
|||
String::NewFromUtf8(isolate, entry->name().c_str(), String::kNormalString)); |
|||
} |
|||
|
|||
void GetPerformanceEntryType(const Local<String> prop, |
|||
const PropertyCallbackInfo<Value>& info) { |
|||
Isolate* isolate = info.GetIsolate(); |
|||
PerformanceEntry* entry; |
|||
ASSIGN_OR_RETURN_UNWRAP(&entry, info.Holder()); |
|||
info.GetReturnValue().Set( |
|||
String::NewFromUtf8(isolate, entry->type().c_str(), String::kNormalString)); |
|||
} |
|||
|
|||
void GetPerformanceEntryStartTime(const Local<String> prop, |
|||
const PropertyCallbackInfo<Value>& info) { |
|||
PerformanceEntry* entry; |
|||
ASSIGN_OR_RETURN_UNWRAP(&entry, info.Holder()); |
|||
info.GetReturnValue().Set(entry->startTime()); |
|||
} |
|||
|
|||
void GetPerformanceEntryDuration(const Local<String> prop, |
|||
const PropertyCallbackInfo<Value>& info) { |
|||
PerformanceEntry* entry; |
|||
ASSIGN_OR_RETURN_UNWRAP(&entry, info.Holder()); |
|||
info.GetReturnValue().Set(entry->duration()); |
|||
} |
|||
|
|||
void MarkMilestone(const FunctionCallbackInfo<Value>& args) { |
|||
Environment* env = Environment::GetCurrent(args); |
|||
Local<Context> context = env->context(); |
|||
double* milestones = env->performance_state()->milestones; |
|||
PerformanceMilestone milestone = |
|||
static_cast<PerformanceMilestone>( |
|||
args[0]->Int32Value(context).ToChecked()); |
|||
if (milestone != NODE_PERFORMANCE_MILESTONE_INVALID) { |
|||
milestones[milestone] = PERFORMANCE_NOW(); |
|||
} |
|||
} |
|||
|
|||
void SetupPerformanceObservers(const FunctionCallbackInfo<Value>& args) { |
|||
Environment* env = Environment::GetCurrent(args); |
|||
CHECK(args[0]->IsFunction()); |
|||
env->set_performance_entry_callback(args[0].As<Function>()); |
|||
} |
|||
|
|||
inline void PerformanceGCCallback(uv_async_t* handle) { |
|||
PerformanceEntry::Data* data = |
|||
static_cast<PerformanceEntry::Data*>(handle->data); |
|||
Isolate* isolate = Isolate::GetCurrent(); |
|||
HandleScope scope(isolate); |
|||
Environment* env = Environment::GetCurrent(isolate); |
|||
Local<Context> context = env->context(); |
|||
Local<Function> fn; |
|||
Local<Object> obj; |
|||
PerformanceGCKind kind = static_cast<PerformanceGCKind>(data->data()); |
|||
|
|||
uint32_t* observers = env->performance_state()->observers; |
|||
if (!observers[NODE_PERFORMANCE_ENTRY_TYPE_GC]) { |
|||
goto cleanup; |
|||
} |
|||
|
|||
fn = env->performance_entry_template(); |
|||
obj = fn->NewInstance(context).ToLocalChecked(); |
|||
obj->Set(context, |
|||
FIXED_ONE_BYTE_STRING(isolate, "kind"), |
|||
Integer::New(isolate, kind)).FromJust(); |
|||
new PerformanceEntry(env, obj, data); |
|||
|
|||
cleanup: |
|||
delete data; |
|||
auto closeCB = [](uv_handle_t* handle) { delete handle; }; |
|||
uv_close(reinterpret_cast<uv_handle_t*>(handle), closeCB); |
|||
} |
|||
|
|||
inline void MarkGarbageCollectionStart(Isolate* isolate, |
|||
v8::GCType type, |
|||
v8::GCCallbackFlags flags) { |
|||
performance_last_gc_start_mark_ = PERFORMANCE_NOW(); |
|||
performance_last_gc_type_ = type; |
|||
} |
|||
|
|||
inline void MarkGarbageCollectionEnd(Isolate* isolate, |
|||
v8::GCType type, |
|||
v8::GCCallbackFlags flags) { |
|||
uv_async_t *async = new uv_async_t; |
|||
async->data = |
|||
new PerformanceEntry::Data("gc", "gc", |
|||
performance_last_gc_start_mark_, |
|||
PERFORMANCE_NOW(), type); |
|||
uv_async_init(uv_default_loop(), async, PerformanceGCCallback); |
|||
uv_async_send(async); |
|||
} |
|||
|
|||
inline void SetupGarbageCollectionTracking(Isolate* isolate) { |
|||
isolate->AddGCPrologueCallback(MarkGarbageCollectionStart); |
|||
isolate->AddGCPrologueCallback(MarkGarbageCollectionEnd); |
|||
} |
|||
|
|||
inline Local<Value> GetName(Local<Function> fn) { |
|||
Local<Value> val = fn->GetDebugName(); |
|||
if (val.IsEmpty() || val->IsUndefined()) { |
|||
Local<Value> boundFunction = fn->GetBoundFunction(); |
|||
if (!boundFunction.IsEmpty() && !boundFunction->IsUndefined()) { |
|||
val = GetName(boundFunction.As<Function>()); |
|||
} |
|||
} |
|||
return val; |
|||
} |
|||
|
|||
void TimerFunctionCall(const FunctionCallbackInfo<Value>& args) { |
|||
Isolate* isolate = args.GetIsolate(); |
|||
HandleScope scope(isolate); |
|||
Environment* env = Environment::GetCurrent(isolate); |
|||
Local<Context> context = env->context(); |
|||
Local<Function> fn = args.Data().As<Function>(); |
|||
size_t count = args.Length(); |
|||
size_t idx; |
|||
std::vector<Local<Value>> call_args; |
|||
for (size_t i = 0; i < count; ++i) { |
|||
call_args.push_back(args[i]); |
|||
} |
|||
|
|||
Utf8Value name(isolate, GetName(fn)); |
|||
|
|||
uint64_t start; |
|||
uint64_t end; |
|||
v8::TryCatch try_catch(isolate); |
|||
if (args.IsConstructCall()) { |
|||
start = PERFORMANCE_NOW(); |
|||
v8::MaybeLocal<Object> ret = fn->NewInstance(context, |
|||
call_args.size(), |
|||
call_args.data()); |
|||
end = PERFORMANCE_NOW(); |
|||
if (ret.IsEmpty()) { |
|||
try_catch.ReThrow(); |
|||
return; |
|||
} |
|||
args.GetReturnValue().Set(ret.ToLocalChecked()); |
|||
} else { |
|||
start = PERFORMANCE_NOW(); |
|||
v8::MaybeLocal<Value> ret = fn->Call(context, |
|||
args.This(), |
|||
call_args.size(), |
|||
call_args.data()); |
|||
end = PERFORMANCE_NOW(); |
|||
if (ret.IsEmpty()) { |
|||
try_catch.ReThrow(); |
|||
return; |
|||
} |
|||
args.GetReturnValue().Set(ret.ToLocalChecked()); |
|||
} |
|||
|
|||
|
|||
uint32_t* observers = env->performance_state()->observers; |
|||
if (!observers[NODE_PERFORMANCE_ENTRY_TYPE_FUNCTION]) |
|||
return; |
|||
|
|||
Local<Function> ctor = env->performance_entry_template(); |
|||
v8::MaybeLocal<Object> instance = ctor->NewInstance(context); |
|||
Local<Object> obj = instance.ToLocalChecked(); |
|||
for (idx = 0; idx < count; idx++) { |
|||
obj->Set(context, idx, args[idx]); |
|||
} |
|||
new PerformanceEntry(env, obj, *name, "function", start, end); |
|||
} |
|||
|
|||
void Timerify(const FunctionCallbackInfo<Value>& args) { |
|||
Environment* env = Environment::GetCurrent(args); |
|||
Local<Context> context = env->context(); |
|||
CHECK(args[0]->IsFunction()); |
|||
CHECK(args[1]->IsNumber()); |
|||
Local<Function> fn = args[0].As<Function>(); |
|||
int length = args[1]->IntegerValue(context).ToChecked(); |
|||
Local<Function> wrap = |
|||
Function::New(context, TimerFunctionCall, fn, length).ToLocalChecked(); |
|||
args.GetReturnValue().Set(wrap); |
|||
} |
|||
|
|||
void Init(Local<Object> target, |
|||
Local<Value> unused, |
|||
Local<Context> context) { |
|||
Environment* env = Environment::GetCurrent(context); |
|||
Isolate* isolate = env->isolate(); |
|||
performance_state* state = env->performance_state(); |
|||
auto state_ab = ArrayBuffer::New(isolate, state, sizeof(*state)); |
|||
|
|||
#define SET_STATE_TYPEDARRAY(name, type, field) \ |
|||
target->Set(context, \ |
|||
FIXED_ONE_BYTE_STRING(isolate, (name)), \ |
|||
type::New(state_ab, \ |
|||
offsetof(performance_state, field), \ |
|||
arraysize(state->field))) \ |
|||
.FromJust() |
|||
SET_STATE_TYPEDARRAY("observerCounts", v8::Uint32Array, observers); |
|||
SET_STATE_TYPEDARRAY("milestones", v8::Float64Array, milestones); |
|||
#undef SET_STATE_TYPEDARRAY |
|||
|
|||
Local<String> performanceEntryString = |
|||
FIXED_ONE_BYTE_STRING(isolate, "PerformanceEntry"); |
|||
|
|||
Local<FunctionTemplate> pe = env->NewFunctionTemplate(PerformanceEntry::New); |
|||
pe->InstanceTemplate()->SetInternalFieldCount(1); |
|||
pe->SetClassName(performanceEntryString); |
|||
Local<ObjectTemplate> ot = pe->InstanceTemplate(); |
|||
ot->SetAccessor(env->name_string(), GetPerformanceEntryName); |
|||
ot->SetAccessor(FIXED_ONE_BYTE_STRING(isolate, "entryType"), |
|||
GetPerformanceEntryType); |
|||
ot->SetAccessor(FIXED_ONE_BYTE_STRING(isolate, "startTime"), |
|||
GetPerformanceEntryStartTime); |
|||
ot->SetAccessor(FIXED_ONE_BYTE_STRING(isolate, "duration"), |
|||
GetPerformanceEntryDuration); |
|||
Local<Function> fn = pe->GetFunction(); |
|||
target->Set(performanceEntryString, fn); |
|||
env->set_performance_entry_template(fn); |
|||
|
|||
env->SetMethod(target, "mark", Mark); |
|||
env->SetMethod(target, "measure", Measure); |
|||
env->SetMethod(target, "markMilestone", MarkMilestone); |
|||
env->SetMethod(target, "setupObservers", SetupPerformanceObservers); |
|||
env->SetMethod(target, "timerify", Timerify); |
|||
|
|||
Local<Object> constants = Object::New(isolate); |
|||
|
|||
NODE_DEFINE_CONSTANT(constants, NODE_PERFORMANCE_GC_MAJOR); |
|||
NODE_DEFINE_CONSTANT(constants, NODE_PERFORMANCE_GC_MINOR); |
|||
NODE_DEFINE_CONSTANT(constants, NODE_PERFORMANCE_GC_INCREMENTAL); |
|||
NODE_DEFINE_CONSTANT(constants, NODE_PERFORMANCE_GC_WEAKCB); |
|||
|
|||
#define V(name, _) \ |
|||
NODE_DEFINE_HIDDEN_CONSTANT(constants, NODE_PERFORMANCE_ENTRY_TYPE_##name); |
|||
NODE_PERFORMANCE_ENTRY_TYPES(V) |
|||
#undef V |
|||
|
|||
#define V(name, _) \ |
|||
NODE_DEFINE_HIDDEN_CONSTANT(constants, NODE_PERFORMANCE_MILESTONE_##name); |
|||
NODE_PERFORMANCE_MILESTONES(V) |
|||
#undef V |
|||
|
|||
v8::PropertyAttribute attr = |
|||
static_cast<v8::PropertyAttribute>(v8::ReadOnly | v8::DontDelete); |
|||
|
|||
target->DefineOwnProperty(context, |
|||
FIXED_ONE_BYTE_STRING(isolate, "timeOrigin"), |
|||
v8::Number::New(isolate, timeOrigin / 1e6), |
|||
attr); |
|||
|
|||
target->DefineOwnProperty(context, |
|||
env->constants_string(), |
|||
constants, |
|||
attr); |
|||
|
|||
SetupGarbageCollectionTracking(isolate); |
|||
} |
|||
|
|||
} // namespace performance
|
|||
} // namespace node
|
|||
|
|||
NODE_MODULE_CONTEXT_AWARE_BUILTIN(performance, node::performance::Init) |
@ -0,0 +1,174 @@ |
|||
#ifndef SRC_NODE_PERF_H_ |
|||
#define SRC_NODE_PERF_H_ |
|||
|
|||
#if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS |
|||
|
|||
#include "node.h" |
|||
#include "node_perf_common.h" |
|||
#include "env.h" |
|||
#include "base-object.h" |
|||
#include "base-object-inl.h" |
|||
|
|||
#include "v8.h" |
|||
#include "uv.h" |
|||
|
|||
#include <limits> |
|||
#include <string> |
|||
|
|||
namespace node { |
|||
namespace performance { |
|||
|
|||
using v8::FunctionCallbackInfo; |
|||
using v8::GCType; |
|||
using v8::Local; |
|||
using v8::Object; |
|||
using v8::Value; |
|||
|
|||
static inline PerformanceMilestone ToPerformanceMilestoneEnum(const char* str) { |
|||
#define V(name, label) \ |
|||
if (strcmp(str, label) == 0) return NODE_PERFORMANCE_MILESTONE_##name; |
|||
NODE_PERFORMANCE_MILESTONES(V) |
|||
#undef V |
|||
return NODE_PERFORMANCE_MILESTONE_INVALID; |
|||
} |
|||
|
|||
static inline PerformanceEntryType ToPerformanceEntryTypeEnum( |
|||
const char* type) { |
|||
#define V(name, label) \ |
|||
if (strcmp(type, label) == 0) return NODE_PERFORMANCE_ENTRY_TYPE_##name; |
|||
NODE_PERFORMANCE_ENTRY_TYPES(V) |
|||
#undef V |
|||
return NODE_PERFORMANCE_ENTRY_TYPE_INVALID; |
|||
} |
|||
|
|||
const double MAX_DOUBLE = std::numeric_limits<double>::max(); |
|||
|
|||
NODE_EXTERN inline void MarkPerformanceMilestone( |
|||
Environment* env, |
|||
PerformanceMilestone milestone) { |
|||
env->performance_state()->milestones[milestone] = PERFORMANCE_NOW(); |
|||
} |
|||
|
|||
class PerformanceEntry : public BaseObject { |
|||
public: |
|||
// Used for temporary storage of performance entry details when the
|
|||
// object cannot be created immediately.
|
|||
class Data { |
|||
public: |
|||
Data( |
|||
const char* name, |
|||
const char* type, |
|||
uint64_t startTime, |
|||
uint64_t endTime, |
|||
int data = 0) : |
|||
name_(name), |
|||
type_(type), |
|||
startTime_(startTime), |
|||
endTime_(endTime), |
|||
data_(data) {} |
|||
|
|||
std::string name() const { |
|||
return name_; |
|||
} |
|||
|
|||
std::string type() const { |
|||
return type_; |
|||
} |
|||
|
|||
uint64_t startTime() const { |
|||
return startTime_; |
|||
} |
|||
|
|||
uint64_t endTime() const { |
|||
return endTime_; |
|||
} |
|||
|
|||
int data() const { |
|||
return data_; |
|||
} |
|||
|
|||
private: |
|||
std::string name_; |
|||
std::string type_; |
|||
uint64_t startTime_; |
|||
uint64_t endTime_; |
|||
int data_; |
|||
}; |
|||
|
|||
static void NotifyObservers(Environment* env, PerformanceEntry* entry); |
|||
|
|||
static void New(const FunctionCallbackInfo<Value>& args); |
|||
|
|||
PerformanceEntry(Environment* env, |
|||
Local<Object> wrap, |
|||
const char* name, |
|||
const char* type, |
|||
uint64_t startTime, |
|||
uint64_t endTime) : |
|||
BaseObject(env, wrap), |
|||
name_(name), |
|||
type_(type), |
|||
startTime_(startTime), |
|||
endTime_(endTime) { |
|||
MakeWeak<PerformanceEntry>(this); |
|||
NotifyObservers(env, this); |
|||
} |
|||
|
|||
PerformanceEntry(Environment* env, |
|||
Local<Object> wrap, |
|||
Data* data) : |
|||
BaseObject(env, wrap), |
|||
name_(data->name()), |
|||
type_(data->type()), |
|||
startTime_(data->startTime()), |
|||
endTime_(data->endTime()) { |
|||
MakeWeak<PerformanceEntry>(this); |
|||
NotifyObservers(env, this); |
|||
} |
|||
|
|||
~PerformanceEntry() {} |
|||
|
|||
std::string name() const { |
|||
return name_; |
|||
} |
|||
|
|||
std::string type() const { |
|||
return type_; |
|||
} |
|||
|
|||
double startTime() const { |
|||
return startTime_ / 1e6; |
|||
} |
|||
|
|||
double duration() const { |
|||
return durationNano() / 1e6; |
|||
} |
|||
|
|||
uint64_t startTimeNano() const { |
|||
return startTime_; |
|||
} |
|||
|
|||
uint64_t durationNano() const { |
|||
return endTime_ - startTime_; |
|||
} |
|||
|
|||
private: |
|||
std::string name_; |
|||
std::string type_; |
|||
uint64_t startTime_; |
|||
uint64_t endTime_; |
|||
}; |
|||
|
|||
enum PerformanceGCKind { |
|||
NODE_PERFORMANCE_GC_MAJOR = GCType::kGCTypeMarkSweepCompact, |
|||
NODE_PERFORMANCE_GC_MINOR = GCType::kGCTypeScavenge, |
|||
NODE_PERFORMANCE_GC_INCREMENTAL = GCType::kGCTypeIncrementalMarking, |
|||
NODE_PERFORMANCE_GC_WEAKCB = GCType::kGCTypeProcessWeakCallbacks |
|||
}; |
|||
|
|||
} // namespace performance
|
|||
} // namespace node
|
|||
|
|||
#endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS
|
|||
|
|||
#endif // SRC_NODE_PERF_H_
|
@ -0,0 +1,71 @@ |
|||
#ifndef SRC_NODE_PERF_COMMON_H_ |
|||
#define SRC_NODE_PERF_COMMON_H_ |
|||
|
|||
#include "node.h" |
|||
#include "v8.h" |
|||
|
|||
#include <map> |
|||
#include <string> |
|||
|
|||
namespace node { |
|||
namespace performance { |
|||
|
|||
#define PERFORMANCE_NOW() uv_hrtime() |
|||
|
|||
// These occur before the environment is created. Cache them
|
|||
// here and add them to the milestones when the env is init'd.
|
|||
extern uint64_t performance_node_start; |
|||
extern uint64_t performance_v8_start; |
|||
|
|||
#define NODE_PERFORMANCE_MILESTONES(V) \ |
|||
V(ENVIRONMENT, "environment") \ |
|||
V(NODE_START, "nodeStart") \ |
|||
V(V8_START, "v8Start") \ |
|||
V(LOOP_START, "loopStart") \ |
|||
V(LOOP_EXIT, "loopExit") \ |
|||
V(BOOTSTRAP_COMPLETE, "bootstrapComplete") \ |
|||
V(THIRD_PARTY_MAIN_START, "thirdPartyMainStart") \ |
|||
V(THIRD_PARTY_MAIN_END, "thirdPartyMainEnd") \ |
|||
V(CLUSTER_SETUP_START, "clusterSetupStart") \ |
|||
V(CLUSTER_SETUP_END, "clusterSetupEnd") \ |
|||
V(MODULE_LOAD_START, "moduleLoadStart") \ |
|||
V(MODULE_LOAD_END, "moduleLoadEnd") \ |
|||
V(PRELOAD_MODULE_LOAD_START, "preloadModulesLoadStart") \ |
|||
V(PRELOAD_MODULE_LOAD_END, "preloadModulesLoadEnd") |
|||
|
|||
#define NODE_PERFORMANCE_ENTRY_TYPES(V) \ |
|||
V(NODE, "node") \ |
|||
V(MARK, "mark") \ |
|||
V(MEASURE, "measure") \ |
|||
V(GC, "gc") \ |
|||
V(FUNCTION, "function") |
|||
|
|||
enum PerformanceMilestone { |
|||
#define V(name, _) NODE_PERFORMANCE_MILESTONE_##name, |
|||
NODE_PERFORMANCE_MILESTONES(V) |
|||
#undef V |
|||
NODE_PERFORMANCE_MILESTONE_INVALID |
|||
}; |
|||
|
|||
enum PerformanceEntryType { |
|||
#define V(name, _) NODE_PERFORMANCE_ENTRY_TYPE_##name, |
|||
NODE_PERFORMANCE_ENTRY_TYPES(V) |
|||
#undef V |
|||
NODE_PERFORMANCE_ENTRY_TYPE_INVALID |
|||
}; |
|||
|
|||
#define PERFORMANCE_MARK(env, n) \ |
|||
do { \ |
|||
node::performance::MarkPerformanceMilestone(env, \ |
|||
node::performance::NODE_PERFORMANCE_MILESTONE_##n); \ |
|||
} while (0); |
|||
|
|||
struct performance_state { |
|||
uint32_t observers[NODE_PERFORMANCE_ENTRY_TYPE_INVALID]; |
|||
double milestones[NODE_PERFORMANCE_MILESTONE_INVALID]; |
|||
}; |
|||
|
|||
} // namespace performance
|
|||
} // namespace node
|
|||
|
|||
#endif // SRC_NODE_PERF_COMMON_H_
|
@ -0,0 +1,93 @@ |
|||
'use strict'; |
|||
|
|||
const common = require('../common'); |
|||
const assert = require('assert'); |
|||
|
|||
const { |
|||
performance, |
|||
PerformanceObserver |
|||
} = require('perf_hooks'); |
|||
|
|||
{ |
|||
// Intentional non-op. Do not wrap in common.mustCall();
|
|||
const n = performance.timerify(() => {}); |
|||
n(); |
|||
const entries = performance.getEntriesByType('function'); |
|||
assert.strictEqual(entries.length, 0); |
|||
|
|||
const obs = new PerformanceObserver(common.mustCall((list) => { |
|||
const entries = list.getEntries(); |
|||
const entry = entries[0]; |
|||
assert(entry); |
|||
assert.strictEqual(entry.name, 'performance.timerify'); |
|||
assert.strictEqual(entry.entryType, 'function'); |
|||
assert.strictEqual(typeof entry.duration, 'number'); |
|||
assert.strictEqual(typeof entry.startTime, 'number'); |
|||
obs.disconnect(); |
|||
performance.clearFunctions(); |
|||
})); |
|||
obs.observe({ entryTypes: ['function'] }); |
|||
n(); |
|||
} |
|||
|
|||
{ |
|||
// If the error throws, the error should just be bubbled up and the
|
|||
// performance timeline entry will not be reported.
|
|||
const obs = new PerformanceObserver(common.mustNotCall()); |
|||
obs.observe({ entryTypes: ['function'] }); |
|||
const n = performance.timerify(() => { |
|||
throw new Error('test'); |
|||
}); |
|||
assert.throws(() => n(), /^Error: test$/); |
|||
const entries = performance.getEntriesByType('function'); |
|||
assert.strictEqual(entries.length, 0); |
|||
obs.disconnect(); |
|||
} |
|||
|
|||
{ |
|||
class N {} |
|||
const n = performance.timerify(N); |
|||
new n(); |
|||
const entries = performance.getEntriesByType('function'); |
|||
assert.strictEqual(entries.length, 0); |
|||
|
|||
const obs = new PerformanceObserver(common.mustCall((list) => { |
|||
const entries = list.getEntries(); |
|||
const entry = entries[0]; |
|||
assert.strictEqual(entry[0], 1); |
|||
assert.strictEqual(entry[1], 'abc'); |
|||
assert(entry); |
|||
assert.strictEqual(entry.name, 'N'); |
|||
assert.strictEqual(entry.entryType, 'function'); |
|||
assert.strictEqual(typeof entry.duration, 'number'); |
|||
assert.strictEqual(typeof entry.startTime, 'number'); |
|||
obs.disconnect(); |
|||
performance.clearFunctions(); |
|||
})); |
|||
obs.observe({ entryTypes: ['function'] }); |
|||
|
|||
new n(1, 'abc'); |
|||
} |
|||
|
|||
{ |
|||
[1, {}, [], null, undefined, Infinity].forEach((i) => { |
|||
assert.throws(() => performance.timerify(i), |
|||
common.expectsError({ |
|||
code: 'ERR_INVALID_ARG_TYPE', |
|||
type: TypeError, |
|||
message: 'The "fn" argument must be of type Function' |
|||
})); |
|||
}); |
|||
} |
|||
|
|||
// Function can only be wrapped once, also check length and name
|
|||
{ |
|||
const m = (a, b = 1) => {}; |
|||
const n = performance.timerify(m); |
|||
const o = performance.timerify(m); |
|||
const p = performance.timerify(n); |
|||
assert.strictEqual(n, o); |
|||
assert.strictEqual(n, p); |
|||
assert.strictEqual(n.length, m.length); |
|||
assert.strictEqual(n.name, 'timerified m'); |
|||
} |
@ -0,0 +1,51 @@ |
|||
// Flags: --expose-gc
|
|||
'use strict'; |
|||
|
|||
const common = require('../common'); |
|||
const assert = require('assert'); |
|||
const { |
|||
performance, |
|||
PerformanceObserver |
|||
} = require('perf_hooks'); |
|||
|
|||
const { |
|||
NODE_PERFORMANCE_GC_MAJOR, |
|||
NODE_PERFORMANCE_GC_MINOR, |
|||
NODE_PERFORMANCE_GC_INCREMENTAL, |
|||
NODE_PERFORMANCE_GC_WEAKCB |
|||
} = process.binding('performance').constants; |
|||
|
|||
const kinds = [ |
|||
NODE_PERFORMANCE_GC_MAJOR, |
|||
NODE_PERFORMANCE_GC_MINOR, |
|||
NODE_PERFORMANCE_GC_INCREMENTAL, |
|||
NODE_PERFORMANCE_GC_WEAKCB |
|||
]; |
|||
|
|||
// No observers for GC events, no entries should appear
|
|||
{ |
|||
global.gc(); |
|||
const entries = performance.getEntriesByType('gc'); |
|||
assert.strictEqual(entries.length, 0); |
|||
} |
|||
|
|||
// Adding an observer should force at least one gc to appear
|
|||
{ |
|||
const obs = new PerformanceObserver(common.mustCallAtLeast((list) => { |
|||
const entry = list.getEntries()[0]; |
|||
assert(entry); |
|||
assert.strictEqual(entry.name, 'gc'); |
|||
assert.strictEqual(entry.entryType, 'gc'); |
|||
assert(kinds.includes(entry.kind)); |
|||
assert.strictEqual(typeof entry.startTime, 'number'); |
|||
assert.strictEqual(typeof entry.duration, 'number'); |
|||
|
|||
performance.clearGC(); |
|||
|
|||
const entries = performance.getEntriesByType('gc'); |
|||
assert.strictEqual(entries.length, 0); |
|||
obs.disconnect(); |
|||
})); |
|||
obs.observe({ entryTypes: ['gc'] }); |
|||
global.gc(); |
|||
} |
@ -0,0 +1,124 @@ |
|||
'use strict'; |
|||
|
|||
const common = require('../common'); |
|||
const assert = require('assert'); |
|||
const { performance } = require('perf_hooks'); |
|||
|
|||
assert(performance); |
|||
assert(performance.nodeTiming); |
|||
assert.strictEqual(typeof performance.timeOrigin, 'number'); |
|||
|
|||
{ |
|||
const entries = performance.getEntries(); |
|||
assert(Array.isArray(entries)); |
|||
assert.strictEqual(entries.length, 1); |
|||
assert.strictEqual(entries[0], performance.nodeTiming); |
|||
} |
|||
|
|||
{ |
|||
const entries = performance.getEntriesByName('node'); |
|||
assert(Array.isArray(entries)); |
|||
assert.strictEqual(entries.length, 1); |
|||
assert.strictEqual(entries[0], performance.nodeTiming); |
|||
} |
|||
|
|||
{ |
|||
let n; |
|||
let entries = performance.getEntries(); |
|||
for (n = 0; n < entries.length; n++) { |
|||
const entry = entries[n]; |
|||
assert.notStrictEqual(entry.name, 'A'); |
|||
assert.notStrictEqual(entry.entryType, 'mark'); |
|||
} |
|||
performance.mark('A'); |
|||
entries = performance.getEntries(); |
|||
const markA = entries[1]; |
|||
assert.strictEqual(markA.name, 'A'); |
|||
assert.strictEqual(markA.entryType, 'mark'); |
|||
performance.clearMarks('A'); |
|||
entries = performance.getEntries(); |
|||
for (n = 0; n < entries.length; n++) { |
|||
const entry = entries[n]; |
|||
assert.notStrictEqual(entry.name, 'A'); |
|||
assert.notStrictEqual(entry.entryType, 'mark'); |
|||
} |
|||
} |
|||
|
|||
{ |
|||
let entries = performance.getEntries(); |
|||
for (let n = 0; n < entries.length; n++) { |
|||
const entry = entries[n]; |
|||
assert.notStrictEqual(entry.name, 'B'); |
|||
assert.notStrictEqual(entry.entryType, 'mark'); |
|||
} |
|||
performance.mark('B'); |
|||
entries = performance.getEntries(); |
|||
const markB = entries[1]; |
|||
assert.strictEqual(markB.name, 'B'); |
|||
assert.strictEqual(markB.entryType, 'mark'); |
|||
performance.clearMarks(); |
|||
entries = performance.getEntries(); |
|||
assert.strictEqual(entries.length, 1); |
|||
} |
|||
|
|||
{ |
|||
performance.mark('A'); |
|||
[undefined, null, 'foo', 'initialize', 1].forEach((i) => { |
|||
assert.doesNotThrow(() => performance.measure('test', i, 'A')); |
|||
}); |
|||
|
|||
[undefined, null, 'foo', 1].forEach((i) => { |
|||
assert.throws(() => performance.measure('test', 'A', i), |
|||
common.expectsError({ |
|||
code: 'ERR_INVALID_PERFORMANCE_MARK', |
|||
type: Error, |
|||
message: `The "${i}" performance mark has not been set` |
|||
})); |
|||
}); |
|||
|
|||
performance.clearMeasures(); |
|||
performance.clearMarks(); |
|||
|
|||
const entries = performance.getEntries(); |
|||
assert.strictEqual(entries.length, 1); |
|||
} |
|||
|
|||
{ |
|||
performance.mark('A'); |
|||
setImmediate(() => { |
|||
performance.mark('B'); |
|||
performance.measure('foo', 'A', 'B'); |
|||
const entry = performance.getEntriesByName('foo')[0]; |
|||
const markA = performance.getEntriesByName('A', 'mark')[0]; |
|||
const markB = performance.getEntriesByName('B', 'mark')[0]; |
|||
assert.strictEqual(entry.name, 'foo'); |
|||
assert.strictEqual(entry.entryType, 'measure'); |
|||
assert.strictEqual(entry.startTime, markA.startTime); |
|||
assert.strictEqual(entry.duration.toPrecision(3), |
|||
(markB.startTime - markA.startTime).toPrecision(3)); |
|||
}); |
|||
} |
|||
|
|||
assert.strictEqual(performance.nodeTiming.name, 'node'); |
|||
assert.strictEqual(performance.nodeTiming.entryType, 'node'); |
|||
|
|||
[ |
|||
'startTime', |
|||
'duration', |
|||
'nodeStart', |
|||
'v8Start', |
|||
'bootstrapComplete', |
|||
'environment', |
|||
'loopStart', |
|||
'loopExit', |
|||
'thirdPartyMainStart', |
|||
'thirdPartyMainEnd', |
|||
'clusterSetupStart', |
|||
'clusterSetupEnd', |
|||
'moduleLoadStart', |
|||
'moduleLoadEnd', |
|||
'preloadModuleLoadStart', |
|||
'preloadModuleLoadEnd' |
|||
].forEach((i) => { |
|||
assert.strictEqual(typeof performance.nodeTiming[i], 'number'); |
|||
}); |
@ -0,0 +1,139 @@ |
|||
'use strict'; |
|||
|
|||
const common = require('../common'); |
|||
const Countdown = require('../common/countdown'); |
|||
const assert = require('assert'); |
|||
const { |
|||
observerCounts: counts |
|||
} = process.binding('performance'); |
|||
const { |
|||
performance, |
|||
PerformanceObserver, |
|||
constants |
|||
} = require('perf_hooks'); |
|||
|
|||
const { |
|||
NODE_PERFORMANCE_ENTRY_TYPE_NODE, |
|||
NODE_PERFORMANCE_ENTRY_TYPE_MARK, |
|||
NODE_PERFORMANCE_ENTRY_TYPE_MEASURE, |
|||
NODE_PERFORMANCE_ENTRY_TYPE_GC, |
|||
NODE_PERFORMANCE_ENTRY_TYPE_FUNCTION, |
|||
} = constants; |
|||
|
|||
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_NODE], 0); |
|||
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 1); |
|||
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MEASURE], 1); |
|||
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_GC], 0); |
|||
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_FUNCTION], 0); |
|||
|
|||
{ |
|||
[1, null, undefined, {}, [], Infinity].forEach((i) => { |
|||
assert.throws(() => new PerformanceObserver(i), |
|||
common.expectsError({ |
|||
code: 'ERR_INVALID_CALLBACK', |
|||
type: TypeError, |
|||
message: 'Callback must be a function' |
|||
})); |
|||
}); |
|||
const observer = new PerformanceObserver(common.mustNotCall()); |
|||
|
|||
[1, null, undefined].forEach((i) => { |
|||
//observer.observe(i);
|
|||
assert.throws(() => observer.observe(i), |
|||
common.expectsError({ |
|||
code: 'ERR_INVALID_ARG_TYPE', |
|||
type: TypeError, |
|||
message: 'The "options" argument must be of type Object' |
|||
})); |
|||
}); |
|||
|
|||
[1, undefined, null, {}, Infinity].forEach((i) => { |
|||
assert.throws(() => observer.observe({ entryTypes: i }), |
|||
common.expectsError({ |
|||
code: 'ERR_INVALID_OPT_VALUE', |
|||
type: TypeError, |
|||
message: 'The value "[object Object]" is invalid for ' + |
|||
'option "entryTypes"' |
|||
})); |
|||
}); |
|||
} |
|||
|
|||
// Test Non-Buffered
|
|||
{ |
|||
const observer = |
|||
new PerformanceObserver(common.mustCall(callback, 3)); |
|||
|
|||
const countdown = |
|||
new Countdown(3, common.mustCall(() => { |
|||
observer.disconnect(); |
|||
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 1); |
|||
})); |
|||
|
|||
function callback(list, obs) { |
|||
assert.strictEqual(obs, observer); |
|||
const entries = list.getEntries(); |
|||
assert.strictEqual(entries.length, 1); |
|||
countdown.dec(); |
|||
} |
|||
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 1); |
|||
assert.doesNotThrow(() => observer.observe({ entryTypes: ['mark'] })); |
|||
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 2); |
|||
performance.mark('test1'); |
|||
performance.mark('test2'); |
|||
performance.mark('test3'); |
|||
} |
|||
|
|||
|
|||
// Test Buffered
|
|||
{ |
|||
const observer = |
|||
new PerformanceObserver(common.mustCall(callback, 1)); |
|||
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 1); |
|||
|
|||
function callback(list, obs) { |
|||
assert.strictEqual(obs, observer); |
|||
const entries = list.getEntries(); |
|||
assert.strictEqual(entries.length, 3); |
|||
observer.disconnect(); |
|||
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 1); |
|||
|
|||
{ |
|||
const entriesByName = list.getEntriesByName('test1'); |
|||
assert.strictEqual(entriesByName.length, 1); |
|||
assert.strictEqual(entriesByName[0].name, 'test1'); |
|||
assert.strictEqual(entriesByName[0].entryType, 'mark'); |
|||
} |
|||
|
|||
{ |
|||
const entriesByName = list.getEntriesByName('test1', 'mark'); |
|||
assert.strictEqual(entriesByName.length, 1); |
|||
assert.strictEqual(entriesByName[0].name, 'test1'); |
|||
assert.strictEqual(entriesByName[0].entryType, 'mark'); |
|||
} |
|||
|
|||
{ |
|||
const entriesByName = list.getEntriesByName('test1', 'measure'); |
|||
assert.strictEqual(entriesByName.length, 0); |
|||
} |
|||
|
|||
{ |
|||
const entriesByType = list.getEntriesByType('measure'); |
|||
assert.strictEqual(entriesByType.length, 1); |
|||
assert.strictEqual(entriesByType[0].name, 'test3'); |
|||
assert.strictEqual(entriesByType[0].entryType, 'measure'); |
|||
} |
|||
} |
|||
|
|||
assert.doesNotThrow(() => { |
|||
observer.observe({ entryTypes: ['mark', 'measure'], buffered: true }); |
|||
}); |
|||
// Do this twice to make sure it doesn't throw
|
|||
assert.doesNotThrow(() => { |
|||
observer.observe({ entryTypes: ['mark', 'measure'], buffered: true }); |
|||
}); |
|||
// Even tho we called twice, count should be 1
|
|||
assert.strictEqual(counts[NODE_PERFORMANCE_ENTRY_TYPE_MARK], 2); |
|||
performance.mark('test1'); |
|||
performance.mark('test2'); |
|||
performance.measure('test3', 'test1', 'test2'); |
|||
} |
Loading…
Reference in new issue