diff --git a/index.js b/index.js index cb55f7d..d1668f0 100644 --- a/index.js +++ b/index.js @@ -7,6 +7,7 @@ const Transform = require('stream').Transform; const urlLib = require('url'); const fs = require('fs'); const querystring = require('querystring'); +const CacheableRequest = require('cacheable-request'); const duplexer3 = require('duplexer3'); const intoStream = require('into-stream'); const isStream = require('is-stream'); @@ -87,7 +88,8 @@ function requestAsEventEmitter(opts) { let progressInterval; - const req = fn.request(opts, res => { + const cacheableRequest = new CacheableRequest(fn.request, opts.cache); + const cacheReq = cacheableRequest(opts, res => { clearInterval(progressInterval); ee.emit('uploadProgress', { @@ -172,7 +174,7 @@ function requestAsEventEmitter(opts) { const response = opts.decompress === true && typeof decompressResponse === 'function' && - req.method !== 'HEAD' ? decompressResponse(progressStream) : progressStream; + opts.method !== 'HEAD' ? decompressResponse(progressStream) : progressStream; if (!opts.decompress && ['gzip', 'deflate'].indexOf(res.headers['content-encoding']) !== -1) { opts.encoding = null; @@ -190,62 +192,66 @@ function requestAsEventEmitter(opts) { }); }); - req.once('error', err => { - clearInterval(progressInterval); + cacheReq.on('error', err => ee.emit('error', new got.CacheError(err, opts))); - const backoff = opts.retries(++retryCount, err); + cacheReq.on('request', req => { + req.once('error', err => { + clearInterval(progressInterval); - if (backoff) { - setTimeout(get, backoff, opts); - return; - } + const backoff = opts.retries(++retryCount, err); - ee.emit('error', new got.RequestError(err, opts)); - }); + if (backoff) { + setTimeout(get, backoff, opts); + return; + } - ee.on('request', req => { - ee.emit('uploadProgress', { - percent: 0, - transferred: 0, - total: uploadBodySize + ee.emit('error', new got.RequestError(err, opts)); }); - req.connection.on('connect', () => { - const uploadEventFrequency = 150; + ee.on('request', req => { + ee.emit('uploadProgress', { + percent: 0, + transferred: 0, + total: uploadBodySize + }); - progressInterval = setInterval(() => { - const lastUploaded = uploaded; - const headersSize = Buffer.byteLength(req._header); - uploaded = req.connection.bytesWritten - headersSize; + req.connection.on('connect', () => { + const uploadEventFrequency = 150; - // Prevent the known issue of `bytesWritten` being larger than body size - if (uploadBodySize && uploaded > uploadBodySize) { - uploaded = uploadBodySize; - } + progressInterval = setInterval(() => { + const lastUploaded = uploaded; + const headersSize = Buffer.byteLength(req._header); + uploaded = req.connection.bytesWritten - headersSize; - // Don't emit events with unchanged progress and - // prevent last event from being emitted, because - // it's emitted when `response` is emitted - if (uploaded === lastUploaded || uploaded === uploadBodySize) { - return; - } + // Prevent the known issue of `bytesWritten` being larger than body size + if (uploadBodySize && uploaded > uploadBodySize) { + uploaded = uploadBodySize; + } - ee.emit('uploadProgress', { - percent: uploadBodySize ? uploaded / uploadBodySize : 0, - transferred: uploaded, - total: uploadBodySize - }); - }, uploadEventFrequency); + // Don't emit events with unchanged progress and + // prevent last event from being emitted, because + // it's emitted when `response` is emitted + if (uploaded === lastUploaded || uploaded === uploadBodySize) { + return; + } + + ee.emit('uploadProgress', { + percent: uploadBodySize ? uploaded / uploadBodySize : 0, + transferred: uploaded, + total: uploadBodySize + }); + }, uploadEventFrequency); + }); }); - }); - if (opts.gotTimeout) { - clearInterval(progressInterval); - timedOut(req, opts.gotTimeout); - } + if (opts.gotTimeout) { + clearInterval(progressInterval); + timedOut(req, opts.gotTimeout); + } - setImmediate(() => { - ee.emit('request', req); + setImmediate(() => { + ee.emit('request', req); + }); }); }; @@ -434,6 +440,7 @@ function normalizeArguments(url, opts) { { path: '', retries: 2, + cache: false, decompress: true, useElectronNet: false }, @@ -589,6 +596,13 @@ class StdError extends Error { } } +got.CacheError = class extends StdError { + constructor(error, opts) { + super(error.message, error, opts); + this.name = 'CacheError'; + } +}; + got.RequestError = class extends StdError { constructor(error, opts) { super(error.message, error, opts); diff --git a/package.json b/package.json index d6a5e60..46a5f37 100644 --- a/package.json +++ b/package.json @@ -50,6 +50,7 @@ "electron" ], "dependencies": { + "cacheable-request": "^2.0.0", "decompress-response": "^3.2.0", "duplexer3": "^0.1.4", "get-stream": "^3.0.0", diff --git a/readme.md b/readme.md index 65e019c..d4bcd5f 100644 --- a/readme.md +++ b/readme.md @@ -19,6 +19,7 @@ Created because [`request`](https://github.com/request/request) is bloated *(sev - [Promise & stream API](#api) - [Request cancelation](#aborting-the-request) +- [RFC compliant caching](#cache-adapters) - [Follows redirects](#followredirect) - [Retries on network failure](#retries) - [Progress events](#onuploadprogress-progress) @@ -69,6 +70,10 @@ It's a `GET` request by default, but can be changed in `options`. Returns a Promise for a `response` object with a `body` property, a `url` property with the request URL or the final URL after redirects, and a `requestUrl` property with the original request URL. +The response object will normally be a [Node.js HTTP response stream](https://nodejs.org/api/http.html#http_class_http_incomingmessage), however if returned from the cache it will be a [responselike object](https://github.com/lukechilds/responselike) which behaves in the same way. + +The response will also have a `fromCache` property set with a boolean value. + ##### url Type: `string` `Object` @@ -170,6 +175,13 @@ Decompress the response automatically. If this is disabled, a compressed response is returned as a `Buffer`. This may be useful if you want to handle decompression yourself or stream the raw compressed data. +###### cache + +Type: `Object`
+Default: `false` + +[Cache adapter instance](#cache-adapters) for storing cached data. + ###### useElectronNet Type: `boolean`
@@ -253,6 +265,10 @@ Each error contains (if available) `statusCode`, `statusMessage`, `host`, `hostn In Promise mode, the `response` is attached to the error. +#### got.CacheError + +When a cache method fails, for example if the database goes down, or there's a filesystem error. + #### got.RequestError When a request fails. Contains a `code` property with error class code, like `ECONNREFUSED`. @@ -316,6 +332,58 @@ request.catch(err => { request.cancel(); ``` + +## Cache + +You can use the JavaScript `Map` type as an in memory cache: + +```js +const got = require('got'); +const map = new Map(); + +(async () => { + let response = await got('todomvc.com', {cache: map}); + console.log(response.fromCache); + //=> false + + response = await got('todomvc.com', {cache: map}); + console.log(response.fromCache); + //=> true +})(); +``` + +Got uses [Keyv](https://github.com/lukechilds/keyv) internally to support a wide range of storage adapters. For something more scalable you could use an [official Keyv storage adapter](https://github.com/lukechilds/keyv#official-storage-adapters): + +``` +npm install @keyv/redis +``` + +```js +const got = require('got'); +const KeyvRedis = require('@keyv/redis'); + +const redis = new KeyvRedis('redis://user:pass@localhost:6379'); + +got('todomvc.com', {cache: redis}); +``` + +Got supports anything that follows the Map API so it's easy to write your own storage adapter or use a third-party solution. + +For example, the following are all valid storage adapters + +```js +const storageAdapter = new Map(); +// or +const storageAdapter = require('./my-storage-adapter'); +// or +const QuickLRU = require('quick-lru'); +const storageAdapter = new QuickLRU({maxSize: 1000}); + +got('todomvc.com', {cache: storageAdapter}); +``` + +View the [Keyv docs](https://github.com/lukechilds/keyv) for more information on how to use storage adapters. + ## Proxies diff --git a/test/cache.js b/test/cache.js new file mode 100644 index 0000000..04a8423 --- /dev/null +++ b/test/cache.js @@ -0,0 +1,107 @@ +import test from 'ava'; +import got from '../'; +import {createServer} from './helpers/server'; + +let s; + +test.before('setup', async () => { + s = await createServer(); + + let noStoreIndex = 0; + s.on('/no-store', (req, res) => { + res.setHeader('Cache-Control', 'public, no-cache, no-store'); + res.end(noStoreIndex.toString()); + noStoreIndex++; + }); + + let cacheIndex = 0; + s.on('/cache', (req, res) => { + res.setHeader('Cache-Control', 'public, max-age=60'); + res.end(cacheIndex.toString()); + cacheIndex++; + }); + + let status301Index = 0; + s.on('/301', (req, res) => { + if (status301Index === 0) { + res.setHeader('Cache-Control', 'public, max-age=60'); + res.setHeader('Location', s.url + '/302'); + res.statusCode = 301; + } + res.end(); + status301Index++; + }); + + let status302Index = 0; + s.on('/302', (req, res) => { + if (status302Index === 0) { + res.setHeader('Cache-Control', 'public, max-age=60'); + res.setHeader('Location', s.url + '/cache'); + res.statusCode = 302; + } + res.end(); + status302Index++; + }); + + await s.listen(s.port); +}); + +test('Non cacheable responses are not cached', async t => { + const endpoint = '/no-store'; + const cache = new Map(); + + const firstResponseInt = Number((await got(s.url + endpoint, {cache})).body); + const secondResponseInt = Number((await got(s.url + endpoint, {cache})).body); + + t.is(cache.size, 0); + t.true(firstResponseInt < secondResponseInt); +}); + +test('Cacheable responses are cached', async t => { + const endpoint = '/cache'; + const cache = new Map(); + + const firstResponse = await got(s.url + endpoint, {cache}); + const secondResponse = await got(s.url + endpoint, {cache}); + + t.is(cache.size, 1); + t.is(firstResponse.body, secondResponse.body); +}); + +test('Cached response is re-encoded to current encoding option', async t => { + const endpoint = '/cache'; + const cache = new Map(); + const firstEncoding = 'base64'; + const secondEncoding = 'hex'; + + const firstResponse = await got(s.url + endpoint, {cache, encoding: firstEncoding}); + const secondResponse = await got(s.url + endpoint, {cache, encoding: secondEncoding}); + + const expectedSecondResponseBody = Buffer.from(firstResponse.body, firstEncoding).toString(secondEncoding); + + t.is(cache.size, 1); + t.is(secondResponse.body, expectedSecondResponseBody); +}); + +test('Redirects are cached and re-used internally', async t => { + const endpoint = '/301'; + const cache = new Map(); + + const firstResponse = await got(s.url + endpoint, {cache}); + const secondResponse = await got(s.url + endpoint, {cache}); + + t.is(cache.size, 3); + t.is(firstResponse.body, secondResponse.body); +}); + +test('Cache error throws got.CacheError', async t => { + const endpoint = '/no-store'; + const cache = {}; + + const err = await t.throws(got(s.url + endpoint, {cache})); + t.is(err.name, 'CacheError'); +}); + +test.after('cleanup', async () => { + await s.close(); +});