Browse Source

Add `cache` option (#284)

no-retry
Luke Childs 7 years ago
committed by Sindre Sorhus
parent
commit
3c7920507f
  1. 18
      index.js
  2. 1
      package.json
  3. 68
      readme.md
  4. 107
      test/cache.js

18
index.js

@ -7,6 +7,7 @@ const Transform = require('stream').Transform;
const urlLib = require('url');
const fs = require('fs');
const querystring = require('querystring');
const CacheableRequest = require('cacheable-request');
const duplexer3 = require('duplexer3');
const intoStream = require('into-stream');
const isStream = require('is-stream');
@ -87,7 +88,8 @@ function requestAsEventEmitter(opts) {
let progressInterval;
const req = fn.request(opts, res => {
const cacheableRequest = new CacheableRequest(fn.request, opts.cache);
const cacheReq = cacheableRequest(opts, res => {
clearInterval(progressInterval);
ee.emit('uploadProgress', {
@ -172,7 +174,7 @@ function requestAsEventEmitter(opts) {
const response = opts.decompress === true &&
typeof decompressResponse === 'function' &&
req.method !== 'HEAD' ? decompressResponse(progressStream) : progressStream;
opts.method !== 'HEAD' ? decompressResponse(progressStream) : progressStream;
if (!opts.decompress && ['gzip', 'deflate'].indexOf(res.headers['content-encoding']) !== -1) {
opts.encoding = null;
@ -190,6 +192,9 @@ function requestAsEventEmitter(opts) {
});
});
cacheReq.on('error', err => ee.emit('error', new got.CacheError(err, opts)));
cacheReq.on('request', req => {
req.once('error', err => {
clearInterval(progressInterval);
@ -247,6 +252,7 @@ function requestAsEventEmitter(opts) {
setImmediate(() => {
ee.emit('request', req);
});
});
};
setImmediate(() => {
@ -434,6 +440,7 @@ function normalizeArguments(url, opts) {
{
path: '',
retries: 2,
cache: false,
decompress: true,
useElectronNet: false
},
@ -589,6 +596,13 @@ class StdError extends Error {
}
}
got.CacheError = class extends StdError {
constructor(error, opts) {
super(error.message, error, opts);
this.name = 'CacheError';
}
};
got.RequestError = class extends StdError {
constructor(error, opts) {
super(error.message, error, opts);

1
package.json

@ -50,6 +50,7 @@
"electron"
],
"dependencies": {
"cacheable-request": "^2.0.0",
"decompress-response": "^3.2.0",
"duplexer3": "^0.1.4",
"get-stream": "^3.0.0",

68
readme.md

@ -19,6 +19,7 @@ Created because [`request`](https://github.com/request/request) is bloated *(sev
- [Promise & stream API](#api)
- [Request cancelation](#aborting-the-request)
- [RFC compliant caching](#cache-adapters)
- [Follows redirects](#followredirect)
- [Retries on network failure](#retries)
- [Progress events](#onuploadprogress-progress)
@ -69,6 +70,10 @@ It's a `GET` request by default, but can be changed in `options`.
Returns a Promise for a `response` object with a `body` property, a `url` property with the request URL or the final URL after redirects, and a `requestUrl` property with the original request URL.
The response object will normally be a [Node.js HTTP response stream](https://nodejs.org/api/http.html#http_class_http_incomingmessage), however if returned from the cache it will be a [responselike object](https://github.com/lukechilds/responselike) which behaves in the same way.
The response will also have a `fromCache` property set with a boolean value.
##### url
Type: `string` `Object`
@ -170,6 +175,13 @@ Decompress the response automatically.
If this is disabled, a compressed response is returned as a `Buffer`. This may be useful if you want to handle decompression yourself or stream the raw compressed data.
###### cache
Type: `Object`<br>
Default: `false`
[Cache adapter instance](#cache-adapters) for storing cached data.
###### useElectronNet
Type: `boolean`<br>
@ -253,6 +265,10 @@ Each error contains (if available) `statusCode`, `statusMessage`, `host`, `hostn
In Promise mode, the `response` is attached to the error.
#### got.CacheError
When a cache method fails, for example if the database goes down, or there's a filesystem error.
#### got.RequestError
When a request fails. Contains a `code` property with error class code, like `ECONNREFUSED`.
@ -316,6 +332,58 @@ request.catch(err => {
request.cancel();
```
<a name="cache-adapters"></a>
## Cache
You can use the JavaScript `Map` type as an in memory cache:
```js
const got = require('got');
const map = new Map();
(async () => {
let response = await got('todomvc.com', {cache: map});
console.log(response.fromCache);
//=> false
response = await got('todomvc.com', {cache: map});
console.log(response.fromCache);
//=> true
})();
```
Got uses [Keyv](https://github.com/lukechilds/keyv) internally to support a wide range of storage adapters. For something more scalable you could use an [official Keyv storage adapter](https://github.com/lukechilds/keyv#official-storage-adapters):
```
npm install @keyv/redis
```
```js
const got = require('got');
const KeyvRedis = require('@keyv/redis');
const redis = new KeyvRedis('redis://user:pass@localhost:6379');
got('todomvc.com', {cache: redis});
```
Got supports anything that follows the Map API so it's easy to write your own storage adapter or use a third-party solution.
For example, the following are all valid storage adapters
```js
const storageAdapter = new Map();
// or
const storageAdapter = require('./my-storage-adapter');
// or
const QuickLRU = require('quick-lru');
const storageAdapter = new QuickLRU({maxSize: 1000});
got('todomvc.com', {cache: storageAdapter});
```
View the [Keyv docs](https://github.com/lukechilds/keyv) for more information on how to use storage adapters.
## Proxies

107
test/cache.js

@ -0,0 +1,107 @@
import test from 'ava';
import got from '../';
import {createServer} from './helpers/server';
let s;
test.before('setup', async () => {
s = await createServer();
let noStoreIndex = 0;
s.on('/no-store', (req, res) => {
res.setHeader('Cache-Control', 'public, no-cache, no-store');
res.end(noStoreIndex.toString());
noStoreIndex++;
});
let cacheIndex = 0;
s.on('/cache', (req, res) => {
res.setHeader('Cache-Control', 'public, max-age=60');
res.end(cacheIndex.toString());
cacheIndex++;
});
let status301Index = 0;
s.on('/301', (req, res) => {
if (status301Index === 0) {
res.setHeader('Cache-Control', 'public, max-age=60');
res.setHeader('Location', s.url + '/302');
res.statusCode = 301;
}
res.end();
status301Index++;
});
let status302Index = 0;
s.on('/302', (req, res) => {
if (status302Index === 0) {
res.setHeader('Cache-Control', 'public, max-age=60');
res.setHeader('Location', s.url + '/cache');
res.statusCode = 302;
}
res.end();
status302Index++;
});
await s.listen(s.port);
});
test('Non cacheable responses are not cached', async t => {
const endpoint = '/no-store';
const cache = new Map();
const firstResponseInt = Number((await got(s.url + endpoint, {cache})).body);
const secondResponseInt = Number((await got(s.url + endpoint, {cache})).body);
t.is(cache.size, 0);
t.true(firstResponseInt < secondResponseInt);
});
test('Cacheable responses are cached', async t => {
const endpoint = '/cache';
const cache = new Map();
const firstResponse = await got(s.url + endpoint, {cache});
const secondResponse = await got(s.url + endpoint, {cache});
t.is(cache.size, 1);
t.is(firstResponse.body, secondResponse.body);
});
test('Cached response is re-encoded to current encoding option', async t => {
const endpoint = '/cache';
const cache = new Map();
const firstEncoding = 'base64';
const secondEncoding = 'hex';
const firstResponse = await got(s.url + endpoint, {cache, encoding: firstEncoding});
const secondResponse = await got(s.url + endpoint, {cache, encoding: secondEncoding});
const expectedSecondResponseBody = Buffer.from(firstResponse.body, firstEncoding).toString(secondEncoding);
t.is(cache.size, 1);
t.is(secondResponse.body, expectedSecondResponseBody);
});
test('Redirects are cached and re-used internally', async t => {
const endpoint = '/301';
const cache = new Map();
const firstResponse = await got(s.url + endpoint, {cache});
const secondResponse = await got(s.url + endpoint, {cache});
t.is(cache.size, 3);
t.is(firstResponse.body, secondResponse.body);
});
test('Cache error throws got.CacheError', async t => {
const endpoint = '/no-store';
const cache = {};
const err = await t.throws(got(s.url + endpoint, {cache}));
t.is(err.name, 'CacheError');
});
test.after('cleanup', async () => {
await s.close();
});
Loading…
Cancel
Save