Browse Source

Merge branch 'release/0.1.0'

hotfix/0.7.1 0.1.0
Bryan Donovan 12 years ago
parent
commit
6231327613
  1. 4
      History.md
  2. 1
      Makefile
  3. 35
      README.md
  4. 35
      examples/example.js
  5. 47
      examples/redis_example/example.js
  6. 86
      examples/redis_example/redis_store.js
  7. 44
      lib/stores/redis.js
  8. 6
      package.json
  9. 141
      test/caching.unit.js
  10. 71
      test/multi_caching.unit.js
  11. 4
      test/run.js
  12. 18
      test/stores/redis.unit.js

4
History.md

@ -1,3 +1,7 @@
- 0.1.0 2013-10-13
Removing built-in Redis store to emphasize that you should plug in your own
cache store.
- 0.0.5 2013-10-13 - 0.0.5 2013-10-13
Removing hiredis requirement. Removing hiredis requirement.

1
Makefile

@ -20,6 +20,7 @@ test-cov: cover check-coverage
lint: lint:
./node_modules/.bin/jshint ./lib --config $(BASE)/.jshintrc && \ ./node_modules/.bin/jshint ./lib --config $(BASE)/.jshintrc && \
./node_modules/.bin/jshint ./test --config $(BASE)/.jshintrc ./node_modules/.bin/jshint ./test --config $(BASE)/.jshintrc
./node_modules/.bin/jshint ./examples --config $(BASE)/.jshintrc
.PHONY: test .PHONY: test

35
README.md

@ -22,19 +22,20 @@ priority cache(s) first.
## Overview ## Overview
First, node-cache-manager features the standard functions you'd expect in most caches: First, it includes a `wrap` function that lets you wrap any function in cache.
(Note, this was inspired by [node-caching](https://github.com/mape/node-caching).)
Second, node-cache-manager features a built-in memory cache (using [node-lru-cache](https://github.com/isaacs/node-lru-cache)),
with the standard functions you'd expect in most caches:
set(key, val, cb) set(key, val, cb)
get(key, cb) get(key, cb)
del(key, cb) del(key, cb)
Second, it includes a `wrap` function that lets you wrap any function in cache.
(Note, this was inspired by [node-caching](https://github.com/mape/node-caching).)
Third, node-cache-manager lets you set up a tiered cache strategy. This may be of Third, node-cache-manager lets you set up a tiered cache strategy. This may be of
limited use in most cases, but imagine a scenario where you expect tons of limited use in most cases, but imagine a scenario where you expect tons of
traffic, and don't want to hit Redis for every request. You decide to store traffic, and don't want to hit your primary cache (like Redis) for every request.
the most commonly-requested data in an in-memory cache (like [node-lru-cache](https://github.com/isaacs/node-lru-cache)), You decide to store the most commonly-requested data in an in-memory cache,
perhaps with a very short timeout and/or a small data size limit. But you perhaps with a very short timeout and/or a small data size limit. But you
still want to store the data in Redis for backup, and for the requests that still want to store the data in Redis for backup, and for the requests that
aren't as common as the ones you want to store in memory. This is something aren't as common as the ones you want to store in memory. This is something
@ -43,22 +44,24 @@ node-cache-manager handles easily and transparently.
## Usage Examples ## Usage Examples
See examples below and in the examples directory. See ``examples/redis_example`` for an example of how to implement a
Redis cache store with connection pooling.
### Single Store ### Single Store
```javascript ```javascript
var cache_manager = require('cache-manager'); var cache_manager = require('cache-manager');
var redis_cache = cache_manager.caching({store: 'redis', db: 1, ttl: 100/*seconds*/});
var memory_cache = cache_manager.caching({store: 'memory', max: 100, ttl: 10/*seconds*/}); var memory_cache = cache_manager.caching({store: 'memory', max: 100, ttl: 10/*seconds*/});
// Note: callback is optional in set() and del(). // Note: callback is optional in set() and del().
redis_cache.set('foo', 'bar', function(err) { memory_cache.set('foo', 'bar', function(err) {
if (err) { throw err; } if (err) { throw err; }
redis_cache.get('foo', function(err, result) { memory_cache.get('foo', function(err, result) {
console.log(result); console.log(result);
// >> 'bar' // >> 'bar'
redis_cache.del('foo', function(err) {}); memory_cache.del('foo', function(err) {});
}); });
}); });
@ -72,13 +75,13 @@ node-cache-manager handles easily and transparently.
var user_id = 123; var user_id = 123;
var key = 'user_' + user_id; var key = 'user_' + user_id;
redis_cache.wrap(key, function (cb) { memory_cache.wrap(key, function (cb) {
get_user(user_id, cb); get_user(user_id, cb);
}, function (err, user) { }, function (err, user) {
console.log(user); console.log(user);
// Second time fetches user from redis_cache // Second time fetches user from memory_cache
redis_cache.wrap(key, function (cb) { memory_cache.wrap(key, function (cb) {
get_user(user_id, cb); get_user(user_id, cb);
}, function (err, user) { }, function (err, user) {
console.log(user); console.log(user);
@ -95,7 +98,7 @@ node-cache-manager handles easily and transparently.
#### Custom Stores #### Custom Stores
You can use your own custom store by creating one with the same API as the You can use your own custom store by creating one with the same API as the
build-in redis and memory stores. To use your own store, you can either pass build-in memory stores (such as a redis or memcached store). To use your own store, you can either pass
in an instance of it, or pass in the path to the module. in an instance of it, or pass in the path to the module.
E.g., E.g.,
@ -110,7 +113,7 @@ E.g.,
### Multi-Store ### Multi-Store
```javascript ```javascript
var multi_cache = cache_manager.multi_caching([memory_cache, redis_cache]); var multi_cache = cache_manager.multi_caching([memory_cache, some_other_cache]);
user_id2 = 456; user_id2 = 456;
key2 = 'user_' + user_id; key2 = 'user_' + user_id;
@ -135,7 +138,7 @@ E.g.,
// Second time fetches user from memory_cache, since it's highest priority. // Second time fetches user from memory_cache, since it's highest priority.
// If the data expires in the memory cache, the next fetch would pull it from // If the data expires in the memory cache, the next fetch would pull it from
// the Redis cache, and set the data in memory again. // the 'some_other_cache', and set the data in memory again.
multi_cache.wrap(key2, function (cb) { multi_cache.wrap(key2, function (cb) {
get_user(user_id2, cb); get_user(user_id2, cb);
}, function (err, user) { }, function (err, user) {

35
examples/example.js

@ -1,14 +1,17 @@
var cache_manager = require('cache-manager'); var cache_manager = require('../');
var redis_cache = cache_manager.caching({store: 'redis', db: 1, ttl: 100/*seconds*/});
var memory_cache = cache_manager.caching({store: 'memory', max: 100, ttl: 10/*seconds*/}); var memory_cache = cache_manager.caching({store: 'memory', max: 100, ttl: 10/*seconds*/});
var memory_cache2 = cache_manager.caching({store: 'memory', max: 100, ttl: 100/*seconds*/});
redis_cache.set('foo', 'bar', function(err) { //
// Basic usage
//
memory_cache2.set('foo', 'bar', function (err) {
if (err) { throw err; } if (err) { throw err; }
redis_cache.get('foo', function(err, result) { memory_cache2.get('foo', function (err, result) {
console.log(result); console.log(result);
// >> 'bar' // >> 'bar'
redis_cache.del('foo', function(err) {}); memory_cache2.del('foo', function (err) { console.log(err); });
}); });
}); });
@ -22,13 +25,16 @@ function get_user(id, cb) {
var user_id = 123; var user_id = 123;
var key = 'user_' + user_id; var key = 'user_' + user_id;
redis_cache.wrap(key, function (cb) { //
// wrap() example
//
memory_cache2.wrap(key, function (cb) {
get_user(user_id, cb); get_user(user_id, cb);
}, function (err, user) { }, function (err, user) {
console.log(user); console.log(user);
// Second time fetches user from redis_cache // Second time fetches user from memory_cache2
redis_cache.wrap(key, function (cb) { memory_cache2.wrap(key, function (cb) {
get_user(user_id, cb); get_user(user_id, cb);
}, function (err, user) { }, function (err, user) {
console.log(user); console.log(user);
@ -41,9 +47,9 @@ redis_cache.wrap(key, function (cb) {
// { id: 123, name: 'Bob' } // { id: 123, name: 'Bob' }
var multi_cache = cache_manager.multi_caching([memory_cache, redis_cache]); var multi_cache = cache_manager.multi_caching([memory_cache, memory_cache2]);
user_id2 = 456; var user_id2 = 456;
key2 = 'user_' + user_id; var key2 = 'user_' + user_id;
multi_cache.wrap(key2, function (cb) { multi_cache.wrap(key2, function (cb) {
get_user(user_id2, cb); get_user(user_id2, cb);
@ -60,16 +66,17 @@ multi_cache.wrap(key2, function (cb) {
}); });
// Sets in all caches. // Sets in all caches.
multi_cache.set('foo2', 'bar2', function(err) { multi_cache.set('foo2', 'bar2', function (err) {
if (err) { throw err; } if (err) { throw err; }
// Fetches from highest priority cache that has the key. // Fetches from highest priority cache that has the key.
multi_cache.get('foo2', function(err, result) { multi_cache.get('foo2', function (err, result) {
console.log(result); console.log(result);
// >> 'bar2' // >> 'bar2'
// Delete from all caches // Delete from all caches
multi_cache.del('foo2', function(err) { multi_cache.del('foo2', function (err) {
console.log(err);
process.exit(); process.exit();
}); });
}); });

47
examples/redis_example/example.js

@ -0,0 +1,47 @@
// Setup:
// npm install redis
// npm install sol-redis-pool
var cache_manager = require('../../');
var redis_store = require('./redis_store');
var redis_cache = cache_manager.caching({store: redis_store, db: 0, ttl: 100/*seconds*/});
redis_cache.set('foo', 'bar', function (err) {
if (err) { throw err; }
redis_cache.get('foo', function (err, result) {
console.log(result);
// >> 'bar'
redis_cache.del('foo', function (err) { console.log(err); });
});
});
function get_user(id, cb) {
setTimeout(function () {
console.log("Returning user from slow database.");
cb(null, {id: id, name: 'Bob'});
}, 100);
}
var user_id = 123;
var key = 'user_' + user_id;
redis_cache.wrap(key, function (cb) {
get_user(user_id, cb);
}, function (err, user) {
console.log(user);
// Second time fetches user from redis_cache
redis_cache.wrap(key, function (cb) {
get_user(user_id, cb);
}, function (err, user) {
console.log(user);
});
});
// Outputs:
// Returning user from slow database.
// { id: 123, name: 'Bob' }
// { id: 123, name: 'Bob' }
process.exit();

86
examples/redis_example/redis_store.js

@ -0,0 +1,86 @@
/**
* This is a very basic example of how you can implement your own Redis-based
* cache store with connection pooling.
*/
var RedisPool = require('sol-redis-pool');
function redis_store(args) {
args = args || {};
var self = {};
var ttl = args.ttl;
self.name = 'redis';
self.client = require('redis').createClient(args.port, args.host, args);
var redis_options = {
redis_host: args.host || '127.0.0.1',
redis_port: args.port || 6379
};
var pool = new RedisPool(redis_options);
function connect(cb) {
pool.acquire(function (err, conn) {
if (err) {
pool.release(conn);
return cb(err);
}
if (args.db || args.db === 0) {
conn.select(args.db);
}
cb(null, conn);
});
}
self.get = function (key, cb) {
connect(function (err, conn) {
if (err) { return cb(err); }
conn.get(key, function (err, result) {
if (err) { pool.release(conn); return cb(err); }
cb(null, JSON.parse(result));
});
});
};
self.set = function (key, value, cb) {
connect(function (err, conn) {
if (err) { return cb(err); }
if (ttl) {
conn.setex(key, ttl, JSON.stringify(value), function (err, result) {
pool.release(conn);
cb(err, result);
});
} else {
conn.set(key, JSON.stringify(value), function (err, result) {
pool.release(conn);
cb(err, result);
});
}
});
};
self.del = function (key, cb) {
connect(function (err, conn) {
if (err) { return cb(err); }
conn.del(key, function (err, result) {
pool.release(conn);
cb(err, result);
});
});
};
return self;
}
var methods = {
create: function (args) {
return redis_store(args);
}
};
module.exports = methods;

44
lib/stores/redis.js

@ -1,44 +0,0 @@
/**
* Note: You very likely want to use your own Redis-based cache store instead
* of this one, especially for connection pooling. This is primarily an
* example implementation.
*/
function redis_store(args) {
args = args || {};
var self = {};
var ttl = args.ttl;
self.name = 'redis';
self.client = require('redis').createClient(args.port, args.host, args);
if (args.db) {
self.client.select(args.db);
}
self.get = function (key, cb) {
self.client.get(key, function (err, result) {
cb(err, JSON.parse(result));
});
};
self.set = function (key, value, cb) {
if (ttl) {
self.client.setex(key, ttl, JSON.stringify(value), cb);
} else {
self.client.set(key, JSON.stringify(value), cb);
}
};
self.del = function (key, cb) {
self.client.del(key, cb);
};
return self;
}
var methods = {
create: function (args) {
return redis_store(args);
}
};
module.exports = methods;

6
package.json

@ -1,6 +1,6 @@
{ {
"name": "cache-manager", "name": "cache-manager",
"version": "0.0.5", "version": "0.1.0",
"description": "Cache module for Node.js", "description": "Cache module for Node.js",
"main": "index.js", "main": "index.js",
"scripts": { "scripts": {
@ -12,7 +12,6 @@
}, },
"keywords": [ "keywords": [
"cache", "cache",
"redis",
"lru-cache", "lru-cache",
"memory cache", "memory cache",
"multiple cache" "multiple cache"
@ -21,8 +20,7 @@
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"async": ">=0.1.22", "async": ">=0.1.22",
"lru-cache": ">=2.3.0", "lru-cache": ">=2.3.0"
"redis": ">=0.6.7"
}, },
"devDependencies": { "devDependencies": {
"istanbul": ">=0.1.29", "istanbul": ">=0.1.29",

141
test/caching.unit.js

@ -1,6 +1,5 @@
var assert = require('assert'); var assert = require('assert');
var sinon = require('sinon'); var sinon = require('sinon');
var redis = require('redis');
var support = require('./support'); var support = require('./support');
var check_err = support.check_err; var check_err = support.check_err;
var caching = require('../index').caching; var caching = require('../index').caching;
@ -20,7 +19,7 @@ describe("caching", function () {
var value; var value;
describe("get() and set()", function () { describe("get() and set()", function () {
['redis', 'memory'].forEach(function (store) { ['memory'].forEach(function (store) {
context("using " + store + " store", function () { context("using " + store + " store", function () {
beforeEach(function () { beforeEach(function () {
cache = caching({store: store}); cache = caching({store: store});
@ -52,7 +51,7 @@ describe("caching", function () {
}); });
describe("del()", function () { describe("del()", function () {
['redis', 'memory'].forEach(function (store) { ['memory'].forEach(function (store) {
context("using " + store + " store", function () { context("using " + store + " store", function () {
beforeEach(function (done) { beforeEach(function (done) {
cache = caching({store: store}); cache = caching({store: store});
@ -98,123 +97,6 @@ describe("caching", function () {
}); });
describe("wrap()", function () { describe("wrap()", function () {
context("using redis store", function () {
var redis_client;
before(function () {
redis_client = redis.createClient();
sinon.stub(redis, 'createClient').returns(redis_client);
});
beforeEach(function () {
cache = caching({store: 'redis'});
key = support.random.string(20);
name = support.random.string();
});
after(function () {
redis.createClient.restore();
});
it("calls back with the result of the wrapped function", function (done) {
cache.wrap(key, function (cb) {
methods.get_widget(name, cb);
}, function (err, widget) {
check_err(err);
assert.deepEqual(widget, {name: name});
done();
});
});
it("caches the result of the function in redis", function (done) {
cache.wrap(key, function (cb) {
methods.get_widget(name, cb);
}, function (err, widget) {
check_err(err);
assert.ok(widget);
redis_client.get(key, function (err, result) {
check_err(err);
assert.deepEqual(JSON.parse(result), {name: name});
done();
});
});
});
context("when wrapped function calls back with an error", function () {
it("calls back with that error and doesn't cache result", function (done) {
var fake_error = new Error(support.random.string());
sinon.stub(methods, 'get_widget', function (name, cb) {
cb(fake_error, {name: name});
});
cache.wrap(key, function (cb) {
methods.get_widget(name, cb);
}, function (err, widget) {
methods.get_widget.restore();
assert.equal(err, fake_error);
assert.ok(!widget);
redis_client.get(key, function (err, result) {
check_err(err);
assert.ok(!result);
done();
});
});
});
});
it("retrieves data from redis when available", function (done) {
cache.wrap(key, function (cb) {
methods.get_widget(name, cb);
}, function (err, widget) {
check_err(err);
assert.ok(widget);
redis_client.get(key, function (err, result) {
check_err(err);
assert.ok(result);
sinon.spy(redis_client, 'get');
cache.wrap(key, function (cb) {
methods.get_widget(name, cb);
}, function (err, widget) {
check_err(err);
assert.deepEqual(widget, {name: name});
assert.ok(redis_client.get.calledWith(key));
redis_client.get.restore();
done();
});
});
});
});
context("when using ttl", function () {
beforeEach(function () {
ttl = 50;
cache = caching({store: 'redis', ttl: ttl});
});
it("expires cached result after ttl seconds", function (done) {
cache.wrap(key, function (cb) {
methods.get_widget(name, cb);
}, function (err, widget) {
check_err(err);
assert.ok(widget);
redis_client.ttl(key, function (err, result) {
check_err(err);
support.assert_within(result, ttl, 2);
done();
});
});
});
});
});
describe("using memory (lru-cache) store", function () { describe("using memory (lru-cache) store", function () {
var memory_store_stub; var memory_store_stub;
@ -339,6 +221,25 @@ describe("caching", function () {
}); });
}); });
}); });
context("when wrapped function calls back with an error", function () {
it("calls back with that error", function (done) {
var fake_error = new Error(support.random.string());
sinon.stub(methods, 'get_widget', function (name, cb) {
cb(fake_error, {name: name});
});
cache.wrap(key, function (cb) {
methods.get_widget(name, cb);
}, function (err, widget) {
methods.get_widget.restore();
assert.equal(err, fake_error);
assert.ok(!widget);
done();
});
});
});
}); });
}); });

71
test/multi_caching.unit.js

@ -1,6 +1,5 @@
var assert = require('assert'); var assert = require('assert');
var sinon = require('sinon'); var sinon = require('sinon');
var redis = require('redis');
var support = require('./support'); var support = require('./support');
var check_err = support.check_err; var check_err = support.check_err;
var caching = require('../index').caching; var caching = require('../index').caching;
@ -14,22 +13,20 @@ var methods = {
}; };
describe("multi_caching", function () { describe("multi_caching", function () {
var redis_cache;
var memory_cache; var memory_cache;
var memory_cache2; var memory_cache2;
var memory_cache3;
var multi_cache; var multi_cache;
var key; var key;
var memory_ttl; var memory_ttl;
var redis_ttl;
var name; var name;
beforeEach(function () { beforeEach(function () {
memory_ttl = 0.1; memory_ttl = 0.1;
redis_ttl = 1;
memory_cache = caching({store: 'memory', ttl: memory_ttl}); memory_cache = caching({store: 'memory', ttl: memory_ttl});
memory_cache2 = caching({store: 'memory', ttl: memory_ttl}); memory_cache2 = caching({store: 'memory', ttl: memory_ttl});
redis_cache = caching({store: 'redis', ttl: redis_ttl}); memory_cache3 = caching({store: 'memory', ttl: memory_ttl});
key = support.random.string(20); key = support.random.string(20);
name = support.random.string(); name = support.random.string();
@ -39,7 +36,7 @@ describe("multi_caching", function () {
var value; var value;
beforeEach(function () { beforeEach(function () {
multi_cache = multi_caching([memory_cache, redis_cache, memory_cache2]); multi_cache = multi_caching([memory_cache, memory_cache2, memory_cache3]);
key = support.random.string(20); key = support.random.string(20);
value = support.random.string(); value = support.random.string();
}); });
@ -51,11 +48,11 @@ describe("multi_caching", function () {
memory_cache.get(key, function (err, result) { memory_cache.get(key, function (err, result) {
assert.equal(result, value); assert.equal(result, value);
redis_cache.get(key, function (err, result) { memory_cache2.get(key, function (err, result) {
check_err(err); check_err(err);
assert.equal(result, value); assert.equal(result, value);
memory_cache2.get(key, function (err, result) { memory_cache3.get(key, function (err, result) {
check_err(err); check_err(err);
assert.equal(result, value); assert.equal(result, value);
done(); done();
@ -73,11 +70,11 @@ describe("multi_caching", function () {
memory_cache.get(key, function (err, result) { memory_cache.get(key, function (err, result) {
assert.equal(result, value); assert.equal(result, value);
redis_cache.get(key, function (err, result) { memory_cache2.get(key, function (err, result) {
check_err(err); check_err(err);
assert.equal(result, value); assert.equal(result, value);
memory_cache2.get(key, function (err, result) { memory_cache3.get(key, function (err, result) {
check_err(err); check_err(err);
assert.equal(result, value); assert.equal(result, value);
done(); done();
@ -91,7 +88,7 @@ describe("multi_caching", function () {
describe("get()", function () { describe("get()", function () {
it("gets data from first cache that has it", function (done) { it("gets data from first cache that has it", function (done) {
redis_cache.set(key, value, function (err) { memory_cache3.set(key, value, function (err) {
check_err(err); check_err(err);
multi_cache.get(key, function (err, result) { multi_cache.get(key, function (err, result) {
@ -114,11 +111,11 @@ describe("multi_caching", function () {
memory_cache.get(key, function (err, result) { memory_cache.get(key, function (err, result) {
assert.ok(!result); assert.ok(!result);
redis_cache.get(key, function (err, result) { memory_cache2.get(key, function (err, result) {
check_err(err); check_err(err);
assert.ok(!result); assert.ok(!result);
memory_cache2.get(key, function (err, result) { memory_cache3.get(key, function (err, result) {
check_err(err); check_err(err);
assert.ok(!result); assert.ok(!result);
done(); done();
@ -139,11 +136,11 @@ describe("multi_caching", function () {
memory_cache.get(key, function (err, result) { memory_cache.get(key, function (err, result) {
assert.ok(!result); assert.ok(!result);
redis_cache.get(key, function (err, result) { memory_cache2.get(key, function (err, result) {
check_err(err); check_err(err);
assert.ok(!result); assert.ok(!result);
memory_cache2.get(key, function (err, result) { memory_cache3.get(key, function (err, result) {
check_err(err); check_err(err);
assert.ok(!result); assert.ok(!result);
done(); done();
@ -157,20 +154,9 @@ describe("multi_caching", function () {
}); });
describe("wrap()", function () { describe("wrap()", function () {
var redis_client;
beforeEach(function () {
redis_client = redis.createClient();
sinon.stub(redis, 'createClient').returns(redis_client);
});
afterEach(function () {
redis.createClient.restore();
});
describe("using a single cache store", function () { describe("using a single cache store", function () {
beforeEach(function () { beforeEach(function () {
multi_cache = multi_caching([redis_cache]); multi_cache = multi_caching([memory_cache3]);
}); });
it("calls back with the result of a function", function (done) { it("calls back with the result of a function", function (done) {
@ -184,7 +170,7 @@ describe("multi_caching", function () {
}); });
context("when wrapped function calls back with an error", function () { context("when wrapped function calls back with an error", function () {
it("calls back with that error and doesn't cache result", function (done) { it("calls back with that error", function (done) {
var fake_error = new Error(support.random.string()); var fake_error = new Error(support.random.string());
sinon.stub(methods, 'get_widget', function (name, cb) { sinon.stub(methods, 'get_widget', function (name, cb) {
cb(fake_error, {name: name}); cb(fake_error, {name: name});
@ -196,12 +182,7 @@ describe("multi_caching", function () {
methods.get_widget.restore(); methods.get_widget.restore();
assert.equal(err, fake_error); assert.equal(err, fake_error);
assert.ok(!widget); assert.ok(!widget);
done();
redis_client.get(key, function (err, result) {
check_err(err);
assert.ok(!result);
done();
});
}); });
}); });
}); });
@ -209,7 +190,7 @@ describe("multi_caching", function () {
describe("using two cache stores", function () { describe("using two cache stores", function () {
beforeEach(function () { beforeEach(function () {
multi_cache = multi_caching([memory_cache, redis_cache]); multi_cache = multi_caching([memory_cache, memory_cache3]);
}); });
it("calls back with the result of a function", function (done) { it("calls back with the result of a function", function (done) {
@ -233,7 +214,7 @@ describe("multi_caching", function () {
check_err(err); check_err(err);
assert.deepEqual(result, {name: name}); assert.deepEqual(result, {name: name});
redis_cache.get(key, function (err, result) { memory_cache3.get(key, function (err, result) {
check_err(err); check_err(err);
assert.deepEqual(result, {name: name}); assert.deepEqual(result, {name: name});
done(); done();
@ -253,7 +234,7 @@ describe("multi_caching", function () {
check_err(err); check_err(err);
assert.deepEqual(widget, {name: name}); assert.deepEqual(widget, {name: name});
redis_cache.get(key, function (err, result) { memory_cache3.get(key, function (err, result) {
check_err(err); check_err(err);
assert.equal(result, null); assert.equal(result, null);
done(); done();
@ -265,7 +246,7 @@ describe("multi_caching", function () {
context("when value exists in second store but not first", function () { context("when value exists in second store but not first", function () {
it("returns value from second store, sets it in first store", function (done) { it("returns value from second store, sets it in first store", function (done) {
redis_cache.set(key, {name: name}, function (err) { memory_cache3.set(key, {name: name}, function (err) {
check_err(err); check_err(err);
multi_cache.wrap(key, function (cb) { multi_cache.wrap(key, function (cb) {
@ -287,7 +268,7 @@ describe("multi_caching", function () {
describe("using three cache stores", function () { describe("using three cache stores", function () {
beforeEach(function () { beforeEach(function () {
multi_cache = multi_caching([memory_cache, redis_cache, memory_cache2]); multi_cache = multi_caching([memory_cache, memory_cache3, memory_cache2]);
}); });
it("calls back with the result of a function", function (done) { it("calls back with the result of a function", function (done) {
@ -311,11 +292,11 @@ describe("multi_caching", function () {
check_err(err); check_err(err);
assert.deepEqual(result, {name: name}); assert.deepEqual(result, {name: name});
redis_cache.get(key, function (err, result) { memory_cache2.get(key, function (err, result) {
check_err(err); check_err(err);
assert.deepEqual(result, {name: name}); assert.deepEqual(result, {name: name});
memory_cache2.get(key, function (err, result) { memory_cache3.get(key, function (err, result) {
check_err(err); check_err(err);
assert.deepEqual(result, {name: name}); assert.deepEqual(result, {name: name});
done(); done();
@ -336,11 +317,11 @@ describe("multi_caching", function () {
check_err(err); check_err(err);
assert.deepEqual(widget, {name: name}); assert.deepEqual(widget, {name: name});
redis_cache.get(key, function (err, result) { memory_cache2.get(key, function (err, result) {
check_err(err); check_err(err);
assert.equal(result, null); assert.equal(result, null);
memory_cache2.get(key, function (err, result) { memory_cache3.get(key, function (err, result) {
check_err(err); check_err(err);
assert.equal(result, null); assert.equal(result, null);
done(); done();
@ -353,7 +334,7 @@ describe("multi_caching", function () {
context("when value exists in second store only", function () { context("when value exists in second store only", function () {
it("returns value from second store, sets it in first store, does not set third store", function (done) { it("returns value from second store, sets it in first store, does not set third store", function (done) {
redis_cache.set(key, {name: name}, function (err) { memory_cache3.set(key, {name: name}, function (err) {
check_err(err); check_err(err);
multi_cache.wrap(key, function (cb) { multi_cache.wrap(key, function (cb) {
@ -388,7 +369,7 @@ describe("multi_caching", function () {
check_err(err); check_err(err);
assert.deepEqual(widget, {name: name}); assert.deepEqual(widget, {name: name});
redis_cache.get(key, function (err, result) { memory_cache3.get(key, function (err, result) {
check_err(err); check_err(err);
assert.deepEqual(result, {name: name}); assert.deepEqual(result, {name: name});

4
test/run.js

@ -7,8 +7,8 @@ var optimist = require('optimist');
var walk_dir = require('./support').walk_dir; var walk_dir = require('./support').walk_dir;
var argv = optimist var argv = optimist
.usage("Usage: $0 -t [types] --reporter [reporter] --timeout [timeout]") .usage("Usage: $0 -t [types] --reporter [reporter] --timeout [timeout]")['default'](
.default({types: 'unit,functional', reporter: 'spec', timeout: 6000}) {types: 'unit,functional', reporter: 'spec', timeout: 6000})
.describe('types', 'The types of tests to run, separated by commas. E.g., unit,functional,acceptance') .describe('types', 'The types of tests to run, separated by commas. E.g., unit,functional,acceptance')
.describe('reporter', 'The mocha test reporter to use.') .describe('reporter', 'The mocha test reporter to use.')
.describe('timeout', 'The mocha timeout to use per test (ms).') .describe('timeout', 'The mocha timeout to use per test (ms).')

18
test/stores/redis.unit.js

@ -1,18 +0,0 @@
var support = require('../support');
var redis_store = require('../../lib/stores/redis');
describe("redis store", function () {
describe("instantiating", function () {
it("lets us pass in a db arg", function (done) {
// Not sure how to prove that it uses the specified db in this test,
// but it does.
var redis_cache = redis_store.create({db: 2});
support.test_set_get_del(redis_cache, done);
});
it("lets us pass in no args", function (done) {
var redis_cache = redis_store.create();
support.test_set_get_del(redis_cache, done);
});
});
});
Loading…
Cancel
Save