Skip to content

Commit

Permalink
Merge pull request #47 from auth0/remove-in-process-cache
Browse files Browse the repository at this point in the history
feat: remove local cache
  • Loading branch information
joseluisdiaz authored Sep 12, 2023
2 parents f8ad45f + e552ed5 commit 7377314
Show file tree
Hide file tree
Showing 6 changed files with 1 addition and 105 deletions.
1 change: 1 addition & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,3 +6,4 @@ node_js:
- 12
- 14
- 16
- 18
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,6 @@ Buckets:
- `per_interval` (number): is the amount of tokens that the bucket receive on every interval.
- `interval` (number): defines the interval in milliseconds.
- `unlimited` (boolean = false): unlimited requests (skip take).
- `enable_cache` (boolean = false): caching non-conformant answers of the bucket until the next drip.

Ping:

Expand Down
29 changes: 0 additions & 29 deletions lib/db.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ const _ = require('lodash');
const async = require('async');
const utils = require('./utils');
const Redis = require('ioredis');
const TTLCache = require('@isaacs/ttlcache');
const { validateParams } = require('./validation');
const DBPing = require("./db_ping");
const EventEmitter = require('events').EventEmitter;
Expand Down Expand Up @@ -107,11 +106,6 @@ class LimitDBRedis extends EventEmitter {
this.emit('node error', err, node);
});

this.cache = new TTLCache({
max: config.cacheSize || 1000,
ttl: 1000,
checkAgeOnGet: true
});
}

#setupPing(config) {
Expand Down Expand Up @@ -188,10 +182,6 @@ class LimitDBRedis extends EventEmitter {
return Math.ceil((now + msToCompletion) / 1000);
}

cacheEnabled(bucket) {
return bucket.enable_cache;
}

/**
* Take N elements from a bucket if available.
*
Expand All @@ -207,7 +197,6 @@ class LimitDBRedis extends EventEmitter {
const bucket = this.buckets[params.type];
const bucketKeyConfig = this.bucketKeyConfig(bucket, params);

const cacheEnabled = this.cacheEnabled(bucketKeyConfig);
const key = `${params.type}:${params.key}`;

const count = this._determineCount({
Expand All @@ -223,17 +212,9 @@ class LimitDBRedis extends EventEmitter {
reset: Math.ceil(Date.now() / 1000),
limit: bucketKeyConfig.size,
delayed: false,
cached: false
});
}

if (cacheEnabled) {
const cached = this.cache.get(key);
if (cached) {
return callback(null, cached);
}
}

this.redis.take(key,
bucketKeyConfig.ms_per_interval || 0,
bucketKeyConfig.size,
Expand All @@ -255,18 +236,8 @@ class LimitDBRedis extends EventEmitter {
reset: Math.ceil(reset / 1000),
limit: bucketKeyConfig.size,
delayed: false,
cached: false
};

if (cacheEnabled && conformant === false) {
// cache if bucket is empty and only until almost the moment it should get a new token
const msUntilFull = reset - currentMS;
const ttl = Math.floor(msUntilFull / bucketKeyConfig.size * 0.99);
if (ttl > 1) { // pointless to cache less than 1ms and prevent against negative values
this.cache.set(key, Object.assign({}, res, {cached: true}), { ttl });
}
}

return callback(null, res);
});
}
Expand Down
1 change: 0 additions & 1 deletion lib/utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ function normalizeTemporals(params) {
'interval',
'size',
'unlimited',
'enable_cache'
]);

INTERVAL_SHORTCUTS.forEach(intervalShortcut => {
Expand Down
1 change: 0 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
"author": "Auth0",
"license": "MIT",
"dependencies": {
"@isaacs/ttlcache": "^1.4.0",
"async": "^2.6.1",
"disyuntor": "^3.5.0",
"ioredis": "^4.28.5",
Expand Down
73 changes: 0 additions & 73 deletions test/db.tests.js
Original file line number Diff line number Diff line change
Expand Up @@ -530,79 +530,6 @@ describe('LimitDBRedis', () => {
});
});

it('should cache buckets intervals until their reset', (done) => {
db.take({type: 'cached', key: 'test', count: 3}, (err, res) => {
assert.ifError(err);
assert.equal(res.conformant, true);
assert.equal(res.remaining, 0);
db.take({type: 'cached', key: 'test'}, (err, res) => {
assert.ifError(err);
assert.equal(res.conformant, false);
assert.equal(res.remaining, 0);
assert.equal(res.cached, false);
const ttl = db.cache.getRemainingTTL('cached:test');
assert(ms('30m') > ttl);
assert(ms('29m') < ttl);
done();
});
});
});

it('should cache buckets accurately in small windows', (done) => {
db.take({type: 'cached', key: 'faster', count: 3}, (err, res) => {
assert.ifError(err);
assert.equal(res.conformant, true);
assert.equal(res.remaining, 0);
db.take({type: 'cached', key: 'faster'}, (err, res) => {
assert.ifError(err);
assert.equal(res.conformant, false);
assert.equal(res.remaining, 0);
assert.equal(res.cached, false);
const ttl = db.cache.getRemainingTTL('cached:faster');
assert(ms('1s') > ttl);
assert(ms('900ms') < ttl);
done();
});
});
});

it('should not cache when enable_cache is undefined', (done) => {
db.take({type: 'cached', key: 'disabled', count: 5}, (err, res) => {
assert.ifError(err);
assert.equal(res.conformant, true);
assert.equal(res.remaining, 0);
db.take({type: 'cached', key: 'disabled'}, (err, res) => {
assert.ifError(err);
assert.equal(res.conformant, false);
assert.equal(res.remaining, 0);
assert.equal(res.cached, false);
assert.equal(db.cache.has('cached:disabled'), false);
done();
});
});
});

it('should indicate the response came from cache', (done) => {
db.take({type: 'cached', key: 'test', count: 3}, (err, res) => {
assert.ifError(err);
assert.equal(res.conformant, true);
assert.equal(res.remaining, 0);
db.take({type: 'cached', key: 'test'}, (err, res) => {
assert.ifError(err);
assert.equal(res.conformant, false);
assert.equal(res.remaining, 0);
assert.equal(res.cached, false);

db.take({type: 'cached', key: 'test'}, (err, res) => {
assert.ifError(err);
assert.equal(res.conformant, false);
assert.equal(res.remaining, 0);
assert.equal(res.cached, true);
done();
});
});
});
});
});

describe('PUT', () => {
Expand Down

0 comments on commit 7377314

Please sign in to comment.