Remove the in-memory cache (#6037)
Shields has long had an in-memory cache with some complex logic for determining when the cached values are used and when they are flushed. At the time this was implemented, squeezing cache performance was helpful since there was no downstream cache. For years now we've used Cloudflare as a CDN, so trying to cache onboard is less useful than before. Furthermore, since the cache is very small and only used in fairly restrictive ways, it probably has very little impact on performance.
This commit is contained in:
@@ -1,74 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const makeBadge = require('../../badge-maker/lib/make-badge')
|
||||
const BaseService = require('./base')
|
||||
const { MetricHelper } = require('./metric-helper')
|
||||
const { setCacheHeaders } = require('./cache-headers')
|
||||
const { makeSend } = require('./legacy-result-sender')
|
||||
const coalesceBadge = require('./coalesce-badge')
|
||||
const { prepareRoute, namedParamsForMatch } = require('./route')
|
||||
|
||||
// Badges are subject to two independent types of caching: in-memory and
|
||||
// downstream.
|
||||
//
|
||||
// Services deriving from `NonMemoryCachingBaseService` are not cached in
|
||||
// memory on the server. This means that each request that hits the server
|
||||
// triggers another call to the handler. When using badges for server
|
||||
// diagnostics, that's useful!
|
||||
//
|
||||
// In contrast, The `handle()` function of most other `BaseService`
|
||||
// subclasses is wrapped in onboard, in-memory caching. See `lib /request-
|
||||
// handler.js` and `BaseService.prototype.register()`.
|
||||
//
|
||||
// All services, including those extending NonMemoryCachingBaseServices, may
|
||||
// be cached _downstream_. This is governed by cache headers, which are
|
||||
// configured by the service, the user's request, and the server's default
|
||||
// cache length.
|
||||
module.exports = class NonMemoryCachingBaseService extends BaseService {
|
||||
static register({ camp, metricInstance }, serviceConfig) {
|
||||
const { cacheHeaders: cacheHeaderConfig } = serviceConfig
|
||||
const { _cacheLength: serviceDefaultCacheLengthSeconds } = this
|
||||
const { regex, captureNames } = prepareRoute(this.route)
|
||||
|
||||
const metricHelper = MetricHelper.create({
|
||||
metricInstance,
|
||||
ServiceClass: this,
|
||||
})
|
||||
|
||||
camp.route(regex, async (queryParams, match, end, ask) => {
|
||||
const metricHandle = metricHelper.startRequest()
|
||||
|
||||
const namedParams = namedParamsForMatch(captureNames, match, this)
|
||||
const serviceData = await this.invoke(
|
||||
{},
|
||||
serviceConfig,
|
||||
namedParams,
|
||||
queryParams
|
||||
)
|
||||
|
||||
const badgeData = coalesceBadge(
|
||||
queryParams,
|
||||
serviceData,
|
||||
this.defaultBadgeData,
|
||||
this
|
||||
)
|
||||
|
||||
// The final capture group is the extension.
|
||||
const format = (match.slice(-1)[0] || '.svg').replace(/^\./, '')
|
||||
badgeData.format = format
|
||||
|
||||
const svg = makeBadge(badgeData)
|
||||
|
||||
setCacheHeaders({
|
||||
cacheHeaderConfig,
|
||||
serviceDefaultCacheLengthSeconds,
|
||||
queryParams,
|
||||
res: ask.res,
|
||||
})
|
||||
|
||||
makeSend(format, ask.res, end)(svg)
|
||||
|
||||
metricHandle.noteResponseSent()
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,6 @@
|
||||
const BaseService = require('./base')
|
||||
const BaseJsonService = require('./base-json')
|
||||
const BaseGraphqlService = require('./base-graphql')
|
||||
const NonMemoryCachingBaseService = require('./base-non-memory-caching')
|
||||
const BaseStaticService = require('./base-static')
|
||||
const BaseSvgScrapingService = require('./base-svg-scraping')
|
||||
const BaseXmlService = require('./base-xml')
|
||||
@@ -22,7 +21,6 @@ module.exports = {
|
||||
BaseService,
|
||||
BaseJsonService,
|
||||
BaseGraphqlService,
|
||||
NonMemoryCachingBaseService,
|
||||
BaseStaticService,
|
||||
BaseSvgScrapingService,
|
||||
BaseXmlService,
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
'use strict'
|
||||
|
||||
const request = require('request')
|
||||
const queryString = require('query-string')
|
||||
const makeBadge = require('../../badge-maker/lib/make-badge')
|
||||
const { setCacheHeaders } = require('./cache-headers')
|
||||
const {
|
||||
@@ -10,27 +9,10 @@ const {
|
||||
ShieldsRuntimeError,
|
||||
} = require('./errors')
|
||||
const { makeSend } = require('./legacy-result-sender')
|
||||
const LruCache = require('./lru-cache')
|
||||
const coalesceBadge = require('./coalesce-badge')
|
||||
|
||||
const userAgent = 'Shields.io/2003a'
|
||||
|
||||
// We avoid calling the vendor's server for computation of the information in a
|
||||
// number of badges.
|
||||
const minAccuracy = 0.75
|
||||
|
||||
// The quotient of (vendor) data change frequency by badge request frequency
|
||||
// must be lower than this to trigger sending the cached data *before*
|
||||
// updating our data from the vendor's server.
|
||||
// Indeed, the accuracy of our badges are:
|
||||
// A(Δt) = 1 - min(# data change over Δt, # requests over Δt)
|
||||
// / (# requests over Δt)
|
||||
// = 1 - max(1, df) / rf
|
||||
const freqRatioMax = 1 - minAccuracy
|
||||
|
||||
// Request cache size of 5MB (~5000 bytes/image).
|
||||
const requestCache = new LruCache(1000)
|
||||
|
||||
// These query parameters are available to any badge. They are handled by
|
||||
// `coalesceBadge`.
|
||||
const globalQueryParams = new Set([
|
||||
@@ -121,8 +103,6 @@ function handleRequest(cacheHeaderConfig, handlerOptions) {
|
||||
return
|
||||
}
|
||||
|
||||
const reqTime = new Date()
|
||||
|
||||
// `defaultCacheLengthSeconds` can be overridden by
|
||||
// `serviceDefaultCacheLengthSeconds` (either by category or on a badge-
|
||||
// by-badge basis). Then in turn that can be overridden by
|
||||
@@ -151,49 +131,10 @@ function handleRequest(cacheHeaderConfig, handlerOptions) {
|
||||
filteredQueryParams[key] = queryParams[key]
|
||||
})
|
||||
|
||||
// Use sindresorhus query-string because it sorts the keys, whereas the
|
||||
// builtin querystring module relies on the iteration order.
|
||||
const stringified = queryString.stringify(filteredQueryParams)
|
||||
const cacheIndex = `${match[0]}?${stringified}`
|
||||
|
||||
// Should we return the data right away?
|
||||
const cached = requestCache.get(cacheIndex)
|
||||
let cachedVersionSent = false
|
||||
if (cached !== undefined) {
|
||||
// A request was made not long ago.
|
||||
const tooSoon = +reqTime - cached.time < cached.interval
|
||||
if (tooSoon || cached.dataChange / cached.reqs <= freqRatioMax) {
|
||||
const svg = makeBadge(cached.data.badgeData)
|
||||
setCacheHeadersOnResponse(
|
||||
ask.res,
|
||||
cached.data.badgeData.cacheLengthSeconds
|
||||
)
|
||||
makeSend(cached.data.format, ask.res, end)(svg)
|
||||
cachedVersionSent = true
|
||||
// We do not wish to call the vendor servers.
|
||||
if (tooSoon) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// In case our vendor servers are unresponsive.
|
||||
let serverUnresponsive = false
|
||||
const serverResponsive = setTimeout(() => {
|
||||
serverUnresponsive = true
|
||||
if (cachedVersionSent) {
|
||||
return
|
||||
}
|
||||
if (requestCache.has(cacheIndex)) {
|
||||
const cached = requestCache.get(cacheIndex)
|
||||
const svg = makeBadge(cached.data.badgeData)
|
||||
setCacheHeadersOnResponse(
|
||||
ask.res,
|
||||
cached.data.badgeData.cacheLengthSeconds
|
||||
)
|
||||
makeSend(cached.data.format, ask.res, end)(svg)
|
||||
return
|
||||
}
|
||||
ask.res.setHeader('Cache-Control', 'no-cache, no-store, must-revalidate')
|
||||
const badgeData = coalesceBadge(
|
||||
filteredQueryParams,
|
||||
@@ -206,8 +147,6 @@ function handleRequest(cacheHeaderConfig, handlerOptions) {
|
||||
makeSend(extension, ask.res, end)(svg)
|
||||
}, 25000)
|
||||
|
||||
// Only call vendor servers when last request is older than…
|
||||
let cacheInterval = 5000 // milliseconds
|
||||
function cachingRequest(uri, options, callback) {
|
||||
if (typeof options === 'function' && !callback) {
|
||||
callback = options
|
||||
@@ -223,20 +162,7 @@ function handleRequest(cacheHeaderConfig, handlerOptions) {
|
||||
options.headers['User-Agent'] = userAgent
|
||||
|
||||
let bufferLength = 0
|
||||
const r = request(options, (err, res, body) => {
|
||||
if (res != null && res.headers != null) {
|
||||
const cacheControl = res.headers['cache-control']
|
||||
if (cacheControl != null) {
|
||||
const age = cacheControl.match(/max-age=([0-9]+)/)
|
||||
// Would like to get some more test coverage on this before changing it.
|
||||
// eslint-disable-next-line no-self-compare
|
||||
if (age != null && +age[1] === +age[1]) {
|
||||
cacheInterval = +age[1] * 1000
|
||||
}
|
||||
}
|
||||
}
|
||||
callback(err, res, body)
|
||||
})
|
||||
const r = request(options, callback)
|
||||
r.on('data', chunk => {
|
||||
bufferLength += chunk.length
|
||||
if (bufferLength > fetchLimitBytes) {
|
||||
@@ -264,30 +190,11 @@ function handleRequest(cacheHeaderConfig, handlerOptions) {
|
||||
return
|
||||
}
|
||||
clearTimeout(serverResponsive)
|
||||
// Check for a change in the data.
|
||||
let dataHasChanged = false
|
||||
if (
|
||||
cached !== undefined &&
|
||||
cached.data.badgeData.message !== badgeData.message
|
||||
) {
|
||||
dataHasChanged = true
|
||||
}
|
||||
// Add format to badge data.
|
||||
badgeData.format = format
|
||||
// Update information in the cache.
|
||||
const updatedCache = {
|
||||
reqs: cached ? cached.reqs + 1 : 1,
|
||||
dataChange: cached ? cached.dataChange + (dataHasChanged ? 1 : 0) : 1,
|
||||
time: +reqTime,
|
||||
interval: cacheInterval,
|
||||
data: { format, badgeData },
|
||||
}
|
||||
requestCache.set(cacheIndex, updatedCache)
|
||||
if (!cachedVersionSent) {
|
||||
const svg = makeBadge(badgeData)
|
||||
setCacheHeadersOnResponse(ask.res, badgeData.cacheLengthSeconds)
|
||||
makeSend(format, ask.res, end)(svg)
|
||||
}
|
||||
const svg = makeBadge(badgeData)
|
||||
setCacheHeadersOnResponse(ask.res, badgeData.cacheLengthSeconds)
|
||||
makeSend(format, ask.res, end)(svg)
|
||||
},
|
||||
cachingRequest
|
||||
)
|
||||
@@ -299,15 +206,8 @@ function handleRequest(cacheHeaderConfig, handlerOptions) {
|
||||
}
|
||||
}
|
||||
|
||||
function clearRequestCache() {
|
||||
requestCache.clear()
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
handleRequest,
|
||||
promisify,
|
||||
clearRequestCache,
|
||||
// Expose for testing.
|
||||
_requestCache: requestCache,
|
||||
userAgent,
|
||||
}
|
||||
|
||||
@@ -6,11 +6,7 @@ const portfinder = require('portfinder')
|
||||
const Camp = require('@shields_io/camp')
|
||||
const got = require('../got-test-client')
|
||||
const coalesceBadge = require('./coalesce-badge')
|
||||
const {
|
||||
handleRequest,
|
||||
clearRequestCache,
|
||||
_requestCache,
|
||||
} = require('./legacy-request-handler')
|
||||
const { handleRequest } = require('./legacy-request-handler')
|
||||
|
||||
async function performTwoRequests(baseUrl, first, second) {
|
||||
expect((await got(`${baseUrl}${first}`)).statusCode).to.equal(200)
|
||||
@@ -83,7 +79,6 @@ describe('The request handler', function () {
|
||||
camp.on('listening', () => done())
|
||||
})
|
||||
afterEach(function (done) {
|
||||
clearRequestCache()
|
||||
if (camp) {
|
||||
camp.close(() => done())
|
||||
camp = null
|
||||
@@ -196,57 +191,18 @@ describe('The request handler', function () {
|
||||
|
||||
describe('caching', function () {
|
||||
describe('standard query parameters', function () {
|
||||
let handlerCallCount
|
||||
beforeEach(function () {
|
||||
handlerCallCount = 0
|
||||
})
|
||||
|
||||
function register({ cacheHeaderConfig }) {
|
||||
camp.route(
|
||||
/^\/testing\/([^/]+)\.(svg|png|gif|jpg|json)$/,
|
||||
handleRequest(
|
||||
cacheHeaderConfig,
|
||||
(queryParams, match, sendBadge, request) => {
|
||||
++handlerCallCount
|
||||
fakeHandler(queryParams, match, sendBadge, request)
|
||||
}
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
context('With standard cache settings', function () {
|
||||
beforeEach(function () {
|
||||
register({ cacheHeaderConfig: standardCacheHeaders })
|
||||
})
|
||||
|
||||
it('should cache identical requests', async function () {
|
||||
await performTwoRequests(
|
||||
baseUrl,
|
||||
'/testing/123.svg',
|
||||
'/testing/123.svg'
|
||||
)
|
||||
expect(handlerCallCount).to.equal(1)
|
||||
})
|
||||
|
||||
it('should differentiate known query parameters', async function () {
|
||||
await performTwoRequests(
|
||||
baseUrl,
|
||||
'/testing/123.svg?label=foo',
|
||||
'/testing/123.svg?label=bar'
|
||||
)
|
||||
expect(handlerCallCount).to.equal(2)
|
||||
})
|
||||
|
||||
it('should ignore unknown query parameters', async function () {
|
||||
await performTwoRequests(
|
||||
baseUrl,
|
||||
'/testing/123.svg?foo=1',
|
||||
'/testing/123.svg?foo=2'
|
||||
)
|
||||
expect(handlerCallCount).to.equal(1)
|
||||
})
|
||||
})
|
||||
|
||||
it('should set the expires header to current time + defaultCacheLengthSeconds', async function () {
|
||||
register({ cacheHeaderConfig: { defaultCacheLengthSeconds: 900 } })
|
||||
const { headers } = await got(`${baseUrl}/testing/123.json`)
|
||||
@@ -277,7 +233,6 @@ describe('The request handler', function () {
|
||||
handleRequest(
|
||||
{ defaultCacheLengthSeconds: 300 },
|
||||
(queryParams, match, sendBadge, request) => {
|
||||
++handlerCallCount
|
||||
createFakeHandlerWithCacheLength(400)(
|
||||
queryParams,
|
||||
match,
|
||||
@@ -298,7 +253,6 @@ describe('The request handler', function () {
|
||||
handleRequest(
|
||||
{ defaultCacheLengthSeconds: 300 },
|
||||
(queryParams, match, sendBadge, request) => {
|
||||
++handlerCallCount
|
||||
createFakeHandlerWithCacheLength(200)(
|
||||
queryParams,
|
||||
match,
|
||||
@@ -345,21 +299,6 @@ describe('The request handler', function () {
|
||||
'no-cache, no-store, must-revalidate'
|
||||
)
|
||||
})
|
||||
|
||||
describe('the cache key', function () {
|
||||
beforeEach(function () {
|
||||
register({ cacheHeaderConfig: standardCacheHeaders })
|
||||
})
|
||||
const expectedCacheKey = '/testing/123.json?color=123&label=foo'
|
||||
it('should match expected and use canonical order - 1', async function () {
|
||||
await got(`${baseUrl}/testing/123.json?color=123&label=foo`)
|
||||
expect(_requestCache.cache).to.have.keys(expectedCacheKey)
|
||||
})
|
||||
it('should match expected and use canonical order - 2', async function () {
|
||||
await got(`${baseUrl}/testing/123.json?label=foo&color=123`)
|
||||
expect(_requestCache.cache).to.have.keys(expectedCacheKey)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('custom query parameters', function () {
|
||||
|
||||
@@ -1,136 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
// In-memory KV, remove the oldest data when the capacity is reached.
|
||||
|
||||
const typeEnum = {
|
||||
unit: 0,
|
||||
heap: 1,
|
||||
}
|
||||
|
||||
// In bytes.
|
||||
let heapSize
|
||||
function computeHeapSize() {
|
||||
return (heapSize = process.memoryUsage().heapTotal)
|
||||
}
|
||||
|
||||
let heapSizeTimeout
|
||||
function getHeapSize() {
|
||||
if (heapSizeTimeout == null) {
|
||||
// Compute the heap size every 60 seconds.
|
||||
heapSizeTimeout = setInterval(computeHeapSize, 60 * 1000)
|
||||
return computeHeapSize()
|
||||
} else {
|
||||
return heapSize
|
||||
}
|
||||
}
|
||||
|
||||
function CacheSlot(key, value) {
|
||||
this.key = key
|
||||
this.value = value
|
||||
this.older = null // Newest slot that is older than this slot.
|
||||
this.newer = null // Oldest slot that is newer than this slot.
|
||||
}
|
||||
|
||||
function Cache(capacity, type) {
|
||||
type = type || 'unit'
|
||||
this.capacity = capacity
|
||||
this.type = typeEnum[type]
|
||||
this.cache = new Map() // Maps cache keys to CacheSlots.
|
||||
this.newest = null // Newest slot in the cache.
|
||||
this.oldest = null
|
||||
}
|
||||
|
||||
Cache.prototype = {
|
||||
set: function addToCache(cacheKey, cached) {
|
||||
let slot = this.cache.get(cacheKey)
|
||||
if (slot === undefined) {
|
||||
slot = new CacheSlot(cacheKey, cached)
|
||||
this.cache.set(cacheKey, slot)
|
||||
}
|
||||
this.makeNewest(slot)
|
||||
const numItemsToRemove = this.limitReached()
|
||||
if (numItemsToRemove > 0) {
|
||||
for (let i = 0; i < numItemsToRemove; i++) {
|
||||
this.removeOldest()
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
get: function getFromCache(cacheKey) {
|
||||
const slot = this.cache.get(cacheKey)
|
||||
if (slot !== undefined) {
|
||||
this.makeNewest(slot)
|
||||
return slot.value
|
||||
}
|
||||
},
|
||||
|
||||
has: function hasInCache(cacheKey) {
|
||||
return this.cache.has(cacheKey)
|
||||
},
|
||||
|
||||
makeNewest: function makeNewestSlot(slot) {
|
||||
const previousNewest = this.newest
|
||||
if (previousNewest === slot) {
|
||||
return
|
||||
}
|
||||
const older = slot.older
|
||||
const newer = slot.newer
|
||||
|
||||
if (older !== null) {
|
||||
older.newer = newer
|
||||
} else if (newer !== null) {
|
||||
this.oldest = newer
|
||||
}
|
||||
if (newer !== null) {
|
||||
newer.older = older
|
||||
}
|
||||
this.newest = slot
|
||||
|
||||
if (previousNewest !== null) {
|
||||
slot.older = previousNewest
|
||||
slot.newer = null
|
||||
previousNewest.newer = slot
|
||||
} else {
|
||||
// If previousNewest is null, the cache used to be empty.
|
||||
this.oldest = slot
|
||||
}
|
||||
},
|
||||
|
||||
removeOldest: function removeOldest() {
|
||||
const cacheKey = this.oldest.key
|
||||
if (this.oldest !== null) {
|
||||
this.oldest = this.oldest.newer
|
||||
if (this.oldest !== null) {
|
||||
this.oldest.older = null
|
||||
}
|
||||
}
|
||||
this.cache.delete(cacheKey)
|
||||
},
|
||||
|
||||
// Returns the number of elements to remove if we're past the limit.
|
||||
limitReached: function heuristic() {
|
||||
if (this.type === typeEnum.unit) {
|
||||
// Remove the excess.
|
||||
return Math.max(0, this.cache.size - this.capacity)
|
||||
} else if (this.type === typeEnum.heap) {
|
||||
if (getHeapSize() >= this.capacity) {
|
||||
console.log('LRU HEURISTIC heap:', getHeapSize())
|
||||
// Remove half of them.
|
||||
return this.cache.size >> 1
|
||||
} else {
|
||||
return 0
|
||||
}
|
||||
} else {
|
||||
console.error(`Unknown heuristic '${this.type}' for LRU cache.`)
|
||||
return 1
|
||||
}
|
||||
},
|
||||
|
||||
clear: function () {
|
||||
this.cache.clear()
|
||||
this.newest = null
|
||||
this.oldest = null
|
||||
},
|
||||
}
|
||||
|
||||
module.exports = Cache
|
||||
@@ -1,134 +0,0 @@
|
||||
'use strict'
|
||||
|
||||
const { expect } = require('chai')
|
||||
const LRU = require('./lru-cache')
|
||||
|
||||
function expectCacheSlots(cache, keys) {
|
||||
expect(cache.cache.size).to.equal(keys.length)
|
||||
|
||||
const slots = keys.map(k => cache.cache.get(k))
|
||||
|
||||
const first = slots[0]
|
||||
const last = slots.slice(-1)[0]
|
||||
|
||||
expect(cache.oldest).to.equal(first)
|
||||
expect(cache.newest).to.equal(last)
|
||||
|
||||
expect(first.older).to.be.null
|
||||
expect(last.newer).to.be.null
|
||||
|
||||
for (let i = 0; i + 1 < slots.length; ++i) {
|
||||
const current = slots[i]
|
||||
const next = slots[i + 1]
|
||||
expect(current.newer).to.equal(next)
|
||||
expect(next.older).to.equal(current)
|
||||
}
|
||||
}
|
||||
|
||||
describe('The LRU cache', function () {
|
||||
it('should support a zero capacity', function () {
|
||||
const cache = new LRU(0)
|
||||
cache.set('key', 'value')
|
||||
expect(cache.cache.size).to.equal(0)
|
||||
})
|
||||
|
||||
it('should support a one capacity', function () {
|
||||
const cache = new LRU(1)
|
||||
cache.set('key1', 'value1')
|
||||
expectCacheSlots(cache, ['key1'])
|
||||
cache.set('key2', 'value2')
|
||||
expectCacheSlots(cache, ['key2'])
|
||||
expect(cache.get('key1')).to.be.undefined
|
||||
expect(cache.get('key2')).to.equal('value2')
|
||||
})
|
||||
|
||||
it('should remove the oldest element when reaching capacity', function () {
|
||||
const cache = new LRU(2)
|
||||
|
||||
cache.set('key1', 'value1')
|
||||
cache.set('key2', 'value2')
|
||||
cache.set('key3', 'value3')
|
||||
cache.cache.get('key1')
|
||||
|
||||
expectCacheSlots(cache, ['key2', 'key3'])
|
||||
expect(cache.cache.get('key1')).to.be.undefined
|
||||
expect(cache.get('key1')).to.be.undefined
|
||||
expect(cache.get('key2')).to.equal('value2')
|
||||
expect(cache.get('key3')).to.equal('value3')
|
||||
})
|
||||
|
||||
it('should make sure that resetting a key in cache makes it newest', function () {
|
||||
const cache = new LRU(2)
|
||||
|
||||
cache.set('key', 'value')
|
||||
cache.set('key2', 'value2')
|
||||
|
||||
expectCacheSlots(cache, ['key', 'key2'])
|
||||
|
||||
cache.set('key', 'value')
|
||||
|
||||
expectCacheSlots(cache, ['key2', 'key'])
|
||||
})
|
||||
|
||||
describe('getting a key in the cache', function () {
|
||||
context('when the requested key is oldest', function () {
|
||||
it('should leave the keys in the expected order', function () {
|
||||
const cache = new LRU(2)
|
||||
cache.set('key1', 'value1')
|
||||
cache.set('key2', 'value2')
|
||||
|
||||
expectCacheSlots(cache, ['key1', 'key2'])
|
||||
|
||||
expect(cache.get('key1')).to.equal('value1')
|
||||
|
||||
expectCacheSlots(cache, ['key2', 'key1'])
|
||||
})
|
||||
})
|
||||
|
||||
context('when the requested key is newest', function () {
|
||||
it('should leave the keys in the expected order', function () {
|
||||
const cache = new LRU(2)
|
||||
cache.set('key1', 'value1')
|
||||
cache.set('key2', 'value2')
|
||||
|
||||
expect(cache.get('key2')).to.equal('value2')
|
||||
|
||||
expectCacheSlots(cache, ['key1', 'key2'])
|
||||
})
|
||||
})
|
||||
|
||||
context('when the requested key is in the middle', function () {
|
||||
it('should leave the keys in the expected order', function () {
|
||||
const cache = new LRU(3)
|
||||
cache.set('key1', 'value1')
|
||||
cache.set('key2', 'value2')
|
||||
cache.set('key3', 'value3')
|
||||
|
||||
expectCacheSlots(cache, ['key1', 'key2', 'key3'])
|
||||
|
||||
expect(cache.get('key2')).to.equal('value2')
|
||||
|
||||
expectCacheSlots(cache, ['key1', 'key3', 'key2'])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('should clear', function () {
|
||||
// Set up.
|
||||
const cache = new LRU(2)
|
||||
cache.set('key1', 'value1')
|
||||
cache.set('key2', 'value2')
|
||||
|
||||
// Confidence check.
|
||||
expect(cache.get('key1')).to.equal('value1')
|
||||
expect(cache.get('key2')).to.equal('value2')
|
||||
|
||||
// Run.
|
||||
cache.clear()
|
||||
|
||||
// Test.
|
||||
expect(cache.get('key1')).to.be.undefined
|
||||
expect(cache.get('key2')).to.be.undefined
|
||||
expect(cache.cache.size).to.equal(0)
|
||||
})
|
||||
})
|
||||
@@ -125,10 +125,9 @@ test this kind of logic through unit tests (e.g. of `render()` and
|
||||
registered.)
|
||||
2. Scoutcamp invokes a callback with the four parameters:
|
||||
`( queryParams, match, end, ask )`. This callback is defined in
|
||||
[`legacy-request-handler`][legacy-request-handler]. If the badge result
|
||||
is found in a relatively small in-memory cache, the response is sent
|
||||
immediately. Otherwise a timeout is set to handle unresponsive service
|
||||
code and the next callback is invoked: the legacy handler function.
|
||||
[`legacy-request-handler`][legacy-request-handler]. A timeout is set to
|
||||
handle unresponsive service code and the next callback is invoked: the
|
||||
legacy handler function.
|
||||
3. The legacy handler function receives
|
||||
`( queryParams, match, sendBadge, request )`. Its job is to extract data
|
||||
from the regex `match` and `queryParams`, invoke `request` to fetch
|
||||
@@ -162,8 +161,8 @@ test this kind of logic through unit tests (e.g. of `render()` and
|
||||
service’s defaults to produce an object that fully describes the badge to
|
||||
be rendered.
|
||||
9. `sendBadge` is invoked with that object. It does some housekeeping on the
|
||||
timeout and caches the result. Then it renders the badge to svg or raster
|
||||
and pushes out the result over the HTTPS connection.
|
||||
timeout. Then it renders the badge to svg or raster and pushes out the
|
||||
result over the HTTPS connection.
|
||||
|
||||
[error reporting]: https://github.com/badges/shields/blob/master/doc/production-hosting.md#error-reporting
|
||||
[coalescebadge]: https://github.com/badges/shields/blob/master/core/base-service/coalesce-badge.js
|
||||
|
||||
@@ -44,15 +44,13 @@ Production hosting is managed by the Shields ops team:
|
||||
|
||||
Shields has mercifully little persistent state:
|
||||
|
||||
1. The GitHub tokens we collect are saved on each server in a cloud Redis database.
|
||||
They can also be fetched from the [GitHub auth admin endpoint][] for debugging.
|
||||
2. The server keeps a few caches in memory. These are neither persisted nor
|
||||
inspectable.
|
||||
- The [request cache][]
|
||||
- The [regular-update cache][]
|
||||
1. The GitHub tokens we collect are saved on each server in a cloud Redis
|
||||
database. They can also be fetched from the [GitHub auth admin endpoint][]
|
||||
for debugging.
|
||||
2. The server keeps the [regular-update cache][] in memory. It is neither
|
||||
persisted nor inspectable.
|
||||
|
||||
[github auth admin endpoint]: https://github.com/badges/shields/blob/master/services/github/auth/admin.js
|
||||
[request cache]: https://github.com/badges/shields/blob/master/core/base-service/legacy-request-handler.js#L29-L30
|
||||
[regular-update cache]: https://github.com/badges/shields/blob/master/core/legacy/regular-update.js
|
||||
|
||||
## Configuration
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
'use strict'
|
||||
|
||||
const { NonMemoryCachingBaseService } = require('..')
|
||||
const { BaseService } = require('..')
|
||||
|
||||
const serverStartTime = new Date(new Date().toGMTString())
|
||||
let bitFlip = false
|
||||
|
||||
module.exports = class Debug extends NonMemoryCachingBaseService {
|
||||
module.exports = class Debug extends BaseService {
|
||||
static category = 'debug'
|
||||
static route = { base: 'debug', pattern: ':variant(time|starttime|flip)' }
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
'use strict'
|
||||
|
||||
const { NonMemoryCachingBaseService } = require('..')
|
||||
const { BaseService } = require('..')
|
||||
|
||||
module.exports = class Maintenance extends NonMemoryCachingBaseService {
|
||||
module.exports = class Maintenance extends BaseService {
|
||||
static category = 'other'
|
||||
|
||||
static route = {
|
||||
|
||||
Reference in New Issue
Block a user