Skip to content

Commit

Permalink
TokenPool: Add debug logging helper + return correct error (#2400)
Browse files Browse the repository at this point in the history
Slice another sliver from #1205.
  • Loading branch information
paulmelnikow authored Dec 2, 2018
1 parent 8829456 commit 59fdd8a
Show file tree
Hide file tree
Showing 5 changed files with 139 additions and 35 deletions.
20 changes: 7 additions & 13 deletions lib/github-auth.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
'use strict'

const { EventEmitter } = require('events')
const crypto = require('crypto')
const queryString = require('query-string')
const serverSecrets = require('./server-secrets')
const mapKeys = require('lodash.mapkeys')
const serverSecrets = require('./server-secrets')
const { sanitizeToken } = require('./token-pool')

const userTokenRateLimit = 12500

Expand Down Expand Up @@ -105,14 +105,6 @@ function mapToObject(map) {
return result
}

// Compute a one-way hash of the input string.
function sha(str) {
return crypto
.createHash('sha256')
.update(str, 'utf-8')
.digest('hex')
}

function getAllTokenIds() {
return githubUserTokens.slice()
}
Expand All @@ -135,9 +127,11 @@ function serializeDebugInfo(options) {

if (sanitize) {
return {
tokens: unsanitized.tokens.map(k => sha(k)),
reqRemaining: mapKeys(unsanitized.reqRemaining, (v, k) => sha(k)),
reqReset: mapKeys(unsanitized.reqReset, (v, k) => sha(k)),
tokens: unsanitized.tokens.map(k => sanitizeToken(k)),
reqRemaining: mapKeys(unsanitized.reqRemaining, (v, k) =>
sanitizeToken(k)
),
reqReset: mapKeys(unsanitized.reqReset, (v, k) => sanitizeToken(k)),
utcEpochSeconds: unsanitized.utcEpochSeconds,
sanitized: true,
}
Expand Down
62 changes: 56 additions & 6 deletions lib/token-pool.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,20 @@
'use strict'

const crypto = require('crypto')
const PriorityQueue = require('priorityqueuejs')

// Compute a one-way hash of the input string.
function sanitizeToken(id) {
return crypto
.createHash('sha256')
.update(id, 'utf-8')
.digest('hex')
}

function getUtcEpochSeconds() {
return (Date.now() / 1000) >>> 0
}

// Encapsulate a rate-limited token, with a user-provided ID and user-provided data.
//
// Each token has a notion of the number of uses remaining until exhausted,
Expand Down Expand Up @@ -34,13 +47,12 @@ class Token {
get isValid() {
return this._isValid
}

static utcEpochSeconds() {
return (Date.now() / 1000) >>> 0
get isFrozen() {
return this._isFrozen
}

get hasReset() {
return this.constructor.utcEpochSeconds() >= this.nextReset
return getUtcEpochSeconds() >= this.nextReset
}

get isExhausted() {
Expand Down Expand Up @@ -92,6 +104,23 @@ class Token {
unfreeze() {
this._isFrozen = false
}

getDebugInfo({ sanitize = true } = {}) {
const { id, data, usesRemaining, nextReset, isValid, isFrozen } = this

if (sanitize) {
return {
id: sanitizeToken(id),
data: '[redacted]',
usesRemaining,
nextReset,
isValid,
isFrozen,
}
} else {
return { id, data, usesRemaining, nextReset, isValid, isFrozen }
}
}
}

// Large sentinel value which means "never reset".
Expand All @@ -105,7 +134,7 @@ Token.nextResetNever = Number.MAX_SAFE_INTEGER
class TokenPool {
// batchSize: The maximum number of times we use each token before moving
// on to the next one.
constructor(batchSize) {
constructor({ batchSize = 1 } = {}) {
this.batchSize = batchSize

this.currentBatch = { currentToken: null, remaining: 0 }
Expand Down Expand Up @@ -160,7 +189,10 @@ class TokenPool {
}
}

while ((next = this.priorityQueue.peek())) {
while (
!this.priorityQueue.isEmpty() &&
(next = this.priorityQueue.peek())
) {
if (!next.isValid) {
// Discard, and
continue
Expand Down Expand Up @@ -240,9 +272,27 @@ class TokenPool {
this.forEach(({ id }) => result.push(id))
return result
}

serializeDebugInfo({ sanitize = true } = {}) {
const maybeSanitize = sanitize ? id => sanitizeToken(id) : id => id

const priorityQueue = []
this.priorityQueue.forEach(t =>
priorityQueue.push(t.getDebugInfo({ sanitize }))
)

return {
utcEpochSeconds: getUtcEpochSeconds(),
allValidTokenIds: this.allValidTokenIds().map(maybeSanitize),
fifoQueue: this.fifoQueue.map(t => t.getDebugInfo({ sanitize })),
priorityQueue,
sanitized: sanitize,
}
}
}

module.exports = {
sanitizeToken,
Token,
TokenPool,
}
69 changes: 64 additions & 5 deletions lib/token-pool.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,12 @@ function expectPoolToBeExhausted(pool) {
}

describe('The token pool', function() {
const ids = [1, 2, 3, 4, 5]
const ids = ['1', '2', '3', '4', '5']
const batchSize = 3

let tokenPool
beforeEach(function() {
// Set up.
tokenPool = new TokenPool(batchSize)
tokenPool = new TokenPool({ batchSize })
ids.forEach(id => tokenPool.add(id))
})

Expand All @@ -41,6 +40,59 @@ describe('The token pool', function() {
)
})

describe('serializeDebugInfo should initially return the expected', function() {
context('sanitize is not specified', function() {
it('returns fully sanitized results', function() {
// This is `sha()` of '1', '2', '3', '4', '5'. These are written
// literally for avoidance of doubt as to whether sanitization is
// happening.
const sanitizedIds = [
'6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b',
'd4735e3a265e16eee03f59718b9b5d03019c07d8b6c51f90da3a666eec13ab35',
'4e07408562bedb8b60ce05c1decfe3ad16b72230967de01f640b7e4729b49fce',
'4b227777d4dd1fc61c6f884f48641d02b4d121d3fd328cb08b5531fcacdabf8a',
'ef2d127de37b942baad06145e54b0c619a1f22327b2ebbcfbec78f5564afe39d',
]

expect(tokenPool.serializeDebugInfo()).to.deep.equal({
allValidTokenIds: sanitizedIds,
priorityQueue: [],
fifoQueue: sanitizedIds.map(id => ({
data: '[redacted]',
id,
isFrozen: false,
isValid: true,
nextReset: Token.nextResetNever,
usesRemaining: batchSize,
})),
sanitized: true,
utcEpochSeconds: (Date.now() / 1000) | 0,
})
})
})

context('with sanitize: false', function() {
it('returns unsanitized results', function() {
expect(tokenPool.serializeDebugInfo({ sanitize: false })).to.deep.equal(
{
allValidTokenIds: ids,
priorityQueue: [],
fifoQueue: ids.map(id => ({
data: undefined,
id,
isFrozen: false,
isValid: true,
nextReset: Token.nextResetNever,
usesRemaining: batchSize,
})),
sanitized: false,
utcEpochSeconds: (Date.now() / 1000) | 0,
}
)
})
})
})

context('tokens are marked exhausted immediately', function() {
it('should be exhausted', function() {
ids.forEach(() => {
Expand All @@ -65,7 +117,7 @@ describe('The token pool', function() {

context('tokens are renewed', function() {
it('should keep using them', function() {
const tokensToRenew = [2, 4]
const tokensToRenew = ['2', '4']
const renewalCount = 3

ids.forEach(id => {
Expand Down Expand Up @@ -99,7 +151,7 @@ describe('The token pool', function() {
})

it('should start using them', function() {
const tokensToReset = [2, 4]
const tokensToReset = ['2', '4']
const futureTime = 1440

ids.forEach(id => {
Expand All @@ -122,4 +174,11 @@ describe('The token pool', function() {
expectPoolToBeExhausted(tokenPool)
})
})

context('when empty', function() {
it('next() should return the expected error', function() {
const tokenPool = new TokenPool()
expect(() => tokenPool.next()).to.throw('Token pool is exhausted')
})
})
})
19 changes: 9 additions & 10 deletions lib/token-provider.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
const { Token, TokenPool } = require('./token-pool')

class StaticTokenProvider {
constructor(tokenValidator, tokenString) {
constructor({ tokenValidator, tokenString }) {
if (typeof tokenValidator !== 'function') {
throw Error('tokenValidator is not a function')
}
Expand All @@ -29,26 +29,21 @@ class PoolingTokenProvider {
/*
tokenValidator: A function which returns true if the argument is a valid token.
*/
constructor(tokenValidator) {
constructor({ tokenValidator, batchSize = 25 }) {
if (typeof tokenValidator !== 'function') {
throw Error('tokenValidator is not a function')
}

Object.assign(this, {
tokenValidator,
batchSize: 25,
searchBatchSize: 5,
})

this.tokenPool = new TokenPool(this.batchSize)
this.tokenValidator = tokenValidator
this.tokenPool = new TokenPool({ batchSize })
}

addToken(tokenString) {
if (!this.tokenValidator(tokenString)) {
throw Error(`Not a valid token: ${tokenString}`)
}

this.tokenPool.add(tokenString, null, this.batchSize)
this.tokenPool.add(tokenString)
}

nextToken() {
Expand All @@ -59,6 +54,10 @@ class PoolingTokenProvider {
toNative() {
return this.tokenPool.allValidTokenIds()
}

serializeDebugInfo(options) {
return this.tokenPool.serializeDebugInfo(options)
}
}

module.exports = {
Expand Down
4 changes: 3 additions & 1 deletion lib/token-provider.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,9 @@ describe('The token provider', function() {
describe('toNative', function() {
it('should return the expected value', function() {
const tokens = ['1', '2', '3', '4', '5'].map(c => c.repeat(40))
const provider = new PoolingTokenProvider(isValidGithubToken)
const provider = new PoolingTokenProvider({
tokenValidator: isValidGithubToken,
})
tokens.forEach(t => provider.addToken(t))
assert.deepStrictEqual(
provider.toNative().sort(),
Expand Down

0 comments on commit 59fdd8a

Please sign in to comment.