Skip to content

Commit

Permalink
Fix silent failure on video cache (#11778)
Browse files Browse the repository at this point in the history
  • Loading branch information
dgirardi committed Jun 12, 2024
1 parent aaeaa9e commit 50eb9f9
Show file tree
Hide file tree
Showing 2 changed files with 48 additions and 12 deletions.
28 changes: 17 additions & 11 deletions src/videoCache.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import {ajaxBuilder} from './ajax.js';
import {config} from './config.js';
import {auctionManager} from './auctionManager.js';
import {logWarn} from './utils.js';
import {logError, logWarn} from './utils.js';
import {addBidToAuction} from './auction.js';

/**
Expand Down Expand Up @@ -166,13 +166,19 @@ export const _internal = {
store
}

const storeInCache = (batch) => {
_internal.store(batch.map(entry => entry.bidResponse), function (error, cacheIds) {
cacheIds.forEach((cacheId, i) => {
const {auctionInstance, bidResponse, afterBidAdded} = batch[i];
if (error) {
logWarn(`Failed to save to the video cache: ${error}. Video bid must be discarded.`);
} else {
export function storeBatch(batch) {
const bids = batch.map(entry => entry.bidResponse)
function err(msg) {
logError(`Failed to save to the video cache: ${msg}. Video bids will be discarded:`, bids)
}
_internal.store(bids, function (error, cacheIds) {
if (error) {
err(error)
} else if (batch.length !== cacheIds.length) {
logError(`expected ${batch.length} cache IDs, got ${cacheIds.length} instead`)
} else {
cacheIds.forEach((cacheId, i) => {
const {auctionInstance, bidResponse, afterBidAdded} = batch[i];
if (cacheId.uuid === '') {
logWarn(`Supplied video cache key was already in use by Prebid Cache; caching attempt was rejected. Video bid must be discarded.`);
} else {
Expand All @@ -183,8 +189,8 @@ const storeInCache = (batch) => {
addBidToAuction(auctionInstance, bidResponse);
afterBidAdded();
}
}
});
});
}
});
};

Expand All @@ -200,7 +206,7 @@ if (FEATURES.VIDEO) {
});
}

export const batchingCache = (timeout = setTimeout, cache = storeInCache) => {
export const batchingCache = (timeout = setTimeout, cache = storeBatch) => {
let batches = [[]];
let debouncing = false;
const noTimeout = cb => cb();
Expand Down
32 changes: 31 additions & 1 deletion test/spec/videoCache_spec.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
import chai from 'chai';
import {batchingCache, getCacheUrl, store} from 'src/videoCache.js';
import {batchingCache, getCacheUrl, store, _internal, storeBatch} from 'src/videoCache.js';
import {config} from 'src/config.js';
import {server} from 'test/mocks/xhr.js';
import {auctionManager} from '../../src/auctionManager.js';
import {AuctionIndex} from '../../src/auctionIndex.js';
import * as utils from 'src/utils.js';

const should = chai.should();

Expand Down Expand Up @@ -366,6 +367,35 @@ describe('The video cache', function () {
return callback;
}
});

describe('storeBatch', () => {
let sandbox;
let err, cacheIds
beforeEach(() => {
err = null;
cacheIds = [];
sandbox = sinon.createSandbox();
sandbox.stub(utils, 'logError');
sandbox.stub(_internal, 'store').callsFake((_, cb) => cb(err, cacheIds));
});
afterEach(() => {
sandbox.restore();
})
it('should log an error when store replies with an error', () => {
err = new Error('err');
storeBatch([]);
sinon.assert.called(utils.logError);
});
it('should not process returned uuids if they do not match the batch size', () => {
const el = {auctionInstance: {}, bidResponse: {}, afterBidAdded: sinon.stub()}
const batch = [el, el];
cacheIds = [{uuid: 'mock-id'}]
storeBatch(batch);
expect(el.bidResponse.videoCacheKey).to.not.exist;
sinon.assert.notCalled(batch[0].afterBidAdded);
sinon.assert.called(utils.logError);
})
})
});

describe('The getCache function', function () {
Expand Down

0 comments on commit 50eb9f9

Please sign in to comment.