Merge pull request #1305 from ing-bank/feat/ajax-concurrent-fetch
Feat/ajax concurrent fetch
This commit is contained in:
commit
a5cc0a1155
6 changed files with 189 additions and 54 deletions
5
.changeset/brave-tools-tie.md
Normal file
5
.changeset/brave-tools-tie.md
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
'@lion/ajax': patch
|
||||||
|
---
|
||||||
|
|
||||||
|
allow caching concurrent requests
|
||||||
5
.changeset/lucky-games-poke.md
Normal file
5
.changeset/lucky-games-poke.md
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
'@lion/ajax': patch
|
||||||
|
---
|
||||||
|
|
||||||
|
return cached status and headers
|
||||||
|
|
@ -157,9 +157,9 @@ In the example below, we do not allow you to write digits.
|
||||||
|
|
||||||
```js preview-story
|
```js preview-story
|
||||||
export const preprocessors = () => {
|
export const preprocessors = () => {
|
||||||
const preprocess = (value) => {
|
const preprocess = value => {
|
||||||
return value.replace(/[0-9]/g, '');
|
return value.replace(/[0-9]/g, '');
|
||||||
}
|
};
|
||||||
return html`
|
return html`
|
||||||
<lion-input
|
<lion-input
|
||||||
label="Date Example"
|
label="Date Example"
|
||||||
|
|
|
||||||
|
|
@ -118,27 +118,15 @@ export class AjaxClient {
|
||||||
|
|
||||||
// run request interceptors, returning directly and skipping the network
|
// run request interceptors, returning directly and skipping the network
|
||||||
// if a interceptor returns a Response
|
// if a interceptor returns a Response
|
||||||
let interceptedRequest = request;
|
const interceptedRequestOrResponse = await this.__interceptRequest(request);
|
||||||
for (const intercept of this._requestInterceptors) {
|
if (interceptedRequestOrResponse instanceof Response) {
|
||||||
// In this instance we actually do want to await for each sequence
|
return interceptedRequestOrResponse;
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
const interceptedRequestOrResponse = await intercept(interceptedRequest);
|
|
||||||
if (interceptedRequestOrResponse instanceof Request) {
|
|
||||||
interceptedRequest = interceptedRequestOrResponse;
|
|
||||||
} else {
|
|
||||||
return interceptedRequestOrResponse;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = /** @type {CacheResponse} */ (await fetch(interceptedRequest));
|
const response = /** @type {CacheResponse} */ (await fetch(interceptedRequestOrResponse));
|
||||||
response.request = interceptedRequest;
|
response.request = interceptedRequestOrResponse;
|
||||||
|
|
||||||
let interceptedResponse = response;
|
const interceptedResponse = await this.__interceptResponse(response);
|
||||||
for (const intercept of this._responseInterceptors) {
|
|
||||||
// In this instance we actually do want to await for each sequence
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
interceptedResponse = await intercept(interceptedResponse);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (interceptedResponse.status >= 400 && interceptedResponse.status < 600) {
|
if (interceptedResponse.status >= 400 && interceptedResponse.status < 600) {
|
||||||
throw new AjaxClientFetchError(request, interceptedResponse);
|
throw new AjaxClientFetchError(request, interceptedResponse);
|
||||||
|
|
@ -190,4 +178,39 @@ export class AjaxClient {
|
||||||
throw new Error(`Failed to parse response from ${response.url} as JSON.`);
|
throw new Error(`Failed to parse response from ${response.url} as JSON.`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Request} request
|
||||||
|
* @returns {Promise<Request | Response>}
|
||||||
|
*/
|
||||||
|
async __interceptRequest(request) {
|
||||||
|
// run request interceptors, returning directly and skipping the network
|
||||||
|
// if a interceptor returns a Response
|
||||||
|
let interceptedRequest = request;
|
||||||
|
for (const intercept of this._requestInterceptors) {
|
||||||
|
// In this instance we actually do want to await for each sequence
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
const interceptedRequestOrResponse = await intercept(interceptedRequest);
|
||||||
|
if (interceptedRequestOrResponse instanceof Request) {
|
||||||
|
interceptedRequest = interceptedRequestOrResponse;
|
||||||
|
} else {
|
||||||
|
return this.__interceptResponse(interceptedRequestOrResponse);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return interceptedRequest;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {Response} response
|
||||||
|
* @returns {Promise<Response>}
|
||||||
|
*/
|
||||||
|
async __interceptResponse(response) {
|
||||||
|
let interceptedResponse = response;
|
||||||
|
for (const intercept of this._responseInterceptors) {
|
||||||
|
// In this instance we actually do want to await for each sequence
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
interceptedResponse = await intercept(interceptedResponse);
|
||||||
|
}
|
||||||
|
return interceptedResponse;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -16,22 +16,55 @@ class Cache {
|
||||||
this.cacheConfig = {};
|
this.cacheConfig = {};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @type {{[url: string]: {expires: number, data: object} }}
|
* @type {{[url: string]: {expires: number, response: CacheResponse} }}
|
||||||
* @protected
|
* @private
|
||||||
*/
|
*/
|
||||||
this._cacheObject = {};
|
this._cacheObject = {};
|
||||||
|
/**
|
||||||
|
* @type {{ [url: string]: { promise: Promise<void>, resolve: (v?: any) => void } }}
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
this._pendingRequests = {};
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @param {string} url */
|
||||||
|
setPendingRequest(url) {
|
||||||
|
/** @type {(v: any) => void} */
|
||||||
|
let resolve = () => {};
|
||||||
|
const promise = new Promise(_resolve => {
|
||||||
|
resolve = _resolve;
|
||||||
|
});
|
||||||
|
this._pendingRequests[url] = { promise, resolve };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param {string} url
|
||||||
|
* @returns {Promise<void> | undefined}
|
||||||
|
*/
|
||||||
|
getPendingRequest(url) {
|
||||||
|
if (this._pendingRequests[url]) {
|
||||||
|
return this._pendingRequests[url].promise;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** @param {string} url */
|
||||||
|
resolvePendingRequest(url) {
|
||||||
|
if (this._pendingRequests[url]) {
|
||||||
|
this._pendingRequests[url].resolve();
|
||||||
|
delete this._pendingRequests[url];
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Store an item in the cache
|
* Store an item in the cache
|
||||||
* @param {string} url key by which the cache is stored
|
* @param {string} url key by which the cache is stored
|
||||||
* @param {object} data the cached object
|
* @param {Response} response the cached response
|
||||||
*/
|
*/
|
||||||
set(url, data) {
|
set(url, response) {
|
||||||
this._validateCache();
|
this._validateCache();
|
||||||
this._cacheObject[url] = {
|
this._cacheObject[url] = {
|
||||||
expires: new Date().getTime(),
|
expires: new Date().getTime(),
|
||||||
data,
|
response,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -39,6 +72,7 @@ class Cache {
|
||||||
* Retrieve an item from the cache
|
* Retrieve an item from the cache
|
||||||
* @param {string} url key by which the cache is stored
|
* @param {string} url key by which the cache is stored
|
||||||
* @param {number} timeToLive maximum time to allow cache to live
|
* @param {number} timeToLive maximum time to allow cache to live
|
||||||
|
* @returns {CacheResponse | false}
|
||||||
*/
|
*/
|
||||||
get(url, timeToLive) {
|
get(url, timeToLive) {
|
||||||
this._validateCache();
|
this._validateCache();
|
||||||
|
|
@ -52,7 +86,7 @@ class Cache {
|
||||||
if (timeToLive !== null && cacheAge > timeToLive) {
|
if (timeToLive !== null && cacheAge > timeToLive) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
return cacheResult.data;
|
return cacheResult.response;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -65,6 +99,7 @@ class Cache {
|
||||||
Object.keys(this._cacheObject).forEach(key => {
|
Object.keys(this._cacheObject).forEach(key => {
|
||||||
if (key.indexOf(url) > -1) {
|
if (key.indexOf(url) > -1) {
|
||||||
delete this._cacheObject[key];
|
delete this._cacheObject[key];
|
||||||
|
this.resolvePendingRequest(key);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
@ -82,6 +117,7 @@ class Cache {
|
||||||
if (notMatch) return;
|
if (notMatch) return;
|
||||||
|
|
||||||
const isDataDeleted = delete this._cacheObject[key];
|
const isDataDeleted = delete this._cacheObject[key];
|
||||||
|
this.resolvePendingRequest(key);
|
||||||
|
|
||||||
if (!isDataDeleted) {
|
if (!isDataDeleted) {
|
||||||
throw new Error(`Failed to delete cache for a request '${key}'`);
|
throw new Error(`Failed to delete cache for a request '${key}'`);
|
||||||
|
|
@ -228,7 +264,7 @@ export const cacheRequestInterceptorFactory = (getCacheIdentifier, globalCacheOp
|
||||||
const validatedInitialCacheOptions = validateOptions(globalCacheOptions);
|
const validatedInitialCacheOptions = validateOptions(globalCacheOptions);
|
||||||
|
|
||||||
return /** @param {CacheRequest} cacheRequest */ async cacheRequest => {
|
return /** @param {CacheRequest} cacheRequest */ async cacheRequest => {
|
||||||
const { method, status, statusText, headers } = cacheRequest;
|
const { method } = cacheRequest;
|
||||||
|
|
||||||
const cacheOptions = composeCacheOptions(
|
const cacheOptions = composeCacheOptions(
|
||||||
validatedInitialCacheOptions,
|
validatedInitialCacheOptions,
|
||||||
|
|
@ -245,7 +281,6 @@ export const cacheRequestInterceptorFactory = (getCacheIdentifier, globalCacheOp
|
||||||
|
|
||||||
// cacheIdentifier is used to bind the cache to the current session
|
// cacheIdentifier is used to bind the cache to the current session
|
||||||
const currentCache = getCache(getCacheIdentifier());
|
const currentCache = getCache(getCacheIdentifier());
|
||||||
const cacheResponse = currentCache.get(cacheId, cacheOptions.timeToLive);
|
|
||||||
|
|
||||||
// don't use cache if the request method is not part of the configs methods
|
// don't use cache if the request method is not part of the configs methods
|
||||||
if (cacheOptions.methods.indexOf(method.toLowerCase()) === -1) {
|
if (cacheOptions.methods.indexOf(method.toLowerCase()) === -1) {
|
||||||
|
|
@ -267,24 +302,29 @@ export const cacheRequestInterceptorFactory = (getCacheIdentifier, globalCacheOp
|
||||||
return cacheRequest;
|
return cacheRequest;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const pendingRequest = currentCache.getPendingRequest(cacheId);
|
||||||
|
if (pendingRequest) {
|
||||||
|
// there is another concurrent request, wait for it to finish
|
||||||
|
await pendingRequest;
|
||||||
|
}
|
||||||
|
|
||||||
|
const cacheResponse = currentCache.get(cacheId, cacheOptions.timeToLive);
|
||||||
if (cacheResponse) {
|
if (cacheResponse) {
|
||||||
// eslint-disable-next-line no-param-reassign
|
// eslint-disable-next-line no-param-reassign
|
||||||
if (!cacheRequest.cacheOptions) {
|
if (!cacheRequest.cacheOptions) {
|
||||||
cacheRequest.cacheOptions = { useCache: false };
|
cacheRequest.cacheOptions = { useCache: false };
|
||||||
}
|
}
|
||||||
|
|
||||||
const init = /** @type {LionRequestInit} */ ({
|
const response = /** @type {CacheResponse} */ cacheResponse.clone();
|
||||||
status,
|
|
||||||
statusText,
|
|
||||||
headers,
|
|
||||||
});
|
|
||||||
|
|
||||||
const response = /** @type {CacheResponse} */ (new Response(cacheResponse, init));
|
|
||||||
response.request = cacheRequest;
|
response.request = cacheRequest;
|
||||||
response.fromCache = true;
|
response.fromCache = true;
|
||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// we do want to use caching for this requesting, but it's not already cached
|
||||||
|
// mark this as a pending request, so that concurrent requests can reuse it from the cache
|
||||||
|
currentCache.setPendingRequest(cacheId);
|
||||||
|
|
||||||
return cacheRequest;
|
return cacheRequest;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
@ -306,38 +346,35 @@ export const cacheResponseInterceptorFactory = (getCacheIdentifier, globalCacheO
|
||||||
throw new Error(`getCacheIdentifier returns falsy`);
|
throw new Error(`getCacheIdentifier returns falsy`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!cacheResponse.request) {
|
||||||
|
throw new Error('Missing request in response.');
|
||||||
|
}
|
||||||
|
|
||||||
const cacheOptions = composeCacheOptions(
|
const cacheOptions = composeCacheOptions(
|
||||||
validatedInitialCacheOptions,
|
validatedInitialCacheOptions,
|
||||||
cacheResponse.request?.cacheOptions,
|
cacheResponse.request?.cacheOptions,
|
||||||
);
|
);
|
||||||
|
// string that identifies cache entry
|
||||||
|
const cacheId = cacheOptions.requestIdentificationFn(
|
||||||
|
cacheResponse.request,
|
||||||
|
searchParamSerializer,
|
||||||
|
);
|
||||||
|
const currentCache = getCache(getCacheIdentifier());
|
||||||
const isAlreadyFromCache = !!cacheResponse.fromCache;
|
const isAlreadyFromCache = !!cacheResponse.fromCache;
|
||||||
// caching all responses with not default `timeToLive`
|
// caching all responses with not default `timeToLive`
|
||||||
const isCacheActive = cacheOptions.timeToLive > 0;
|
const isCacheActive = cacheOptions.timeToLive > 0;
|
||||||
|
|
||||||
if (isAlreadyFromCache || !isCacheActive) {
|
|
||||||
return cacheResponse;
|
|
||||||
}
|
|
||||||
|
|
||||||
// if the request is one of the options.methods; store response in cache
|
// if the request is one of the options.methods; store response in cache
|
||||||
if (
|
if (
|
||||||
cacheResponse.request &&
|
!isAlreadyFromCache &&
|
||||||
|
isCacheActive &&
|
||||||
cacheOptions.methods.indexOf(cacheResponse.request.method.toLowerCase()) > -1
|
cacheOptions.methods.indexOf(cacheResponse.request.method.toLowerCase()) > -1
|
||||||
) {
|
) {
|
||||||
// string that identifies cache entry
|
// store the response data in the cache and mark request as resolved
|
||||||
const cacheId = cacheOptions.requestIdentificationFn(
|
currentCache.set(cacheId, cacheResponse.clone());
|
||||||
cacheResponse.request,
|
|
||||||
searchParamSerializer,
|
|
||||||
);
|
|
||||||
|
|
||||||
const responseBody = await cacheResponse.clone().text();
|
|
||||||
// store the response data in the cache
|
|
||||||
getCache(getCacheIdentifier()).set(cacheId, responseBody);
|
|
||||||
} else {
|
|
||||||
// don't store in cache if the request method is not part of the configs methods
|
|
||||||
return cacheResponse;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
currentCache.resolvePendingRequest(cacheId);
|
||||||
return cacheResponse;
|
return cacheResponse;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
import { expect } from '@open-wc/testing';
|
import { aTimeout, expect } from '@open-wc/testing';
|
||||||
import { spy, stub, useFakeTimers } from 'sinon';
|
import { spy, stub, useFakeTimers } from 'sinon';
|
||||||
import '../src/typedef.js';
|
import '../src/typedef.js';
|
||||||
|
|
||||||
|
|
@ -452,5 +452,70 @@ describe('ajax cache', () => {
|
||||||
ajaxAlwaysRequestSpy.restore();
|
ajaxAlwaysRequestSpy.restore();
|
||||||
removeCacheInterceptors(ajax, indexes);
|
removeCacheInterceptors(ajax, indexes);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('caches concurrent requests', async () => {
|
||||||
|
newCacheId();
|
||||||
|
|
||||||
|
let i = 0;
|
||||||
|
fetchStub.returns(
|
||||||
|
new Promise(resolve => {
|
||||||
|
i += 1;
|
||||||
|
setTimeout(() => {
|
||||||
|
resolve(new Response(`mock response ${i}`));
|
||||||
|
}, 5);
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const indexes = addCacheInterceptors(ajax, {
|
||||||
|
useCache: true,
|
||||||
|
timeToLive: 100,
|
||||||
|
});
|
||||||
|
const ajaxRequestSpy = spy(ajax, 'request');
|
||||||
|
|
||||||
|
const request1 = ajax.request('/test');
|
||||||
|
const request2 = ajax.request('/test');
|
||||||
|
await aTimeout(1);
|
||||||
|
const request3 = ajax.request('/test');
|
||||||
|
await aTimeout(3);
|
||||||
|
const request4 = ajax.request('/test');
|
||||||
|
const responses = await Promise.all([request1, request2, request3, request4]);
|
||||||
|
expect(fetchStub.callCount).to.equal(1);
|
||||||
|
const responseTexts = await Promise.all(responses.map(r => r.text()));
|
||||||
|
expect(responseTexts).to.eql([
|
||||||
|
'mock response 1',
|
||||||
|
'mock response 1',
|
||||||
|
'mock response 1',
|
||||||
|
'mock response 1',
|
||||||
|
]);
|
||||||
|
|
||||||
|
ajaxRequestSpy.restore();
|
||||||
|
removeCacheInterceptors(ajax, indexes);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('preserves status and headers when returning cached response', async () => {
|
||||||
|
newCacheId();
|
||||||
|
fetchStub.returns(
|
||||||
|
Promise.resolve(
|
||||||
|
new Response('mock response', { status: 206, headers: { 'x-foo': 'x-bar' } }),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
const indexes = addCacheInterceptors(ajax, {
|
||||||
|
useCache: true,
|
||||||
|
timeToLive: 100,
|
||||||
|
});
|
||||||
|
const ajaxRequestSpy = spy(ajax, 'request');
|
||||||
|
|
||||||
|
const response1 = await ajax.request('/test');
|
||||||
|
const response2 = await ajax.request('/test');
|
||||||
|
expect(fetchStub.callCount).to.equal(1);
|
||||||
|
expect(response1.status).to.equal(206);
|
||||||
|
expect(response1.headers.get('x-foo')).to.equal('x-bar');
|
||||||
|
expect(response2.status).to.equal(206);
|
||||||
|
expect(response2.headers.get('x-foo')).to.equal('x-bar');
|
||||||
|
|
||||||
|
ajaxRequestSpy.restore();
|
||||||
|
removeCacheInterceptors(ajax, indexes);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue