Remove LazyLruStore, HashingStore, and createStorageHash.
- Replace with HashedItemStore, createStateHash, and isStateHash. - Refactor stubBrowserStorage.
This commit is contained in:
parent
df2c116561
commit
06ed9339bb
|
@ -49,10 +49,10 @@ describe('StubBrowserStorage', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('size limiting', () => {
|
||||
describe('#setStubbedSizeLimit', () => {
|
||||
it('allows limiting the storage size', () => {
|
||||
const store = new StubBrowserStorage();
|
||||
store._setSizeLimit(10);
|
||||
store.setStubbedSizeLimit(10);
|
||||
store.setItem('abc', 'def'); // store size is 6, key.length + val.length
|
||||
expect(() => {
|
||||
store.setItem('ghi', 'jkl');
|
||||
|
@ -61,25 +61,41 @@ describe('StubBrowserStorage', () => {
|
|||
|
||||
it('allows defining the limit as infinity', () => {
|
||||
const store = new StubBrowserStorage();
|
||||
store._setSizeLimit(Infinity);
|
||||
store.setStubbedSizeLimit(Infinity);
|
||||
store.setItem('abc', 'def');
|
||||
store.setItem('ghi', 'jkl'); // unlike the previous test, this doesn't throw
|
||||
});
|
||||
|
||||
it('requires setting the limit before keys', () => {
|
||||
it('throws an error if the limit is below the current size', () => {
|
||||
const store = new StubBrowserStorage();
|
||||
store.setItem('key', 'val');
|
||||
expect(() => {
|
||||
store._setSizeLimit(10);
|
||||
}).throwError(/before setting/);
|
||||
store.setStubbedSizeLimit(5);
|
||||
}).throwError(Error);
|
||||
});
|
||||
|
||||
it('respects removed items', () => {
|
||||
const store = new StubBrowserStorage();
|
||||
store._setSizeLimit(10);
|
||||
store.setStubbedSizeLimit(10);
|
||||
store.setItem('abc', 'def');
|
||||
store.removeItem('abc');
|
||||
store.setItem('ghi', 'jkl'); // unlike the previous test, this doesn't throw
|
||||
});
|
||||
});
|
||||
|
||||
describe('#getStubbedSizeLimit', () => {
|
||||
it('returns the size limit', () => {
|
||||
const store = new StubBrowserStorage();
|
||||
store.setStubbedSizeLimit(10);
|
||||
expect(store.getStubbedSizeLimit()).to.equal(10);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#getStubbedSize', () => {
|
||||
it('returns the size', () => {
|
||||
const store = new StubBrowserStorage();
|
||||
store.setItem(1, 1);
|
||||
expect(store.getStubbedSize()).to.equal(2);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,92 +1,109 @@
|
|||
const keys = Symbol('keys');
|
||||
const values = Symbol('values');
|
||||
const remainingSize = Symbol('remainingSize');
|
||||
|
||||
export default class StubBrowserStorage {
|
||||
constructor() {
|
||||
this[keys] = [];
|
||||
this[values] = [];
|
||||
this[remainingSize] = 5000000; // 5mb, minimum browser storage size
|
||||
this._keys = [];
|
||||
this._values = [];
|
||||
this._size = 0;
|
||||
this._sizeLimit = 5000000; // 5mb, minimum browser storage size
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------------------------
|
||||
// Browser-specific methods.
|
||||
// -----------------------------------------------------------------------------------------------
|
||||
|
||||
get length() {
|
||||
return this[keys].length;
|
||||
return this._keys.length;
|
||||
}
|
||||
|
||||
key(i) {
|
||||
return this[keys][i];
|
||||
return this._keys[i];
|
||||
}
|
||||
|
||||
getItem(key) {
|
||||
key = String(key);
|
||||
|
||||
const i = this[keys].indexOf(key);
|
||||
const i = this._keys.indexOf(key);
|
||||
if (i === -1) return null;
|
||||
return this[values][i];
|
||||
return this._values[i];
|
||||
}
|
||||
|
||||
setItem(key, value) {
|
||||
key = String(key);
|
||||
value = String(value);
|
||||
this._takeUpSpace(this._calcSizeOfAdd(key, value));
|
||||
const sizeOfAddition = this._getSizeOfAddition(key, value);
|
||||
this._updateSize(sizeOfAddition);
|
||||
|
||||
const i = this[keys].indexOf(key);
|
||||
const i = this._keys.indexOf(key);
|
||||
if (i === -1) {
|
||||
this[keys].push(key);
|
||||
this[values].push(value);
|
||||
this._keys.push(key);
|
||||
this._values.push(value);
|
||||
} else {
|
||||
this[values][i] = value;
|
||||
this._values[i] = value;
|
||||
}
|
||||
}
|
||||
|
||||
removeItem(key) {
|
||||
key = String(key);
|
||||
this._takeUpSpace(this._calcSizeOfRemove(key));
|
||||
const sizeOfRemoval = this._getSizeOfRemoval(key);
|
||||
this._updateSize(sizeOfRemoval);
|
||||
|
||||
const i = this[keys].indexOf(key);
|
||||
const i = this._keys.indexOf(key);
|
||||
if (i === -1) return;
|
||||
this[keys].splice(i, 1);
|
||||
this[values].splice(i, 1);
|
||||
this._keys.splice(i, 1);
|
||||
this._values.splice(i, 1);
|
||||
}
|
||||
|
||||
// non-standard api methods
|
||||
_getKeys() {
|
||||
return this[keys].slice();
|
||||
// -----------------------------------------------------------------------------------------------
|
||||
// Test-specific methods.
|
||||
// -----------------------------------------------------------------------------------------------
|
||||
|
||||
getStubbedKeys() {
|
||||
return this._keys.slice();
|
||||
}
|
||||
|
||||
_getValues() {
|
||||
return this[values].slice();
|
||||
getStubbedValues() {
|
||||
return this._values.slice();
|
||||
}
|
||||
|
||||
_setSizeLimit(limit) {
|
||||
if (this[keys].length) {
|
||||
throw new Error('You must call _setSizeLimit() before setting any values');
|
||||
setStubbedSizeLimit(sizeLimit) {
|
||||
// We can't reconcile a size limit with the "stored" items, if the stored items size exceeds it.
|
||||
if (sizeLimit < this._size) {
|
||||
throw new Error(`You can't set a size limit smaller than the current size.`);
|
||||
}
|
||||
|
||||
this[remainingSize] = limit;
|
||||
this._sizeLimit = sizeLimit;
|
||||
}
|
||||
|
||||
_calcSizeOfAdd(key, value) {
|
||||
const i = this[keys].indexOf(key);
|
||||
getStubbedSizeLimit() {
|
||||
return this._sizeLimit;
|
||||
}
|
||||
|
||||
getStubbedSize() {
|
||||
return this._size;
|
||||
}
|
||||
|
||||
_getSizeOfAddition(key, value) {
|
||||
const i = this._keys.indexOf(key);
|
||||
if (i === -1) {
|
||||
return key.length + value.length;
|
||||
}
|
||||
return value.length - this[values][i].length;
|
||||
// Return difference of what's been stored, and what *will* be stored.
|
||||
return value.length - this._values[i].length;
|
||||
}
|
||||
|
||||
_calcSizeOfRemove(key) {
|
||||
const i = this[keys].indexOf(key);
|
||||
_getSizeOfRemoval(key) {
|
||||
const i = this._keys.indexOf(key);
|
||||
if (i === -1) {
|
||||
return 0;
|
||||
}
|
||||
return 0 - (key.length + this[values][i].length);
|
||||
// Return negative value.
|
||||
return -(key.length + this._values[i].length);
|
||||
}
|
||||
|
||||
_takeUpSpace(delta) {
|
||||
if (this[remainingSize] - delta < 0) {
|
||||
_updateSize(delta) {
|
||||
if (this._size + delta > this._sizeLimit) {
|
||||
throw new Error('something about quota exceeded, browsers are not consistent here');
|
||||
}
|
||||
|
||||
this[remainingSize] -= delta;
|
||||
this._size += delta;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -152,8 +152,8 @@ describe('Chrome API :: apps', function () {
|
|||
expect(chrome.getLastUrlFor('app')).to.equal(null);
|
||||
chrome.setLastUrlFor('app', 'url');
|
||||
expect(chrome.getLastUrlFor('app')).to.equal('url');
|
||||
expect(store._getKeys().length).to.equal(1);
|
||||
expect(store._getValues().shift()).to.equal('url');
|
||||
expect(store.getStubbedKeys().length).to.equal(1);
|
||||
expect(store.getStubbedValues().shift()).to.equal('url');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -11,9 +11,10 @@ import {
|
|||
unhashQueryString,
|
||||
} from 'ui/state_management/state_hashing';
|
||||
import {
|
||||
createStorageHash,
|
||||
HashingStore,
|
||||
createStateHash,
|
||||
isStateHash,
|
||||
} from 'ui/state_management/state_storage';
|
||||
import HashedItemStore from 'ui/state_management/state_storage/hashed_item_store';
|
||||
import StubBrowserStorage from 'test_utils/stub_browser_storage';
|
||||
import EventsProvider from 'ui/events';
|
||||
|
||||
|
@ -37,14 +38,14 @@ describe('State Management', function () {
|
|||
const { param, initial, storeInHash } = (opts || {});
|
||||
sinon.stub(config, 'get').withArgs('state:storeInSessionStorage').returns(!!storeInHash);
|
||||
const store = new StubBrowserStorage();
|
||||
const hashingStore = new HashingStore(createStorageHash, store);
|
||||
const state = new State(param, initial, { hashingStore, notifier });
|
||||
const hashedItemStore = new HashedItemStore(store);
|
||||
const state = new State(param, initial, hashedItemStore, notifier);
|
||||
|
||||
const getUnhashedSearch = state => {
|
||||
return unhashQueryString($location.search(), [ state ]);
|
||||
};
|
||||
|
||||
return { notifier, store, hashingStore, state, getUnhashedSearch };
|
||||
return { notifier, store, hashedItemStore, state, getUnhashedSearch };
|
||||
};
|
||||
}));
|
||||
|
||||
|
@ -191,18 +192,18 @@ describe('State Management', function () {
|
|||
});
|
||||
|
||||
describe('Hashing', () => {
|
||||
it('stores state values in a hashingStore, writing the hash to the url', () => {
|
||||
const { state, hashingStore } = setup({ storeInHash: true });
|
||||
it('stores state values in a hashedItemStore, writing the hash to the url', () => {
|
||||
const { state, hashedItemStore } = setup({ storeInHash: true });
|
||||
state.foo = 'bar';
|
||||
state.save();
|
||||
const urlVal = $location.search()[state.getQueryParamName()];
|
||||
|
||||
expect(hashingStore.isHash(urlVal)).to.be(true);
|
||||
expect(hashingStore.getItemAtHash(urlVal)).to.eql({ foo: 'bar' });
|
||||
expect(isStateHash(urlVal)).to.be(true);
|
||||
expect(hashedItemStore.getItem(urlVal)).to.eql(JSON.stringify({ foo: 'bar' }));
|
||||
});
|
||||
|
||||
it('should replace rison in the URL with a hash', () => {
|
||||
const { state, hashingStore } = setup({ storeInHash: true });
|
||||
const { state, hashedItemStore } = setup({ storeInHash: true });
|
||||
const obj = { foo: { bar: 'baz' } };
|
||||
const rison = encodeRison(obj);
|
||||
|
||||
|
@ -211,15 +212,15 @@ describe('State Management', function () {
|
|||
|
||||
const urlVal = $location.search()._s;
|
||||
expect(urlVal).to.not.be(rison);
|
||||
expect(hashingStore.isHash(urlVal)).to.be(true);
|
||||
expect(hashingStore.getItemAtHash(urlVal)).to.eql(obj);
|
||||
expect(isStateHash(urlVal)).to.be(true);
|
||||
expect(hashedItemStore.getItem(urlVal)).to.eql(JSON.stringify(obj));
|
||||
});
|
||||
|
||||
context('error handling', () => {
|
||||
it('notifies the user when a hash value does not map to a stored value', () => {
|
||||
const { state, hashingStore, notifier } = setup({ storeInHash: true });
|
||||
const { state, hashedItemStore, notifier } = setup({ storeInHash: true });
|
||||
const search = $location.search();
|
||||
const badHash = hashingStore._getShortHash('{"a": "b"}');
|
||||
const badHash = createStateHash('{"a": "b"}', () => null);
|
||||
|
||||
search[state.getQueryParamName()] = badHash;
|
||||
$location.search(search);
|
||||
|
@ -230,10 +231,10 @@ describe('State Management', function () {
|
|||
expect(notifier._notifs[0].content).to.match(/use the share functionality/i);
|
||||
});
|
||||
|
||||
it('presents fatal error linking to github when hashingStore.hashAndSetItem fails', () => {
|
||||
const { state, hashingStore, notifier } = setup({ storeInHash: true });
|
||||
it('presents fatal error linking to github when setting item fails', () => {
|
||||
const { state, hashedItemStore, notifier } = setup({ storeInHash: true });
|
||||
const fatalStub = sinon.stub(notifier, 'fatal').throws();
|
||||
sinon.stub(hashingStore, 'hashAndSetItem').throws();
|
||||
sinon.stub(hashedItemStore, 'setItem').returns(false);
|
||||
|
||||
expect(() => {
|
||||
state.toQueryParam();
|
||||
|
|
|
@ -8,58 +8,52 @@ import Notifier from 'ui/notify/notifier';
|
|||
import KbnUrlProvider from 'ui/url';
|
||||
|
||||
import {
|
||||
createStorageHash,
|
||||
HashingStore,
|
||||
LazyLruStore,
|
||||
createStateHash,
|
||||
hashedItemStoreSingleton,
|
||||
isStateHash,
|
||||
} from './state_storage';
|
||||
|
||||
const MAX_BROWSER_HISTORY = 50;
|
||||
|
||||
export default function StateProvider(Private, $rootScope, $location, config) {
|
||||
const Events = Private(EventsProvider);
|
||||
|
||||
_.class(State).inherits(Events);
|
||||
function State(urlParam, defaults, { hashingStore, notifier } = {}) {
|
||||
function State(
|
||||
urlParam,
|
||||
defaults,
|
||||
hashedItemStore = hashedItemStoreSingleton,
|
||||
notifier = new Notifier()
|
||||
) {
|
||||
State.Super.call(this);
|
||||
|
||||
this.setDefaults(defaults);
|
||||
this._urlParam = urlParam || '_s';
|
||||
this._notifier = notifier || new Notifier();
|
||||
|
||||
this._hashingStore = hashingStore || (() => {
|
||||
const lazyLruStore = new LazyLruStore({
|
||||
id: `${this._urlParam}:state`,
|
||||
store: window.sessionStorage,
|
||||
maxItems: MAX_BROWSER_HISTORY
|
||||
});
|
||||
|
||||
return new HashingStore(createStorageHash, lazyLruStore);
|
||||
})();
|
||||
this._notifier = notifier;
|
||||
this._hashedItemStore = hashedItemStore;
|
||||
|
||||
// When the URL updates we need to fetch the values from the URL
|
||||
this._cleanUpListeners = _.partial(_.callEach, [
|
||||
// partial route update, no app reload
|
||||
$rootScope.$on('$routeUpdate', () => {
|
||||
self.fetch();
|
||||
this.fetch();
|
||||
}),
|
||||
|
||||
// beginning of full route update, new app will be initialized before
|
||||
// $routeChangeSuccess or $routeChangeError
|
||||
$rootScope.$on('$routeChangeStart', () => {
|
||||
if (!self._persistAcrossApps) {
|
||||
self.destroy();
|
||||
if (!this._persistAcrossApps) {
|
||||
this.destroy();
|
||||
}
|
||||
}),
|
||||
|
||||
$rootScope.$on('$routeChangeSuccess', () => {
|
||||
if (self._persistAcrossApps) {
|
||||
self.fetch();
|
||||
if (this._persistAcrossApps) {
|
||||
this.fetch();
|
||||
}
|
||||
})
|
||||
]);
|
||||
|
||||
// Initialize the State with fetch
|
||||
self.fetch();
|
||||
this.fetch();
|
||||
}
|
||||
|
||||
State.prototype._readFromURL = function () {
|
||||
|
@ -70,7 +64,7 @@ export default function StateProvider(Private, $rootScope, $location, config) {
|
|||
return null;
|
||||
}
|
||||
|
||||
if (this._hashingStore.isHash(urlVal)) {
|
||||
if (isStateHash(urlVal)) {
|
||||
return this._parseQueryParamValue(urlVal);
|
||||
}
|
||||
|
||||
|
@ -197,16 +191,17 @@ export default function StateProvider(Private, $rootScope, $location, config) {
|
|||
* @return {any} - the stored value, or null if hash does not resolve
|
||||
*/
|
||||
State.prototype._parseQueryParamValue = function (queryParam) {
|
||||
if (!this._hashingStore.isHash(queryParam)) {
|
||||
if (!isStateHash(queryParam)) {
|
||||
return rison.decode(queryParam);
|
||||
}
|
||||
|
||||
const stored = this._hashingStore.getItemAtHash(queryParam);
|
||||
if (stored === null) {
|
||||
const json = this._hashedItemStore.getItem(queryParam);
|
||||
if (json === null) {
|
||||
this._notifier.error('Unable to completely restore the URL, be sure to use the share functionality.');
|
||||
}
|
||||
|
||||
return stored;
|
||||
const state = JSON.parse(json);
|
||||
return state;
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -230,23 +225,29 @@ export default function StateProvider(Private, $rootScope, $location, config) {
|
|||
return rison.encode(state);
|
||||
}
|
||||
|
||||
try {
|
||||
const hash = this._hashingStore.hashAndSetItem(state);
|
||||
// We need to strip out Angular-specific properties.
|
||||
const json = angular.toJson(state);
|
||||
const hash = createStateHash(json, hash => {
|
||||
return this._hashedItemStore.getItem(hash);
|
||||
});
|
||||
const isItemSet = this._hashedItemStore.setItem(hash, json);
|
||||
|
||||
if (isItemSet) {
|
||||
return hash;
|
||||
} catch (err) {
|
||||
this._notifier.log('Unable to create hash of State due to error: ' + (state.stack || state.message));
|
||||
this._notifier.fatal(
|
||||
new Error(
|
||||
'Kibana is unable to store history items in your session ' +
|
||||
'because it is full and there don\'t seem to be items any items safe ' +
|
||||
'to delete.\n' +
|
||||
'\n' +
|
||||
'This can usually be fixed by moving to a fresh tab, but could ' +
|
||||
'be caused by a larger issue. If you are seeing this message regularly, ' +
|
||||
'please file an issue at https://github.com/elastic/kibana/issues.'
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// If we ran out of space trying to persist the state, notify the user.
|
||||
this._notifier.fatal(
|
||||
new Error(
|
||||
'Kibana is unable to store history items in your session ' +
|
||||
'because it is full and there don\'t seem to be items any items safe ' +
|
||||
'to delete.\n' +
|
||||
'\n' +
|
||||
'This can usually be fixed by moving to a fresh tab, but could ' +
|
||||
'be caused by a larger issue. If you are seeing this message regularly, ' +
|
||||
'please file an issue at https://github.com/elastic/kibana/issues.'
|
||||
)
|
||||
);
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
|
@ -0,0 +1,334 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import bluebird from 'bluebird';
|
||||
|
||||
import StubBrowserStorage from 'test_utils/stub_browser_storage';
|
||||
import HashedItemStore from '../hashed_item_store';
|
||||
|
||||
describe('hashedItemStore', () => {
|
||||
describe('interface', () => {
|
||||
describe('#constructor', () => {
|
||||
it('retrieves persisted index from sessionStorage', () => {
|
||||
const sessionStorage = new StubBrowserStorage();
|
||||
sinon.spy(sessionStorage, 'getItem');
|
||||
|
||||
const hashedItemStore = new HashedItemStore(sessionStorage);
|
||||
sinon.assert.calledWith(sessionStorage.getItem, HashedItemStore.PERSISTED_INDEX_KEY);
|
||||
sessionStorage.getItem.restore();
|
||||
});
|
||||
|
||||
it('sorts indexed items by touched property', () => {
|
||||
const a = {
|
||||
hash: 'a',
|
||||
touched: 0,
|
||||
};
|
||||
const b = {
|
||||
hash: 'b',
|
||||
touched: 2,
|
||||
};
|
||||
const c = {
|
||||
hash: 'c',
|
||||
touched: 1,
|
||||
};
|
||||
const sessionStorage = new StubBrowserStorage();
|
||||
if (!HashedItemStore.PERSISTED_INDEX_KEY) {
|
||||
// This is very brittle and depends upon HashedItemStore implementation details,
|
||||
// so let's protect ourselves from accidentally breaking this test.
|
||||
throw new Error('Missing HashedItemStore.PERSISTED_INDEX_KEY');
|
||||
}
|
||||
sessionStorage.setItem(HashedItemStore.PERSISTED_INDEX_KEY, JSON.stringify({a, b, c}));
|
||||
|
||||
const hashedItemStore = new HashedItemStore(sessionStorage);
|
||||
expect(hashedItemStore._indexedItems).to.eql([a, c, b]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#setItem', () => {
|
||||
describe('if the item exists in sessionStorage', () => {
|
||||
let sessionStorage;
|
||||
let hashedItemStore;
|
||||
const hash = 'a';
|
||||
const item = JSON.stringify({});
|
||||
|
||||
beforeEach(() => {
|
||||
sessionStorage = new StubBrowserStorage();
|
||||
hashedItemStore = new HashedItemStore(sessionStorage);
|
||||
});
|
||||
|
||||
it('persists the item in sessionStorage', () => {
|
||||
hashedItemStore.setItem(hash, item);
|
||||
expect(sessionStorage.getItem(hash)).to.equal(item);
|
||||
});
|
||||
|
||||
it('returns true', () => {
|
||||
const result = hashedItemStore.setItem(hash, item);
|
||||
expect(result).to.equal(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe(`if the item doesn't exist in sessionStorage`, () => {
|
||||
describe(`if there's storage space`, () => {
|
||||
let sessionStorage;
|
||||
let hashedItemStore;
|
||||
const hash = 'a';
|
||||
const item = JSON.stringify({});
|
||||
|
||||
beforeEach(() => {
|
||||
sessionStorage = new StubBrowserStorage();
|
||||
hashedItemStore = new HashedItemStore(sessionStorage);
|
||||
});
|
||||
|
||||
it('persists the item in sessionStorage', () => {
|
||||
hashedItemStore.setItem(hash, item);
|
||||
expect(sessionStorage.getItem(hash)).to.equal(item);
|
||||
});
|
||||
|
||||
it('returns true', () => {
|
||||
const result = hashedItemStore.setItem(hash, item);
|
||||
expect(result).to.equal(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe(`if there isn't storage space`, () => {
|
||||
let fakeTimer;
|
||||
let sessionStorage;
|
||||
let hashedItemStore;
|
||||
let storageSizeLimit;
|
||||
const hash = 'a';
|
||||
const item = JSON.stringify({});
|
||||
|
||||
function setItemLater(hash, item) {
|
||||
// Move time forward, so this item will be "touched" most recently.
|
||||
fakeTimer.tick(1);
|
||||
return hashedItemStore.setItem(hash, item);
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
// Control time.
|
||||
fakeTimer = sinon.useFakeTimers(Date.now());
|
||||
|
||||
sessionStorage = new StubBrowserStorage();
|
||||
hashedItemStore = new HashedItemStore(sessionStorage);
|
||||
|
||||
// Add some items that will be removed.
|
||||
setItemLater('b', item);
|
||||
|
||||
// Do this a little later so that this item is newer.
|
||||
setItemLater('c', item);
|
||||
|
||||
// Cap the storage at its current size.
|
||||
storageSizeLimit = sessionStorage.getStubbedSize();
|
||||
sessionStorage.setStubbedSizeLimit(storageSizeLimit);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Stop controlling time.
|
||||
fakeTimer.restore();
|
||||
});
|
||||
|
||||
describe('and the item will fit', () => {
|
||||
it('removes older items until the new item fits', () => {
|
||||
setItemLater(hash, item);
|
||||
expect(sessionStorage.getItem('b')).to.equal(null);
|
||||
expect(sessionStorage.getItem('c')).to.equal(item);
|
||||
});
|
||||
|
||||
it('persists the item in sessionStorage', () => {
|
||||
setItemLater(hash, item);
|
||||
expect(sessionStorage.getItem(hash)).to.equal(item);
|
||||
});
|
||||
|
||||
it('returns true', () => {
|
||||
const result = setItemLater(hash, item);
|
||||
expect(result).to.equal(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe(`and the item won't fit`, () => {
|
||||
let itemTooBigToFit;
|
||||
|
||||
beforeEach(() => {
|
||||
// Make sure the item is longer than the storage size limit.
|
||||
itemTooBigToFit = '';
|
||||
const length = storageSizeLimit + 1;
|
||||
for (let i = 0; i < length; i++) {
|
||||
itemTooBigToFit += 'a';
|
||||
}
|
||||
});
|
||||
|
||||
it('removes all items', () => {
|
||||
setItemLater(hash, itemTooBigToFit);
|
||||
expect(sessionStorage.getItem('b')).to.equal(null);
|
||||
expect(sessionStorage.getItem('c')).to.equal(null);
|
||||
});
|
||||
|
||||
it(`doesn't persist the item in sessionStorage`, () => {
|
||||
setItemLater(hash, itemTooBigToFit);
|
||||
expect(sessionStorage.getItem(hash)).to.equal(null);
|
||||
});
|
||||
|
||||
it('returns false', () => {
|
||||
const result = setItemLater(hash, itemTooBigToFit);
|
||||
expect(result).to.equal(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('#getItem', () => {
|
||||
describe('if the item exists in sessionStorage', () => {
|
||||
let fakeTimer;
|
||||
let sessionStorage;
|
||||
let hashedItemStore;
|
||||
|
||||
function setItemLater(hash, item) {
|
||||
// Move time forward, so this item will be "touched" most recently.
|
||||
fakeTimer.tick(1);
|
||||
return hashedItemStore.setItem(hash, item);
|
||||
}
|
||||
|
||||
function getItemLater(hash) {
|
||||
// Move time forward, so this item will be "touched" most recently.
|
||||
fakeTimer.tick(1);
|
||||
return hashedItemStore.getItem(hash);
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
// Control time.
|
||||
fakeTimer = sinon.useFakeTimers(Date.now());
|
||||
|
||||
sessionStorage = new StubBrowserStorage();
|
||||
hashedItemStore = new HashedItemStore(sessionStorage);
|
||||
hashedItemStore.setItem('1', 'a');
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Stop controlling time.
|
||||
fakeTimer.restore();
|
||||
});
|
||||
|
||||
it('returns the item', () => {
|
||||
const retrievedItem = hashedItemStore.getItem('1');
|
||||
expect(retrievedItem).to.be('a');
|
||||
});
|
||||
|
||||
it('prevents the item from being first to be removed when freeing up storage spage', () => {
|
||||
// Do this a little later so that this item is newer.
|
||||
setItemLater('2', 'b');
|
||||
|
||||
// Wait a bit, then retrieve/touch the first item, making *it* newer, and 2 as the oldest.
|
||||
getItemLater('1');
|
||||
|
||||
// Cap the storage at its current size.
|
||||
const storageSizeLimit = sessionStorage.getStubbedSize();
|
||||
sessionStorage.setStubbedSizeLimit(storageSizeLimit);
|
||||
|
||||
// Add a new item, causing the second item to be removed, but not the first.
|
||||
setItemLater('3', 'c');
|
||||
expect(hashedItemStore.getItem('2')).to.equal(null);
|
||||
expect(hashedItemStore.getItem('1')).to.equal('a');
|
||||
});
|
||||
});
|
||||
|
||||
describe(`if the item doesn't exist in sessionStorage`, () => {
|
||||
let sessionStorage;
|
||||
let hashedItemStore;
|
||||
const hash = 'a';
|
||||
|
||||
beforeEach(() => {
|
||||
sessionStorage = new StubBrowserStorage();
|
||||
hashedItemStore = new HashedItemStore(sessionStorage);
|
||||
});
|
||||
|
||||
it('returns null', () => {
|
||||
const retrievedItem = hashedItemStore.getItem(hash);
|
||||
expect(retrievedItem).to.be(null);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('behavior', () => {
|
||||
let fakeTimer;
|
||||
let sessionStorage;
|
||||
let hashedItemStore;
|
||||
|
||||
function setItemLater(hash, item) {
|
||||
// Move time forward, so this item will be "touched" most recently.
|
||||
fakeTimer.tick(1);
|
||||
return hashedItemStore.setItem(hash, item);
|
||||
}
|
||||
|
||||
function getItemLater(hash) {
|
||||
// Move time forward, so this item will be "touched" most recently.
|
||||
fakeTimer.tick(1);
|
||||
return hashedItemStore.getItem(hash);
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
// Control time.
|
||||
fakeTimer = sinon.useFakeTimers(Date.now());
|
||||
|
||||
sessionStorage = new StubBrowserStorage();
|
||||
hashedItemStore = new HashedItemStore(sessionStorage);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Stop controlling time.
|
||||
fakeTimer.restore();
|
||||
});
|
||||
|
||||
it('orders items to be removed based on when they were last retrieved', () => {
|
||||
setItemLater('1', 'a');
|
||||
setItemLater('2', 'b');
|
||||
setItemLater('3', 'c');
|
||||
setItemLater('4', 'd');
|
||||
|
||||
// Cap the storage at its current size.
|
||||
const storageSizeLimit = sessionStorage.getStubbedSize();
|
||||
sessionStorage.setStubbedSizeLimit(storageSizeLimit);
|
||||
|
||||
// Expect items to be removed in order: 1, 3, 2, 4.
|
||||
getItemLater('1');
|
||||
getItemLater('3');
|
||||
getItemLater('2');
|
||||
getItemLater('4');
|
||||
|
||||
setItemLater('5', 'e');
|
||||
expect(hashedItemStore.getItem('1')).to.equal(null);
|
||||
expect(hashedItemStore.getItem('3')).to.equal('c');
|
||||
expect(hashedItemStore.getItem('2')).to.equal('b');
|
||||
expect(hashedItemStore.getItem('4')).to.equal('d');
|
||||
expect(hashedItemStore.getItem('5')).to.equal('e');
|
||||
|
||||
setItemLater('6', 'f');
|
||||
expect(hashedItemStore.getItem('3')).to.equal(null);
|
||||
expect(hashedItemStore.getItem('2')).to.equal('b');
|
||||
expect(hashedItemStore.getItem('4')).to.equal('d');
|
||||
expect(hashedItemStore.getItem('5')).to.equal('e');
|
||||
expect(hashedItemStore.getItem('6')).to.equal('f');
|
||||
|
||||
setItemLater('7', 'g');
|
||||
expect(hashedItemStore.getItem('2')).to.equal(null);
|
||||
expect(hashedItemStore.getItem('4')).to.equal('d');
|
||||
expect(hashedItemStore.getItem('5')).to.equal('e');
|
||||
expect(hashedItemStore.getItem('6')).to.equal('f');
|
||||
expect(hashedItemStore.getItem('7')).to.equal('g');
|
||||
|
||||
setItemLater('8', 'h');
|
||||
expect(hashedItemStore.getItem('4')).to.equal(null);
|
||||
expect(hashedItemStore.getItem('5')).to.equal('e');
|
||||
expect(hashedItemStore.getItem('6')).to.equal('f');
|
||||
expect(hashedItemStore.getItem('7')).to.equal('g');
|
||||
expect(hashedItemStore.getItem('8')).to.equal('h');
|
||||
|
||||
setItemLater('9', 'i');
|
||||
expect(hashedItemStore.getItem('5')).to.equal(null);
|
||||
expect(hashedItemStore.getItem('6')).to.equal('f');
|
||||
expect(hashedItemStore.getItem('7')).to.equal('g');
|
||||
expect(hashedItemStore.getItem('8')).to.equal('h');
|
||||
expect(hashedItemStore.getItem('9')).to.equal('i');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,126 +0,0 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import { encode as encodeRison } from 'rison-node';
|
||||
import StubBrowserStorage from 'test_utils/stub_browser_storage';
|
||||
import {
|
||||
createStorageHash,
|
||||
HashingStore,
|
||||
} from 'ui/state_management/state_storage';
|
||||
|
||||
const setup = createStorageHash => {
|
||||
const store = new StubBrowserStorage();
|
||||
const hashingStore = new HashingStore(createStorageHash, store);
|
||||
return { store, hashingStore };
|
||||
};
|
||||
|
||||
describe('Hashing Store', () => {
|
||||
describe('#hashAndSetItem', () => {
|
||||
it('adds a value to the store and returns its hash', () => {
|
||||
const { hashingStore, store } = setup(createStorageHash);
|
||||
const val = { foo: 'bar' };
|
||||
const hash = hashingStore.hashAndSetItem(val);
|
||||
expect(hash).to.be.a('string');
|
||||
expect(hash).to.be.ok();
|
||||
expect(store).to.have.length(1);
|
||||
});
|
||||
|
||||
it('json encodes the values it stores', () => {
|
||||
const { hashingStore, store } = setup(createStorageHash);
|
||||
const val = { toJSON() { return 1; } };
|
||||
const hash = hashingStore.hashAndSetItem(val);
|
||||
expect(hashingStore.getItemAtHash(hash)).to.eql(1);
|
||||
});
|
||||
|
||||
it('addresses values with a short hash', () => {
|
||||
const val = { foo: 'bar' };
|
||||
const longHash = 'longlonglonglonglonglonglonglonglonglonghash';
|
||||
const { hashingStore } = setup(() => longHash);
|
||||
|
||||
const hash = hashingStore.hashAndSetItem(val);
|
||||
expect(hash.length < longHash.length).to.be.ok();
|
||||
});
|
||||
|
||||
it('addresses values with a slightly longer hash when short hashes collide', () => {
|
||||
const fixtures = [
|
||||
{
|
||||
hash: '1234567890-1',
|
||||
val: { foo: 'bar' }
|
||||
},
|
||||
{
|
||||
hash: '1234567890-2',
|
||||
val: { foo: 'baz' }
|
||||
},
|
||||
{
|
||||
hash: '1234567890-3',
|
||||
val: { foo: 'boo' }
|
||||
}
|
||||
];
|
||||
|
||||
const matchVal = json => f => JSON.stringify(f.val) === json;
|
||||
const { hashingStore } = setup(val => {
|
||||
const fixture = fixtures.find(matchVal(val));
|
||||
return fixture.hash;
|
||||
});
|
||||
|
||||
const hash1 = hashingStore.hashAndSetItem(fixtures[0].val);
|
||||
const hash2 = hashingStore.hashAndSetItem(fixtures[1].val);
|
||||
const hash3 = hashingStore.hashAndSetItem(fixtures[2].val);
|
||||
|
||||
expect(hash3).to.have.length(hash2.length + 1);
|
||||
expect(hash2).to.have.length(hash1.length + 1);
|
||||
});
|
||||
|
||||
it('bubbles up the error if the store fails to hashAndSetItem', () => {
|
||||
const { store, hashingStore } = setup(createStorageHash);
|
||||
const err = new Error();
|
||||
sinon.stub(store, 'setItem').throws(err);
|
||||
expect(() => {
|
||||
hashingStore.hashAndSetItem({});
|
||||
}).to.throwError(e => expect(e).to.be(err));
|
||||
});
|
||||
});
|
||||
|
||||
describe('#getItemAtHash', () => {
|
||||
it('reads a value from the store by its hash', () => {
|
||||
const { hashingStore } = setup(createStorageHash);
|
||||
const val = { foo: 'bar' };
|
||||
const hash = hashingStore.hashAndSetItem(val);
|
||||
expect(hashingStore.getItemAtHash(hash)).to.eql(val);
|
||||
});
|
||||
|
||||
it('returns null when the value is not in the store', () => {
|
||||
const { hashingStore } = setup(createStorageHash);
|
||||
const val = { foo: 'bar' };
|
||||
const hash = hashingStore.hashAndSetItem(val);
|
||||
expect(hashingStore.getItemAtHash(`${hash} break`)).to.be(null);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#isHash', () => {
|
||||
it('can identify values that look like hashes', () => {
|
||||
const { hashingStore } = setup(createStorageHash);
|
||||
const val = { foo: 'bar' };
|
||||
const hash = hashingStore.hashAndSetItem(val);
|
||||
expect(hashingStore.isHash(hash)).to.be(true);
|
||||
});
|
||||
|
||||
describe('rison', () => {
|
||||
const tests = [
|
||||
['object', { foo: 'bar' }],
|
||||
['number', 1],
|
||||
['number', 1000],
|
||||
['number', Math.round(Math.random() * 10000000)],
|
||||
['string', 'this is a string'],
|
||||
['array', [1,2,3]],
|
||||
];
|
||||
|
||||
tests.forEach(([type, val]) => {
|
||||
it(`is not fooled by rison ${type} "${val}"`, () => {
|
||||
const { hashingStore } = setup(createStorageHash);
|
||||
const rison = encodeRison(val);
|
||||
expect(hashingStore.isHash(rison)).to.be(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,291 +0,0 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import { times, sum, padLeft } from 'lodash';
|
||||
|
||||
import StubBrowserStorage from 'test_utils/stub_browser_storage';
|
||||
import { LazyLruStore } from '..';
|
||||
|
||||
const setup = (opts = {}) => {
|
||||
const {
|
||||
id = 'testLru',
|
||||
store = new StubBrowserStorage(),
|
||||
maxItems,
|
||||
maxSetAttempts,
|
||||
idealClearRatio,
|
||||
maxIdealClearPercent
|
||||
} = opts;
|
||||
|
||||
const lru = new LazyLruStore({
|
||||
id,
|
||||
store,
|
||||
maxItems,
|
||||
maxSetAttempts,
|
||||
idealClearRatio,
|
||||
maxIdealClearPercent
|
||||
});
|
||||
|
||||
return { lru, store };
|
||||
};
|
||||
|
||||
describe('LazyLruStore', () => {
|
||||
describe('#getItem()', () => {
|
||||
it('returns null when item not found', () => {
|
||||
const { lru } = setup();
|
||||
expect(lru.getItem('item1')).to.be(null);
|
||||
});
|
||||
|
||||
it('returns stored value when item found', () => {
|
||||
const { lru } = setup();
|
||||
lru.setItem('item1', '1');
|
||||
expect(lru.getItem('item1')).to.be('1');
|
||||
});
|
||||
});
|
||||
|
||||
describe('#setItem()', () => {
|
||||
it('stores the item in the underlying store', () => {
|
||||
const { lru, store } = setup();
|
||||
expect(store).to.have.length(0);
|
||||
lru.setItem('item1', '1');
|
||||
expect(store).to.have.length(1);
|
||||
});
|
||||
|
||||
it('makes space for new item when necessary', () => {
|
||||
const { lru, store } = setup({ idealClearRatio: 1 });
|
||||
store._setSizeLimit(lru.getStorageOverhead() + 6);
|
||||
lru.setItem('item1', '1');
|
||||
expect(store).to.have.length(1);
|
||||
lru.setItem('item2', '2');
|
||||
expect(store).to.have.length(1);
|
||||
|
||||
expect(lru.getItem('item1')).to.be(null);
|
||||
expect(lru.getItem('item2')).to.be('2');
|
||||
});
|
||||
|
||||
it('overwrites existing values', () => {
|
||||
const { lru, store } = setup();
|
||||
lru.setItem('item1', '1');
|
||||
expect(store).to.have.length(1);
|
||||
lru.setItem('item1', '2');
|
||||
expect(store).to.have.length(1);
|
||||
expect(lru.getItem('item1')).to.be('2');
|
||||
});
|
||||
|
||||
it('stores items as strings', () => {
|
||||
const { lru } = setup();
|
||||
lru.setItem('item1', 1);
|
||||
expect(lru.getItem('item1')).to.be('1');
|
||||
});
|
||||
|
||||
it('bubbles up the error when unable to clear the necessary space', () => {
|
||||
const { lru, store } = setup();
|
||||
store._setSizeLimit(lru.getStorageOverhead() + 2);
|
||||
lru.setItem('1', '1');
|
||||
sinon.stub(store, 'removeItem');
|
||||
expect(() => {
|
||||
lru.setItem('2', '2');
|
||||
}).to.throwError(/quota/);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#removeItem()', () => {
|
||||
it('removes items from the underlying store', () => {
|
||||
const { lru, store } = setup();
|
||||
lru.setItem('item1', '1');
|
||||
expect(store).to.have.length(1);
|
||||
lru.removeItem('item1');
|
||||
expect(store).to.have.length(0);
|
||||
expect(lru.getItem('item1')).to.be(null);
|
||||
});
|
||||
|
||||
it('ignores unknown items', () => {
|
||||
const { lru, store } = setup();
|
||||
expect(store).to.have.length(0);
|
||||
expect(() => {
|
||||
lru.removeItem('item1');
|
||||
}).to.not.throwError();
|
||||
expect(store).to.have.length(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#getStorageOverhead()', () => {
|
||||
it('returns the number of bytes added to each storage item, used for testing', () => {
|
||||
const { store } = setup();
|
||||
const id1 = new LazyLruStore({ id: '1', store });
|
||||
const id11 = new LazyLruStore({ id: '11', store });
|
||||
expect(id1.getStorageOverhead()).to.be(id11.getStorageOverhead() - 1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('space management', () => {
|
||||
let clock;
|
||||
beforeEach(() => {
|
||||
clock = sinon.useFakeTimers(Date.now());
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
clock.restore();
|
||||
});
|
||||
|
||||
it('tries to clear space if setItem fails because the quota was exceeded', () => {
|
||||
const { lru, store } = setup();
|
||||
const itemSize = lru.getStorageOverhead() + 10; // each item key length + val length is 10
|
||||
|
||||
store._setSizeLimit(itemSize * 3);
|
||||
|
||||
lru.setItem('item1', 'item1');
|
||||
clock.tick(1); // move clock forward so removal based on time is predictable
|
||||
lru.setItem('item2', 'item2');
|
||||
clock.tick(1);
|
||||
lru.setItem('item3', 'item3');
|
||||
clock.tick(1);
|
||||
lru.setItem('item4', 'item4');
|
||||
clock.tick(1);
|
||||
lru.setItem('item5', 'item5');
|
||||
clock.tick(1);
|
||||
|
||||
expect(store).to.have.length(3);
|
||||
expect(lru.getItem('item1')).to.be(null);
|
||||
expect(lru.getItem('item2')).to.be(null);
|
||||
expect(lru.getItem('item3')).to.be('item3');
|
||||
expect(lru.getItem('item4')).to.be('item4');
|
||||
expect(lru.getItem('item5')).to.be('item5');
|
||||
});
|
||||
|
||||
context('when small items are being written to a large existing collection', () => {
|
||||
context('with idealClearRatio = 6', () => {
|
||||
it('clears 6 times the amount of space necessary', () => {
|
||||
const { lru, store } = setup({ idealClearRatio: 6 });
|
||||
|
||||
const overhead = lru.getStorageOverhead();
|
||||
const getItemSize = i => overhead + `${i.key}${i.value}`.length;
|
||||
|
||||
const items = times(100, i => {
|
||||
// pad n so that 1 and 100 take up equal space in the store
|
||||
const n = padLeft(i + 1, 3, '0');
|
||||
return { key: `key${n}`, value: `value${n}` };
|
||||
});
|
||||
const lastItem = items[items.length - 1];
|
||||
|
||||
// set the size limit so that the last item causes a cleanup, which
|
||||
store._setSizeLimit(sum(items.map(getItemSize)) - getItemSize(lastItem));
|
||||
|
||||
for (const i of items) {
|
||||
lru.setItem(i.key, i.value);
|
||||
clock.tick(1); // move clock forward so removal based on time is predictable
|
||||
}
|
||||
|
||||
// the current ratio is 6:1, so when the last item fails
|
||||
// to set, 6 items are cleared to make space for it
|
||||
expect(store).to.have.length(94);
|
||||
expect(lru.getItem('key001')).to.be(null);
|
||||
expect(lru.getItem('key002')).to.be(null);
|
||||
expect(lru.getItem('key003')).to.be(null);
|
||||
expect(lru.getItem('key004')).to.be(null);
|
||||
expect(lru.getItem('key005')).to.be(null);
|
||||
expect(lru.getItem('key006')).to.be(null);
|
||||
expect(lru.getItem('key007')).to.be('value007');
|
||||
});
|
||||
});
|
||||
|
||||
context('with idealClearRatio = 100 and maxIdealClearPercent = 0.1', () => {
|
||||
it('clears 10% of the store', () => {
|
||||
const { lru, store } = setup({ idealClearRatio: 100, maxIdealClearPercent: 0.1 });
|
||||
|
||||
const overhead = lru.getStorageOverhead();
|
||||
const getItemSize = i => overhead + `${i.key}${i.value}`.length;
|
||||
|
||||
const items = times(100, i => {
|
||||
// pad n so that 1 and 100 take up equal space in the store
|
||||
const n = padLeft(i + 1, 3, '0');
|
||||
return { key: `key${n}`, value: `value${n}` };
|
||||
});
|
||||
const lastItem = items[items.length - 1];
|
||||
|
||||
// set the size limit so that the last item causes a cleanup, which
|
||||
store._setSizeLimit(sum(items.map(getItemSize)) - getItemSize(lastItem));
|
||||
|
||||
for (const i of items) {
|
||||
lru.setItem(i.key, i.value);
|
||||
clock.tick(1); // move clock forward so removal based on time is predictable
|
||||
}
|
||||
|
||||
// with the ratio set to 100:1 the store will try to clear
|
||||
// 100x the stored values, but that could be the entire store
|
||||
// so it is limited by the maxIdealClearPercent (10% here)
|
||||
// so the store should now contain values 11-100
|
||||
expect(store).to.have.length(90);
|
||||
expect(lru.getItem('key001')).to.be(null);
|
||||
expect(lru.getItem('key002')).to.be(null);
|
||||
expect(lru.getItem('key003')).to.be(null);
|
||||
expect(lru.getItem('key004')).to.be(null);
|
||||
expect(lru.getItem('key005')).to.be(null);
|
||||
expect(lru.getItem('key006')).to.be(null);
|
||||
expect(lru.getItem('key007')).to.be(null);
|
||||
expect(lru.getItem('key008')).to.be(null);
|
||||
expect(lru.getItem('key009')).to.be(null);
|
||||
expect(lru.getItem('key010')).to.be(null);
|
||||
expect(lru.getItem('key011')).to.be('value011');
|
||||
expect(lru.getItem('key012')).to.be('value012');
|
||||
expect(lru.getItem('key100')).to.be('value100');
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('maxSetAttempts setting', () => {
|
||||
it('must be >= 1', () => {
|
||||
expect(() => setup({ maxSetAttempts: 0 })).to.throwError(TypeError);
|
||||
expect(() => setup({ maxSetAttempts: -1 })).to.throwError(TypeError);
|
||||
expect(() => setup({ maxSetAttempts: 0.9 })).to.throwError(TypeError);
|
||||
expect(() => setup({ maxSetAttempts: 1 })).to.not.throwError(TypeError);
|
||||
});
|
||||
|
||||
context('= 1', () => {
|
||||
it('will cause sets to a full storage to throw', () => {
|
||||
const { lru, store } = setup({ maxSetAttempts: 1 });
|
||||
store._setSizeLimit(lru.getStorageOverhead() + 2);
|
||||
lru.setItem('1', '1');
|
||||
expect(() => {
|
||||
lru.setItem('2', '2');
|
||||
}).to.throwError(/quota/i);
|
||||
});
|
||||
});
|
||||
|
||||
context('= 5', () => {
|
||||
it('will try to set 5 times and remove 4', () => {
|
||||
const { store, lru } = setup({ maxSetAttempts: 5 });
|
||||
|
||||
// trick lru into thinking it can clear space
|
||||
lru.setItem('1', '1');
|
||||
// but prevent removing items
|
||||
const removeStub = sinon.stub(store, 'removeItem');
|
||||
|
||||
// throw on the first 4 set attempts
|
||||
const setStub = sinon.stub(store, 'setItem')
|
||||
.onCall(0).throws()
|
||||
.onCall(1).throws()
|
||||
.onCall(2).throws()
|
||||
.onCall(3).throws()
|
||||
.stub;
|
||||
|
||||
lru.setItem('1', '1');
|
||||
sinon.assert.callCount(removeStub, 4);
|
||||
sinon.assert.callCount(setStub, 5);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
context('with maxItems set', () => {
|
||||
it('trims the list when starting with more than max items', () => {
|
||||
const { store, lru: lruNoMax } = setup();
|
||||
lruNoMax.setItem('1', '1');
|
||||
lruNoMax.setItem('2', '2');
|
||||
lruNoMax.setItem('3', '3');
|
||||
lruNoMax.setItem('4', '4');
|
||||
expect(store).to.have.length(4);
|
||||
|
||||
const { lru } = setup({ store, maxItems: 3 });
|
||||
expect(store).to.have.length(3);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,56 @@
|
|||
import expect from 'expect.js';
|
||||
import sinon from 'sinon';
|
||||
import { encode as encodeRison } from 'rison-node';
|
||||
|
||||
import {
|
||||
createStateHash,
|
||||
isStateHash,
|
||||
} from '../state_hash';
|
||||
|
||||
describe('stateHash', () => {
|
||||
const existingJsonProvider = () => null;
|
||||
|
||||
describe('#createStateHash', () => {
|
||||
|
||||
describe('returns a hash', () => {
|
||||
const json = JSON.stringify({a: 'a'});
|
||||
const hash = createStateHash(json, existingJsonProvider);
|
||||
expect(isStateHash(hash)).to.be(true);
|
||||
});
|
||||
|
||||
describe('returns the same hash for the same input', () => {
|
||||
const json = JSON.stringify({a: 'a'});
|
||||
const hash1 = createStateHash(json, existingJsonProvider);
|
||||
const hash2 = createStateHash(json, existingJsonProvider);
|
||||
expect(hash1).to.equal(hash2);
|
||||
});
|
||||
|
||||
describe('returns a different hash for different input', () => {
|
||||
const json1 = JSON.stringify({a: 'a'});
|
||||
const hash1 = createStateHash(json1, existingJsonProvider);
|
||||
|
||||
const json2 = JSON.stringify({a: 'b'});
|
||||
const hash2 = createStateHash(json2, existingJsonProvider);
|
||||
expect(hash1).to.not.equal(hash2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('#isStateHash', () => {
|
||||
it('returns true for values created using #createStateHash', () => {
|
||||
const json = JSON.stringify({a: 'a'});
|
||||
const hash = createStateHash(json, existingJsonProvider);
|
||||
expect(isStateHash(hash)).to.be(true);
|
||||
});
|
||||
|
||||
it('returns false for values not created using #createStateHash', () => {
|
||||
const json = JSON.stringify({a: 'a'});
|
||||
expect(isStateHash(json)).to.be(false);
|
||||
});
|
||||
|
||||
it('returns false for RISON', () => {
|
||||
// We're storing RISON in the URL, so let's test against this specifically.
|
||||
const rison = encodeRison({a: 'a'});
|
||||
expect(isStateHash(rison)).to.be(false);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,5 +0,0 @@
|
|||
import { Sha256 } from 'ui/crypto';
|
||||
|
||||
export default function createStorageHash(json) {
|
||||
return new Sha256().update(json, 'utf8').digest('hex');
|
||||
}
|
|
@ -0,0 +1,174 @@
|
|||
/**
|
||||
* The HashedItemStore associates JSON objects with states in browser history and persists these
|
||||
* objects in sessionStorage. We persist them so that when a tab is closed and re-opened, we can
|
||||
* retain access to the state objects referenced by the browser history.
|
||||
*
|
||||
* Because there is a limit on how much data we can put into sessionStorage, the HashedItemStore
|
||||
* will attempt to remove old items from storage once that limit is reached.
|
||||
*
|
||||
* -------------------------------------------------------------------------------------------------
|
||||
*
|
||||
* Consideration 1: We can't (easily) mirror the browser history
|
||||
*
|
||||
* If we use letters to indicate a unique state object, and numbers to represent the same state
|
||||
* occurring again (due to action by the user), a history could look like this:
|
||||
*
|
||||
* Old < - - - - - - - - > New
|
||||
* A1 | B1 | C1 | A2 | D1 | E1
|
||||
*
|
||||
* If the user navigates back to C1 and starts to create new states, persisted history states will
|
||||
* become inaccessible:
|
||||
*
|
||||
* Old < - - - - - - - - - - -> New
|
||||
* A1 | B1 | C1 | F1 | G1 | H1 | I1 (new history states)
|
||||
* A2 | D1 | E1 (inaccessible persisted history states)
|
||||
*
|
||||
* Theoretically, we could build a mirror of the browser history. When the onpopstate event is
|
||||
* dispatched, we could determine whether we have gone back or forward in history. Then, when
|
||||
* a new state is persisted, we could delete all of the persisted items which are no longer
|
||||
* accessible. (Note that this would require reference-counting so that A isn't removed while D and
|
||||
* E are, since A would still have a remaining reference from A1).
|
||||
*
|
||||
* However, the History API doesn't allow us to read from the history beyond the current state. This
|
||||
* means that if a session is restored, we can't rebuild this browser history mirror.
|
||||
*
|
||||
* Due to this imperfect implementation, HashedItemStore ignores the possibility of inaccessible
|
||||
* history states. In the future, we could implement this history mirror and persist it in
|
||||
* sessionStorage too. Then, when restoring a session, we can just retrieve it from sessionStorage.
|
||||
*
|
||||
* -------------------------------------------------------------------------------------------------
|
||||
*
|
||||
* Consideration 2: We can't tell when we've hit the browser history limit
|
||||
*
|
||||
* Because some of our persisted history states may no longer be referenced by the browser history,
|
||||
* and we have no way of knowing which ones, we have no way of knowing whether we've persisted a
|
||||
* number of accessible states beyond the browser history length limit.
|
||||
*
|
||||
* More fundamentally, the browser history length limit is a browser implementation detail, so it
|
||||
* can change from browser to browser, or over time. Respecting this limit would introduce a lot of
|
||||
* (unnecessary?) complexity.
|
||||
*
|
||||
* For these reasons, HashedItemStore doesn't concern itself with this constraint.
|
||||
*/
|
||||
|
||||
import {
|
||||
sortBy,
|
||||
values,
|
||||
} from 'lodash';
|
||||
|
||||
export default class HashedItemStore {
|
||||
|
||||
/**
|
||||
* HashedItemStore uses objects called indexed items to refer to items that have been persisted
|
||||
* in sessionStorage. An indexed item is shaped {hash, touched}. The touched date is when the item
|
||||
* was last referenced by the browser history.
|
||||
*/
|
||||
constructor(sessionStorage) {
|
||||
this._sessionStorage = sessionStorage;
|
||||
|
||||
// Store indexed items in descending order by touched (oldest first, newest last). We'll use
|
||||
// this to remove older items when we run out of storage space.
|
||||
this._indexedItems = [];
|
||||
// Associate item hashes with the corresponding indexed items. We'll use this to quickly look
|
||||
// up an item and update its touched date when it reoccurs in the browser history.
|
||||
this._hashToIndexedItemMap = {};
|
||||
|
||||
// Build index from the persisted index. This happens when we re-open a closed tab.
|
||||
const persistedItemIndex = this._sessionStorage.getItem(HashedItemStore.PERSISTED_INDEX_KEY);
|
||||
|
||||
if (persistedItemIndex) {
|
||||
this._hashToIndexedItemMap = JSON.parse(persistedItemIndex) || {};
|
||||
this._indexedItems = values(this._hashToIndexedItemMap);
|
||||
|
||||
// Order items by touched date (oldest first, newest last).
|
||||
this._indexedItems = sortBy(this._indexedItems, 'touched');
|
||||
}
|
||||
}
|
||||
|
||||
setItem(hash, item) {
|
||||
const isItemPersisted = this._persistItem(hash, item);
|
||||
|
||||
if (isItemPersisted) {
|
||||
this._touchHash(hash);
|
||||
}
|
||||
|
||||
return isItemPersisted;
|
||||
}
|
||||
|
||||
getItem(hash) {
|
||||
const item = this._sessionStorage.getItem(hash);
|
||||
|
||||
if (item !== null) {
|
||||
this._touchHash(hash);
|
||||
}
|
||||
|
||||
return item;
|
||||
}
|
||||
|
||||
_persistItem(hash, item) {
|
||||
try {
|
||||
this._sessionStorage.setItem(hash, item);
|
||||
return true;
|
||||
} catch (e) {
|
||||
// If there was an error then we need to make some space for the item.
|
||||
if (this._indexedItems.length === 0) {
|
||||
// If there's nothing left to remove, then we've run out of space and we're trying to
|
||||
// persist too large an item.
|
||||
return false;
|
||||
}
|
||||
|
||||
// We need to try to make some space for the item by removing older items (i.e. items that
|
||||
// haven't been accessed recently).
|
||||
this._removeOldestItem();
|
||||
|
||||
// Try to persist again.
|
||||
return this._persistItem(hash, item);
|
||||
}
|
||||
}
|
||||
|
||||
_removeOldestItem() {
|
||||
const oldestIndexedItem = this._indexedItems.shift();
|
||||
|
||||
// Remove oldest item from index.
|
||||
delete this._hashToIndexedItemMap[oldestIndexedItem.hash];
|
||||
|
||||
// Remove oldest item from storage.
|
||||
this._sessionStorage.removeItem(oldestIndexedItem.hash);
|
||||
}
|
||||
|
||||
_touchHash(hash) {
|
||||
// Touching a hash indicates that it's been used recently, so it won't be the first in line
|
||||
// when we remove items to free up storage space.
|
||||
if (this._hashToIndexedItemMap[hash]) {
|
||||
const indexedItem = this._hashToIndexedItemMap[hash];
|
||||
|
||||
// If item is already indexed, update the touched date.
|
||||
indexedItem.touched = Date.now();
|
||||
|
||||
// Since the items are already sorted by touched and we're only changing one item, we can
|
||||
// avoid a "costly" sort by just moving it to the end of the array.
|
||||
const index = this._indexedItems.indexOf(indexedItem);
|
||||
this._indexedItems.splice(index, 1);
|
||||
this._indexedItems.push(indexedItem);
|
||||
} else {
|
||||
// If the item isn't indexed, create it...
|
||||
const indexedItem = {
|
||||
hash,
|
||||
touched: Date.now(),
|
||||
};
|
||||
|
||||
// ...and index it.
|
||||
this._indexedItems.push(indexedItem);
|
||||
this._hashToIndexedItemMap[hash] = indexedItem;
|
||||
}
|
||||
|
||||
// Regardless of whether this is a new or updated item, we need to persist the index.
|
||||
this._sessionStorage.setItem(
|
||||
HashedItemStore.PERSISTED_INDEX_KEY,
|
||||
JSON.stringify(this._hashToIndexedItemMap)
|
||||
);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
HashedItemStore.PERSISTED_INDEX_KEY = 'kibana.hashedItemIndex';
|
|
@ -0,0 +1,3 @@
|
|||
import HashedItemStore from './hashed_item_store';
|
||||
|
||||
export default new HashedItemStore(window.sessionStorage);
|
|
@ -1,78 +0,0 @@
|
|||
import angular from 'angular';
|
||||
|
||||
/**
|
||||
* The HashingStore is a wrapper around a browser store object
|
||||
* that hashes the items added to it and stores them by their
|
||||
* hash. This hash is then returned so that the item can be received
|
||||
* at a later time.
|
||||
*/
|
||||
class HashingStore {
|
||||
constructor(createStorageHash, store) {
|
||||
this._createStorageHash = createStorageHash;
|
||||
this._store = store;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine if the passed value looks like a hash
|
||||
*
|
||||
* @param {string} str
|
||||
* @return {boolean}
|
||||
*/
|
||||
isHash(str) {
|
||||
return String(str).indexOf(HashingStore.HASH_TAG) === 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the value stored for the given hash
|
||||
*
|
||||
* @param {string} hash
|
||||
* @return {any}
|
||||
*/
|
||||
getItemAtHash(hash) {
|
||||
try {
|
||||
return JSON.parse(this._store.getItem(hash));
|
||||
} catch (err) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute the hash of an object, store the object, and return
|
||||
* the hash
|
||||
*
|
||||
* @param {any} the value to hash
|
||||
* @return {string} the hash of the value
|
||||
*/
|
||||
hashAndSetItem(object) {
|
||||
// The object may contain Angular $$ properties, so let's ignore them.
|
||||
const json = angular.toJson(object);
|
||||
const hash = this._getShortHash(json);
|
||||
this._store.setItem(hash, json);
|
||||
return hash;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the full hash for a json blob and then shorten in until
|
||||
* it until it doesn't collide with other short hashes in the store
|
||||
*
|
||||
* @private
|
||||
* @param {string} json
|
||||
* @param {string} shortHash
|
||||
*/
|
||||
_getShortHash(json) {
|
||||
const fullHash = `${HashingStore.HASH_TAG}${this._createStorageHash(json)}`;
|
||||
|
||||
let short;
|
||||
for (let i = 7; i < fullHash.length; i++) {
|
||||
short = fullHash.slice(0, i);
|
||||
const existing = this._store.getItem(short);
|
||||
if (existing === null || existing === json) break;
|
||||
}
|
||||
|
||||
return short;
|
||||
}
|
||||
}
|
||||
|
||||
HashingStore.HASH_TAG = 'h@';
|
||||
|
||||
export default HashingStore;
|
|
@ -1,11 +1,8 @@
|
|||
export {
|
||||
default as createStorageHash,
|
||||
} from './create_storage_hash';
|
||||
default as hashedItemStoreSingleton,
|
||||
} from './hashed_item_store_singleton';
|
||||
|
||||
export {
|
||||
default as HashingStore,
|
||||
} from './hashing_store';
|
||||
|
||||
export {
|
||||
default as LazyLruStore,
|
||||
} from './lazy_lru_store';
|
||||
createStateHash,
|
||||
isStateHash,
|
||||
} from './state_hash';
|
||||
|
|
|
@ -1,276 +0,0 @@
|
|||
import { sortBy } from 'lodash';
|
||||
|
||||
import Notifier from 'ui/notify/notifier';
|
||||
|
||||
/**
|
||||
* The maximum number of times that we will try to
|
||||
* clear space after a call to setItem on the store fails
|
||||
*
|
||||
* @type {Number}
|
||||
*/
|
||||
const DEFAULT_MAX_SET_ATTEMPTS = 3;
|
||||
|
||||
/**
|
||||
* When trying to clear enough space for a key+chunk,
|
||||
* multiply the necessary space by this to produce the
|
||||
* "ideal" amount of space to clear.
|
||||
*
|
||||
* By clearing the "ideal" amount instead of just the
|
||||
* necessary amount we prevent extra calls cleanup calls.
|
||||
*
|
||||
* The "ideal" amount is limited by the MAX_IDEAL_CLEAR_PERCENT
|
||||
*
|
||||
* @type {Number}
|
||||
*/
|
||||
const DEFAULT_IDEAL_CLEAR_RATIO = 100;
|
||||
|
||||
/**
|
||||
* A limit to the amount of space that can be cleared
|
||||
* by the inflation caused by the IDEAL_CLEAR_RATIO
|
||||
* @type {Number}
|
||||
*/
|
||||
const DEFAULT_MAX_IDEAL_CLEAR_PERCENT = 0.3;
|
||||
|
||||
export default class LazyLruStore {
|
||||
constructor(opts = {}) {
|
||||
const {
|
||||
id,
|
||||
store,
|
||||
notifier = new Notifier(`LazyLruStore (re: probably history hashing)`),
|
||||
maxItems = Infinity,
|
||||
maxSetAttempts = DEFAULT_MAX_SET_ATTEMPTS,
|
||||
idealClearRatio = DEFAULT_IDEAL_CLEAR_RATIO,
|
||||
maxIdealClearPercent = DEFAULT_MAX_IDEAL_CLEAR_PERCENT,
|
||||
} = opts;
|
||||
|
||||
if (!id) throw new TypeError('id is required');
|
||||
if (!store) throw new TypeError('store is required');
|
||||
if (maxSetAttempts < 1) throw new TypeError('maxSetAttempts must be >= 1');
|
||||
if (idealClearRatio < 1) throw new TypeError('idealClearRatio must be >= 1');
|
||||
if (maxIdealClearPercent < 0 || maxIdealClearPercent > 1) {
|
||||
throw new TypeError('maxIdealClearPercent must be between 0 and 1');
|
||||
}
|
||||
|
||||
this._id = id;
|
||||
this._prefix = `lru:${this._id}:`;
|
||||
this._store = store;
|
||||
this._notifier = notifier;
|
||||
this._maxItems = maxItems;
|
||||
this._itemCountGuess = this._getItemCount();
|
||||
this._maxSetAttempts = maxSetAttempts;
|
||||
this._idealClearRatio = idealClearRatio;
|
||||
this._maxIdealClearPercent = maxIdealClearPercent;
|
||||
|
||||
this._verifyMaxItems();
|
||||
}
|
||||
|
||||
getItem(key) {
|
||||
const chunk = this._store.getItem(this._getStoreKey(key));
|
||||
if (chunk === null) return null;
|
||||
const { val } = this._parseChunk(chunk);
|
||||
return val;
|
||||
}
|
||||
|
||||
setItem(key, val) {
|
||||
const newKey = !this._storeHasKey(key);
|
||||
this._attemptToSet(this._getStoreKey(key), this._getChunk(val));
|
||||
if (newKey) this._itemCountGuess += 1;
|
||||
this._verifyMaxItems();
|
||||
}
|
||||
|
||||
removeItem(key) {
|
||||
if (!this._storeHasKey(key)) return;
|
||||
this._store.removeItem(this._getStoreKey(key));
|
||||
this._itemCountGuess -= 1;
|
||||
this._verifyMaxItems();
|
||||
}
|
||||
|
||||
getStorageOverhead() {
|
||||
return (this._getStoreKey('') + this._getChunk('')).length;
|
||||
}
|
||||
|
||||
// private api
|
||||
|
||||
_getStoreKey(key) {
|
||||
return `${this._prefix}${key}`;
|
||||
}
|
||||
|
||||
_storeHasKey(key) {
|
||||
return this._store.getItem(this._getStoreKey(key)) !== null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a JSON blob into a chunk, the wrapper around values
|
||||
* that tells us when they were last stored
|
||||
*
|
||||
* @private
|
||||
* @param {string} val
|
||||
* @return {string} chunk
|
||||
*/
|
||||
_getChunk(val) {
|
||||
return `${Date.now()}/${val}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a chunk into it's store time and val values
|
||||
*
|
||||
* @private
|
||||
* @param {string} the chunk, probably read from the store
|
||||
* @return {object} parsed
|
||||
* @property {number} parsed.time
|
||||
* @property {string} parsed.val
|
||||
*/
|
||||
_parseChunk(chunk) {
|
||||
const splitIndex = chunk.indexOf('/');
|
||||
const time = parseInt(chunk.slice(0, splitIndex), 10);
|
||||
const val = chunk.slice(splitIndex + 1);
|
||||
return { time, val };
|
||||
}
|
||||
|
||||
/**
|
||||
* Attempt to a set a key on the store, if the setItem call
|
||||
* fails then the assumption is that the store is out of space
|
||||
* so we call this._makeSpaceFor(key, chunk). If this call
|
||||
* reports that enough space for the key and chunk were cleared,
|
||||
* then this function will call itself again, this time sending
|
||||
* attempt + 1 as the attempt number. If this loop continues
|
||||
* and attempt meets or exceeds the this._maxSetAttempts then a fatal
|
||||
* error will be sent to notifier, as the users session is no longer
|
||||
* usable.
|
||||
*
|
||||
* @private
|
||||
* @param {string} key
|
||||
* @param {string} chunk
|
||||
* @param {number} [attempt=1]
|
||||
*/
|
||||
_attemptToSet(key, chunk, attempt = 1) {
|
||||
try {
|
||||
this._store.setItem(key, chunk);
|
||||
} catch (error) {
|
||||
if (attempt >= this._maxSetAttempts) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const madeEnoughSpace = this._makeSpaceFor(key, chunk);
|
||||
if (madeEnoughSpace) {
|
||||
this._attemptToSet(key, chunk, attempt + 1);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Walk all items in the store to find items stored using the same
|
||||
* this._prefix. Collect the time that key was last set, and the
|
||||
* byte-size of that item, and report all values found along
|
||||
* with the total bytes
|
||||
*
|
||||
* @private
|
||||
* @return {object} index
|
||||
* @property {object[]} index.itemsByOldestAccess
|
||||
* @property {number} index.totalBytes
|
||||
*/
|
||||
_indexStoredItems() {
|
||||
const store = this._store;
|
||||
const notifier = this._notifier;
|
||||
|
||||
const items = [];
|
||||
let totalBytes = 0;
|
||||
|
||||
for (let i = 0; i < store.length; i++) {
|
||||
const key = store.key(i);
|
||||
|
||||
if (key.slice(0, this._prefix.length) !== this._prefix) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const chunk = store.getItem(key);
|
||||
const { time } = this._parseChunk(chunk);
|
||||
const bytes = key.length + chunk.length;
|
||||
items.push({ key, time, bytes });
|
||||
totalBytes += bytes;
|
||||
}
|
||||
|
||||
const itemsByOldestAccess = sortBy(items, 'time');
|
||||
return { itemsByOldestAccess, totalBytes };
|
||||
}
|
||||
|
||||
_getItemCount() {
|
||||
const { itemsByOldestAccess } = this._indexStoredItems();
|
||||
return itemsByOldestAccess.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that the itemCountGuess has not exceeded the maxItems,
|
||||
* if it has, trim the item list to meet the maxItem count
|
||||
*/
|
||||
_verifyMaxItems() {
|
||||
if (this._maxItems > this._itemCountGuess) return;
|
||||
|
||||
const { itemsByOldestAccess } = this._indexStoredItems();
|
||||
// update our guess to make sure it's accurate
|
||||
this._itemCountGuess = itemsByOldestAccess.length;
|
||||
// remove all items from the beginning of the list, leaving this._maxItems in the list
|
||||
itemsByOldestAccess
|
||||
.slice(0, -this._maxItems)
|
||||
.forEach(item => this._doItemAutoRemoval(item));
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine how much space to clear so that we can store the specified
|
||||
* key and chunk into the store. Then clear that data and return true of
|
||||
* false if we were successfull
|
||||
*
|
||||
* @private
|
||||
* @param {string} key
|
||||
* @param {string} chunk
|
||||
* @return {boolean} success
|
||||
*/
|
||||
_makeSpaceFor(key, chunk) {
|
||||
const notifier = this._notifier;
|
||||
return notifier.event(`trying to make room in lru ${this._id}`, () => {
|
||||
const { totalBytes, itemsByOldestAccess } = this._indexStoredItems();
|
||||
|
||||
// pick how much space we are going to try to clear
|
||||
// by finding a value that is at least the size of
|
||||
// the key + chunk but up to the key + chunk * IDEAL_CLEAR_RATIO
|
||||
const freeMin = key.length + chunk.length;
|
||||
const freeIdeal = freeMin * this._idealClearRatio;
|
||||
const toClear = Math.max(freeMin, Math.min(freeIdeal, totalBytes * this._maxIdealClearPercent));
|
||||
notifier.log(`PLAN: min ${freeMin} bytes, target ${toClear} bytes`);
|
||||
|
||||
let remainingToClear = toClear;
|
||||
let removedItemCount = 0;
|
||||
while (itemsByOldestAccess.length > 0 && remainingToClear > 0) {
|
||||
const item = itemsByOldestAccess.shift();
|
||||
remainingToClear -= item.bytes;
|
||||
removedItemCount += 1;
|
||||
this._doItemAutoRemoval(item);
|
||||
}
|
||||
|
||||
const success = remainingToClear <= 0;
|
||||
|
||||
const label = success ? 'SUCCESS' : 'FAILURE';
|
||||
const removedBytes = toClear - remainingToClear;
|
||||
notifier.log(`${label}: removed ${removedItemCount} items for ${removedBytes} bytes`);
|
||||
return success;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracted helper for automated removal of items with logging
|
||||
*
|
||||
* @private
|
||||
* @param {object} item
|
||||
* @property {string} item.key
|
||||
* @property {number} item.time
|
||||
* @property {number} item.bytes
|
||||
*/
|
||||
_doItemAutoRemoval(item) {
|
||||
const timeString = new Date(item.time).toISOString();
|
||||
this._notifier.log(`REMOVE: entry "${item.key}" from ${timeString}, freeing ${item.bytes} bytes`);
|
||||
this._store.removeItem(item.key);
|
||||
this._itemCountGuess -= 1;
|
||||
}
|
||||
}
|
29
src/ui/public/state_management/state_storage/state_hash.js
Normal file
29
src/ui/public/state_management/state_storage/state_hash.js
Normal file
|
@ -0,0 +1,29 @@
|
|||
import { Sha256 } from 'ui/crypto';
|
||||
|
||||
// This prefix is used to identify hash strings that have been encoded in the URL.
|
||||
const HASH_PREFIX = 'h@';
|
||||
|
||||
export function createStateHash(json, existingJsonProvider) {
|
||||
if (typeof json !== 'string') {
|
||||
throw new Error('createHash only accepts strings (JSON).');
|
||||
}
|
||||
|
||||
const hash = new Sha256().update(json, 'utf8').digest('hex');
|
||||
|
||||
let shortenedHash;
|
||||
|
||||
// Shorten the hash to at minimum 7 characters. We just need to make sure that it either:
|
||||
// a) hasn't been used yet
|
||||
// b) or has been used already, but with the JSON we're currently hashing.
|
||||
for (let i = 7; i < hash.length; i++) {
|
||||
shortenedHash = hash.slice(0, i);
|
||||
const existingJson = existingJsonProvider(shortenedHash);
|
||||
if (existingJson === null || existingJson === json) break;
|
||||
}
|
||||
|
||||
return `${HASH_PREFIX}${shortenedHash}`;
|
||||
}
|
||||
|
||||
export function isStateHash(str) {
|
||||
return String(str).indexOf(HASH_PREFIX) === 0;
|
||||
}
|
Loading…
Reference in a new issue