Add buffer to chunk size to prevent risk of oversize post requests (#48595)

This commit is contained in:
Aaron Caldwell 2019-10-31 10:30:57 -06:00 committed by GitHub
parent b4ecd5ab91
commit 869e0c0fbf
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -5,16 +5,23 @@
*/
import { MAX_BYTES } from '../../common/constants/file_import';
// MAX_BYTES is a good guideline for splitting up posts, but this logic
// occasionally sizes chunks so closely to the limit, that the remaining content
// of a post (besides features) tips it over the max. Adding a 2MB buffer
// to ensure this doesn't happen
const CHUNK_BUFFER = 2097152;
// Add data elements to chunk until limit is met
export function sizeLimitedChunking(dataArr, maxChunkSize = MAX_BYTES) {
export function sizeLimitedChunking(dataArr, maxByteSize = MAX_BYTES - CHUNK_BUFFER) {
let chunkSize = 0;
return dataArr.reduce((accu, el) => {
const featureByteSize = (
new Blob([JSON.stringify(el)], { type: 'application/json' })
).size;
if (featureByteSize > maxChunkSize) {
throw `Some features exceed maximum chunk size of ${maxChunkSize}`;
} else if (chunkSize + featureByteSize < maxChunkSize) {
if (featureByteSize > maxByteSize) {
throw `Some features exceed maximum chunk size of ${maxByteSize}`;
} else if (chunkSize + featureByteSize < maxByteSize) {
const lastChunkRef = accu.length - 1;
chunkSize += featureByteSize;
accu[lastChunkRef].push(el);