diff --git a/client/js/upload-handler/upload.handler.controller.js b/client/js/upload-handler/upload.handler.controller.js index 1141824a4..71aa5d44e 100644 --- a/client/js/upload-handler/upload.handler.controller.js +++ b/client/js/upload-handler/upload.handler.controller.js @@ -77,8 +77,8 @@ qq.UploadHandlerController = function(o, namespace) { log("Problem finalizing chunks for file ID " + id + " - " + normalizedResponse.error, "error"); if ( - normalizedResponse.reset - || (xhr && options.chunking.success.resetOnStatus.indexOf(xhr.status) >= 0) + normalizedResponse.reset || + (xhr && options.chunking.success.resetOnStatus.indexOf(xhr.status) >= 0) ) { chunked.reset(id); } diff --git a/client/js/upload-handler/xhr.upload.handler.js b/client/js/upload-handler/xhr.upload.handler.js index 549e65150..7ac45aa7e 100644 --- a/client/js/upload-handler/xhr.upload.handler.js +++ b/client/js/upload-handler/xhr.upload.handler.js @@ -12,6 +12,24 @@ qq.XhrUploadHandler = function(spec) { namespace = spec.options.namespace, proxy = spec.proxy, chunking = spec.options.chunking, + getChunkSize = function(id) { + var fileState = handler._getFileState(id); + + if (fileState.chunkSize) { + return fileState.chunkSize; + } + + else { + var chunkSize = chunking.partSize; + + if (qq.isFunction(chunkSize)) { + chunkSize = chunkSize(id, getSize(id)); + } + + fileState.chunkSize = chunkSize; + return chunkSize; + } + }, resume = spec.options.resume, chunkFiles = chunking && spec.options.chunking.enabled && qq.supportedFeatures.chunking, resumeEnabled = resume && spec.options.resume.enabled && chunkFiles && qq.supportedFeatures.resume, @@ -141,8 +159,8 @@ qq.XhrUploadHandler = function(spec) { }, isResumable: function(id) { - return !!chunking && handler.isValid(id) - && !handler._getFileState(id).notResumable; + return !!chunking && handler.isValid(id) && + !handler._getFileState(id).notResumable; }, moveInProgressToRemaining: function(id, optInProgress, optRemaining) { @@ -232,7 +250,7 @@ qq.XhrUploadHandler = function(spec) { }, _getChunkData: function(id, chunkIndex) { - var chunkSize = chunking.partSize, + var chunkSize = getChunkSize(id), fileSize = getSize(id), fileOrBlob = handler.getFile(id), startBytes = chunkSize * chunkIndex, @@ -273,7 +291,7 @@ qq.XhrUploadHandler = function(spec) { var formatVersion = "5.0", name = getName(id), size = getSize(id), - chunkSize = chunking.partSize, + chunkSize = getChunkSize(id), endpoint = getEndpoint(id), customKeys = resume.customKeys(id), localStorageId = qq.format("qq{}resume{}-{}-{}-{}-{}", namespace, formatVersion, name, size, chunkSize, endpoint); @@ -300,7 +318,7 @@ qq.XhrUploadHandler = function(spec) { _getTotalChunks: function(id) { if (chunking) { var fileSize = getSize(id), - chunkSize = chunking.partSize; + chunkSize = getChunkSize(id); return Math.ceil(fileSize / chunkSize); } diff --git a/client/js/uploader.basic.js b/client/js/uploader.basic.js index 06a365c4b..4998ca2bf 100644 --- a/client/js/uploader.basic.js +++ b/client/js/uploader.basic.js @@ -111,7 +111,9 @@ totalFileSize: "qqtotalfilesize", totalParts: "qqtotalparts" }, - partSize: 2000000, + partSize: function(id) { + return 2000000; + }, // only relevant for traditional endpoints, only required when concurrent.enabled === true success: { endpoint: null, diff --git a/client/js/version.js b/client/js/version.js index e29d7823c..04aeb9ec6 100644 --- a/client/js/version.js +++ b/client/js/version.js @@ -1,2 +1,2 @@ /*global qq */ -qq.version = "5.16.0-alpha.11"; +qq.version = "5.16.0-alpha.12"; diff --git a/package.json b/package.json index 21cc7deef..849e02dea 100644 --- a/package.json +++ b/package.json @@ -3,7 +3,7 @@ "title": "Fine Uploader", "main": "lib/traditional.js", "types" : "typescript/fine-uploader.d.ts", - "version": "5.16.0-alpha.11", + "version": "5.16.0-alpha.12", "description": "Multiple file upload plugin with progress-bar, drag-and-drop, direct-to-S3 & Azure uploading, client-side image scaling, preview generation, form support, chunking, auto-resume, and tons of other features.", "keywords": [ "amazon",