diff options
author | Piotr Mrowczynski <mrow4a@yahoo.com> | 2017-06-20 21:27:26 +0300 |
---|---|---|
committer | Markus Goetz <markus@woboq.com> | 2017-07-03 15:41:53 +0300 |
commit | d1e00099dc502fe22a1dd4e5c0d8d42e7de868a7 (patch) | |
tree | 45e480ec7828d4e510384d0260b9d343225b6633 /src/libsync | |
parent | d018d460e3968ea67352a9dad9f3bdbe8a97306e (diff) |
Classify chunked items correctly. Issue #5850
Diffstat (limited to 'src/libsync')
-rw-r--r-- | src/libsync/discoveryphase.h | 5 | ||||
-rw-r--r-- | src/libsync/owncloudpropagator.cpp | 3 | ||||
-rw-r--r-- | src/libsync/propagateupload.h | 6 | ||||
-rw-r--r-- | src/libsync/propagateuploadng.cpp | 1 |
4 files changed, 11 insertions, 4 deletions
diff --git a/src/libsync/discoveryphase.h b/src/libsync/discoveryphase.h index 9603c39ab..b7d252c67 100644 --- a/src/libsync/discoveryphase.h +++ b/src/libsync/discoveryphase.h @@ -53,9 +53,10 @@ struct SyncOptions /** If a confirmation should be asked for external storages */ bool _confirmExternalStorage; - /** The initial un-adjusted chunk size in bytes for chunked uploads + /** The initial un-adjusted chunk size in bytes for chunked uploads, both + * for old and new chunking algorithm, which classifies the item to be chunked * - * When dynamic chunk size adjustments are done, this is the + * In chunkingNG, when dynamic chunk size adjustments are done, this is the * starting value and is then gradually adjusted within the * minChunkSize / maxChunkSize bounds. */ diff --git a/src/libsync/owncloudpropagator.cpp b/src/libsync/owncloudpropagator.cpp index 62f12f023..f77dfd860 100644 --- a/src/libsync/owncloudpropagator.cpp +++ b/src/libsync/owncloudpropagator.cpp @@ -379,7 +379,8 @@ PropagateItemJob *OwncloudPropagator::createJob(const SyncFileItemPtr &item) return job; } else { PropagateUploadFileCommon *job = 0; - if (item->_size > _chunkSize && account()->capabilities().chunkingNg()) { + if (item->_size > syncOptions()._initialChunkSize && account()->capabilities().chunkingNg()) { + // Item is above _initialChunkSize, thus will be classified as to be chunked job = new PropagateUploadFileNG(this, item); } else { job = new PropagateUploadFileV1(this, item); diff --git a/src/libsync/propagateupload.h b/src/libsync/propagateupload.h index 5e16b5ff9..030c43020 100644 --- a/src/libsync/propagateupload.h +++ b/src/libsync/propagateupload.h @@ -307,7 +307,11 @@ private: int _chunkCount; /// Total number of chunks for this file int _transferId; /// transfer id (part of the url) - quint64 chunkSize() const { return propagator()->syncOptions()._initialChunkSize; } + quint64 chunkSize() const { + // Old chunking does not use dynamic chunking algorithm, and does not adjusts the chunk size respectively, + // thus this value should be used as the one classifing item to be chunked + return propagator()->syncOptions()._initialChunkSize; + } public: diff --git a/src/libsync/propagateuploadng.cpp b/src/libsync/propagateuploadng.cpp index 45768102d..dd918e80b 100644 --- a/src/libsync/propagateuploadng.cpp +++ b/src/libsync/propagateuploadng.cpp @@ -400,6 +400,7 @@ void PropagateUploadFileNG::slotPutFinished() // the chunk sizes a bit. quint64 targetSize = (propagator()->_chunkSize + predictedGoodSize) / 2; + // Adjust the dynamic chunk size _chunkSize used for sizing of the item's chunks to be send propagator()->_chunkSize = qBound( propagator()->syncOptions()._minChunkSize, targetSize, |