Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/duplicati/duplicati.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'Duplicati/Library')
-rw-r--r--Duplicati/Library/AutoUpdater/UpdaterManager.cs2
-rw-r--r--Duplicati/Library/Main/BackendManager.cs12
-rw-r--r--Duplicati/Library/Main/Controller.cs2
-rw-r--r--Duplicati/Library/Main/Database/LocalDeleteDatabase.cs6
-rw-r--r--Duplicati/Library/Main/Database/LocalRecreateDatabase.cs8
-rw-r--r--Duplicati/Library/Main/Operation/Backup/FileEnumerationProcess.cs7
-rw-r--r--Duplicati/Library/Main/Operation/Backup/RecreateMissingIndexFiles.cs2
-rw-r--r--Duplicati/Library/Main/Operation/BackupHandler.cs4
-rw-r--r--Duplicati/Library/Main/Operation/Common/BackendHandler.cs139
-rw-r--r--Duplicati/Library/Main/Operation/CompactHandler.cs2
-rw-r--r--Duplicati/Library/Main/Operation/FilelistProcessor.cs4
-rw-r--r--Duplicati/Library/Main/Operation/PurgeFilesHandler.cs2
-rw-r--r--Duplicati/Library/Main/Operation/RestoreHandler.cs6
-rw-r--r--Duplicati/Library/Main/Volumes/VolumeWriterBase.cs4
-rw-r--r--Duplicati/Library/Snapshots/SnapshotUtility.cs5
-rw-r--r--Duplicati/Library/Utility/DirectStreamLink.cs3
-rw-r--r--Duplicati/Library/Utility/ProgressReportingStream.cs2
17 files changed, 104 insertions, 106 deletions
diff --git a/Duplicati/Library/AutoUpdater/UpdaterManager.cs b/Duplicati/Library/AutoUpdater/UpdaterManager.cs
index 72d6dfe93..2ad798c02 100644
--- a/Duplicati/Library/AutoUpdater/UpdaterManager.cs
+++ b/Duplicati/Library/AutoUpdater/UpdaterManager.cs
@@ -460,7 +460,7 @@ namespace Duplicati.Library.AutoUpdater
var areq = new Duplicati.Library.Utility.AsyncHttpRequest(wreq);
using (var resp = areq.GetResponse())
using (var rss = areq.GetResponseStream())
- using (var pgs = new Duplicati.Library.Utility.ProgressReportingStream(rss, version.CompressedSize, cb))
+ using (var pgs = new Duplicati.Library.Utility.ProgressReportingStream(rss, cb))
{
Duplicati.Library.Utility.Utility.CopyStream(pgs, tempfile);
}
diff --git a/Duplicati/Library/Main/BackendManager.cs b/Duplicati/Library/Main/BackendManager.cs
index 49e00f44d..de20f256a 100644
--- a/Duplicati/Library/Main/BackendManager.cs
+++ b/Duplicati/Library/Main/BackendManager.cs
@@ -272,7 +272,7 @@ namespace Duplicati.Library.Main
public string Newname;
}
- public DatabaseCollector(LocalDatabase database, IBackendWriter stats)
+ public DatabaseCollector(LocalDatabase database)
{
m_database = database;
m_dbqueue = new List<IDbEntry>();
@@ -376,7 +376,7 @@ namespace Duplicati.Library.Main
m_numberofretries = options.NumberOfRetries;
m_retrydelay = options.RetryDelay;
- m_db = new DatabaseCollector(database, statwriter);
+ m_db = new DatabaseCollector(database);
m_backend = DynamicLoader.BackendLoader.GetBackend(m_backendurl, m_options.RawOptions);
if (m_backend == null)
@@ -631,7 +631,7 @@ namespace Duplicati.Library.Main
private void RenameFileAfterError(FileEntryItem item)
{
var p = VolumeBase.ParseFilename(item.RemoteFilename);
- var guid = VolumeWriterBase.GenerateGuid(m_options);
+ var guid = VolumeWriterBase.GenerateGuid();
var time = p.Time.Ticks == 0 ? p.Time : p.Time.AddSeconds(1);
var newname = VolumeBase.GenerateFilename(p.FileType, p.Prefix, guid, time, p.CompressionModule, p.EncryptionModule);
var oldname = item.RemoteFilename;
@@ -733,7 +733,7 @@ namespace Duplicati.Library.Main
{
using (var fs = System.IO.File.OpenRead(item.LocalFilename))
using (var ts = new ThrottledStream(fs, m_options.MaxUploadPrSecond, m_options.MaxDownloadPrSecond))
- using (var pgs = new Library.Utility.ProgressReportingStream(ts, item.Size, pg => HandleProgress(ts, pg)))
+ using (var pgs = new Library.Utility.ProgressReportingStream(ts, pg => HandleProgress(ts, pg)))
((Library.Interface.IStreamingBackend)m_backend).Put(item.RemoteFilename, pgs);
}
else
@@ -827,7 +827,7 @@ namespace Duplicati.Library.Main
using (var ss = new ShaderStream(nextTierWriter, false))
{
using (var ts = new ThrottledStream(ss, m_options.MaxDownloadPrSecond, m_options.MaxUploadPrSecond))
- using (var pgs = new Library.Utility.ProgressReportingStream(ts, item.Size, pg => HandleProgress(ts, pg)))
+ using (var pgs = new Library.Utility.ProgressReportingStream(ts, pg => HandleProgress(ts, pg)))
{
taskHasher.Start(); // We do not start tasks earlier to be sure the input always gets closed.
if (taskDecrypter != null) taskDecrypter.Start();
@@ -912,7 +912,7 @@ namespace Duplicati.Library.Main
using (var ss = new ShaderStream(hs, true))
{
using (var ts = new ThrottledStream(ss, m_options.MaxDownloadPrSecond, m_options.MaxUploadPrSecond))
- using (var pgs = new Library.Utility.ProgressReportingStream(ts, item.Size, pg => HandleProgress(ts, pg)))
+ using (var pgs = new Library.Utility.ProgressReportingStream(ts, pg => HandleProgress(ts, pg)))
{ ((Library.Interface.IStreamingBackend)m_backend).Get(item.RemoteFilename, pgs); }
ss.Flush();
retDownloadSize = ss.TotalBytesWritten;
diff --git a/Duplicati/Library/Main/Controller.cs b/Duplicati/Library/Main/Controller.cs
index b89149c5f..18c8f801b 100644
--- a/Duplicati/Library/Main/Controller.cs
+++ b/Duplicati/Library/Main/Controller.cs
@@ -158,7 +158,7 @@ namespace Duplicati.Library.Main
return List((IEnumerable<string>)null, filter);
}
- public Duplicati.Library.Interface.IListResults List (string filterstring, Library.Utility.IFilter filter = null)
+ public Duplicati.Library.Interface.IListResults List(string filterstring)
{
return List(filterstring == null ? null : new string[] { filterstring }, null);
}
diff --git a/Duplicati/Library/Main/Database/LocalDeleteDatabase.cs b/Duplicati/Library/Main/Database/LocalDeleteDatabase.cs
index fe5ec4287..751094b1e 100644
--- a/Duplicati/Library/Main/Database/LocalDeleteDatabase.cs
+++ b/Duplicati/Library/Main/Database/LocalDeleteDatabase.cs
@@ -272,7 +272,7 @@ namespace Duplicati.Library.Main.Database
{
private System.Data.IDbCommand m_command;
- public BlockQuery(System.Data.IDbConnection con, Options options, System.Data.IDbTransaction transaction)
+ public BlockQuery(System.Data.IDbConnection con, System.Data.IDbTransaction transaction)
{
m_command = con.CreateCommand();
m_command.Transaction = transaction;
@@ -302,9 +302,9 @@ namespace Duplicati.Library.Main.Database
/// <summary>
/// Builds a lookup table to enable faster response to block queries
/// </summary>
- public IBlockQuery CreateBlockQueryHelper(Options options, System.Data.IDbTransaction transaction)
+ public IBlockQuery CreateBlockQueryHelper(System.Data.IDbTransaction transaction)
{
- return new BlockQuery(m_connection, options, transaction);
+ return new BlockQuery(m_connection, transaction);
}
public void MoveBlockToNewVolume(string hash, long size, long volumeID, System.Data.IDbTransaction tr)
diff --git a/Duplicati/Library/Main/Database/LocalRecreateDatabase.cs b/Duplicati/Library/Main/Database/LocalRecreateDatabase.cs
index bbb21fe11..f2177e114 100644
--- a/Duplicati/Library/Main/Database/LocalRecreateDatabase.cs
+++ b/Duplicati/Library/Main/Database/LocalRecreateDatabase.cs
@@ -294,20 +294,20 @@ namespace Duplicati.Library.Main.Database
public void AddDirectoryEntry(long filesetid, string path, DateTime time, long metadataid, System.Data.IDbTransaction transaction)
{
- AddEntry(FilelistEntryType.Folder, filesetid, path, time, FOLDER_BLOCKSET_ID, metadataid, transaction);
+ AddEntry(filesetid, path, time, FOLDER_BLOCKSET_ID, metadataid, transaction);
}
public void AddSymlinkEntry(long filesetid, string path, DateTime time, long metadataid, System.Data.IDbTransaction transaction)
{
- AddEntry(FilelistEntryType.Symlink, filesetid, path, time, SYMLINK_BLOCKSET_ID, metadataid, transaction);
+ AddEntry(filesetid, path, time, SYMLINK_BLOCKSET_ID, metadataid, transaction);
}
public void AddFileEntry(long filesetid, string path, DateTime time, long blocksetid, long metadataid, System.Data.IDbTransaction transaction)
{
- AddEntry(FilelistEntryType.File , filesetid, path, time, blocksetid, metadataid, transaction);
+ AddEntry(filesetid, path, time, blocksetid, metadataid, transaction);
}
- private void AddEntry(FilelistEntryType type, long filesetid, string path, DateTime time, long blocksetid, long metadataid, System.Data.IDbTransaction transaction)
+ private void AddEntry(long filesetid, string path, DateTime time, long blocksetid, long metadataid, System.Data.IDbTransaction transaction)
{
var fileid = -1L;
diff --git a/Duplicati/Library/Main/Operation/Backup/FileEnumerationProcess.cs b/Duplicati/Library/Main/Operation/Backup/FileEnumerationProcess.cs
index 6799c716f..f348b848a 100644
--- a/Duplicati/Library/Main/Operation/Backup/FileEnumerationProcess.cs
+++ b/Duplicati/Library/Main/Operation/Backup/FileEnumerationProcess.cs
@@ -77,13 +77,13 @@ namespace Duplicati.Library.Main.Operation.Backup
{
}
- return AttributeFilter(null, x, fa, snapshot, sourcefilter, hardlinkPolicy, symlinkPolicy, hardlinkmap, fileAttributes, enumeratefilter, ignorenames, mixinqueue);
+ return AttributeFilter(x, fa, snapshot, sourcefilter, hardlinkPolicy, symlinkPolicy, hardlinkmap, fileAttributes, enumeratefilter, ignorenames, mixinqueue);
});
}
else
{
Library.Utility.Utility.EnumerationFilterDelegate attributeFilter = (root, path, attr) =>
- AttributeFilter(root, path, attr, snapshot, sourcefilter, hardlinkPolicy, symlinkPolicy, hardlinkmap, fileAttributes, enumeratefilter, ignorenames, mixinqueue);
+ AttributeFilter(path, attr, snapshot, sourcefilter, hardlinkPolicy, symlinkPolicy, hardlinkmap, fileAttributes, enumeratefilter, ignorenames, mixinqueue);
if (journalService != null)
{
@@ -219,10 +219,9 @@ namespace Duplicati.Library.Main.Operation.Backup
/// Plugin filter for enumerating a list of files.
/// </summary>
/// <returns>True if the path should be returned, false otherwise.</returns>
- /// <param name="rootpath">The root path that initiated this enumeration.</param>
/// <param name="path">The current path.</param>
/// <param name="attributes">The file or folder attributes.</param>
- private static bool AttributeFilter(string rootpath, string path, FileAttributes attributes, Snapshots.ISnapshotService snapshot, Library.Utility.IFilter sourcefilter, Options.HardlinkStrategy hardlinkPolicy, Options.SymlinkStrategy symlinkPolicy, Dictionary<string, string> hardlinkmap, FileAttributes fileAttributes, Duplicati.Library.Utility.IFilter enumeratefilter, string[] ignorenames, Queue<string> mixinqueue)
+ private static bool AttributeFilter(string path, FileAttributes attributes, Snapshots.ISnapshotService snapshot, Library.Utility.IFilter sourcefilter, Options.HardlinkStrategy hardlinkPolicy, Options.SymlinkStrategy symlinkPolicy, Dictionary<string, string> hardlinkmap, FileAttributes fileAttributes, Duplicati.Library.Utility.IFilter enumeratefilter, string[] ignorenames, Queue<string> mixinqueue)
{
// Step 1, exclude block devices
try
diff --git a/Duplicati/Library/Main/Operation/Backup/RecreateMissingIndexFiles.cs b/Duplicati/Library/Main/Operation/Backup/RecreateMissingIndexFiles.cs
index acc038103..eeba6ff42 100644
--- a/Duplicati/Library/Main/Operation/Backup/RecreateMissingIndexFiles.cs
+++ b/Duplicati/Library/Main/Operation/Backup/RecreateMissingIndexFiles.cs
@@ -28,7 +28,7 @@ namespace Duplicati.Library.Main.Operation.Backup
/// </summary>
private static readonly string LOGTAG = Logging.Log.LogTagFromType(typeof(RecreateMissingIndexFiles));
- public static Task Run(BackupDatabase database, Options options, BackupResults result, ITaskReader taskreader)
+ public static Task Run(BackupDatabase database, Options options, ITaskReader taskreader)
{
return AutomationExtensions.RunTask(new
{
diff --git a/Duplicati/Library/Main/Operation/BackupHandler.cs b/Duplicati/Library/Main/Operation/BackupHandler.cs
index 7c5eb4af1..2eadd5db8 100644
--- a/Duplicati/Library/Main/Operation/BackupHandler.cs
+++ b/Duplicati/Library/Main/Operation/BackupHandler.cs
@@ -142,7 +142,7 @@ namespace Duplicati.Library.Main.Operation
{
if (m_options.NoBackendverification)
{
- FilelistProcessor.VerifyLocalList(backend, m_options, m_database, m_result.BackendWriter);
+ FilelistProcessor.VerifyLocalList(backend, m_database);
UpdateStorageStatsFromDatabase();
}
else
@@ -454,7 +454,7 @@ namespace Duplicati.Library.Main.Operation
var lastfilesetid = prevfileset.Value.Ticks == 0 ? -1 : prevfileset.Key;
// Rebuild any index files that are missing
- await Backup.RecreateMissingIndexFiles.Run(db, m_options, m_result, m_result.TaskReader);
+ await Backup.RecreateMissingIndexFiles.Run(db, m_options, m_result.TaskReader);
// This should be removed as the lookups are no longer used
m_database.BuildLookupTable(m_options);
diff --git a/Duplicati/Library/Main/Operation/Common/BackendHandler.cs b/Duplicati/Library/Main/Operation/Common/BackendHandler.cs
index 78039aca5..d0634fd5e 100644
--- a/Duplicati/Library/Main/Operation/Common/BackendHandler.cs
+++ b/Duplicati/Library/Main/Operation/Common/BackendHandler.cs
@@ -108,13 +108,13 @@ namespace Duplicati.Library.Main.Operation.Common
this.LocalTempfile = Library.Utility.TempFile.WrapExistingFile(name);
this.LocalTempfile.Protected = true;
}
-
+
public void Encrypt(Options options)
{
if (!this.Encrypted && !options.NoEncryption)
{
var tempfile = new Library.Utility.TempFile();
- using(var enc = DynamicLoader.EncryptionLoader.GetModule(options.EncryptionModule, options.Passphrase, options.RawOptions))
+ using (var enc = DynamicLoader.EncryptionLoader.GetModule(options.EncryptionModule, options.Passphrase, options.RawOptions))
enc.Encrypt(this.LocalFilename, tempfile);
this.DeleteLocalFile();
@@ -150,10 +150,10 @@ namespace Duplicati.Library.Main.Operation.Common
{
if (this.LocalTempfile != null)
{
- try
+ try
{
this.LocalTempfile.Protected = false;
- this.LocalTempfile.Dispose();
+ this.LocalTempfile.Dispose();
}
catch (Exception ex) { Logging.Log.WriteWarningMessage(LOGTAG, "DeleteTemporaryFileError", ex, "Failed to dispose temporary file: {0}", this.LocalTempfile); }
finally { this.LocalTempfile = null; }
@@ -180,17 +180,17 @@ namespace Duplicati.Library.Main.Operation.Common
m_stats = stats;
m_taskreader = taskreader;
m_backend = DynamicLoader.BackendLoader.GetBackend(backendUrl, options.RawOptions);
-
+
var shortname = m_backendurl;
- // Try not to leak hostnames or other information in the error messages
- try { shortname = new Library.Utility.Uri(shortname).Scheme; }
- catch { }
+ // Try not to leak hostnames or other information in the error messages
+ try { shortname = new Library.Utility.Uri(shortname).Scheme; }
+ catch { }
- if (m_backend == null)
+ if (m_backend == null)
throw new Duplicati.Library.Interface.UserInformationException(string.Format("Backend not supported: {0}", shortname), "BackendNotSupported");
- }
-
+ }
+
protected Task<T> RunRetryOnMain<T>(FileEntryItem fe, Func<Task<T>> method)
{
return RunOnMain<T>(() =>
@@ -213,7 +213,7 @@ namespace Duplicati.Library.Main.Operation.Common
});
}
-
+
public async Task UploadFileAsync(VolumeWriterBase item, Func<string, Task<IndexVolumeWriter>> createIndexFile = null)
{
var fe = new FileEntryItem(BackendActionType.Put, item.RemoteFilename);
@@ -231,7 +231,8 @@ namespace Duplicati.Library.Main.Operation.Common
{
try
{
- await DoWithRetry(fe, async () => {
+ await DoWithRetry(fe, async () =>
+ {
if (fe.IsRetry)
await RenameFileAfterErrorAsync(fe).ConfigureAwait(false);
@@ -249,7 +250,8 @@ namespace Duplicati.Library.Main.Operation.Common
await m_database.UpdateRemoteVolumeAsync(indexFile.RemoteFilename, RemoteVolumeState.Uploading, -1, null);
- await DoWithRetry(indexFile, async () => {
+ await DoWithRetry(indexFile, async () =>
+ {
if (indexFile.IsRetry)
await RenameFileAfterErrorAsync(indexFile).ConfigureAwait(false);
@@ -268,7 +270,7 @@ namespace Duplicati.Library.Main.Operation.Common
tcs.TrySetResult(true);
}
- catch(Exception ex)
+ catch (Exception ex)
{
if (ex is System.Threading.ThreadAbortException)
tcs.TrySetCanceled();
@@ -300,21 +302,22 @@ namespace Duplicati.Library.Main.Operation.Common
public Task<IList<Library.Interface.IFileEntry>> ListFilesAsync()
{
var fe = new FileEntryItem(BackendActionType.List, null);
- return RunRetryOnMain(fe, () =>
- DoList(fe)
+ return RunRetryOnMain(fe, () =>
+ DoList()
);
}
public Task<Library.Utility.TempFile> GetFileAsync(string remotename, long size, string remotehash)
{
var fe = new FileEntryItem(BackendActionType.Get, remotename, size, remotehash);
- return RunRetryOnMain(fe, () => DoGet(fe) );
+ return RunRetryOnMain(fe, () => DoGet(fe));
}
public Task<Tuple<Library.Utility.TempFile, long, string>> GetFileWithInfoAsync(string remotename)
{
var fe = new FileEntryItem(BackendActionType.Get, remotename);
- return RunRetryOnMain(fe, async () => {
+ return RunRetryOnMain(fe, async () =>
+ {
var res = await DoGet(fe).ConfigureAwait(false);
return new Tuple<Library.Utility.TempFile, long, string>(
res,
@@ -324,7 +327,7 @@ namespace Duplicati.Library.Main.Operation.Common
});
}
- public Task<Library.Utility.TempFile> GetFileForTestingAsync(string remotename, long size, string remotehash)
+ public Task<Library.Utility.TempFile> GetFileForTestingAsync(string remotename)
{
var fe = new FileEntryItem(BackendActionType.Get, remotename);
fe.VerifyHashOnly = true;
@@ -338,9 +341,9 @@ namespace Duplicati.Library.Main.Operation.Common
if (m_backend != null)
m_backend.Dispose();
}
- catch (Exception dex)
- {
- Logging.Log.WriteWarningMessage(LOGTAG, "BackendDisposeError", dex, "Failed to dispose backend instance: {0}", ex.Message);
+ catch (Exception dex)
+ {
+ Logging.Log.WriteWarningMessage(LOGTAG, "BackendDisposeError", dex, "Failed to dispose backend instance: {0}", ex.Message);
}
m_backend = null;
}
@@ -355,8 +358,8 @@ namespace Duplicati.Library.Main.Operation.Common
if (m_workerSource.IsCancellationRequested)
throw new OperationCanceledException();
-
- for(var i = 0; i < m_options.NumberOfRetries; i++)
+
+ for (var i = 0; i < m_options.NumberOfRetries; i++)
{
if (m_options.RetryDelay.Ticks != 0 && i != 0)
await Task.Delay(m_options.RetryDelay).ConfigureAwait(false);
@@ -392,13 +395,13 @@ namespace Duplicati.Library.Main.Operation.Common
if (!m_uploadSuccess && ex is Duplicati.Library.Interface.FolderMissingException && m_options.AutocreateFolders)
{
try
- {
+ {
// If we successfully create the folder, we can re-use the connection
- m_backend.CreateFolder();
+ m_backend.CreateFolder();
recovered = true;
}
catch (Exception dex)
- {
+ {
Logging.Log.WriteWarningMessage(LOGTAG, "FolderCreateError", dex, "Failed to create folder: {0}", ex.Message);
}
}
@@ -419,7 +422,7 @@ namespace Duplicati.Library.Main.Operation.Common
private async Task RenameFileAfterErrorAsync(FileEntryItem item)
{
var p = VolumeBase.ParseFilename(item.RemoteFilename);
- var guid = VolumeWriterBase.GenerateGuid(m_options);
+ var guid = VolumeWriterBase.GenerateGuid();
var time = p.Time.Ticks == 0 ? p.Time : p.Time.AddSeconds(1);
var newname = VolumeBase.GenerateFilename(p.FileType, p.Prefix, guid, time, p.CompressionModule, p.EncryptionModule);
var oldname = item.RemoteFilename;
@@ -447,7 +450,7 @@ namespace Duplicati.Library.Main.Operation.Common
item.DeleteLocalFile();
return true;
}
-
+
await m_database.LogRemoteOperationAsync("put", item.RemoteFilename, JsonConvert.SerializeObject(new { Size = item.Size, Hash = item.Hash }));
await m_stats.SendEventAsync(BackendActionType.Put, BackendEventType.Started, item.RemoteFilename, item.Size);
@@ -457,7 +460,7 @@ namespace Duplicati.Library.Main.Operation.Common
{
using (var fs = System.IO.File.OpenRead(item.LocalFilename))
using (var ts = new ThrottledStream(fs, m_options.MaxUploadPrSecond, m_options.MaxDownloadPrSecond))
- using (var pgs = new Library.Utility.ProgressReportingStream(ts, item.Size, pg => HandleProgress(ts, pg)))
+ using (var pgs = new Library.Utility.ProgressReportingStream(ts, pg => HandleProgress(ts, pg)))
((Library.Interface.IStreamingBackend)m_backend).Put(item.RemoteFilename, pgs);
}
else
@@ -479,14 +482,14 @@ namespace Duplicati.Library.Main.Operation.Common
else if (f.Size != item.Size && f.Size >= 0)
throw new Exception(string.Format("List verify failed for file: {0}, size was {1} but expected to be {2}", f.Name, f.Size, item.Size));
}
-
+
item.DeleteLocalFile();
await m_database.CommitTransactionAsync("CommitAfterUpload");
return true;
}
- private async Task<IList<Library.Interface.IFileEntry>> DoList(FileEntryItem item)
+ private async Task<IList<Library.Interface.IFileEntry>> DoList()
{
await m_stats.SendEventAsync(BackendActionType.List, BackendEventType.Started, null, -1);
@@ -575,7 +578,7 @@ namespace Duplicati.Library.Main.Operation.Common
try
{
m_backend.CreateFolder();
- }
+ }
catch (Exception ex)
{
result = ex.ToString();
@@ -604,7 +607,7 @@ namespace Duplicati.Library.Main.Operation.Common
{
using (var fs = System.IO.File.OpenWrite(tmpfile))
using (var ts = new ThrottledStream(fs, m_options.MaxUploadPrSecond, m_options.MaxDownloadPrSecond))
- using (var pgs = new Library.Utility.ProgressReportingStream(ts, item.Size, pg => HandleProgress(ts, pg)))
+ using (var pgs = new Library.Utility.ProgressReportingStream(ts, pg => HandleProgress(ts, pg)))
((Library.Interface.IStreamingBackend)m_backend).Get(item.RemoteFilename, pgs);
}
else
@@ -641,14 +644,14 @@ namespace Duplicati.Library.Main.Operation.Common
// Fast exit
if (item.VerifyHashOnly)
return null;
-
+
// Decrypt before returning
if (!m_options.NoEncryption)
{
try
{
- using(var tmpfile2 = tmpfile)
- {
+ using (var tmpfile2 = tmpfile)
+ {
tmpfile = new Library.Utility.TempFile();
// Auto-guess the encryption module
@@ -658,11 +661,11 @@ namespace Duplicati.Library.Main.Operation.Common
// Check if the file is encrypted with something else
if (DynamicLoader.EncryptionLoader.Keys.Contains(ext, StringComparer.OrdinalIgnoreCase))
{
- using(var encmodule = DynamicLoader.EncryptionLoader.GetModule(ext, m_options.Passphrase, m_options.RawOptions))
+ using (var encmodule = DynamicLoader.EncryptionLoader.GetModule(ext, m_options.Passphrase, m_options.RawOptions))
if (encmodule != null)
{
- Logging.Log.WriteVerboseMessage(LOGTAG, "AutomaticDecryptionDetection", "Filename extension \"{0}\" does not match encryption module \"{1}\", using matching encryption module", ext, m_options.EncryptionModule);
- encmodule.Decrypt(tmpfile2, tmpfile);
+ Logging.Log.WriteVerboseMessage(LOGTAG, "AutomaticDecryptionDetection", "Filename extension \"{0}\" does not match encryption module \"{1}\", using matching encryption module", ext, m_options.EncryptionModule);
+ encmodule.Decrypt(tmpfile2, tmpfile);
}
}
// Check if the file is not encrypted
@@ -674,13 +677,13 @@ namespace Duplicati.Library.Main.Operation.Common
else
{
Logging.Log.WriteVerboseMessage(LOGTAG, "AutomaticDecryptionDetection", "Filename extension \"{0}\" does not match encryption module \"{1}\", attempting to use specified encryption module as no others match", ext, m_options.EncryptionModule);
- using(var encmodule = DynamicLoader.EncryptionLoader.GetModule(m_options.EncryptionModule, m_options.Passphrase, m_options.RawOptions))
+ using (var encmodule = DynamicLoader.EncryptionLoader.GetModule(m_options.EncryptionModule, m_options.Passphrase, m_options.RawOptions))
encmodule.Decrypt(tmpfile2, tmpfile);
}
}
else
{
- using(var encmodule = DynamicLoader.EncryptionLoader.GetModule(m_options.EncryptionModule, m_options.Passphrase, m_options.RawOptions))
+ using (var encmodule = DynamicLoader.EncryptionLoader.GetModule(m_options.EncryptionModule, m_options.Passphrase, m_options.RawOptions))
encmodule.Decrypt(tmpfile2, tmpfile);
}
}
@@ -701,9 +704,9 @@ namespace Duplicati.Library.Main.Operation.Common
}
finally
{
- try
- {
- if (tmpfile != null)
+ try
+ {
+ if (tmpfile != null)
tmpfile.Dispose();
}
catch
@@ -712,31 +715,31 @@ namespace Duplicati.Library.Main.Operation.Common
}
}
- private string m_lastThrottleUploadValue = null;
- private string m_lastThrottleDownloadValue = null;
+ private string m_lastThrottleUploadValue = null;
+ private string m_lastThrottleDownloadValue = null;
- private void HandleProgress(ThrottledStream ts, long pg)
- {
+ private void HandleProgress(ThrottledStream ts, long pg)
+ {
if (!m_taskreader.TransferProgressAsync.WaitForTask().Result)
throw new OperationCanceledException();
- // Update the throttle speeds if they have changed
- string tmp;
- m_options.RawOptions.TryGetValue("throttle-upload", out tmp);
- if (tmp != m_lastThrottleUploadValue)
- {
- ts.WriteSpeed = m_options.MaxUploadPrSecond;
- m_lastThrottleUploadValue = tmp;
- }
-
- m_options.RawOptions.TryGetValue("throttle-download", out tmp);
- if (tmp != m_lastThrottleDownloadValue)
- {
- ts.ReadSpeed = m_options.MaxDownloadPrSecond;
- m_lastThrottleDownloadValue = tmp;
- }
-
- m_stats.UpdateBackendProgress(pg);
+ // Update the throttle speeds if they have changed
+ string tmp;
+ m_options.RawOptions.TryGetValue("throttle-upload", out tmp);
+ if (tmp != m_lastThrottleUploadValue)
+ {
+ ts.WriteSpeed = m_options.MaxUploadPrSecond;
+ m_lastThrottleUploadValue = tmp;
+ }
+
+ m_options.RawOptions.TryGetValue("throttle-download", out tmp);
+ if (tmp != m_lastThrottleDownloadValue)
+ {
+ ts.ReadSpeed = m_options.MaxDownloadPrSecond;
+ m_lastThrottleDownloadValue = tmp;
+ }
+
+ m_stats.UpdateBackendProgress(pg);
}
protected override void Dispose(bool disposing)
@@ -745,7 +748,7 @@ namespace Duplicati.Library.Main.Operation.Common
if (m_backend != null)
try { m_backend.Dispose(); }
- catch {}
+ catch { }
finally { m_backend = null; }
}
}
diff --git a/Duplicati/Library/Main/Operation/CompactHandler.cs b/Duplicati/Library/Main/Operation/CompactHandler.cs
index ba2d62c44..701119324 100644
--- a/Duplicati/Library/Main/Operation/CompactHandler.cs
+++ b/Duplicati/Library/Main/Operation/CompactHandler.cs
@@ -141,7 +141,7 @@ namespace Duplicati.Library.Main.Operation
where report.CompactableVolumes.Contains(v.Name)
select (IRemoteVolume)v).ToList();
- using(var q = db.CreateBlockQueryHelper(m_options, transaction))
+ using(var q = db.CreateBlockQueryHelper(transaction))
{
foreach (var entry in new AsyncDownloader(volumesToDownload, backend))
{
diff --git a/Duplicati/Library/Main/Operation/FilelistProcessor.cs b/Duplicati/Library/Main/Operation/FilelistProcessor.cs
index 00bdff668..c5badc9f0 100644
--- a/Duplicati/Library/Main/Operation/FilelistProcessor.cs
+++ b/Duplicati/Library/Main/Operation/FilelistProcessor.cs
@@ -33,10 +33,8 @@ namespace Duplicati.Library.Main.Operation
/// Helper method that verifies uploaded volumes and updates their state in the database.
/// Throws an error if there are issues with the remote storage
/// </summary>
- /// <param name="options">The options used</param>
/// <param name="database">The database to compare with</param>
- /// <param name="log">The log instance to use</param>
- public static void VerifyLocalList(BackendManager backend, Options options, LocalDatabase database, IBackendWriter log)
+ public static void VerifyLocalList(BackendManager backend, LocalDatabase database)
{
var locallist = database.GetRemoteVolumes();
foreach(var i in locallist)
diff --git a/Duplicati/Library/Main/Operation/PurgeFilesHandler.cs b/Duplicati/Library/Main/Operation/PurgeFilesHandler.cs
index 16f4de839..25a1bcf1e 100644
--- a/Duplicati/Library/Main/Operation/PurgeFilesHandler.cs
+++ b/Duplicati/Library/Main/Operation/PurgeFilesHandler.cs
@@ -85,7 +85,7 @@ namespace Duplicati.Library.Main.Operation
db.VerifyConsistency(m_options.Blocksize, m_options.BlockhashSize, false, null);
if (m_options.NoBackendverification)
- FilelistProcessor.VerifyLocalList(backend, m_options, db, m_result.BackendWriter);
+ FilelistProcessor.VerifyLocalList(backend, db);
else
FilelistProcessor.VerifyRemoteList(backend, m_options, db, m_result.BackendWriter, null);
}
diff --git a/Duplicati/Library/Main/Operation/RestoreHandler.cs b/Duplicati/Library/Main/Operation/RestoreHandler.cs
index 6701f3ff9..76ada16f2 100644
--- a/Duplicati/Library/Main/Operation/RestoreHandler.cs
+++ b/Duplicati/Library/Main/Operation/RestoreHandler.cs
@@ -124,7 +124,7 @@ namespace Duplicati.Library.Main.Operation
.DoRun(database, false, filter, filelistfilter, null);
if (!m_options.SkipMetadata)
- ApplyStoredMetadata(database, m_options, m_result, metadatastorage);
+ ApplyStoredMetadata(m_options, metadatastorage);
}
//If we have --version set, we need to adjust, as the db has only the required versions
@@ -253,7 +253,7 @@ namespace Duplicati.Library.Main.Operation
}
}
- private static void ApplyStoredMetadata(LocalRestoreDatabase database, Options options, RestoreResults result, RestoreHandlerMetadataStorage metadatastorage)
+ private static void ApplyStoredMetadata(Options options, RestoreHandlerMetadataStorage metadatastorage)
{
foreach(var metainfo in metadatastorage.Records)
{
@@ -407,7 +407,7 @@ namespace Duplicati.Library.Main.Operation
// Apply metadata
if (!m_options.SkipMetadata)
- ApplyStoredMetadata(database, m_options, m_result, metadatastorage);
+ ApplyStoredMetadata(m_options, metadatastorage);
// Reset the filehasher if it was used to verify existing files
filehasher.Initialize();
diff --git a/Duplicati/Library/Main/Volumes/VolumeWriterBase.cs b/Duplicati/Library/Main/Volumes/VolumeWriterBase.cs
index 8729b66ff..c427a1ff0 100644
--- a/Duplicati/Library/Main/Volumes/VolumeWriterBase.cs
+++ b/Duplicati/Library/Main/Volumes/VolumeWriterBase.cs
@@ -29,7 +29,7 @@ namespace Duplicati.Library.Main.Volumes
{
}
- public static string GenerateGuid(Options options)
+ public static string GenerateGuid()
{
var s = Guid.NewGuid().ToString("N");
@@ -41,7 +41,7 @@ namespace Duplicati.Library.Main.Volumes
public void ResetRemoteFilename(Options options, DateTime timestamp)
{
- m_volumename = GenerateFilename(this.FileType, options.Prefix, GenerateGuid(options), timestamp, options.CompressionModule, options.NoEncryption ? null : options.EncryptionModule);
+ m_volumename = GenerateFilename(this.FileType, options.Prefix, GenerateGuid(), timestamp, options.CompressionModule, options.NoEncryption ? null : options.EncryptionModule);
}
protected VolumeWriterBase(Options options, DateTime timestamp)
diff --git a/Duplicati/Library/Snapshots/SnapshotUtility.cs b/Duplicati/Library/Snapshots/SnapshotUtility.cs
index 8a6185171..43f6d7336 100644
--- a/Duplicati/Library/Snapshots/SnapshotUtility.cs
+++ b/Duplicati/Library/Snapshots/SnapshotUtility.cs
@@ -38,7 +38,7 @@ namespace Duplicati.Library.Snapshots
{
return
Utility.Utility.IsClientLinux
- ? CreateLinuxSnapshot(folders, options)
+ ? CreateLinuxSnapshot(folders)
: CreateWindowsSnapshot(folders, options);
}
@@ -51,10 +51,9 @@ namespace Duplicati.Library.Snapshots
/// Loads a snapshot implementation for Linux
/// </summary>
/// <param name="folders">The list of folders to create snapshots of</param>
- /// <param name="options">A set of commandline options</param>
/// <returns>The ISnapshotService implementation</returns>
[System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.NoInlining)]
- private static ISnapshotService CreateLinuxSnapshot(IEnumerable<string> folders, Dictionary<string, string> options)
+ private static ISnapshotService CreateLinuxSnapshot(IEnumerable<string> folders)
{
return new LinuxSnapshot(folders);
}
diff --git a/Duplicati/Library/Utility/DirectStreamLink.cs b/Duplicati/Library/Utility/DirectStreamLink.cs
index 5c51d97ef..228da9e95 100644
--- a/Duplicati/Library/Utility/DirectStreamLink.cs
+++ b/Duplicati/Library/Utility/DirectStreamLink.cs
@@ -415,8 +415,7 @@ namespace Duplicati.Library.Utility
/// <param name="callbackFinalizePumping"> A callback to issue when pumping is done but before streams are closed. e.g. Can add data to output. </param>
/// <param name="dontCloseInputWhenDone"> Disable auto close of input stream when pumping is done. </param>
/// <param name="dontCloseOutputWhenDone"> Disable auto close of output stream when pumping is done. </param>
- public DataPump(Stream input, Stream output, int bufsize = DEFAULTBUFSIZE
- , Action<DataPump> callbackFinalizePumping = null
+ public DataPump(Stream input, Stream output, Action<DataPump> callbackFinalizePumping = null
, bool dontCloseInputWhenDone = false, bool dontCloseOutputWhenDone = false)
{
this.m_input = input;
diff --git a/Duplicati/Library/Utility/ProgressReportingStream.cs b/Duplicati/Library/Utility/ProgressReportingStream.cs
index 061c3a01c..b3f0fcda9 100644
--- a/Duplicati/Library/Utility/ProgressReportingStream.cs
+++ b/Duplicati/Library/Utility/ProgressReportingStream.cs
@@ -31,7 +31,7 @@ namespace Duplicati.Library.Utility
private readonly Action<long> m_progress;
private long m_streamOffset;
- public ProgressReportingStream(System.IO.Stream basestream, long expectedSize, Action<long> progress)
+ public ProgressReportingStream(System.IO.Stream basestream, Action<long> progress)
: base(basestream)
{
m_streamOffset = 0;