Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/duplicati/duplicati.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKenneth Skovhede <kenneth@hexad.dk>2020-03-25 09:11:04 +0300
committerGitHub <noreply@github.com>2020-03-25 09:11:04 +0300
commite45b8c4b2e465712144959f0d80eae4b03b75e8b (patch)
tree7da5af99486d15072393b3484f284d1e109ab3a4
parent6a8912c4736bbe8bd9e61be561f339e3f16f48ab (diff)
parentd5e064ae452c6051a3ba4c6a2b897c95f1c4b7d1 (diff)
Merge pull request #4049 from seantempleton/filesetdifference
Fix unexpected difference in fileset when using Stop Now
-rw-r--r--Duplicati/Library/Main/Controller.cs11
-rw-r--r--Duplicati/Library/Main/Database/LocalBackupDatabase.cs22
-rw-r--r--Duplicati/Library/Main/Database/LocalDatabase.cs58
-rw-r--r--Duplicati/Library/Main/Database/LocalDeleteDatabase.cs8
-rw-r--r--Duplicati/Library/Main/Operation/BackupHandler.cs23
-rw-r--r--Duplicati/Library/Main/Operation/CompactHandler.cs6
-rw-r--r--Duplicati/Library/Main/Operation/DeleteHandler.cs6
-rw-r--r--Duplicati/Library/Main/Operation/FilelistProcessor.cs53
-rw-r--r--Duplicati/Library/Main/Operation/RestoreHandler.cs7
-rw-r--r--Duplicati/Library/Main/Operation/TestHandler.cs8
-rw-r--r--Duplicati/UnitTest/DisruptionTests.cs4
11 files changed, 135 insertions, 71 deletions
diff --git a/Duplicati/Library/Main/Controller.cs b/Duplicati/Library/Main/Controller.cs
index ec1629043..ea2c93dce 100644
--- a/Duplicati/Library/Main/Controller.cs
+++ b/Duplicati/Library/Main/Controller.cs
@@ -1076,12 +1076,11 @@ namespace Duplicati.Library.Main
public void Stop(bool allowCurrentFileToFinish)
{
var ct = m_currentTask;
- if (ct == null) return;
- if (allowCurrentFileToFinish)
- {
- Logging.Log.WriteVerboseMessage(LOGTAG, "CancellationRequested", "Cancellation Requested");
- m_cancellationTokenSource.Cancel();
- }
+ if (ct == null)
+ return;
+
+ Logging.Log.WriteVerboseMessage(LOGTAG, "CancellationRequested", "Cancellation Requested");
+ m_cancellationTokenSource.Cancel();
ct.Stop(allowCurrentFileToFinish);
}
diff --git a/Duplicati/Library/Main/Database/LocalBackupDatabase.cs b/Duplicati/Library/Main/Database/LocalBackupDatabase.cs
index 84e99cab8..dcccea279 100644
--- a/Duplicati/Library/Main/Database/LocalBackupDatabase.cs
+++ b/Duplicati/Library/Main/Database/LocalBackupDatabase.cs
@@ -2,10 +2,6 @@ using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
-using System.Text;
-using System.IO;
-using System.Linq.Expressions;
-
namespace Duplicati.Library.Main.Database
{
@@ -761,7 +757,6 @@ namespace Duplicati.Library.Main.Database
{
using (var cmd = m_connection.CreateCommand(transaction))
{
- cmd.Transaction = transaction;
using (var rd = cmd.ExecuteReader(@"SELECT DISTINCT ""Fileset"".""ID"", ""Fileset"".""Timestamp"" FROM ""Fileset"", ""RemoteVolume"" WHERE ""RemoteVolume"".""ID"" = ""Fileset"".""VolumeID"" AND ""Fileset"".""ID"" IN (SELECT ""FilesetID"" FROM ""FilesetEntry"") AND (""RemoteVolume"".""State"" = ""Uploading"" OR ""RemoteVolume"".""State"" = ""Temporary"")"))
while (rd.Read())
{
@@ -789,6 +784,23 @@ namespace Duplicati.Library.Main.Database
);
else
return default(RemoteVolumeEntry);
+ }
+
+ public IEnumerable<string> GetTemporaryFilelistVolumeNames(bool latestOnly, IDbTransaction transaction = null)
+ {
+ var incompleteFilesetIDs = GetIncompleteFilesets(transaction).OrderBy(x => x.Value).Select(x => x.Key).ToArray();
+
+ if (!incompleteFilesetIDs.Any())
+ return Enumerable.Empty<string>();
+
+ if (latestOnly)
+ incompleteFilesetIDs = new long[] { incompleteFilesetIDs.Last() };
+
+ var volumeNames = new List<string>();
+ foreach (var filesetID in incompleteFilesetIDs)
+ volumeNames.Add(GetRemoteVolumeFromFilesetID(filesetID).Name);
+
+ return volumeNames;
}
public IEnumerable<string> GetMissingIndexFiles(System.Data.IDbTransaction transaction)
diff --git a/Duplicati/Library/Main/Database/LocalDatabase.cs b/Duplicati/Library/Main/Database/LocalDatabase.cs
index 65255d673..f46694ef3 100644
--- a/Duplicati/Library/Main/Database/LocalDatabase.cs
+++ b/Duplicati/Library/Main/Database/LocalDatabase.cs
@@ -412,36 +412,67 @@ namespace Duplicati.Library.Main.Database
var volIdsSubQuery = string.Format(@"SELECT ""ID"" FROM ""{0}"" ", volidstable);
deletecmd.Parameters.Clear();
- // If the volume is a block or index volume, this will update the crosslink table, otherwise nothing will happen
- deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""IndexBlockLink"" WHERE ""BlockVolumeID"" IN ({0}) OR ""IndexVolumeID"" IN ({0})", volIdsSubQuery));
-
- // If the volume is a fileset, this will remove the fileset, otherwise nothing will happen
- deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""FilesetEntry"" WHERE ""FilesetID"" IN (SELECT ""ID"" FROM ""Fileset"" WHERE ""VolumeID"" IN ({0}))", volIdsSubQuery));
- deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""ChangeJournalData"" WHERE ""FilesetID"" IN (SELECT ""ID"" FROM ""Fileset"" WHERE ""VolumeID"" IN ({0}))", volIdsSubQuery));
- deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""Fileset"" WHERE ""VolumeID"" IN ({0})", volIdsSubQuery));
var bsIdsSubQuery = string.Format(
- @"SELECT ""BlocksetEntry"".""BlocksetID"" FROM ""BlocksetEntry"", ""Block"" "
+ @"SELECT DISTINCT ""BlocksetEntry"".""BlocksetID"" FROM ""BlocksetEntry"", ""Block"""
+ @" WHERE ""BlocksetEntry"".""BlockID"" = ""Block"".""ID"" AND ""Block"".""VolumeID"" IN ({0}) "
+ @"UNION ALL "
- + @"SELECT ""BlocksetID"" FROM ""BlocklistHash"" "
- + @"WHERE ""Hash"" IN (SELECT ""Hash"" FROM ""Block"" WHERE ""VolumeID"" IN ({0}))"
+ + @"SELECT DISTINCT ""BlocksetID"" FROM ""BlocklistHash"""
+ + @" WHERE ""Hash"" IN (SELECT ""Hash"" FROM ""Block"" WHERE ""VolumeID"" IN ({0}))"
, volIdsSubQuery);
// Create a temporary table to cache subquery result, as it might take long (SQLite does not cache at all).
deletecmd.ExecuteNonQuery(string.Format(@"CREATE TEMP TABLE ""{0}"" (""ID"" INTEGER PRIMARY KEY)", blocksetidstable));
deletecmd.ExecuteNonQuery(string.Format(@"INSERT OR IGNORE INTO ""{0}"" (""ID"") {1}", blocksetidstable, bsIdsSubQuery));
- bsIdsSubQuery = string.Format(@"SELECT ""ID"" FROM ""{0}"" ", blocksetidstable);
+ bsIdsSubQuery = string.Format(@"SELECT DISTINCT ""ID"" FROM ""{0}"" ", blocksetidstable);
deletecmd.Parameters.Clear();
- deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""FileLookup"" WHERE ""BlocksetID"" IN ({0}) OR ""MetadataID"" IN ({0})", bsIdsSubQuery));
+ // Create a temp table to associate metadata that is being deleted to a fileset
+ var metadataFilesetQuery = $@"SELECT Metadataset.ID, FilesetEntry.FilesetID
+FROM Metadataset
+INNER JOIN FileLookup ON FileLookup.MetadataID = Metadataset.ID
+INNER JOIN FilesetEntry ON FilesetEntry.FileID = FileLookup.ID
+WHERE Metadataset.BlocksetID IN ({bsIdsSubQuery})
+OR Metadataset.ID IN (SELECT MetadataID FROM FileLookup WHERE BlocksetID IN ({bsIdsSubQuery}))";
+
+ var metadataFilesetTable = @"DelMetadataFilesetIds-" + temptransguid;
+ deletecmd.ExecuteNonQuery($@"CREATE TEMP TABLE ""{metadataFilesetTable}"" (MetadataID INTEGER PRIMARY KEY, FilesetID INTEGER)");
+ deletecmd.ExecuteNonQuery($@"INSERT OR IGNORE INTO ""{metadataFilesetTable}"" (MetadataID, FilesetID) {metadataFilesetQuery}");
+
+ // Delete FilesetEntry rows that had their metadata deleted
+ deletecmd.ExecuteNonQuery($@"DELETE FROM FilesetEntry
+WHERE FilesetEntry.FilesetID IN (SELECT DISTINCT FilesetID FROM ""{metadataFilesetTable}"")
+AND FilesetEntry.FileID IN (
+ SELECT FilesetEntry.FileID
+ FROM FilesetEntry
+ INNER JOIN FileLookup ON FileLookup.ID = FilesetEntry.FileID
+ WHERE FileLookup.MetadataID IN (SELECT MetadataID FROM ""{metadataFilesetTable}""))");
+
+ // Delete FilesetEntry rows that had their blocks deleted
+ deletecmd.ExecuteNonQuery($@"DELETE FROM FilesetEntry WHERE FilesetEntry.FileID IN (
+SELECT ID FROM FileLookup
+WHERE FileLookup.BlocksetID IN ({bsIdsSubQuery}))");
+ deletecmd.ExecuteNonQuery($@"DELETE FROM FileLookup WHERE FileLookup.MetadataID IN (SELECT MetadataID FROM ""{metadataFilesetTable}"")");
+
deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""Metadataset"" WHERE ""BlocksetID"" IN ({0})", bsIdsSubQuery));
+ deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""FileLookup"" WHERE ""BlocksetID"" IN ({0})", bsIdsSubQuery));
deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""Blockset"" WHERE ""ID"" IN ({0})", bsIdsSubQuery));
deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""BlocksetEntry"" WHERE ""BlocksetID"" IN ({0})", bsIdsSubQuery));
-
deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""BlocklistHash"" WHERE ""BlocklistHash"".""BlocksetID"" IN ({0})", bsIdsSubQuery));
+
+ // If the volume is a block or index volume, this will update the crosslink table, otherwise nothing will happen
+ deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""IndexBlockLink"" WHERE ""BlockVolumeID"" IN ({0}) OR ""IndexVolumeID"" IN ({0})", volIdsSubQuery));
deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""Block"" WHERE ""VolumeID"" IN ({0})", volIdsSubQuery));
deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""DeletedBlock"" WHERE ""VolumeID"" IN ({0})", volIdsSubQuery));
+ deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""ChangeJournalData"" WHERE ""FilesetID"" IN (SELECT ""ID"" FROM ""Fileset"" WHERE ""VolumeID"" IN ({0}))", volIdsSubQuery));
+ deletecmd.ExecuteNonQuery($@"DELETE FROM FilesetEntry WHERE FilesetID IN (SELECT ID FROM Fileset WHERE VolumeID IN ({volIdsSubQuery}))");
+ deletecmd.ExecuteNonQuery($@"DELETE FROM Fileset WHERE VolumeID IN ({volIdsSubQuery})");
+
+ // Delete from Fileset if FilesetEntry rows were deleted by related metadata and there are no references in FilesetEntry anymore
+ deletecmd.ExecuteNonQuery($@"DELETE FROM Fileset WHERE Fileset.ID IN
+(SELECT DISTINCT FilesetID FROM ""{metadataFilesetTable}"")
+AND Fileset.ID NOT IN
+ (SELECT DISTINCT FilesetID FROM FilesetEntry)");
// Clean up temp tables for subqueries. We truncate content and then try to delete.
// Drop in try-block, as it fails in nested transactions (SQLite problem)
@@ -453,6 +484,7 @@ namespace Duplicati.Library.Main.Database
deletecmd.CommandTimeout = 2;
deletecmd.ExecuteNonQuery(string.Format(@"DROP TABLE IF EXISTS ""{0}"" ", blocksetidstable));
deletecmd.ExecuteNonQuery(string.Format(@"DROP TABLE IF EXISTS ""{0}"" ", volidstable));
+ deletecmd.ExecuteNonQuery(string.Format(@"DROP TABLE IF EXISTS ""{0}"" ", metadataFilesetTable));
}
catch { /* Ignore, will be deleted on close anyway. */ }
diff --git a/Duplicati/Library/Main/Database/LocalDeleteDatabase.cs b/Duplicati/Library/Main/Database/LocalDeleteDatabase.cs
index 397f4a17a..7ff65885b 100644
--- a/Duplicati/Library/Main/Database/LocalDeleteDatabase.cs
+++ b/Duplicati/Library/Main/Database/LocalDeleteDatabase.cs
@@ -95,11 +95,11 @@ namespace Duplicati.Library.Main.Database
//We save the block info for the remote files, before we delete it
cmd.ExecuteNonQuery(@"INSERT INTO ""DeletedBlock"" (""Hash"", ""Size"", ""VolumeID"") SELECT ""Hash"", ""Size"", ""VolumeID"" FROM ""Block"" WHERE ""ID"" NOT IN (SELECT DISTINCT ""BlockID"" AS ""BlockID"" FROM ""BlocksetEntry"" UNION SELECT DISTINCT ""ID"" FROM ""Block"", ""BlocklistHash"" WHERE ""Block"".""Hash"" = ""BlocklistHash"".""Hash"") ");
- cmd.ExecuteNonQuery(@"DELETE FROM ""Block"" WHERE ""ID"" NOT IN (SELECT DISTINCT ""BlockID"" FROM ""BlocksetEntry"" UNION SELECT DISTINCT ""ID"" FROM ""Block"", ""BlocklistHash"" WHERE ""Block"".""Hash"" = ""BlocklistHash"".""Hash"") ");
-
+ cmd.ExecuteNonQuery(@"DELETE FROM ""Block"" WHERE ""ID"" NOT IN (SELECT DISTINCT ""BlockID"" FROM ""BlocksetEntry"" UNION SELECT DISTINCT ""ID"" FROM ""Block"", ""BlocklistHash"" WHERE ""Block"".""Hash"" = ""BlocklistHash"".""Hash"") ");
+
//Find all remote filesets that are no longer required, and mark them as delete
- var updated = cmd.ExecuteNonQuery(@"UPDATE ""RemoteVolume"" SET ""State"" = ? WHERE ""Type"" = ? AND ""State"" IN (?, ?) AND ""ID"" NOT IN (SELECT ""VolumeID"" FROM ""Fileset"") ", RemoteVolumeState.Deleting.ToString(), RemoteVolumeType.Files.ToString(), RemoteVolumeState.Uploaded.ToString(), RemoteVolumeState.Verified.ToString());
-
+ var updated = cmd.ExecuteNonQuery(@"UPDATE ""RemoteVolume"" SET ""State"" = ? WHERE ""Type"" = ? AND ""State"" IN (?, ?, ?) AND ""ID"" NOT IN (SELECT ""VolumeID"" FROM ""Fileset"") ", RemoteVolumeState.Deleting.ToString(), RemoteVolumeType.Files.ToString(), RemoteVolumeState.Uploaded.ToString(), RemoteVolumeState.Verified.ToString(), RemoteVolumeState.Temporary.ToString());
+
if (deleted != updated)
throw new Exception(string.Format("Unexpected number of remote volumes marked as deleted. Found {0} filesets, but {1} volumes", deleted, updated));
diff --git a/Duplicati/Library/Main/Operation/BackupHandler.cs b/Duplicati/Library/Main/Operation/BackupHandler.cs
index 3a53a3cae..8a4bd2768 100644
--- a/Duplicati/Library/Main/Operation/BackupHandler.cs
+++ b/Duplicati/Library/Main/Operation/BackupHandler.cs
@@ -154,7 +154,7 @@ namespace Duplicati.Library.Main.Operation
UpdateStorageStatsFromDatabase();
}
else
- FilelistProcessor.VerifyRemoteList(backend, m_options, m_database, m_result.BackendWriter, protectedfile);
+ FilelistProcessor.VerifyRemoteList(backend, m_options, m_database, m_result.BackendWriter, new string[] { protectedfile });
}
catch (Exception ex)
{
@@ -165,7 +165,7 @@ namespace Duplicati.Library.Main.Operation
new RepairHandler(backend.BackendUrl, m_options, (RepairResults)m_result.RepairResults).Run();
Logging.Log.WriteInformationMessage(LOGTAG, "BackendCleanupFinished", "Backend cleanup finished, retrying verification");
- FilelistProcessor.VerifyRemoteList(backend, m_options, m_database, m_result.BackendWriter);
+ FilelistProcessor.VerifyRemoteList(backend, m_options, m_database, m_result.BackendWriter, new string[] { protectedfile });
}
else
throw;
@@ -286,13 +286,13 @@ namespace Duplicati.Library.Main.Operation
}
}
- private void PostBackupVerification()
+ private void PostBackupVerification(string currentFilelistVolume)
{
m_result.OperationProgressUpdater.UpdatePhase(OperationPhase.Backup_PostBackupVerify);
using(var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, m_database))
{
- using(new Logging.Timer(LOGTAG, "AfterBackupVerify", "AfterBackupVerify"))
- FilelistProcessor.VerifyRemoteList(backend, m_options, m_database, m_result.BackendWriter);
+ using (new Logging.Timer(LOGTAG, "AfterBackupVerify", "AfterBackupVerify"))
+ FilelistProcessor.VerifyRemoteList(backend, m_options, m_database, m_result.BackendWriter, new string[] { currentFilelistVolume });
backend.WaitForComplete(m_database, null);
}
@@ -375,10 +375,17 @@ namespace Duplicati.Library.Main.Operation
private static async Task<long> FlushBackend(BackupResults result, IWriteChannel<Backup.IUploadRequest> uploadtarget, Task uploader)
{
- var flushReq = new Backup.FlushRequest();
-
// Wait for upload completion
result.OperationProgressUpdater.UpdatePhase(OperationPhase.Backup_WaitForUpload);
+
+ if (await uploadtarget.IsRetiredAsync)
+ {
+ await uploader.ConfigureAwait(false);
+ return -1;
+ }
+
+ var flushReq = new Backup.FlushRequest();
+
await uploadtarget.WriteAsync(flushReq).ConfigureAwait(false);
await uploader.ConfigureAwait(false);
@@ -558,7 +565,7 @@ namespace Duplicati.Library.Main.Operation
if (m_options.NoBackendverification)
UpdateStorageStatsFromDatabase();
else
- PostBackupVerification();
+ PostBackupVerification(filesetvolume.RemoteFilename);
}
}
diff --git a/Duplicati/Library/Main/Operation/CompactHandler.cs b/Duplicati/Library/Main/Operation/CompactHandler.cs
index 217e08a8e..48c334b10 100644
--- a/Duplicati/Library/Main/Operation/CompactHandler.cs
+++ b/Duplicati/Library/Main/Operation/CompactHandler.cs
@@ -99,7 +99,11 @@ namespace Duplicati.Library.Main.Operation
{
var backend = bk ?? sharedBackend;
if (!hasVerifiedBackend && !m_options.NoBackendverification)
- FilelistProcessor.VerifyRemoteList(backend, m_options, db, m_result.BackendWriter);
+ {
+ var backupDatabase = new LocalBackupDatabase(db, m_options);
+ var latestFilelist = backupDatabase.GetTemporaryFilelistVolumeNames(latestOnly: true, transaction: transaction);
+ FilelistProcessor.VerifyRemoteList(backend, m_options, db, m_result.BackendWriter, latestFilelist);
+ }
BlockVolumeWriter newvol = new BlockVolumeWriter(m_options);
newvol.VolumeID = db.RegisterRemoteVolume(newvol.RemoteFilename, RemoteVolumeType.Blocks, RemoteVolumeState.Temporary, transaction);
diff --git a/Duplicati/Library/Main/Operation/DeleteHandler.cs b/Duplicati/Library/Main/Operation/DeleteHandler.cs
index 3b45a6214..ef1054048 100644
--- a/Duplicati/Library/Main/Operation/DeleteHandler.cs
+++ b/Duplicati/Library/Main/Operation/DeleteHandler.cs
@@ -88,7 +88,11 @@ namespace Duplicati.Library.Main.Operation
var backend = bk ?? sharedManager;
if (!hasVerifiedBacked && !m_options.NoBackendverification)
- FilelistProcessor.VerifyRemoteList(backend, m_options, db, m_result.BackendWriter);
+ {
+ var backupDatabase = new LocalBackupDatabase(db, m_options);
+ var latestFilelist = backupDatabase.GetTemporaryFilelistVolumeNames(latestOnly: true, transaction: transaction);
+ FilelistProcessor.VerifyRemoteList(backend, m_options, db, m_result.BackendWriter, latestFilelist);
+ }
IListResultFileset[] filesets = db.FilesetsWithBackupVersion.ToArray();
List<IListResultFileset> versionsToDelete = new List<IListResultFileset>();
diff --git a/Duplicati/Library/Main/Operation/FilelistProcessor.cs b/Duplicati/Library/Main/Operation/FilelistProcessor.cs
index 01569e571..91c668971 100644
--- a/Duplicati/Library/Main/Operation/FilelistProcessor.cs
+++ b/Duplicati/Library/Main/Operation/FilelistProcessor.cs
@@ -79,13 +79,13 @@ namespace Duplicati.Library.Main.Operation
/// <param name="options">The options used</param>
/// <param name="database">The database to compare with</param>
/// <param name="log">The log instance to use</param>
- /// <param name="protectedfile">A filename that should be exempted for deletion</param>
- public static void VerifyRemoteList(BackendManager backend, Options options, LocalDatabase database, IBackendWriter log, string protectedfile = null)
+ /// <param name="protectedFiles">Filenames that should be exempted from deletion</param>
+ public static void VerifyRemoteList(BackendManager backend, Options options, LocalDatabase database, IBackendWriter log, IEnumerable<string> protectedFiles = null)
{
- var tp = RemoteListAnalysis(backend, options, database, log, protectedfile);
+ var tp = RemoteListAnalysis(backend, options, database, log, protectedFiles);
long extraCount = 0;
long missingCount = 0;
-
+
foreach(var n in tp.ExtraVolumes)
{
Logging.Log.WriteWarningMessage(LOGTAG, "ExtraUnknownFile", null, "Extra unknown file: {0}", n.File.Name);
@@ -122,10 +122,10 @@ namespace Duplicati.Library.Main.Operation
s = string.Format("Found {0} files that are missing from the remote storage, and no files with the backup prefix {1}, but found the following backup prefixes: {2}", missingCount, options.Prefix, string.Join(", ", tp.BackupPrefixes));
else
s = string.Format("Found {0} files that are missing from the remote storage, please run repair", missingCount);
-
+
Logging.Log.WriteErrorMessage(LOGTAG, "MissingRemoteFiles", null, s);
throw new Duplicati.Library.Interface.UserInformationException(s, "MissingRemoteFiles");
- }
+ }
}
public struct RemoteAnalysisResult
@@ -150,7 +150,7 @@ namespace Duplicati.Library.Main.Operation
var s = new Newtonsoft.Json.JsonSerializer();
s.Serialize(stream, db.GetRemoteVolumes().Where(x => x.State != RemoteVolumeState.Temporary).Cast<IRemoteVolume>().ToArray());
}
-
+
/// <summary>
/// Uploads the verification file.
/// </summary>
@@ -167,7 +167,7 @@ namespace Duplicati.Library.Main.Operation
var remotename = options.Prefix + "-verification.json";
using(var stream = new System.IO.StreamWriter(tempfile, false, System.Text.Encoding.UTF8))
FilelistProcessor.CreateVerificationFile(db, stream);
-
+
if (options.Dryrun)
{
Logging.Log.WriteDryrunMessage(LOGTAG, "WouldUploadVerificationFile", "Would upload verification file: {0}, size: {1}", remotename, Library.Utility.Utility.FormatSizeString(new System.IO.FileInfo(tempfile).Length));
@@ -187,12 +187,12 @@ namespace Duplicati.Library.Main.Operation
/// <param name="backend">The backend instance to use</param>
/// <param name="options">The options used</param>
/// <param name="database">The database to compare with</param>
- /// <param name="protectedfile">A filename that should be exempted for deletion</param>
- public static RemoteAnalysisResult RemoteListAnalysis(BackendManager backend, Options options, LocalDatabase database, IBackendWriter log, string protectedfile)
+ /// <param name="protectedFiles">Filenames that should be exempted from deletion</param>
+ public static RemoteAnalysisResult RemoteListAnalysis(BackendManager backend, Options options, LocalDatabase database, IBackendWriter log, IEnumerable<string> protectedFiles)
{
var rawlist = backend.List();
var lookup = new Dictionary<string, Volumes.IParsedVolume>();
- protectedfile = protectedfile ?? string.Empty;
+ protectedFiles = protectedFiles ?? Enumerable.Empty<string>();
var remotelist = (from n in rawlist
let p = Volumes.VolumeBase.ParseFilename(n)
@@ -248,17 +248,17 @@ namespace Duplicati.Library.Main.Operation
if (quota.FreeQuotaSpace < warningThreshold * knownFileSize)
{
log.ReportedQuotaWarning = true;
- Logging.Log.WriteWarningMessage(LOGTAG, "BackendQuotaNear" , null, "Backend quota is close to being exceeded: Using {0} of {1} ({2} available)", Library.Utility.Utility.FormatSizeString(knownFileSize), Library.Utility.Utility.FormatSizeString(quota.TotalQuotaSpace), Library.Utility.Utility.FormatSizeString(quota.FreeQuotaSpace));
+ Logging.Log.WriteWarningMessage(LOGTAG, "BackendQuotaNear", null, "Backend quota is close to being exceeded: Using {0} of {1} ({2} available)", Library.Utility.Utility.FormatSizeString(knownFileSize), Library.Utility.Utility.FormatSizeString(quota.TotalQuotaSpace), Library.Utility.Utility.FormatSizeString(quota.FreeQuotaSpace));
}
}
}
}
log.AssignedQuotaSpace = options.QuotaSize;
-
+
foreach(var s in remotelist)
lookup[s.File.Name] = s;
-
+
var missing = new List<RemoteVolumeEntry>();
var missingHash = new List<Tuple<long, RemoteVolumeEntry>>();
var cleanupRemovedRemoteVolumes = new HashSet<string>();
@@ -303,7 +303,7 @@ namespace Duplicati.Library.Main.Operation
}
else
{
- if (string.Equals(i.Name, protectedfile) && i.State == RemoteVolumeState.Temporary)
+ if (i.State == RemoteVolumeState.Temporary && protectedFiles.Any(pf => pf == i.Name))
{
Logging.Log.WriteInformationMessage(LOGTAG, "KeepIncompleteFile", "keeping protected incomplete remote file listed as {0}: {1}", i.State, i.Name);
}
@@ -323,8 +323,7 @@ namespace Duplicati.Library.Main.Operation
}
else if (!remoteFound)
{
-
- if (string.Equals(i.Name, protectedfile))
+ if (protectedFiles.Any(pf => pf == i.Name))
{
Logging.Log.WriteInformationMessage(LOGTAG, "KeepIncompleteFile", "keeping protected incomplete remote file listed as {0}: {1}", i.State, i.Name);
database.UpdateRemoteVolume(i.Name, RemoteVolumeState.Temporary, i.Size, i.Hash, false, new TimeSpan(0), null);
@@ -338,7 +337,7 @@ namespace Duplicati.Library.Main.Operation
}
else
{
- if (string.Equals(i.Name, protectedfile))
+ if (protectedFiles.Any(pf => pf == i.Name))
{
Logging.Log.WriteInformationMessage(LOGTAG, "KeepIncompleteFile", "keeping protected incomplete remote file listed as {0}: {1}", i.State, i.Name);
}
@@ -367,7 +366,7 @@ namespace Duplicati.Library.Main.Operation
missingHash.Add(new Tuple<long, RemoteVolumeEntry>(r.File.Size, i));
break;
-
+
default:
Logging.Log.WriteWarningMessage(LOGTAG, "UnknownFileState", null, "unknown state for remote file listed as {0}: {1}", i.State, i.Name);
break;
@@ -381,15 +380,15 @@ namespace Duplicati.Library.Main.Operation
foreach(var i in missingHash)
Logging.Log.WriteWarningMessage(LOGTAG, "MissingRemoteHash", null, "remote file {1} is listed as {0} with size {2} but should be {3}, please verify the sha256 hash \"{4}\"", i.Item2.State, i.Item2.Name, i.Item1, i.Item2.Size, i.Item2.Hash);
-
- return new RemoteAnalysisResult() {
- ParsedVolumes = remotelist,
+
+ return new RemoteAnalysisResult()
+ {
+ ParsedVolumes = remotelist,
OtherVolumes = otherlist,
- ExtraVolumes = lookup.Values,
- MissingVolumes = missing,
- VerificationRequiredVolumes = missingHash.Select(x => x.Item2)
+ ExtraVolumes = lookup.Values,
+ MissingVolumes = missing,
+ VerificationRequiredVolumes = missingHash.Select(x => x.Item2)
};
}
- }
+ }
}
-
diff --git a/Duplicati/Library/Main/Operation/RestoreHandler.cs b/Duplicati/Library/Main/Operation/RestoreHandler.cs
index a12876b9d..badc13989 100644
--- a/Duplicati/Library/Main/Operation/RestoreHandler.cs
+++ b/Duplicati/Library/Main/Operation/RestoreHandler.cs
@@ -332,8 +332,11 @@ namespace Duplicati.Library.Main.Operation
if (!m_options.NoBackendverification)
{
- m_result.OperationProgressUpdater.UpdatePhase(OperationPhase.Restore_PreRestoreVerify);
- FilelistProcessor.VerifyRemoteList(backend, m_options, database, result.BackendWriter);
+ m_result.OperationProgressUpdater.UpdatePhase(OperationPhase.Restore_PreRestoreVerify);
+
+ var backupDatabase = new LocalBackupDatabase(database, m_options);
+ var tempFilelistVolumes = backupDatabase.GetTemporaryFilelistVolumeNames(latestOnly: false);
+ FilelistProcessor.VerifyRemoteList(backend, m_options, database, result.BackendWriter, tempFilelistVolumes);
}
//Figure out what files are to be patched, and what blocks are needed
diff --git a/Duplicati/Library/Main/Operation/TestHandler.cs b/Duplicati/Library/Main/Operation/TestHandler.cs
index 7fd8c6469..405032fcc 100644
--- a/Duplicati/Library/Main/Operation/TestHandler.cs
+++ b/Duplicati/Library/Main/Operation/TestHandler.cs
@@ -53,9 +53,13 @@ namespace Duplicati.Library.Main.Operation
Utility.UpdateOptionsFromDb(db, m_options);
Utility.VerifyParameters(db, m_options);
db.VerifyConsistency(m_options.Blocksize, m_options.BlockhashSize, true, null);
-
+
if (!m_options.NoBackendverification)
- FilelistProcessor.VerifyRemoteList(backend, m_options, db, m_results.BackendWriter);
+ {
+ var backupDatabase = new LocalBackupDatabase(db, m_options);
+ var latestFilelist = backupDatabase.GetTemporaryFilelistVolumeNames(latestOnly: true);
+ FilelistProcessor.VerifyRemoteList(backend, m_options, db, m_results.BackendWriter, latestFilelist);
+ }
DoRun(samples, db, backend);
db.WriteResults();
diff --git a/Duplicati/UnitTest/DisruptionTests.cs b/Duplicati/UnitTest/DisruptionTests.cs
index abbded9d3..eaf10ffd8 100644
--- a/Duplicati/UnitTest/DisruptionTests.cs
+++ b/Duplicati/UnitTest/DisruptionTests.cs
@@ -515,7 +515,7 @@ namespace Duplicati.UnitTest
[Test]
[Category("Disruption")]
- public void StopNow()
+ public async Task StopNow()
{
// Choose a dblock size that is small enough so that more than one volume is needed.
Dictionary<string, string> options = new Dictionary<string, string>(this.TestOptions) {["dblock-size"] = "10mb", ["disable-synthetic-filelist"] = "true"};
@@ -541,7 +541,7 @@ namespace Duplicati.UnitTest
Thread.Sleep(1000);
c.Stop(false);
- Assert.That(async () => await backupTask.ConfigureAwait(false), Throws.Exception);
+ await backupTask.ConfigureAwait(false);
}
// The next backup should proceed without issues.