using System; using System.Collections.Generic; using System.IO; using System.Linq; using System.Threading; using System.Threading.Tasks; using Duplicati.Library.Interface; using Duplicati.Library.Main; using Duplicati.Library.Main.Database; using Duplicati.Library.Main.Volumes; using NUnit.Framework; using IFileEntry = Duplicati.Library.Interface.IFileEntry; using Utility = Duplicati.Library.Utility.Utility; namespace Duplicati.UnitTest { public class DisruptionTests : BasicSetupHelper { // Files to create in MB. private readonly int[] fileSizes = {10, 20, 30}; private void ModifySourceFiles() { foreach (int size in this.fileSizes) { byte[] data = new byte[size * 1024 * 1024]; Random rng = new Random(); rng.NextBytes(data); File.WriteAllBytes(Path.Combine(this.DATAFOLDER, size + "MB"), data); } } private async Task RunPartialBackup(Controller controller) { this.ModifySourceFiles(); // ReSharper disable once AccessToDisposedClosure Task backupTask = Task.Run(() => controller.Backup(new[] {this.DATAFOLDER})); // Block for a small amount of time to allow the ITaskControl to be associated // with the Controller. Otherwise, the call to Stop will simply be a no-op. Thread.Sleep(1000); controller.Stop(true); return await backupTask.ConfigureAwait(false); } public override void SetUp() { base.SetUp(); this.ModifySourceFiles(); } [Test] [Category("Disruption")] public async Task FilesetFiles() { // Choose a dblock size that is small enough so that more than one volume is needed. Dictionary options = new Dictionary(this.TestOptions) { ["dblock-size"] = "10mb", // This allows us to inspect the dlist files without needing the BackendManager (which is inaccessible here) to decrypt them. ["no-encryption"] = "true" }; // Run a full backup. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = c.Backup(new[] {this.DATAFOLDER}); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Run a partial backup. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = await this.RunPartialBackup(c).ConfigureAwait(false); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(1, backupResults.Warnings.Count()); } Dictionary GetBackupTypesFromRemoteFiles(Controller c, out List filelistFiles) { Dictionary map = new Dictionary(); filelistFiles = new List(); IListRemoteResults remoteFiles = c.ListRemote(); foreach (IFileEntry file in remoteFiles.Files) { IParsedVolume volume = VolumeBase.ParseFilename(file); if (volume != null && volume.FileType == RemoteVolumeType.Files) { string dlistFile = Path.Combine(this.TARGETFOLDER, volume.File.Name); filelistFiles.Add(dlistFile); VolumeBase.FilesetData filesetData = VolumeReaderBase.GetFilesetData(volume.CompressionModule, dlistFile, new Options(options)); map[volume.Time] = filesetData.IsFullBackup ? BackupType.FULL_BACKUP : BackupType.PARTIAL_BACKUP; } } return map; } // Purge a file and verify that the fileset file exists in the new dlist files. List dlistFiles; Dictionary backupTypeMap; using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IPurgeFilesResults purgeResults = c.PurgeFiles(new Library.Utility.FilterExpression($"*{this.fileSizes[0]}*")); Assert.AreEqual(0, purgeResults.Errors.Count()); Assert.AreEqual(0, purgeResults.Warnings.Count()); List filesets = c.List().Filesets.ToList(); Assert.AreEqual(2, filesets.Count); Assert.AreEqual(BackupType.FULL_BACKUP, filesets.Single(x => x.Version == 1).IsFullBackup); Assert.AreEqual(BackupType.PARTIAL_BACKUP, filesets.Single(x => x.Version == 0).IsFullBackup); backupTypeMap = GetBackupTypesFromRemoteFiles(c, out dlistFiles); } int[] backupTypes = backupTypeMap.OrderByDescending(x => x.Key).Select(x => x.Value).ToArray(); Assert.AreEqual(2, backupTypes.Length); Assert.AreEqual(BackupType.FULL_BACKUP, backupTypes[1]); Assert.AreEqual(BackupType.PARTIAL_BACKUP, backupTypes[0]); // Remove the dlist files. foreach (string dlistFile in dlistFiles) { File.Delete(dlistFile); } // Run a repair and verify that the fileset file exists in the new dlist files. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IRepairResults repairResults = c.Repair(); Assert.AreEqual(0, repairResults.Errors.Count()); Assert.AreEqual(0, repairResults.Warnings.Count()); List filesets = c.List().Filesets.ToList(); Assert.AreEqual(2, filesets.Count); Assert.AreEqual(BackupType.FULL_BACKUP, filesets.Single(x => x.Version == 1).IsFullBackup); Assert.AreEqual(BackupType.PARTIAL_BACKUP, filesets.Single(x => x.Version == 0).IsFullBackup); backupTypeMap = GetBackupTypesFromRemoteFiles(c, out _); } backupTypes = backupTypeMap.OrderByDescending(x => x.Key).Select(x => x.Value).ToArray(); Assert.AreEqual(2, backupTypes.Length); Assert.AreEqual(BackupType.FULL_BACKUP, backupTypes[1]); Assert.AreEqual(BackupType.PARTIAL_BACKUP, backupTypes[0]); } [Test] [Category("Disruption")] public async Task KeepTimeRetention() { // Choose a dblock size that is small enough so that more than one volume is needed. Dictionary options = new Dictionary(this.TestOptions) {["dblock-size"] = "10mb"}; // First, run two complete backups followed by a partial backup. We will then set the keep-time // option so that the threshold lies between the first and second backups. DateTime firstBackupTime; using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = c.Backup(new[] {this.DATAFOLDER}); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); firstBackupTime = c.List().Filesets.First().Time; } // Wait before the second backup so that we can more easily define the keep-time threshold // to lie between the first and second backups. Thread.Sleep(5000); DateTime secondBackupTime; using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { this.ModifySourceFiles(); IBackupResults backupResults = c.Backup(new[] {this.DATAFOLDER}); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); secondBackupTime = c.List().Filesets.First().Time; } // Run a partial backup. DateTime thirdBackupTime; using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = await this.RunPartialBackup(c).ConfigureAwait(false); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(1, backupResults.Warnings.Count()); thirdBackupTime = c.List().Filesets.First().Time; } // Set the keep-time option so that the threshold lies between the first and second backups // and run the delete operation. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { options["keep-time"] = $"{(int) ((DateTime.Now - firstBackupTime).TotalSeconds - (secondBackupTime - firstBackupTime).TotalSeconds / 2)}s"; IDeleteResults deleteResults = c.Delete(); Assert.AreEqual(0, deleteResults.Errors.Count()); Assert.AreEqual(0, deleteResults.Warnings.Count()); List filesets = c.List().Filesets.ToList(); Assert.AreEqual(2, filesets.Count); Assert.AreEqual(secondBackupTime, filesets[1].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[1].IsFullBackup); Assert.AreEqual(thirdBackupTime, filesets[0].Time); Assert.AreEqual(BackupType.PARTIAL_BACKUP, filesets[0].IsFullBackup); } // Run another partial backup. We will then verify that a full backup is retained // even when all the "recent" backups are partial. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = await this.RunPartialBackup(c).ConfigureAwait(false); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(1, backupResults.Warnings.Count()); DateTime fourthBackupTime = c.List().Filesets.First().Time; // Set the keep-time option so that the threshold lies after the most recent full backup // and run the delete operation. options["keep-time"] = "1s"; IDeleteResults deleteResults = c.Delete(); Assert.AreEqual(0, deleteResults.Errors.Count()); Assert.AreEqual(0, deleteResults.Warnings.Count()); List filesets = c.List().Filesets.ToList(); Assert.AreEqual(3, filesets.Count); Assert.AreEqual(secondBackupTime, filesets[2].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[2].IsFullBackup); Assert.AreEqual(thirdBackupTime, filesets[1].Time); Assert.AreEqual(BackupType.PARTIAL_BACKUP, filesets[1].IsFullBackup); Assert.AreEqual(fourthBackupTime, filesets[0].Time); Assert.AreEqual(BackupType.PARTIAL_BACKUP, filesets[0].IsFullBackup); } } [Test] [Category("Disruption")] public async Task KeepVersionsRetention() { // Choose a dblock size that is small enough so that more than one volume is needed. Dictionary options = new Dictionary(this.TestOptions) {["dblock-size"] = "10mb"}; // Run a full backup. DateTime firstBackupTime; using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = c.Backup(new[] {this.DATAFOLDER}); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); firstBackupTime = c.List().Filesets.First().Time; } // Run a partial backup. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = await this.RunPartialBackup(c).ConfigureAwait(false); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(1, backupResults.Warnings.Count()); } // Run a partial backup. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = await this.RunPartialBackup(c).ConfigureAwait(false); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(1, backupResults.Warnings.Count()); } // Run a full backup. DateTime fourthBackupTime; using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { this.ModifySourceFiles(); IBackupResults backupResults = c.Backup(new[] {this.DATAFOLDER}); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); fourthBackupTime = c.List().Filesets.First().Time; } // Run a partial backup. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { options["keep-versions"] = "2"; IBackupResults backupResults = await this.RunPartialBackup(c).ConfigureAwait(false); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(1, backupResults.Warnings.Count()); DateTime fifthBackupTime = c.List().Filesets.First().Time; // Partial backups that are followed by a full backup can be deleted. List filesets = c.List().Filesets.ToList(); Assert.AreEqual(3, filesets.Count); Assert.AreEqual(firstBackupTime, filesets[2].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[2].IsFullBackup); Assert.AreEqual(fourthBackupTime, filesets[1].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[1].IsFullBackup); Assert.AreEqual(fifthBackupTime, filesets[0].Time); Assert.AreEqual(BackupType.PARTIAL_BACKUP, filesets[0].IsFullBackup); } // Run a full backup. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { this.ModifySourceFiles(); IBackupResults backupResults = c.Backup(new[] {this.DATAFOLDER}); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); DateTime sixthBackupTime = c.List().Filesets.First().Time; // Since the last backup was full, we can now expect to have just the 2 most recent full backups. List filesets = c.List().Filesets.ToList(); Assert.AreEqual(2, filesets.Count); Assert.AreEqual(fourthBackupTime, filesets[1].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[1].IsFullBackup); Assert.AreEqual(sixthBackupTime, filesets[0].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[0].IsFullBackup); } } [Test] [Category("Disruption")] public async Task ListWithoutLocalDb() { // Choose a dblock size that is small enough so that more than one volume is needed. Dictionary options = new Dictionary(this.TestOptions) { ["dblock-size"] = "10mb", ["no-local-db"] = "true" }; // Run a full backup. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = c.Backup(new[] {this.DATAFOLDER}); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); } // Run a partial backup. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = await this.RunPartialBackup(c).ConfigureAwait(false); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(1, backupResults.Warnings.Count()); List filesets = c.List().Filesets.ToList(); Assert.AreEqual(2, filesets.Count); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[1].IsFullBackup); Assert.AreEqual(BackupType.PARTIAL_BACKUP, filesets[0].IsFullBackup); } } [Test] [Category("Disruption")] public async Task RetentionPolicyRetention() { Dictionary options = new Dictionary(this.TestOptions) { // Choose a dblock size that is small enough so that more than one volume is needed. ["dblock-size"] = "10mb", // This test assumes that we can perform 3 backups within 1 minute. ["retention-policy"] = "1m:59s,U:1m", ["no-backend-verification"] = "true" }; DateTime firstBackupTime; using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = c.Backup(new[] {this.DATAFOLDER}); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); firstBackupTime = c.List().Filesets.First().Time; List filesets = c.List().Filesets.ToList(); Assert.AreEqual(1, filesets.Count); this.ModifySourceFiles(); backupResults = c.Backup(new[] {this.DATAFOLDER}); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); DateTime secondBackupTime = c.List().Filesets.First().Time; // Since the most recent backup is not considered in the retention logic, the only backup in the first time frame // is the initial one. As a result, we should have 2 backups. filesets = c.List().Filesets.ToList(); Assert.AreEqual(2, filesets.Count); Assert.AreEqual(firstBackupTime, filesets[1].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[1].IsFullBackup); Assert.AreEqual(secondBackupTime, filesets[0].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[0].IsFullBackup); } // Wait so that the next backups fall in the next retention interval. Thread.Sleep(new TimeSpan(0, 0, 1, 0)); DateTime thirdBackupTime; using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = await this.RunPartialBackup(c).ConfigureAwait(false); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(1, backupResults.Warnings.Count()); thirdBackupTime = c.List().Filesets.First().Time; // Since the most recent backup is not considered in the retention logic, there are no backups in the first time // frame. The original 2 backups have now spilled over to the U:1m specification. Since we keep the first // backup in the interval, we should be left with the first backup, as well as the third partial one. List filesets = c.List().Filesets.ToList(); Assert.AreEqual(2, filesets.Count); Assert.AreEqual(firstBackupTime, filesets[1].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[1].IsFullBackup); Assert.AreEqual(thirdBackupTime, filesets[0].Time); Assert.AreEqual(BackupType.PARTIAL_BACKUP, filesets[0].IsFullBackup); } DateTime fourthBackupTime; using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { this.ModifySourceFiles(); IBackupResults backupResults = c.Backup(new[] {this.DATAFOLDER}); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); fourthBackupTime = c.List().Filesets.First().Time; // Since the most recent backup is not considered in the retention logic, the third backup is the only backup // in the first time frame. There is no further spillover, so we simply add the fourth backup to the // collection of retained backups. List filesets = c.List().Filesets.ToList(); Assert.AreEqual(3, filesets.Count); Assert.AreEqual(firstBackupTime, filesets[2].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[2].IsFullBackup); Assert.AreEqual(thirdBackupTime, filesets[1].Time); Assert.AreEqual(BackupType.PARTIAL_BACKUP, filesets[1].IsFullBackup); Assert.AreEqual(fourthBackupTime, filesets[0].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[0].IsFullBackup); this.ModifySourceFiles(); backupResults = c.Backup(new[] {this.DATAFOLDER}); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); DateTime fifthBackupTime = c.List().Filesets.First().Time; // Since the most recent backup is not considered in the retention logic, we now have two backups in the // first time frame: the third (partial) and fourth (full). Since the first backup in each interval is // kept, we would typically keep just the third backup. However, since we should not discard a full // backup in favor of a partial one, we keep the fourth as well. We also still have the initial backup. filesets = c.List().Filesets.ToList(); Assert.AreEqual(4, filesets.Count); Assert.AreEqual(firstBackupTime, filesets[3].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[3].IsFullBackup); Assert.AreEqual(thirdBackupTime, filesets[2].Time); Assert.AreEqual(BackupType.PARTIAL_BACKUP, filesets[2].IsFullBackup); Assert.AreEqual(fourthBackupTime, filesets[1].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[1].IsFullBackup); Assert.AreEqual(fifthBackupTime, filesets[0].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[0].IsFullBackup); } // Wait so that the next backups fall in the next retention interval. Thread.Sleep(new TimeSpan(0, 0, 1, 0)); using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { this.ModifySourceFiles(); IBackupResults backupResults = c.Backup(new[] {this.DATAFOLDER}); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); DateTime sixthBackupTime = c.List().Filesets.First().Time; // Since the most recent backup is not considered in the retention logic, we now have three backups in the // second time frame: the third (partial), fourth (full), and fifth (full). Since we keep up to the first // full backup in each time frame, we now drop the fifth backup. List filesets = c.List().Filesets.ToList(); Assert.AreEqual(4, filesets.Count); Assert.AreEqual(firstBackupTime, filesets[3].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[3].IsFullBackup); Assert.AreEqual(thirdBackupTime, filesets[2].Time); Assert.AreEqual(BackupType.PARTIAL_BACKUP, filesets[2].IsFullBackup); Assert.AreEqual(fourthBackupTime, filesets[1].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[1].IsFullBackup); Assert.AreEqual(sixthBackupTime, filesets[0].Time); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[0].IsFullBackup); } } [Test] [Category("Disruption")] public async Task StopAfterCurrentFile() { // Choose a dblock size that is small enough so that more than one volume is needed. Dictionary options = new Dictionary(this.TestOptions) {["dblock-size"] = "10mb"}; // Run a complete backup. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = c.Backup(new[] {this.DATAFOLDER}); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); Assert.AreEqual(1, c.List().Filesets.Count()); Assert.AreEqual(BackupType.FULL_BACKUP, c.List().Filesets.Single(x => x.Version == 0).IsFullBackup); } // Run a partial backup. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = await this.RunPartialBackup(c).ConfigureAwait(false); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(1, backupResults.Warnings.Count()); // If we interrupt the backup, the most recent Fileset should be marked as partial. Assert.AreEqual(2, c.List().Filesets.Count()); Assert.AreEqual(BackupType.FULL_BACKUP, c.List().Filesets.Single(x => x.Version == 1).IsFullBackup); Assert.AreEqual(BackupType.PARTIAL_BACKUP, c.List().Filesets.Single(x => x.Version == 0).IsFullBackup); } // Restore files from the partial backup set. Dictionary restoreOptions = new Dictionary(options) {["restore-path"] = this.RESTOREFOLDER}; using (Controller c = new Controller("file://" + this.TARGETFOLDER, restoreOptions, null)) { IListResults lastResults = c.List("*"); string[] partialVersionFiles = lastResults.Files.Select(x => x.Path).Where(x => !Utility.IsFolder(x, File.GetAttributes)).ToArray(); Assert.GreaterOrEqual(partialVersionFiles.Length, 1); c.Restore(partialVersionFiles); foreach (string filepath in partialVersionFiles) { string filename = Path.GetFileName(filepath); TestUtils.AssertFilesAreEqual(filepath, Path.Combine(this.RESTOREFOLDER, filename ?? String.Empty), false, filename); } } // Recreating the database should preserve the backup types. File.Delete(this.DBFILE); using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IRepairResults repairResults = c.Repair(); Assert.AreEqual(0, repairResults.Errors.Count()); Assert.AreEqual(0, repairResults.Warnings.Count()); Assert.AreEqual(2, c.List().Filesets.Count()); Assert.AreEqual(BackupType.FULL_BACKUP, c.List().Filesets.Single(x => x.Version == 1).IsFullBackup); Assert.AreEqual(BackupType.PARTIAL_BACKUP, c.List().Filesets.Single(x => x.Version == 0).IsFullBackup); } // Run a complete backup. Listing the Filesets should include both full and partial backups. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = c.Backup(new[] {this.DATAFOLDER}); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); Assert.AreEqual(3, c.List().Filesets.Count()); Assert.AreEqual(BackupType.FULL_BACKUP, c.List().Filesets.Single(x => x.Version == 2).IsFullBackup); Assert.AreEqual(BackupType.PARTIAL_BACKUP, c.List().Filesets.Single(x => x.Version == 1).IsFullBackup); Assert.AreEqual(BackupType.FULL_BACKUP, c.List().Filesets.Single(x => x.Version == 0).IsFullBackup); } // Restore files from the full backup set. restoreOptions["overwrite"] = "true"; using (Controller c = new Controller("file://" + this.TARGETFOLDER, restoreOptions, null)) { IListResults lastResults = c.List("*"); string[] fullVersionFiles = lastResults.Files.Select(x => x.Path).Where(x => !Utility.IsFolder(x, File.GetAttributes)).ToArray(); Assert.AreEqual(this.fileSizes.Length, fullVersionFiles.Length); IRestoreResults restoreResults = c.Restore(fullVersionFiles); Assert.AreEqual(0, restoreResults.Errors.Count()); Assert.AreEqual(0, restoreResults.Warnings.Count()); foreach (string filepath in fullVersionFiles) { string filename = Path.GetFileName(filepath); TestUtils.AssertFilesAreEqual(filepath, Path.Combine(this.RESTOREFOLDER, filename ?? String.Empty), false, filename); } } } [Test] [Category("Disruption")] public async Task StopNow() { // Choose a dblock size that is small enough so that more than one volume is needed. Dictionary options = new Dictionary(this.TestOptions) {["dblock-size"] = "10mb", ["disable-synthetic-filelist"] = "true"}; // Run a complete backup. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = c.Backup(new[] {this.DATAFOLDER}); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); List filesets = c.List().Filesets.ToList(); Assert.AreEqual(1, filesets.Count); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[0].IsFullBackup); } // Interrupt a backup with "stop now". this.ModifySourceFiles(); using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { // ReSharper disable once AccessToDisposedClosure Task backupTask = Task.Run(() => c.Backup(new[] {this.DATAFOLDER})); // Block for a small amount of time to allow the ITaskControl to be associated // with the Controller. Otherwise, the call to Stop will simply be a no-op. Thread.Sleep(1000); c.Stop(false); await backupTask.ConfigureAwait(false); } // The next backup should proceed without issues. using (Controller c = new Controller("file://" + this.TARGETFOLDER, options, null)) { IBackupResults backupResults = c.Backup(new[] {this.DATAFOLDER}); Assert.AreEqual(0, backupResults.Errors.Count()); Assert.AreEqual(0, backupResults.Warnings.Count()); List filesets = c.List().Filesets.ToList(); Assert.AreEqual(2, filesets.Count); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[1].IsFullBackup); Assert.AreEqual(BackupType.FULL_BACKUP, filesets[0].IsFullBackup); } // Restore from the backup that followed the interruption. Dictionary restoreOptions = new Dictionary(options) {["restore-path"] = this.RESTOREFOLDER}; using (Controller c = new Controller("file://" + this.TARGETFOLDER, restoreOptions, null)) { IListResults lastResults = c.List("*"); string[] fullVersionFiles = lastResults.Files.Select(x => x.Path).Where(x => !Utility.IsFolder(x, File.GetAttributes)).ToArray(); Assert.AreEqual(this.fileSizes.Length, fullVersionFiles.Length); IRestoreResults restoreResults = c.Restore(fullVersionFiles); Assert.AreEqual(0, restoreResults.Errors.Count()); Assert.AreEqual(0, restoreResults.Warnings.Count()); foreach (string filepath in fullVersionFiles) { string filename = Path.GetFileName(filepath); TestUtils.AssertFilesAreEqual(filepath, Path.Combine(this.RESTOREFOLDER, filename ?? String.Empty), false, filename); } } } } }