Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/duplicati/duplicati.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Duplicati/Library/Compression/FileArchiveDirectory.cs3
-rw-r--r--Duplicati/Library/Compression/FileArchiveZip.cs8
-rw-r--r--Duplicati/Library/Interface/ResultInterfaces.cs1
-rw-r--r--Duplicati/Library/Main/Controller.cs53
-rw-r--r--Duplicati/Library/Main/Database/Database schema/10. Add IsFullBackup to Fileset table.sql12
-rw-r--r--Duplicati/Library/Main/Database/Database schema/Schema.sql3
-rw-r--r--Duplicati/Library/Main/Database/LocalBackupDatabase.cs142
-rw-r--r--Duplicati/Library/Main/Database/LocalDatabase.cs460
-rw-r--r--Duplicati/Library/Main/Database/LocalListAffectedDatabase.cs1
-rw-r--r--Duplicati/Library/Main/Database/LocalListDatabase.cs98
-rw-r--r--Duplicati/Library/Main/Database/LocalPurgeDatabase.cs13
-rw-r--r--Duplicati/Library/Main/Database/LocalRecreateDatabase.cs2
-rw-r--r--Duplicati/Library/Main/Duplicati.Library.Main.csproj1
-rw-r--r--Duplicati/Library/Main/Operation/Backup/BackupDatabase.cs6
-rw-r--r--Duplicati/Library/Main/Operation/Backup/CountFilesHandler.cs36
-rw-r--r--Duplicati/Library/Main/Operation/Backup/FileBlockProcessor.cs47
-rw-r--r--Duplicati/Library/Main/Operation/Backup/FileEnumerationProcess.cs151
-rw-r--r--Duplicati/Library/Main/Operation/Backup/MetadataPreProcess.cs21
-rw-r--r--Duplicati/Library/Main/Operation/Backup/UploadRealFilelist.cs4
-rw-r--r--Duplicati/Library/Main/Operation/Backup/UploadSyntheticFilelist.cs2
-rw-r--r--Duplicati/Library/Main/Operation/BackupHandler.cs98
-rw-r--r--Duplicati/Library/Main/Operation/DeleteHandler.cs149
-rw-r--r--Duplicati/Library/Main/Operation/FilelistProcessor.cs2
-rw-r--r--Duplicati/Library/Main/Operation/ListControlFilesHandler.cs3
-rw-r--r--Duplicati/Library/Main/Operation/ListFilesHandler.cs107
-rw-r--r--Duplicati/Library/Main/Operation/PurgeFilesHandler.cs13
-rw-r--r--Duplicati/Library/Main/Operation/RecreateDatabaseHandler.cs8
-rw-r--r--Duplicati/Library/Main/Operation/RestoreHandler.cs25
-rw-r--r--Duplicati/Library/Main/Operation/TestFilterHandler.cs50
-rw-r--r--Duplicati/Library/Main/ResultClasses.cs44
-rw-r--r--Duplicati/Library/Main/Volumes/FilesetVolumeWriter.cs26
-rw-r--r--Duplicati/Library/Main/Volumes/VolumeBase.cs19
-rw-r--r--Duplicati/Library/Main/Volumes/VolumeReaderBase.cs66
-rw-r--r--Duplicati/Library/Main/Volumes/VolumeWriterBase.cs17
-rw-r--r--Duplicati/Library/Snapshots/UsnJournalService.cs58
-rw-r--r--Duplicati/Server/Runner.cs38
-rw-r--r--Duplicati/Server/WebServer/RESTMethods/Task.cs42
-rwxr-xr-xDuplicati/Server/webroot/ngax/index.html2
-rw-r--r--Duplicati/Server/webroot/ngax/scripts/controllers/RestoreController.js3
-rw-r--r--Duplicati/Server/webroot/ngax/scripts/controllers/StateController.js22
40 files changed, 1162 insertions, 694 deletions
diff --git a/Duplicati/Library/Compression/FileArchiveDirectory.cs b/Duplicati/Library/Compression/FileArchiveDirectory.cs
index a3f85c9ac..2ca6ae8ac 100644
--- a/Duplicati/Library/Compression/FileArchiveDirectory.cs
+++ b/Duplicati/Library/Compression/FileArchiveDirectory.cs
@@ -22,7 +22,6 @@ using System.Linq;
#endregion
using System;
using System.Collections.Generic;
-using System.Text;
using Duplicati.Library.Interface;
using Duplicati.Library.Common.IO;
@@ -117,7 +116,7 @@ namespace Duplicati.Library.Compression
System.IO.File.SetLastWriteTime(path, lastWrite);
return res;
}
-
+
/// <summary>
/// Returns a value that indicates if the file exists
/// </summary>
diff --git a/Duplicati/Library/Compression/FileArchiveZip.cs b/Duplicati/Library/Compression/FileArchiveZip.cs
index e41c174a3..d1514ce84 100644
--- a/Duplicati/Library/Compression/FileArchiveZip.cs
+++ b/Duplicati/Library/Compression/FileArchiveZip.cs
@@ -103,6 +103,7 @@ namespace Duplicati.Library.Compression
/// The ZipArchive instance used when reading archives
/// </summary>
private IArchive m_archive;
+
/// <summary>
/// The stream used to either read or write
/// </summary>
@@ -202,7 +203,7 @@ namespace Duplicati.Library.Compression
/// you may reuse it and have to dispose it yourself.
/// </summary>
/// <param name="stream">The stream to read or write depending access mode</param>
- /// <param name="mode">The archive acces mode</param>
+ /// <param name="mode">The archive access mode</param>
/// <param name="options">The options passed on the commandline</param>
public FileArchiveZip(Stream stream, ArchiveMode mode, IDictionary<string, string> options)
{
@@ -410,7 +411,7 @@ namespace Duplicati.Library.Compression
return null;
}
-
+
/// <summary>
/// Creates a file in the archive and returns a writeable stream
/// </summary>
@@ -433,9 +434,8 @@ namespace Duplicati.Library.Compression
ModificationDateTime = lastWrite,
CompressionType = m_compressionType
});
-
}
-
+
/// <summary>
/// Returns a value that indicates if the file exists
/// </summary>
diff --git a/Duplicati/Library/Interface/ResultInterfaces.cs b/Duplicati/Library/Interface/ResultInterfaces.cs
index e11218594..fc960d330 100644
--- a/Duplicati/Library/Interface/ResultInterfaces.cs
+++ b/Duplicati/Library/Interface/ResultInterfaces.cs
@@ -91,6 +91,7 @@ namespace Duplicati.Library.Interface
public interface IListResultFileset
{
long Version { get; }
+ int IsFullBackup { get; }
DateTime Time { get; }
long FileCount { get; }
long FileSizes { get; }
diff --git a/Duplicati/Library/Main/Controller.cs b/Duplicati/Library/Main/Controller.cs
index e5a81ada8..6a6243a68 100644
--- a/Duplicati/Library/Main/Controller.cs
+++ b/Duplicati/Library/Main/Controller.cs
@@ -1,5 +1,5 @@
#region Disclaimer / License
-// Copyright (C) 2015, The Duplicati Team
+// Copyright (C) 2019, The Duplicati Team
// http://www.duplicati.com, info@duplicati.com
//
// This library is free software; you can redistribute it and/or
@@ -16,16 +16,15 @@
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
-using System.Linq;
-
-
#endregion
using System;
using System.Collections.Generic;
+using System.Linq;
+using System.Threading;
using Duplicati.Library.Utility;
-using Duplicati.Library.Common.IO;
-using Duplicati.Library.Common;
-
+using Duplicati.Library.Common.IO;
+using Duplicati.Library.Common;
+
namespace Duplicati.Library.Main
{
public class Controller : IDisposable
@@ -83,6 +82,11 @@ namespace Duplicati.Library.Main
private ControllerMultiLogTarget m_logTarget;
/// <summary>
+ /// The cancellation token for the running task
+ /// </summary>
+ private readonly CancellationTokenSource m_cancellationTokenSource = new CancellationTokenSource();
+
+ /// <summary>
/// Constructs a new interface for performing backup and restore operations
/// </summary>
/// <param name="backend">The url for the backend to use</param>
@@ -118,7 +122,9 @@ namespace Duplicati.Library.Main
return RunAction(new BackupResults(), ref inputsources, ref filter, (result) => {
using (var h = new Operation.BackupHandler(m_backend, m_options, result))
- h.Run(ExpandInputSources(inputsources, filter), filter);
+ {
+ h.Run(ExpandInputSources(inputsources, filter), filter, m_cancellationTokenSource.Token);
+ }
Library.UsageReporter.Reporter.Report("BACKUP_FILECOUNT", result.ExaminedFiles);
Library.UsageReporter.Reporter.Report("BACKUP_FILESIZE", result.SizeOfExaminedFiles);
@@ -293,7 +299,7 @@ namespace Duplicati.Library.Main
{
return RunAction(new TestFilterResults(), ref paths, ref filter, (result) =>
{
- new Operation.TestFilterHandler(m_options, result).Run(ExpandInputSources(paths, filter), filter);
+ new Operation.TestFilterHandler(m_options, result).Run(ExpandInputSources(paths, filter), filter, m_cancellationTokenSource.Token);
});
}
}
@@ -796,15 +802,15 @@ namespace Duplicati.Library.Main
// For now, warn not to use 7z
if (string.Equals(m_options.CompressionModule, "7z", StringComparison.OrdinalIgnoreCase))
- Logging.Log.WriteWarningMessage(LOGTAG, "7zModuleHasIssues", null, "The 7z compression module has known issues and should only be used for experimental purposes");
-
- // Amazon CD is closing August 16th 2019
- if (string.Equals(new Library.Utility.Uri(m_backend).Scheme, "amzcd", StringComparison.OrdinalIgnoreCase))
- Logging.Log.WriteWarningMessage(LOGTAG, "AmzCDClosingApi", null, "The Amazon Cloud Drive API is closing down on August 16th 2019, please migrate your backups before this date");
-
- //TODO: Based on the action, see if all options are relevant
- }
-
+ Logging.Log.WriteWarningMessage(LOGTAG, "7zModuleHasIssues", null, "The 7z compression module has known issues and should only be used for experimental purposes");
+
+ // Amazon CD is closing August 16th 2019
+ if (string.Equals(new Library.Utility.Uri(m_backend).Scheme, "amzcd", StringComparison.OrdinalIgnoreCase))
+ Logging.Log.WriteWarningMessage(LOGTAG, "AmzCDClosingApi", null, "The Amazon Cloud Drive API is closing down on August 16th 2019, please migrate your backups before this date");
+
+ //TODO: Based on the action, see if all options are relevant
+ }
+
/// <summary>
/// Helper method that expands the users chosen source input paths,
/// and removes duplicate paths
@@ -1072,11 +1078,16 @@ namespace Duplicati.Library.Main
ct.Resume();
}
- public void Stop()
+ public void Stop(bool allowCurrentFileToFinish)
{
var ct = m_currentTask;
- if (ct != null)
- ct.Stop();
+ if (ct == null) return;
+ if (allowCurrentFileToFinish)
+ {
+ Logging.Log.WriteVerboseMessage(LOGTAG, "CancellationRequested", "Cancellation Requested");
+ m_cancellationTokenSource.Cancel();
+ }
+ ct.Stop(allowCurrentFileToFinish);
}
public void Abort()
diff --git a/Duplicati/Library/Main/Database/Database schema/10. Add IsFullBackup to Fileset table.sql b/Duplicati/Library/Main/Database/Database schema/10. Add IsFullBackup to Fileset table.sql
new file mode 100644
index 000000000..00aaa87b1
--- /dev/null
+++ b/Duplicati/Library/Main/Database/Database schema/10. Add IsFullBackup to Fileset table.sql
@@ -0,0 +1,12 @@
+CREATE TABLE "Fileset_Temp" ("ID" INTEGER PRIMARY KEY,
+ "OperationID" INTEGER NOT NULL,
+ "VolumeID" INTEGER NOT NULL,
+ "IsFullBackup" INTEGER NOT NULL,
+ "Timestamp" INTEGER NOT NULL);
+
+INSERT INTO "Fileset_Temp" SELECT "ID", "OperationID", "VolumeID", 1,
+ "Timestamp" FROM "Fileset";
+DROP TABLE "Fileset";
+ALTER TABLE "Fileset_Temp" RENAME TO "Fileset";
+
+UPDATE "Version" SET "Version" = 10;
diff --git a/Duplicati/Library/Main/Database/Database schema/Schema.sql b/Duplicati/Library/Main/Database/Database schema/Schema.sql
index e2b4f68c4..1d6e509ea 100644
--- a/Duplicati/Library/Main/Database/Database schema/Schema.sql
+++ b/Duplicati/Library/Main/Database/Database schema/Schema.sql
@@ -60,6 +60,7 @@ CREATE TABLE "Fileset" (
"ID" INTEGER PRIMARY KEY,
"OperationID" INTEGER NOT NULL,
"VolumeID" INTEGER NOT NULL,
+ "IsFullBackup" INTEGER NOT NULL,
"Timestamp" INTEGER NOT NULL
);
@@ -274,4 +275,4 @@ CREATE TABLE "ChangeJournalData" (
"ConfigHash" TEXT NOT NULL
);
-INSERT INTO "Version" ("Version") VALUES (9);
+INSERT INTO "Version" ("Version") VALUES (10);
diff --git a/Duplicati/Library/Main/Database/LocalBackupDatabase.cs b/Duplicati/Library/Main/Database/LocalBackupDatabase.cs
index ef58083b4..bf5668250 100644
--- a/Duplicati/Library/Main/Database/LocalBackupDatabase.cs
+++ b/Duplicati/Library/Main/Database/LocalBackupDatabase.cs
@@ -1,5 +1,6 @@
using System;
using System.Collections.Generic;
+using System.Data;
using System.Linq;
using System.Text;
using System.IO;
@@ -23,9 +24,9 @@ namespace Duplicati.Library.Main.Database
public long Filesize;
public string Metahash;
public long Metasize;
-
+
private SortedList<KeyValuePair<long, long>, long> m_versions;
-
+
public PathEntryKeeper(long fileId, DateTime lastmodified, long filesize, string metahash, long metasize)
{
this.FileID = fileId;
@@ -35,7 +36,7 @@ namespace Duplicati.Library.Main.Database
this.Metasize = metasize;
this.m_versions = null;
}
-
+
public long GetFilesetID(long blocksetId, long metadataId)
{
if (m_versions == null)
@@ -47,27 +48,27 @@ namespace Duplicati.Library.Main.Database
else
return r;
}
-
+
public void AddFilesetID(long blocksetId, long metadataId, long filesetId)
{
if (m_versions == null)
m_versions = new SortedList<KeyValuePair<long, long>, long>(1, new KeyValueComparer());
m_versions.Add(new KeyValuePair<long, long>(blocksetId, metadataId), filesetId);
}
-
+
private struct KeyValueComparer : IComparer<KeyValuePair<long, long>>
{
public int Compare(KeyValuePair<long, long> x, KeyValuePair<long, long> y)
{
- return x.Key == y.Key ?
- (x.Value == y.Value ?
- 0
- : (x.Value < y.Value ? -1 : 1))
+ return x.Key == y.Key ?
+ (x.Value == y.Value ?
+ 0
+ : (x.Value < y.Value ? -1 : 1))
: (x.Key < y.Key ? -1 : 1);
}
}
}
-
+
private readonly System.Data.IDbCommand m_findblockCommand;
private readonly System.Data.IDbCommand m_findblocksetCommand;
private readonly System.Data.IDbCommand m_findfilesetCommand;
@@ -94,7 +95,7 @@ namespace Duplicati.Library.Main.Database
private readonly System.Data.IDbCommand m_selectfilemetadatahashandsizeCommand;
private Dictionary<string, long> m_blockCache;
-
+
private long m_filesetId;
private readonly bool m_logQueries;
@@ -104,7 +105,7 @@ namespace Duplicati.Library.Main.Database
{
this.ShouldCloseConnection = true;
}
-
+
public LocalBackupDatabase(LocalDatabase db, Options options)
: base(db)
{
@@ -128,7 +129,7 @@ namespace Duplicati.Library.Main.Database
m_selectfileHashCommand = m_connection.CreateCommand();
m_insertblocksetentryFastCommand = m_connection.CreateCommand();
m_selectfilemetadatahashandsizeCommand = m_connection.CreateCommand();
-
+
m_findblockCommand.CommandText = @"SELECT ""ID"" FROM ""Block"" WHERE ""Hash"" = ? AND ""Size"" = ?";
m_findblockCommand.AddParameters(2);
@@ -265,15 +266,15 @@ namespace Duplicati.Library.Main.Database
m_selectblocklistHashesCommand.CommandText = @"SELECT ""Hash"" FROM ""BlocklistHash"" WHERE ""BlocksetID"" = ? ORDER BY ""Index"" ASC ";
m_selectblocklistHashesCommand.AddParameters(1);
}
-
+
/// <summary>
/// Builds the lookup tables. Call this method after deleting items, and before processing items
/// </summary>
/// <param name="options">The option settings</param>
public void BuildLookupTable(Options options)
{
- if (options.UseBlockCache)
- {
+ if (options.UseBlockCache)
+ {
string failedhash = null;
try
{
@@ -291,9 +292,9 @@ namespace Duplicati.Library.Main.Database
{
Logging.Log.WriteWarningMessage(LOGTAG, "BlockCacheFailure", ex, "Failed to create block cache, this could mean you have hash collisions in your table, the hash that failed is {0}. Error message: {1}.", failedhash, ex.Message);
Logging.Log.WriteWarningMessage(LOGTAG, "BlockCacheFailure", null, "Disabling block cache due to error");
- }
- }
- }
+ }
+ }
+ }
/// <summary>
/// Probes to see if a block already exists
@@ -301,7 +302,7 @@ namespace Duplicati.Library.Main.Database
/// <param name="key">The block key</param>
/// <param name="size">The size of the block</param>
/// <returns>True if the block should be added to the current output</returns>
- public long FindBlockID (string key, long size, System.Data.IDbTransaction transaction = null)
+ public long FindBlockID(string key, long size, System.Data.IDbTransaction transaction = null)
{
m_findblockCommand.Transaction = transaction;
m_findblockCommand.SetParameterValue(0, key);
@@ -309,13 +310,13 @@ namespace Duplicati.Library.Main.Database
return m_findblockCommand.ExecuteScalarInt64(m_logQueries, -1);
}
- /// <summary>
- /// Adds a block to the local database, returning a value indicating if the value presents a new block
- /// </summary>
- /// <param name="key">The block key</param>
- /// <param name="size">The size of the block</param>
- /// <returns>True if the block should be added to the current output</returns>
- public bool AddBlock (string key, long size, long volumeid, System.Data.IDbTransaction transaction = null)
+ /// <summary>
+ /// Adds a block to the local database, returning a value indicating if the value presents a new block
+ /// </summary>
+ /// <param name="key">The block key</param>
+ /// <param name="size">The size of the block</param>
+ /// <returns>True if the block should be added to the current output</returns>
+ public bool AddBlock(string key, long size, long volumeid, System.Data.IDbTransaction transaction = null)
{
long exsize;
@@ -367,7 +368,7 @@ namespace Duplicati.Library.Main.Database
if (blocksetid != -1)
return false; //Found it
- using(var tr = new TemporaryTransactionWrapper(m_connection, transaction))
+ using (var tr = new TemporaryTransactionWrapper(m_connection, transaction))
{
m_insertblocksetCommand.Transaction = tr.Parent;
m_insertblocksetCommand.SetParameterValue(0, size);
@@ -379,7 +380,7 @@ namespace Duplicati.Library.Main.Database
{
m_insertblocklistHashesCommand.SetParameterValue(0, blocksetid);
m_insertblocklistHashesCommand.Transaction = tr.Parent;
- foreach(var bh in blocklistHashes)
+ foreach (var bh in blocklistHashes)
{
m_insertblocklistHashesCommand.SetParameterValue(1, ix);
m_insertblocklistHashesCommand.SetParameterValue(2, bh);
@@ -396,7 +397,7 @@ namespace Duplicati.Library.Main.Database
ix = 0;
long remainsize = size;
- foreach(var h in hashes)
+ foreach (var h in hashes)
{
var exsize = remainsize < blocksize ? remainsize : blocksize;
m_insertblocksetentryCommand.SetParameterValue(1, ix);
@@ -411,13 +412,13 @@ namespace Duplicati.Library.Main.Database
var bid = cmd.ExecuteScalarInt64(@"SELECT ""ID"" FROM ""Block"" WHERE ""Hash"" = ?", -1, h);
if (bid == -1)
throw new Exception(string.Format("Could not find any blocks with the given hash: {0}", h));
- foreach(var rd in cmd.ExecuteReaderEnumerable(@"SELECT ""Size"" FROM ""Block"" WHERE ""Hash"" = ?", h))
+ foreach (var rd in cmd.ExecuteReaderEnumerable(@"SELECT ""Size"" FROM ""Block"" WHERE ""Hash"" = ?", h))
Logging.Log.WriteErrorMessage(LOGTAG, "FoundIssue1400Error", null, "Found block with ID {0} and hash {1} and size {2}", bid, h, rd.ConvertValueToInt64(0, -1));
}
throw new Exception(string.Format("Unexpected result count: {0}, expected {1}, check log for more messages", c, 1));
}
-
+
ix++;
remainsize -= blocksize;
}
@@ -461,7 +462,7 @@ namespace Duplicati.Library.Main.Database
public bool AddMetadataset(string filehash, long size, long blocksetid, out long metadataid, System.Data.IDbTransaction transaction = null)
{
if (GetMetadatasetID(filehash, size, out metadataid, transaction))
- return false;
+ return false;
using (var tr = new TemporaryTransactionWrapper(m_connection, transaction))
{
@@ -510,7 +511,7 @@ namespace Duplicati.Library.Main.Database
m_insertfileOperationCommand.SetParameterValue(0, m_filesetId);
m_insertfileOperationCommand.SetParameterValue(1, fileidobj);
m_insertfileOperationCommand.SetParameterValue(2, lastmodified.ToUniversalTime().Ticks);
- m_insertfileOperationCommand.ExecuteNonQuery(m_logQueries);
+ m_insertfileOperationCommand.ExecuteNonQuery(m_logQueries);
}
/// <summary>
@@ -540,7 +541,7 @@ namespace Duplicati.Library.Main.Database
{
AddFile(path, lastmodified, FOLDER_BLOCKSET_ID, metadataID, transaction);
}
-
+
public void AddSymlinkEntry(string path, long metadataID, DateTime lastmodified, System.Data.IDbTransaction transaction = null)
{
AddFile(path, lastmodified, SYMLINK_BLOCKSET_ID, metadataID, transaction);
@@ -629,11 +630,11 @@ namespace Duplicati.Library.Main.Database
var r = m_selectfileHashCommand.ExecuteScalar(m_logQueries, null);
if (r == null || r == DBNull.Value)
return null;
-
+
return r.ToString();
}
- public override void Dispose ()
+ public override void Dispose()
{
base.Dispose();
}
@@ -642,10 +643,10 @@ namespace Duplicati.Library.Main.Database
{
return GetPreviousFilesetID(cmd, OperationTimestamp, m_filesetId);
}
-
+
private long GetPreviousFilesetID(System.Data.IDbCommand cmd, DateTime timestamp, long filesetid)
{
- var lastFilesetId = cmd.ExecuteScalarInt64(@"SELECT ""ID"" FROM ""Fileset"" WHERE ""Timestamp"" < ? AND ""ID"" != ? ORDER BY ""Timestamp"" DESC ", -1, Library.Utility.Utility.NormalizeDateTimeToEpochSeconds(timestamp), filesetid);
+ var lastFilesetId = cmd.ExecuteScalarInt64(@"SELECT ""ID"" FROM ""Fileset"" WHERE ""Timestamp"" < ? AND ""ID"" != ? ORDER BY ""Timestamp"" DESC ", -1, Library.Utility.Utility.NormalizeDateTimeToEpochSeconds(timestamp), filesetid);
return lastFilesetId;
}
@@ -663,7 +664,7 @@ namespace Duplicati.Library.Main.Database
internal void UpdateChangeStatistics(BackupResults results, System.Data.IDbTransaction transaction)
{
- using(var cmd = m_connection.CreateCommand(transaction))
+ using (var cmd = m_connection.CreateCommand(transaction))
{
// TODO: Optimize these queries to not use the "File" view
var lastFilesetId = GetPreviousFilesetID(cmd);
@@ -676,20 +677,20 @@ namespace Duplicati.Library.Main.Database
var subqueryNonFiles = @"SELECT ""File"".""Path"", ""Blockset"".""Fullhash"" FROM ""File"", ""FilesetEntry"", ""Metadataset"", ""Blockset"" WHERE ""File"".""ID"" = ""FilesetEntry"".""FileID"" AND ""Metadataset"".""ID"" = ""File"".""MetadataID"" AND ""File"".""BlocksetID"" = ? AND ""Metadataset"".""BlocksetID"" = ""Blockset"".""ID"" AND ""FilesetEntry"".""FilesetID"" = ? ";
results.ModifiedFolders = cmd.ExecuteScalarInt64(@"SELECT COUNT(*) FROM (" + subqueryNonFiles + @") A, (" + subqueryNonFiles + @") B WHERE ""A"".""Path"" = ""B"".""Path"" AND ""A"".""Fullhash"" != ""B"".""Fullhash"" ", 0, lastFilesetId, FOLDER_BLOCKSET_ID, m_filesetId, FOLDER_BLOCKSET_ID);
results.ModifiedSymlinks = cmd.ExecuteScalarInt64(@"SELECT COUNT(*) FROM (" + subqueryNonFiles + @") A, (" + subqueryNonFiles + @") B WHERE ""A"".""Path"" = ""B"".""Path"" AND ""A"".""Fullhash"" != ""B"".""Fullhash"" ", 0, lastFilesetId, SYMLINK_BLOCKSET_ID, m_filesetId, SYMLINK_BLOCKSET_ID);
-
+
var tmpName1 = "TmpFileList-" + Library.Utility.Utility.ByteArrayAsHexString(Guid.NewGuid().ToByteArray());
var tmpName2 = "TmpFileList-" + Library.Utility.Utility.ByteArrayAsHexString(Guid.NewGuid().ToByteArray());
try
{
var subqueryFiles = @"SELECT ""File"".""Path"" AS ""Path"", ""A"".""Fullhash"" AS ""Filehash"", ""B"".""Fullhash"" AS ""Metahash"" FROM ""File"", ""FilesetEntry"", ""Blockset"" A, ""Blockset"" B, ""Metadataset"" WHERE ""File"".""ID"" = ""FilesetEntry"".""FileID"" AND ""A"".""ID"" = ""File"".""BlocksetID"" AND ""FilesetEntry"".""FilesetID"" = ? AND ""File"".""MetadataID"" = ""Metadataset"".""ID"" AND ""Metadataset"".""BlocksetID"" = ""B"".""ID"" ";
-
+
cmd.ExecuteNonQuery(string.Format(@"CREATE TEMPORARY TABLE ""{0}"" AS " + subqueryFiles, tmpName1), lastFilesetId);
cmd.ExecuteNonQuery(string.Format(@"CREATE TEMPORARY TABLE ""{0}"" AS " + subqueryFiles, tmpName2), m_filesetId);
-
+
results.AddedFiles = cmd.ExecuteScalarInt64(string.Format(@"SELECT COUNT(*) FROM ""File"" INNER JOIN ""FilesetEntry"" ON ""File"".""ID"" = ""FilesetEntry"".""FileID"" WHERE ""FilesetEntry"".""FilesetID"" = ? AND ""File"".""BlocksetID"" != ? AND ""File"".""BlocksetID"" != ? AND NOT ""File"".""Path"" IN (SELECT ""Path"" FROM ""{0}"")", tmpName1), 0, m_filesetId, FOLDER_BLOCKSET_ID, SYMLINK_BLOCKSET_ID);
results.DeletedFiles = cmd.ExecuteScalarInt64(string.Format(@"SELECT COUNT(*) FROM ""{0}"" WHERE ""{0}"".""Path"" NOT IN (SELECT ""Path"" FROM ""File"" INNER JOIN ""FilesetEntry"" ON ""File"".""ID"" = ""FilesetEntry"".""FileID"" WHERE ""FilesetEntry"".""FilesetID"" = ?)", tmpName1), 0, m_filesetId);
results.ModifiedFiles = cmd.ExecuteScalarInt64(string.Format(@"SELECT COUNT(*) FROM ""{0}"" A, ""{1}"" B WHERE ""A"".""Path"" = ""B"".""Path"" AND (""A"".""Filehash"" != ""B"".""Filehash"" OR ""A"".""Metahash"" != ""B"".""Metahash"")", tmpName1, tmpName2), 0);
-
+
}
finally
{
@@ -723,14 +724,14 @@ namespace Duplicati.Library.Main.Database
/// <param name="timestamp">If <c>filesetid</c> == -1, used to locate previous file-set</param>
public void AppendFilesFromPreviousSet(System.Data.IDbTransaction transaction, IEnumerable<string> deleted, long filesetid, long prevId, DateTime timestamp)
{
- using(var cmd = m_connection.CreateCommand())
- using(var cmdDelete = m_connection.CreateCommand())
- using(var tr = new TemporaryTransactionWrapper(m_connection, transaction))
+ using (var cmd = m_connection.CreateCommand())
+ using (var cmdDelete = m_connection.CreateCommand())
+ using (var tr = new TemporaryTransactionWrapper(m_connection, transaction))
{
long lastFilesetId = prevId < 0 ? GetPreviousFilesetID(cmd, timestamp, filesetid) : prevId;
cmd.Transaction = tr.Parent;
- cmd.ExecuteNonQuery( @"INSERT INTO ""FilesetEntry"" (""FilesetID"", ""FileID"", ""Lastmodified"") SELECT ? AS ""FilesetID"", ""FileID"", ""Lastmodified"" FROM (SELECT DISTINCT ""FilesetID"", ""FileID"", ""Lastmodified"" FROM ""FilesetEntry"" WHERE ""FilesetID"" = ? AND ""FileID"" NOT IN (SELECT ""FileID"" FROM ""FilesetEntry"" WHERE ""FilesetID"" = ?)) ", filesetid, lastFilesetId, filesetid);
+ cmd.ExecuteNonQuery(@"INSERT INTO ""FilesetEntry"" (""FilesetID"", ""FileID"", ""Lastmodified"") SELECT ? AS ""FilesetID"", ""FileID"", ""Lastmodified"" FROM (SELECT DISTINCT ""FilesetID"", ""FileID"", ""Lastmodified"" FROM ""FilesetEntry"" WHERE ""FilesetID"" = ? AND ""FileID"" NOT IN (SELECT ""FileID"" FROM ""FilesetEntry"" WHERE ""FilesetID"" = ?)) ", filesetid, lastFilesetId, filesetid);
if (deleted != null)
{
@@ -825,8 +826,7 @@ namespace Duplicati.Library.Main.Database
tr.Commit();
}
}
-
-
+
/// <summary>
/// Creates a timestamped backup operation to correctly associate the fileset with the time it was created.
/// </summary>
@@ -837,14 +837,14 @@ namespace Duplicati.Library.Main.Database
{
return m_filesetId = base.CreateFileset(volumeid, timestamp, transaction);
}
-
+
public IEnumerable<KeyValuePair<long, DateTime>> GetIncompleteFilesets(System.Data.IDbTransaction transaction)
{
- using(var cmd = m_connection.CreateCommand(transaction))
+ using (var cmd = m_connection.CreateCommand(transaction))
{
cmd.Transaction = transaction;
- using(var rd = cmd.ExecuteReader(@"SELECT DISTINCT ""Fileset"".""ID"", ""Fileset"".""Timestamp"" FROM ""Fileset"", ""RemoteVolume"" WHERE ""RemoteVolume"".""ID"" = ""Fileset"".""VolumeID"" AND ""Fileset"".""ID"" IN (SELECT ""FilesetID"" FROM ""FilesetEntry"") AND (""RemoteVolume"".""State"" = ""Uploading"" OR ""RemoteVolume"".""State"" = ""Temporary"")"))
- while(rd.Read())
+ using (var rd = cmd.ExecuteReader(@"SELECT DISTINCT ""Fileset"".""ID"", ""Fileset"".""Timestamp"" FROM ""Fileset"", ""RemoteVolume"" WHERE ""RemoteVolume"".""ID"" = ""Fileset"".""VolumeID"" AND ""Fileset"".""ID"" IN (SELECT ""FilesetID"" FROM ""FilesetEntry"") AND (""RemoteVolume"".""State"" = ""Uploading"" OR ""RemoteVolume"".""State"" = ""Temporary"")"))
+ while (rd.Read())
{
yield return new KeyValuePair<long, DateTime>(
rd.GetInt64(0),
@@ -856,8 +856,8 @@ namespace Duplicati.Library.Main.Database
public IRemoteVolume GetRemoteVolumeFromName(string name, System.Data.IDbTransaction transaction)
{
- using(var cmd = m_connection.CreateCommand(transaction))
- using(var rd = cmd.ExecuteReader(@"SELECT ""Name"", ""Hash"", ""Size"" FROM ""RemoteVolume"" WHERE ""Name"" = ?", name))
+ using (var cmd = m_connection.CreateCommand(transaction))
+ using (var rd = cmd.ExecuteReader(@"SELECT ""Name"", ""Hash"", ""Size"" FROM ""RemoteVolume"" WHERE ""Name"" = ?", name))
if (rd.Read())
return new RemoteVolume(rd.GetValue(0).ToString(), rd.GetValue(1).ToString(), rd.ConvertValueToInt64(2));
else
@@ -884,26 +884,26 @@ namespace Duplicati.Library.Main.Database
public IEnumerable<string> GetMissingIndexFiles(System.Data.IDbTransaction transaction)
{
- using(var cmd = m_connection.CreateCommand(transaction))
- using(var rd = cmd.ExecuteReader(@"SELECT ""Name"" FROM ""RemoteVolume"" WHERE ""Type"" = ? AND NOT ""ID"" IN (SELECT ""BlockVolumeID"" FROM ""IndexBlockLink"") AND ""State"" IN (?,?)", RemoteVolumeType.Blocks.ToString(), RemoteVolumeState.Uploaded.ToString(), RemoteVolumeState.Verified.ToString()))
+ using (var cmd = m_connection.CreateCommand(transaction))
+ using (var rd = cmd.ExecuteReader(@"SELECT ""Name"" FROM ""RemoteVolume"" WHERE ""Type"" = ? AND NOT ""ID"" IN (SELECT ""BlockVolumeID"" FROM ""IndexBlockLink"") AND ""State"" IN (?,?)", RemoteVolumeType.Blocks.ToString(), RemoteVolumeState.Uploaded.ToString(), RemoteVolumeState.Verified.ToString()))
while (rd.Read())
yield return rd.GetValue(0).ToString();
}
-
+
public void LinkFilesetToVolume(long filesetid, long volumeid, System.Data.IDbTransaction transaction)
{
- using(var cmd = m_connection.CreateCommand())
+ using (var cmd = m_connection.CreateCommand())
{
cmd.Transaction = transaction;
var c = cmd.ExecuteNonQuery(@"UPDATE ""Fileset"" SET ""VolumeID"" = ? WHERE ""ID"" = ?", volumeid, filesetid);
if (c != 1)
throw new Exception(string.Format("Failed to link filesetid {0} to volumeid {1}", filesetid, volumeid));
- }
+ }
}
public void MoveBlockToVolume(string blockkey, long size, long sourcevolumeid, long targetvolumeid, System.Data.IDbTransaction transaction)
{
- using(var cmd = m_connection.CreateCommand())
+ using (var cmd = m_connection.CreateCommand())
{
cmd.Transaction = transaction;
var c = cmd.ExecuteNonQuery(@"UPDATE ""Block"" SET ""VolumeID"" = ? WHERE ""Hash"" = ? AND ""Size"" = ? AND ""VolumeID"" = ? ", targetvolumeid, blockkey, size, sourcevolumeid);
@@ -916,27 +916,27 @@ namespace Duplicati.Library.Main.Database
{
var volumeid = GetRemoteVolumeID(name, transaction);
- using(var cmd = m_connection.CreateCommand(transaction))
+ using (var cmd = m_connection.CreateCommand(transaction))
{
var c = cmd.ExecuteScalarInt64(@"SELECT COUNT(*) FROM ""Block"" WHERE ""VolumeID"" = ? ", -1, volumeid);
if (c != 0)
throw new Exception(string.Format("Failed to safe-delete volume {0}, blocks: {1}", name, c));
RemoveRemoteVolume(name, transaction);
- }
+ }
}
public string[] GetBlocklistHashes(string name, System.Data.IDbTransaction transaction)
{
var volumeid = GetRemoteVolumeID(name, transaction);
- using(var cmd = m_connection.CreateCommand(transaction))
+ using (var cmd = m_connection.CreateCommand(transaction))
{
// Grab the strings and return as array to avoid concurrent access to the IEnumerable
return cmd.ExecuteReaderEnumerable(
@"SELECT DISTINCT ""Block"".""Hash"" FROM ""Block"" WHERE ""Block"".""VolumeID"" = ? AND ""Block"".""Hash"" IN (SELECT ""Hash"" FROM ""BlocklistHash"")", volumeid)
.Select(x => x.ConvertValueToString(0))
.ToArray();
- }
+ }
}
public string GetFirstPath()
@@ -951,6 +951,7 @@ namespace Duplicati.Library.Main.Database
return v0.ToString();
}
}
+
/// <summary>
/// Retrieves change journal data for file set
/// </summary>
@@ -992,7 +993,7 @@ namespace Duplicati.Library.Main.Database
{
foreach (var entry in data)
{
- using(var cmd = m_connection.CreateCommand())
+ using (var cmd = m_connection.CreateCommand())
{
cmd.Transaction = tr.Parent;
var c = cmd.ExecuteNonQuery(
@@ -1001,7 +1002,7 @@ namespace Duplicati.Library.Main.Database
if (c != 1)
throw new Exception("Unable to add change journal entry");
- }
+ }
}
tr.Commit();
@@ -1020,17 +1021,18 @@ namespace Duplicati.Library.Main.Database
{
foreach (var entry in data)
{
- using(var cmd = m_connection.CreateCommand())
+ using (var cmd = m_connection.CreateCommand())
{
cmd.Transaction = tr.Parent;
cmd.ExecuteNonQuery(
@"UPDATE ""ChangeJournalData"" SET ""NextUSN"" = ? WHERE ""FilesetID"" = ? AND ""VolumeName"" = ? AND ""JournalID"" = ?;",
entry.NextUsn, fileSetId, entry.Volume, entry.JournalId);
- }
+ }
}
tr.Commit();
}
}
+
}
}
diff --git a/Duplicati/Library/Main/Database/LocalDatabase.cs b/Duplicati/Library/Main/Database/LocalDatabase.cs
index 723899495..acdafb25c 100644
--- a/Duplicati/Library/Main/Database/LocalDatabase.cs
+++ b/Duplicati/Library/Main/Database/LocalDatabase.cs
@@ -9,7 +9,7 @@ using Duplicati.Library.Modules.Builtin.ResultSerialization;
namespace Duplicati.Library.Main.Database
{
internal class LocalDatabase : IDisposable
- {
+ {
/// <summary>
/// The tag used for logging
/// </summary>
@@ -42,7 +42,7 @@ namespace Duplicati.Library.Main.Database
public DateTime OperationTimestamp { get; private set; }
internal System.Data.IDbConnection Connection { get { return m_connection; } }
-
+
public bool IsDisposed { get; private set; }
public bool ShouldCloseConnection { get; set; }
@@ -52,11 +52,11 @@ namespace Duplicati.Library.Main.Database
path = System.IO.Path.GetFullPath(path);
if (!System.IO.Directory.Exists(System.IO.Path.GetDirectoryName(path)))
System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(path));
-
+
var c = Duplicati.Library.SQLiteHelper.SQLiteLoader.LoadConnection(path);
Library.SQLiteHelper.DatabaseUpgrader.UpgradeDatabase(c, path, typeof(LocalDatabase));
-
+
return c;
}
@@ -82,7 +82,7 @@ namespace Duplicati.Library.Main.Database
this.m_operationid = db.m_operationid;
this.m_result = db.m_result;
}
-
+
/// <summary>
/// Creates a new database instance and starts a new operation
/// </summary>
@@ -99,7 +99,7 @@ namespace Duplicati.Library.Main.Database
using (var cmd = m_connection.CreateCommand())
m_operationid = cmd.ExecuteScalarInt64( @"INSERT INTO ""Operation"" (""Description"", ""Timestamp"") VALUES (?, ?); SELECT last_insert_rowid();", -1, operation, Library.Utility.Utility.NormalizeDateTimeToEpochSeconds(OperationTimestamp));
}
-
+
private LocalDatabase(System.Data.IDbConnection connection)
{
m_updateremotevolumeCommand = connection.CreateCommand();
@@ -157,7 +157,7 @@ namespace Duplicati.Library.Main.Database
{
m_result = result;
}
-
+
/// <summary>
/// Creates a DateTime instance by adding the specified number of seconds to the EPOCH value
/// </summary>
@@ -167,13 +167,13 @@ namespace Duplicati.Library.Main.Database
}
public void UpdateRemoteVolume(string name, RemoteVolumeState state, long size, string hash, System.Data.IDbTransaction transaction = null)
- {
- UpdateRemoteVolume(name, state, size, hash, false, transaction);
+ {
+ UpdateRemoteVolume(name, state, size, hash, false, transaction);
}
public void UpdateRemoteVolume(string name, RemoteVolumeState state, long size, string hash, bool suppressCleanup, System.Data.IDbTransaction transaction = null)
{
- UpdateRemoteVolume(name, state, size, hash, suppressCleanup, new TimeSpan(0), transaction);
+ UpdateRemoteVolume(name, state, size, hash, suppressCleanup, new TimeSpan(0), transaction);
}
public void UpdateRemoteVolume(string name, RemoteVolumeState state, long size, string hash, bool suppressCleanup, TimeSpan deleteGraceTime, System.Data.IDbTransaction transaction = null)
@@ -213,13 +213,29 @@ namespace Duplicati.Library.Main.Database
}
public IEnumerable<KeyValuePair<long, DateTime>> FilesetTimes
- {
- get
+ {
+ get
{
- using(var cmd = m_connection.CreateCommand())
- using(var rd = cmd.ExecuteReader(@"SELECT ""ID"", ""Timestamp"" FROM ""Fileset"" ORDER BY ""Timestamp"" DESC"))
+ using (var cmd = m_connection.CreateCommand())
+ using (var rd = cmd.ExecuteReader(@"SELECT ""ID"", ""IsFullBackup"", ""Timestamp"" FROM ""Fileset"" ORDER BY ""Timestamp"" DESC"))
+ {
+ var isFullBackupEncountered = false;
while (rd.Read())
- yield return new KeyValuePair<long, DateTime>(rd.GetInt64(0), ParseFromEpochSeconds(rd.GetInt64(1)).ToLocalTime());
+ {
+ var id = rd.GetInt64(0);
+ var isFullBackup = rd.GetInt32(1);
+ var timeStamp = ParseFromEpochSeconds(rd.GetInt64(2)).ToLocalTime();
+
+ if (isFullBackupEncountered && isFullBackup != BackupType.FULL_BACKUP) continue;
+
+ yield return new KeyValuePair<long, DateTime>(id, timeStamp);
+
+ if (!isFullBackupEncountered && isFullBackup == BackupType.FULL_BACKUP)
+ {
+ isFullBackupEncountered = true;
+ }
+ }
+ }
}
}
@@ -271,7 +287,7 @@ namespace Duplicati.Library.Main.Database
query.Insert(0, " WHERE ");
}
}
-
+
return new Tuple<string, object[]>(query.ToString(), args.ToArray());
}
@@ -285,7 +301,7 @@ namespace Duplicati.Library.Main.Database
{
m_selectremotevolumeCommand.Transaction = transaction;
m_selectremotevolumeCommand.SetParameterValue(0, file);
- using(var rd = m_selectremotevolumeCommand.ExecuteReader())
+ using (var rd = m_selectremotevolumeCommand.ExecuteReader())
if (rd.Read())
return new RemoteVolumeEntry(
rd.ConvertValueToInt64(0),
@@ -296,13 +312,13 @@ namespace Duplicati.Library.Main.Database
(RemoteVolumeState)Enum.Parse(typeof(RemoteVolumeState), rd.GetValue(5).ToString()),
new DateTime(rd.ConvertValueToInt64(6, 0), DateTimeKind.Utc)
);
-
+
return RemoteVolumeEntry.Empty;
}
public IEnumerable<KeyValuePair<string, RemoteVolumeState>> DuplicateRemoteVolumes()
{
- foreach(var rd in m_selectduplicateRemoteVolumesCommand.ExecuteReaderEnumerable(null))
+ foreach (var rd in m_selectduplicateRemoteVolumesCommand.ExecuteReaderEnumerable(null))
{
yield return new KeyValuePair<string, RemoteVolumeState>(
rd.GetValue(0).ToString(),
@@ -368,7 +384,7 @@ namespace Duplicati.Library.Main.Database
public void UnlinkRemoteVolume(string name, RemoteVolumeState state, System.Data.IDbTransaction transaction = null)
{
using (var tr = new TemporaryTransactionWrapper(m_connection, transaction))
- using(var cmd = m_connection.CreateCommand())
+ using (var cmd = m_connection.CreateCommand())
{
cmd.Transaction = tr.Parent;
var c = cmd.ExecuteNonQuery(@"DELETE FROM ""RemoteVolume"" WHERE ""Name"" = ? AND ""State"" = ? ", name, state.ToString());
@@ -410,15 +426,15 @@ namespace Duplicati.Library.Main.Database
}
var volIdsSubQuery = string.Format(@"SELECT ""ID"" FROM ""{0}"" ", volidstable);
deletecmd.Parameters.Clear();
-
+
// If the volume is a block or index volume, this will update the crosslink table, otherwise nothing will happen
deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""IndexBlockLink"" WHERE ""BlockVolumeID"" IN ({0}) OR ""IndexVolumeID"" IN ({0})", volIdsSubQuery));
-
+
// If the volume is a fileset, this will remove the fileset, otherwise nothing will happen
deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""FilesetEntry"" WHERE ""FilesetID"" IN (SELECT ""ID"" FROM ""Fileset"" WHERE ""VolumeID"" IN ({0}))", volIdsSubQuery));
deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""ChangeJournalData"" WHERE ""FilesetID"" IN (SELECT ""ID"" FROM ""Fileset"" WHERE ""VolumeID"" IN ({0}))", volIdsSubQuery));
deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""Fileset"" WHERE ""VolumeID"" IN ({0})", volIdsSubQuery));
-
+
var bsIdsSubQuery = string.Format(
@"SELECT ""BlocksetEntry"".""BlocksetID"" FROM ""BlocksetEntry"", ""Block"" "
+ @" WHERE ""BlocksetEntry"".""BlockID"" = ""Block"".""ID"" AND ""Block"".""VolumeID"" IN ({0}) "
@@ -467,10 +483,10 @@ namespace Duplicati.Library.Main.Database
tr.Commit();
}
}
-
+
public void Vacuum()
{
- using(var cmd = m_connection.CreateCommand())
+ using (var cmd = m_connection.CreateCommand())
cmd.ExecuteNonQuery("VACUUM");
}
@@ -488,10 +504,10 @@ namespace Duplicati.Library.Main.Database
{
return RegisterRemoteVolume(name, type, state, -1, deleteGraceTime, transaction);
}
-
+
public long RegisterRemoteVolume(string name, RemoteVolumeType type, RemoteVolumeState state, long size, TimeSpan deleteGraceTime, System.Data.IDbTransaction transaction)
{
- using(var tr = new TemporaryTransactionWrapper(m_connection, transaction))
+ using (var tr = new TemporaryTransactionWrapper(m_connection, transaction))
{
m_createremotevolumeCommand.SetParameterValue(0, m_operationid);
m_createremotevolumeCommand.SetParameterValue(1, name);
@@ -504,7 +520,7 @@ namespace Duplicati.Library.Main.Database
m_createremotevolumeCommand.SetParameterValue(6, 0);
else
m_createremotevolumeCommand.SetParameterValue(6, (DateTime.UtcNow + deleteGraceTime).Ticks);
-
+
m_createremotevolumeCommand.Transaction = tr.Parent;
var r = m_createremotevolumeCommand.ExecuteScalarInt64();
tr.Commit();
@@ -515,7 +531,7 @@ namespace Duplicati.Library.Main.Database
public long GetFilesetID(DateTime restoretime, long[] versions)
{
return GetFilesetIDs(restoretime, versions).First();
- }
+ }
public IEnumerable<long> GetFilesetIDs(DateTime restoretime, long[] versions)
{
@@ -527,19 +543,19 @@ namespace Duplicati.Library.Main.Database
var args = tmp.Item2;
var res = new List<long>();
- using(var cmd = m_connection.CreateCommand())
- {
- using(var rd = cmd.ExecuteReader(@"SELECT ""ID"" FROM ""Fileset"" " + query + @" ORDER BY ""Timestamp"" DESC", args))
+ using (var cmd = m_connection.CreateCommand())
+ {
+ using (var rd = cmd.ExecuteReader(@"SELECT ""ID"" FROM ""Fileset"" " + query + @" ORDER BY ""Timestamp"" DESC", args))
while (rd.Read())
res.Add(rd.GetInt64(0));
-
+
if (res.Count == 0)
{
cmd.Parameters.Clear();
- using(var rd = cmd.ExecuteReader(@"SELECT ""ID"" FROM ""Fileset"" ORDER BY ""Timestamp"" DESC "))
- while (rd.Read())
- res.Add(rd.ConvertValueToInt64(0));
-
+ using (var rd = cmd.ExecuteReader(@"SELECT ""ID"" FROM ""Fileset"" ORDER BY ""Timestamp"" DESC "))
+ while (rd.Read())
+ res.Add(rd.ConvertValueToInt64(0));
+
if (res.Count == 0)
throw new Duplicati.Library.Interface.UserInformationException("No backup at the specified date", "NoBackupAtDate");
else
@@ -560,14 +576,29 @@ namespace Duplicati.Library.Main.Database
var args = tmp.Item2;
var res = new List<long>();
- using(var cmd = m_connection.CreateCommand())
- using(var rd = cmd.ExecuteReader(@"SELECT ""ID"" FROM ""Fileset"" " + query + @" ORDER BY ""Timestamp"" DESC", args))
+ using (var cmd = m_connection.CreateCommand())
+ using (var rd = cmd.ExecuteReader(@"SELECT ""ID"" FROM ""Fileset"" " + query + @" ORDER BY ""Timestamp"" DESC", args))
while (rd.Read())
res.Add(rd.GetInt64(0));
return res;
}
-
+
+ public bool IsFilesetFullBackup(DateTime filesetTime)
+ {
+ using (var cmd = m_connection.CreateCommand())
+ {
+ using (var rd = cmd.ExecuteReader($@"SELECT ""IsFullBackup"" FROM ""Fileset"" WHERE ""Timestamp"" = {Library.Utility.Utility.NormalizeDateTimeToEpochSeconds(filesetTime)}"))
+ {
+ if (!rd.Read())
+ {
+ return false;
+ }
+ var isFullBackup = rd.GetInt32(0);
+ return isFullBackup == BackupType.FULL_BACKUP;
+ }
+ }
+ }
// TODO: Remove this
public System.Data.IDbTransaction BeginTransaction()
@@ -597,19 +628,19 @@ namespace Duplicati.Library.Main.Database
public System.Data.IDbConnection Connection { get { return m_parent.Connection; } }
public System.Data.IsolationLevel IsolationLevel { get { return m_parent.IsolationLevel; } }
- public void Commit()
- {
- if (m_isTemporary)
- m_parent.Commit();
+ public void Commit()
+ {
+ if (m_isTemporary)
+ m_parent.Commit();
}
public void Rollback()
{
if (m_isTemporary)
- m_parent.Rollback();
+ m_parent.Rollback();
}
- public void Dispose()
+ public void Dispose()
{
if (m_isTemporary)
m_parent.Dispose();
@@ -617,7 +648,7 @@ namespace Duplicati.Library.Main.Database
public System.Data.IDbTransaction Parent { get { return m_parent; } }
}
-
+
private class LocalFileEntry : ILocalFileEntry
{
private readonly System.Data.IDataReader m_reader;
@@ -628,7 +659,7 @@ namespace Duplicati.Library.Main.Database
public string Path
{
- get
+ get
{
var c = m_reader.GetValue(0);
if (c == null || c == DBNull.Value)
@@ -667,26 +698,26 @@ namespace Duplicati.Library.Main.Database
}
}
}
-
+
public IEnumerable<ILocalFileEntry> GetFiles(long filesetId)
{
- using(var cmd = m_connection.CreateCommand())
- using(var rd = cmd.ExecuteReader(@"SELECT ""A"".""Path"", ""B"".""Length"", ""B"".""FullHash"", ""D"".""FullHash"" FROM ""File"" A, ""Blockset"" B, ""Metadataset"" C, ""Blockset"" D, ""FilesetEntry"" E WHERE ""A"".""BlocksetID"" = ""B"".""ID"" AND ""A"".""MetadataID"" = ""C"".""ID"" AND ""C"".""BlocksetID"" = ""D"".""ID"" AND ""A"".""ID"" = ""E"".""FileID"" AND ""E"".""FilesetID"" = ? ", filesetId))
- while(rd.Read())
- yield return new LocalFileEntry(rd);
+ using (var cmd = m_connection.CreateCommand())
+ using (var rd = cmd.ExecuteReader(@"SELECT ""A"".""Path"", ""B"".""Length"", ""B"".""FullHash"", ""D"".""FullHash"" FROM ""File"" A, ""Blockset"" B, ""Metadataset"" C, ""Blockset"" D, ""FilesetEntry"" E WHERE ""A"".""BlocksetID"" = ""B"".""ID"" AND ""A"".""MetadataID"" = ""C"".""ID"" AND ""C"".""BlocksetID"" = ""D"".""ID"" AND ""A"".""ID"" = ""E"".""FileID"" AND ""E"".""FilesetID"" = ? ", filesetId))
+ while (rd.Read())
+ yield return new LocalFileEntry(rd);
}
private IEnumerable<KeyValuePair<string, string>> GetDbOptionList(System.Data.IDbTransaction transaction = null)
{
- using(var cmd = m_connection.CreateCommand(transaction))
- using(var rd = cmd.ExecuteReader(@"SELECT ""Key"", ""Value"" FROM ""Configuration"" "))
- while(rd.Read())
- yield return new KeyValuePair<string, string>(rd.GetValue(0).ToString(), rd.GetValue(1).ToString());
+ using (var cmd = m_connection.CreateCommand(transaction))
+ using (var rd = cmd.ExecuteReader(@"SELECT ""Key"", ""Value"" FROM ""Configuration"" "))
+ while (rd.Read())
+ yield return new KeyValuePair<string, string>(rd.GetValue(0).ToString(), rd.GetValue(1).ToString());
}
-
+
public IDictionary<string, string> GetDbOptions(System.Data.IDbTransaction transaction = null)
{
- return GetDbOptionList(transaction).ToDictionary(x => x.Key, x => x.Value);
+ return GetDbOptionList(transaction).ToDictionary(x => x.Key, x => x.Value);
}
public bool RepairInProgress
@@ -703,7 +734,7 @@ namespace Duplicati.Library.Main.Database
opts["repair-in-progress"] = "true";
else
opts.Remove("repair-in-progress");
-
+
SetDbOptions(opts);
}
}
@@ -726,24 +757,24 @@ namespace Duplicati.Library.Main.Database
SetDbOptions(opts);
}
}
-
+
public void SetDbOptions(IDictionary<string, string> options, System.Data.IDbTransaction transaction = null)
{
- using(var tr = new TemporaryTransactionWrapper(m_connection, transaction))
- using(var cmd = m_connection.CreateCommand())
+ using (var tr = new TemporaryTransactionWrapper(m_connection, transaction))
+ using (var cmd = m_connection.CreateCommand())
{
cmd.Transaction = tr.Parent;
cmd.ExecuteNonQuery(@"DELETE FROM ""Configuration"" ");
- foreach(var kp in options)
+ foreach (var kp in options)
cmd.ExecuteNonQuery(@"INSERT INTO ""Configuration"" (""Key"", ""Value"") VALUES (?, ?) ", kp.Key, kp.Value);
-
+
tr.Commit();
}
}
public long GetBlocksLargerThan(long fhblocksize)
{
- using(var cmd = m_connection.CreateCommand())
+ using (var cmd = m_connection.CreateCommand())
return cmd.ExecuteScalarInt64(@"SELECT COUNT(*) FROM ""Block"" WHERE ""Size"" > ?", -1, fhblocksize);
}
@@ -778,8 +809,8 @@ ON
";
// For each blockset with wrong lengths, fetch the file path
var reportDetails = @"SELECT ""CalcLen"", ""Length"", ""A"".""BlocksetID"", ""File"".""Path"" FROM (" + combinedLengths + @") A, ""File"" WHERE ""A"".""BlocksetID"" = ""File"".""BlocksetID"" AND ""A"".""CalcLen"" != ""A"".""Length"" ";
-
- using(var rd = cmd.ExecuteReader(reportDetails))
+
+ using (var rd = cmd.ExecuteReader(reportDetails))
if (rd.Read())
{
var sb = new StringBuilder();
@@ -790,12 +821,12 @@ ON
if (c < 5)
sb.AppendFormat("{0}, actual size {1}, dbsize {2}, blocksetid: {3}{4}", rd.GetValue(3), rd.GetValue(1), rd.GetValue(0), rd.GetValue(2), Environment.NewLine);
c++;
- } while(rd.Read());
-
+ } while (rd.Read());
+
c -= 5;
if (c > 0)
sb.AppendFormat("... and {0} more", c);
-
+
sb.Append(". Run repair to fix it.");
throw new InvalidDataException(sb.ToString());
}
@@ -820,50 +851,50 @@ ON
if (verifyfilelists)
{
- using(var cmd2 = m_connection.CreateCommand(transaction))
- foreach(var filesetid in cmd.ExecuteReaderEnumerable(@"SELECT ""ID"" FROM ""Fileset"" ").Select(x => x.ConvertValueToInt64(0, -1)))
- {
- var expandedCmd = string.Format(@"SELECT COUNT(*) FROM (SELECT DISTINCT ""Path"" FROM ({0}) UNION SELECT DISTINCT ""Path"" FROM ({1}))", LocalDatabase.LIST_FILESETS, LocalDatabase.LIST_FOLDERS_AND_SYMLINKS);
- var expandedlist = cmd2.ExecuteScalarInt64(expandedCmd, 0, filesetid, FOLDER_BLOCKSET_ID, SYMLINK_BLOCKSET_ID, filesetid);
- //var storedfilelist = cmd2.ExecuteScalarInt64(string.Format(@"SELECT COUNT(*) FROM ""FilesetEntry"", ""FileLookup"" WHERE ""FilesetEntry"".""FilesetID"" = ? AND ""FileLookup"".""ID"" = ""FilesetEntry"".""FileID"" AND ""FileLookup"".""BlocksetID"" != ? AND ""FileLookup"".""BlocksetID"" != ?"), 0, filesetid, FOLDER_BLOCKSET_ID, SYMLINK_BLOCKSET_ID);
- var storedlist = cmd2.ExecuteScalarInt64(@"SELECT COUNT(*) FROM ""FilesetEntry"" WHERE ""FilesetEntry"".""FilesetID"" = ?", 0, filesetid);
-
- if (expandedlist != storedlist)
+ using (var cmd2 = m_connection.CreateCommand(transaction))
+ foreach (var filesetid in cmd.ExecuteReaderEnumerable(@"SELECT ""ID"" FROM ""Fileset"" ").Select(x => x.ConvertValueToInt64(0, -1)))
{
- var filesetname = filesetid.ToString();
- var fileset = FilesetTimes.Zip(Enumerable.Range(0, FilesetTimes.Count()), (a, b) => new Tuple<long, long, DateTime>(b, a.Key, a.Value)).FirstOrDefault(x => x.Item2 == filesetid);
- if (fileset != null)
- filesetname = string.Format("version {0}: {1} (database id: {2})", fileset.Item1, fileset.Item3, fileset.Item2);
- throw new Interface.UserInformationException(string.Format("Unexpected difference in fileset {0}, found {1} entries, but expected {2}", filesetname, expandedlist, storedlist), "FilesetDifferences");
+ var expandedCmd = string.Format(@"SELECT COUNT(*) FROM (SELECT DISTINCT ""Path"" FROM ({0}) UNION SELECT DISTINCT ""Path"" FROM ({1}))", LocalDatabase.LIST_FILESETS, LocalDatabase.LIST_FOLDERS_AND_SYMLINKS);
+ var expandedlist = cmd2.ExecuteScalarInt64(expandedCmd, 0, filesetid, FOLDER_BLOCKSET_ID, SYMLINK_BLOCKSET_ID, filesetid);
+ //var storedfilelist = cmd2.ExecuteScalarInt64(string.Format(@"SELECT COUNT(*) FROM ""FilesetEntry"", ""FileLookup"" WHERE ""FilesetEntry"".""FilesetID"" = ? AND ""FileLookup"".""ID"" = ""FilesetEntry"".""FileID"" AND ""FileLookup"".""BlocksetID"" != ? AND ""FileLookup"".""BlocksetID"" != ?"), 0, filesetid, FOLDER_BLOCKSET_ID, SYMLINK_BLOCKSET_ID);
+ var storedlist = cmd2.ExecuteScalarInt64(@"SELECT COUNT(*) FROM ""FilesetEntry"" WHERE ""FilesetEntry"".""FilesetID"" = ?", 0, filesetid);
+
+ if (expandedlist != storedlist)
+ {
+ var filesetname = filesetid.ToString();
+ var fileset = FilesetTimes.Zip(Enumerable.Range(0, FilesetTimes.Count()), (a, b) => new Tuple<long, long, DateTime>(b, a.Key, a.Value)).FirstOrDefault(x => x.Item2 == filesetid);
+ if (fileset != null)
+ filesetname = string.Format("version {0}: {1} (database id: {2})", fileset.Item1, fileset.Item3, fileset.Item2);
+ throw new Interface.UserInformationException(string.Format("Unexpected difference in fileset {0}, found {1} entries, but expected {2}", filesetname, expandedlist, storedlist), "FilesetDifferences");
+ }
}
- }
}
}
}
- public interface IBlock
- {
- string Hash { get; }
- long Size { get; }
- }
-
- internal class Block : IBlock
- {
- public string Hash { get; private set; }
- public long Size { get; private set; }
-
- public Block(string hash, long size)
- {
- this.Hash = hash;
- this.Size = size;
- }
- }
+ public interface IBlock
+ {
+ string Hash { get; }
+ long Size { get; }
+ }
+
+ internal class Block : IBlock
+ {
+ public string Hash { get; private set; }
+ public long Size { get; private set; }
+
+ public Block(string hash, long size)
+ {
+ this.Hash = hash;
+ this.Size = size;
+ }
+ }
public IEnumerable<IBlock> GetBlocks(long volumeid, System.Data.IDbTransaction transaction = null)
- {
- using(var cmd = m_connection.CreateCommand(transaction))
- using(var rd = cmd.ExecuteReader(@"SELECT DISTINCT ""Hash"", ""Size"" FROM ""Block"" WHERE ""VolumeID"" = ?", volumeid))
- while (rd.Read())
+ {
+ using (var cmd = m_connection.CreateCommand(transaction))
+ using (var rd = cmd.ExecuteReader(@"SELECT DISTINCT ""Hash"", ""Size"" FROM ""Block"" WHERE ""VolumeID"" = ?", volumeid))
+ while (rd.Read())
yield return new Block(rd.GetValue(0).ToString(), rd.GetInt64(1));
}
@@ -883,7 +914,7 @@ ON
m_parent = parent;
}
- public string Current { get{ return m_current; } }
+ public string Current { get { return m_current; } }
public void Dispose()
{
@@ -1087,7 +1118,7 @@ ORDER BY
{
using (var cmd = m_connection.CreateCommand())
{
- cmd.Transaction = transaction;
+ cmd.Transaction = transaction;
cmd.CommandText = LIST_FOLDERS_AND_SYMLINKS;
cmd.AddParameter(FOLDER_BLOCKSET_ID);
cmd.AddParameter(SYMLINK_BLOCKSET_ID);
@@ -1095,26 +1126,26 @@ ORDER BY
string lastpath = null;
using (var rd = cmd.ExecuteReader())
- while(rd.Read())
- {
- var blocksetID = rd.ConvertValueToInt64(0, -1);
- var path = rd.GetValue(2).ToString();
- var metalength = rd.ConvertValueToInt64(3, -1);
- var metahash = rd.GetValue(4).ToString();
- var metablockhash = rd.GetValue(6).ToString();
- var metablocklisthash = rd.GetValue(7).ToString();
-
- if (path == lastpath)
- Logging.Log.WriteWarningMessage(LOGTAG, "DuplicatePathFound", null, "Duplicate path detected: {0}", path);
-
- lastpath = path;
-
- if (blocksetID == FOLDER_BLOCKSET_ID)
- filesetvolume.AddDirectory(path, metahash, metalength, metablockhash, string.IsNullOrWhiteSpace(metablocklisthash) ? null : new string[] { metablocklisthash });
- else if (blocksetID == SYMLINK_BLOCKSET_ID)
- filesetvolume.AddSymlink(path, metahash, metalength, metablockhash, string.IsNullOrWhiteSpace(metablocklisthash) ? null : new string[] { metablocklisthash });
- }
-
+ while (rd.Read())
+ {
+ var blocksetID = rd.ConvertValueToInt64(0, -1);
+ var path = rd.GetValue(2).ToString();
+ var metalength = rd.ConvertValueToInt64(3, -1);
+ var metahash = rd.GetValue(4).ToString();
+ var metablockhash = rd.GetValue(6).ToString();
+ var metablocklisthash = rd.GetValue(7).ToString();
+
+ if (path == lastpath)
+ Logging.Log.WriteWarningMessage(LOGTAG, "DuplicatePathFound", null, "Duplicate path detected: {0}", path);
+
+ lastpath = path;
+
+ if (blocksetID == FOLDER_BLOCKSET_ID)
+ filesetvolume.AddDirectory(path, metahash, metalength, metablockhash, string.IsNullOrWhiteSpace(metablocklisthash) ? null : new string[] { metablocklisthash });
+ else if (blocksetID == SYMLINK_BLOCKSET_ID)
+ filesetvolume.AddSymlink(path, metahash, metalength, metablockhash, string.IsNullOrWhiteSpace(metablocklisthash) ? null : new string[] { metablocklisthash });
+ }
+
// TODO: Perhaps run the above query after recreate and compare count(*) with count(*) from filesetentry where id = x
cmd.CommandText = LIST_FILESETS;
@@ -1122,42 +1153,42 @@ ORDER BY
cmd.AddParameter(filesetId);
using (var rd = cmd.ExecuteReader())
- if (rd.Read())
- {
- var more = false;
- do
+ if (rd.Read())
{
- var path = rd.GetValue(0).ToString();
- var filehash = rd.GetValue(3).ToString();
- var size = rd.ConvertValueToInt64(2);
- var lastmodified = new DateTime(rd.ConvertValueToInt64(1, 0), DateTimeKind.Utc);
- var metahash = rd.GetValue(4).ToString();
- var metasize = rd.ConvertValueToInt64(5, -1);
- var p = rd.GetValue(6);
- var blrd = (p == null || p == DBNull.Value) ? null : new BlocklistHashEnumerable(rd);
- var blockhash = rd.GetValue(7).ToString();
- var blocksize = rd.ConvertValueToInt64(8, -1);
- var metablockhash = rd.GetValue(9).ToString();
- //var metablocksize = rd.ConvertValueToInt64(10, -1);
- var metablocklisthash = rd.GetValue(11).ToString();
-
- if (blockhash == filehash)
- blockhash = null;
-
- if (metablockhash == metahash)
- metablockhash = null;
-
- filesetvolume.AddFile(path, filehash, size, lastmodified, metahash, metasize, metablockhash, blockhash, blocksize, blrd, string.IsNullOrWhiteSpace(metablocklisthash) ? null : new string[] { metablocklisthash });
- if (blrd == null)
- more = rd.Read();
- else
- more = blrd.MoreData;
-
- } while (more);
- }
+ var more = false;
+ do
+ {
+ var path = rd.GetValue(0).ToString();
+ var filehash = rd.GetValue(3).ToString();
+ var size = rd.ConvertValueToInt64(2);
+ var lastmodified = new DateTime(rd.ConvertValueToInt64(1, 0), DateTimeKind.Utc);
+ var metahash = rd.GetValue(4).ToString();
+ var metasize = rd.ConvertValueToInt64(5, -1);
+ var p = rd.GetValue(6);
+ var blrd = (p == null || p == DBNull.Value) ? null : new BlocklistHashEnumerable(rd);
+ var blockhash = rd.GetValue(7).ToString();
+ var blocksize = rd.ConvertValueToInt64(8, -1);
+ var metablockhash = rd.GetValue(9).ToString();
+ //var metablocksize = rd.ConvertValueToInt64(10, -1);
+ var metablocklisthash = rd.GetValue(11).ToString();
+
+ if (blockhash == filehash)
+ blockhash = null;
+
+ if (metablockhash == metahash)
+ metablockhash = null;
+
+ filesetvolume.AddFile(path, filehash, size, lastmodified, metahash, metasize, metablockhash, blockhash, blocksize, blrd, string.IsNullOrWhiteSpace(metablocklisthash) ? null : new string[] { metablocklisthash });
+ if (blrd == null)
+ more = rd.Read();
+ else
+ more = blrd.MoreData;
+
+ } while (more);
+ }
}
}
-
+
/// <summary>
/// Keeps a list of filenames in a temporary table with a single column Path
///</summary>
@@ -1177,33 +1208,33 @@ ORDER BY
// Bugfix: SQLite does not handle case-insensitive LIKE with non-ascii characters
if (type != Library.Utility.FilterType.Regexp && !Library.Utility.Utility.IsFSCaseSensitive && filter.ToString().Any(x => x > 127))
type = Library.Utility.FilterType.Regexp;
-
+
if (type == Library.Utility.FilterType.Regexp || type == Library.Utility.FilterType.Group)
{
- using(var cmd = m_connection.CreateCommand())
+ using (var cmd = m_connection.CreateCommand())
{
// TODO: Optimize this to not rely on the "File" view, and not instantiate the paths in full
cmd.Transaction = transaction;
cmd.ExecuteNonQuery(string.Format(@"CREATE TEMPORARY TABLE ""{0}"" (""Path"" TEXT NOT NULL)", Tablename));
- using(var tr = new TemporaryTransactionWrapper(m_connection, transaction))
+ using (var tr = new TemporaryTransactionWrapper(m_connection, transaction))
{
cmd.CommandText = string.Format(@"INSERT INTO ""{0}"" (""Path"") VALUES (?)", Tablename);
cmd.AddParameter();
cmd.Transaction = tr.Parent;
- using(var c2 = m_connection.CreateCommand())
- using(var rd = c2.ExecuteReader(@"SELECT DISTINCT ""Path"" FROM ""File"" "))
- while(rd.Read())
+ using (var c2 = m_connection.CreateCommand())
+ using (var rd = c2.ExecuteReader(@"SELECT DISTINCT ""Path"" FROM ""File"" "))
+ while (rd.Read())
{
var p = rd.GetValue(0).ToString();
- if(Library.Utility.FilterExpression.Matches(filter, p))
+ if (Library.Utility.FilterExpression.Matches(filter, p))
{
cmd.SetParameterValue(0, p);
cmd.ExecuteNonQuery();
}
}
-
-
+
+
tr.Commit();
}
}
@@ -1212,7 +1243,7 @@ ORDER BY
{
var sb = new StringBuilder();
var args = new List<object>();
- foreach(var f in ((Library.Utility.FilterExpression)filter).GetSimpleList())
+ foreach (var f in ((Library.Utility.FilterExpression)filter).GetSimpleList())
{
if (f.Contains('*') || f.Contains('?'))
{
@@ -1225,11 +1256,11 @@ ORDER BY
args.Add(f);
}
}
-
+
sb.Length = sb.Length - " OR ".Length;
-
- using(var cmd = m_connection.CreateCommand())
- using(var tr = new TemporaryTransactionWrapper(m_connection, transaction))
+
+ using (var cmd = m_connection.CreateCommand())
+ using (var tr = new TemporaryTransactionWrapper(m_connection, transaction))
{
cmd.Transaction = tr.Parent;
cmd.ExecuteNonQuery(string.Format(@"CREATE TEMPORARY TABLE ""{0}"" (""Path"" TEXT NOT NULL)", Tablename));
@@ -1238,43 +1269,43 @@ ORDER BY
}
}
}
-
+
public void Dispose()
{
if (Tablename != null)
- try
- {
- using(var cmd = m_connection.CreateCommand())
+ try
+ {
+ using (var cmd = m_connection.CreateCommand())
cmd.ExecuteNonQuery(string.Format(@"DROP TABLE IF EXISTS ""{0}"" ", Tablename));
}
catch { }
finally { Tablename = null; }
- }
+ }
}
-
+
public void RenameRemoteFile(string oldname, string newname, System.Data.IDbTransaction transaction)
{
- using(var tr = new TemporaryTransactionWrapper(m_connection, transaction))
- using(var cmd = m_connection.CreateCommand())
+ using (var tr = new TemporaryTransactionWrapper(m_connection, transaction))
+ using (var cmd = m_connection.CreateCommand())
{
cmd.Transaction = tr.Parent;
-
+
//Rename the old entry, to preserve ID links
var c = cmd.ExecuteNonQuery(@"UPDATE ""Remotevolume"" SET ""Name"" = ? WHERE ""Name"" = ?", newname, oldname);
if (c != 1)
throw new Exception(string.Format("Unexpected result from renaming \"{0}\" to \"{1}\", expected {2} got {3}", oldname, newname, 1, c));
-
+
// Grab the type of entry
var type = (RemoteVolumeType)Enum.Parse(typeof(RemoteVolumeType), cmd.ExecuteScalar(@"SELECT ""Type"" FROM ""Remotevolume"" WHERE ""Name"" = ?", newname).ToString(), true);
-
+
//Create a fake new entry with the old name and mark as deleting
// as this ensures we will remove it, if it shows up in some later listing
RegisterRemoteVolume(oldname, type, RemoteVolumeState.Deleting, tr.Parent);
-
+
tr.Commit();
}
}
-
+
/// <summary>
/// Creates a timestamped backup operation to correctly associate the fileset with the time it was created.
/// </summary>
@@ -1286,8 +1317,8 @@ ORDER BY
using (var cmd = m_connection.CreateCommand())
using (var tr = new TemporaryTransactionWrapper(m_connection, transaction))
{
- cmd.Transaction = tr.Parent;
- var id = cmd.ExecuteScalarInt64(@"INSERT INTO ""Fileset"" (""OperationID"", ""Timestamp"", ""VolumeID"") VALUES (?, ?, ?); SELECT last_insert_rowid();", -1, m_operationid, Library.Utility.Utility.NormalizeDateTimeToEpochSeconds(timestamp), volumeid);
+ cmd.Transaction = tr.Parent;
+ var id = cmd.ExecuteScalarInt64(@"INSERT INTO ""Fileset"" (""OperationID"", ""Timestamp"", ""VolumeID"", ""IsFullBackup"") VALUES (?, ?, ?, ?); SELECT last_insert_rowid();", -1, m_operationid, Library.Utility.Utility.NormalizeDateTimeToEpochSeconds(timestamp), volumeid, BackupType.PARTIAL_BACKUP);
tr.Commit();
return id;
}
@@ -1303,12 +1334,12 @@ ORDER BY
public IEnumerable<Tuple<string, byte[], int>> GetBlocklists(long volumeid, long blocksize, int hashsize, System.Data.IDbTransaction transaction = null)
{
- using(var cmd = m_connection.CreateCommand(transaction))
+ using (var cmd = m_connection.CreateCommand(transaction))
{
- var sql = string.Format(@"SELECT ""A"".""Hash"", ""C"".""Hash"" FROM " +
- @"(SELECT ""BlocklistHash"".""BlocksetID"", ""Block"".""Hash"", * FROM ""BlocklistHash"",""Block"" WHERE ""BlocklistHash"".""Hash"" = ""Block"".""Hash"" AND ""Block"".""VolumeID"" = ?) A, " +
- @" ""BlocksetEntry"" B, ""Block"" C WHERE ""B"".""BlocksetID"" = ""A"".""BlocksetID"" AND " +
- @" ""B"".""Index"" >= (""A"".""Index"" * {0}) AND ""B"".""Index"" < ((""A"".""Index"" + 1) * {0}) AND ""C"".""ID"" = ""B"".""BlockID"" " +
+ var sql = string.Format(@"SELECT ""A"".""Hash"", ""C"".""Hash"" FROM " +
+ @"(SELECT ""BlocklistHash"".""BlocksetID"", ""Block"".""Hash"", * FROM ""BlocklistHash"",""Block"" WHERE ""BlocklistHash"".""Hash"" = ""Block"".""Hash"" AND ""Block"".""VolumeID"" = ?) A, " +
+ @" ""BlocksetEntry"" B, ""Block"" C WHERE ""B"".""BlocksetID"" = ""A"".""BlocksetID"" AND " +
+ @" ""B"".""Index"" >= (""A"".""Index"" * {0}) AND ""B"".""Index"" < ((""A"".""Index"" + 1) * {0}) AND ""C"".""ID"" = ""B"".""BlockID"" " +
@" ORDER BY ""A"".""BlocksetID"", ""B"".""Index""",
blocksize / hashsize
);
@@ -1317,7 +1348,7 @@ ORDER BY
int index = 0;
byte[] buffer = new byte[blocksize];
- using(var rd = cmd.ExecuteReader(sql, volumeid))
+ using (var rd = cmd.ExecuteReader(sql, volumeid))
while (rd.Read())
{
var blockhash = rd.GetValue(0).ToString();
@@ -1339,10 +1370,30 @@ ORDER BY
}
}
+ /// <summary>
+ /// Update fileset with full backup state
+ /// </summary>
+ /// <param name="fileSetId">Existing file set to update</param>
+ /// <param name="isFullBackup">Full backup state</param>
+ /// <param name="transaction">An optional external transaction</param>
+ public void UpdateFullBackupStateInFileset(long fileSetId, bool isFullBackup, IDbTransaction transaction = null)
+ {
+ using (var tr = new TemporaryTransactionWrapper(m_connection, transaction))
+ {
+ using (var cmd = m_connection.CreateCommand())
+ {
+ cmd.Transaction = tr.Parent;
+ cmd.ExecuteNonQuery(@"UPDATE ""Fileset"" SET ""IsFullBackup"" = ? WHERE ""ID"" = ?;", isFullBackup, fileSetId);
+ }
+
+ tr.Commit();
+ }
+ }
+
public void PurgeLogData(DateTime threshold)
{
- using(var tr = m_connection.BeginTransaction())
- using(var cmd = m_connection.CreateCommand(tr))
+ using (var tr = m_connection.BeginTransaction())
+ using (var cmd = m_connection.CreateCommand(tr))
{
var t = Library.Utility.Utility.NormalizeDateTimeToEpochSeconds(threshold);
cmd.ExecuteNonQuery(@"DELETE FROM ""LogData"" WHERE ""Timestamp"" < ?", t);
@@ -1519,6 +1570,15 @@ ORDER BY
return new KeyValuePair<string, string>(path.Substring(0, nLast + 1), path.Substring(nLast + 1));
return new KeyValuePair<string, string>(string.Empty, path);
- }
+ }
+
+ /// <summary>
+ /// Defines the backups types
+ /// </summary>
+ public static class BackupType
+ {
+ public const int PARTIAL_BACKUP = 0;
+ public const int FULL_BACKUP = 1;
+ }
}
}
diff --git a/Duplicati/Library/Main/Database/LocalListAffectedDatabase.cs b/Duplicati/Library/Main/Database/LocalListAffectedDatabase.cs
index e293b7329..7157d8946 100644
--- a/Duplicati/Library/Main/Database/LocalListAffectedDatabase.cs
+++ b/Duplicati/Library/Main/Database/LocalListAffectedDatabase.cs
@@ -32,6 +32,7 @@ namespace Duplicati.Library.Main.Database
private class ListResultFileset : Duplicati.Library.Interface.IListResultFileset
{
public long Version { get; set; }
+ public int IsFullBackup { get; set; }
public DateTime Time { get; set; }
public long FileCount { get; set; }
public long FileSizes { get; set; }
diff --git a/Duplicati/Library/Main/Database/LocalListDatabase.cs b/Duplicati/Library/Main/Database/LocalListDatabase.cs
index cef67a6b5..fe66abf59 100644
--- a/Duplicati/Library/Main/Database/LocalListDatabase.cs
+++ b/Duplicati/Library/Main/Database/LocalListDatabase.cs
@@ -41,6 +41,7 @@ namespace Duplicati.Library.Main.Database
public interface IFileset
{
long Version { get; }
+ int IsFullBackup { get; set; }
DateTime Time { get; }
long FileCount { get; }
long FileSizes { get; }
@@ -55,7 +56,7 @@ namespace Duplicati.Library.Main.Database
IEnumerable<IFileversion> SelectFolderContents(Library.Utility.IFilter filter);
void TakeFirst ();
}
-
+
private class FileSets : IFileSets
{
private readonly System.Data.IDbConnection m_connection;
@@ -73,7 +74,7 @@ namespace Duplicati.Library.Main.Database
using(var cmd = m_connection.CreateCommand())
{
- cmd.ExecuteNonQuery(string.Format(@"CREATE TEMPORARY TABLE ""{0}"" AS SELECT DISTINCT ""ID"" AS ""FilesetID"", ""Timestamp"" AS ""Timestamp"" FROM ""Fileset"" " + query, m_tablename), args);
+ cmd.ExecuteNonQuery(string.Format(@"CREATE TEMPORARY TABLE ""{0}"" AS SELECT DISTINCT ""ID"" AS ""FilesetID"", ""IsFullBackup"" AS ""IsFullBackup"" , ""Timestamp"" AS ""Timestamp"" FROM ""Fileset"" " + query, m_tablename), args);
cmd.ExecuteNonQuery(string.Format(@"CREATE INDEX ""{0}_FilesetIDTimestampIndex"" ON ""{0}"" (""FilesetID"", ""Timestamp"" DESC)", m_tablename));
}
}
@@ -81,13 +82,15 @@ namespace Duplicati.Library.Main.Database
private class Fileset : IFileset
{
public long Version { get; private set; }
+ public int IsFullBackup { get; set; }
public DateTime Time { get; private set; }
public long FileCount { get; private set; }
public long FileSizes { get; private set; }
- public Fileset(long version, DateTime time, long filecount, long filesizes)
+ public Fileset(long version, int isFullBackup, DateTime time, long filecount, long filesizes)
{
this.Version = version;
+ this.IsFullBackup = isFullBackup;
this.Time = time;
this.FileCount = filecount;
this.FileSizes = filesizes;
@@ -353,18 +356,46 @@ namespace Duplicati.Library.Main.Database
get
{
var dict = new Dictionary<long, long>();
- for(var i = 0; i < m_filesets.Length; i++)
+ for (var i = 0; i < m_filesets.Length; i++)
+ {
dict[m_filesets[i].Key] = i;
+ }
- using(var cmd = m_connection.CreateCommand())
- using(var rd = cmd.ExecuteReader(@"SELECT DISTINCT ""ID"" FROM ""Fileset"" ORDER BY ""Timestamp"" DESC "))
- while (rd.Read())
+ using (var cmd = m_connection.CreateCommand())
+ {
+ using (var rd =
+ cmd.ExecuteReader(
+ @"SELECT DISTINCT ""ID"", ""IsFullBackup"" FROM ""Fileset"" ORDER BY ""Timestamp"" DESC ")
+ )
{
- var id = rd.GetInt64(0);
- var e = dict[id];
-
- yield return new Fileset(e, m_filesets[e].Value, -1L, -1L);
+ // partial backup sets are only included until the first full backup is encountered
+ var isFullBackupEncountered = false;
+ while (rd.Read())
+ {
+ var id = rd.GetInt64(0);
+
+ if (!dict.ContainsKey(id))
+ {
+ continue;
+ }
+
+ var backupType = rd.GetInt32(1);
+ var e = dict[id];
+
+ if (isFullBackupEncountered && backupType != BackupType.FULL_BACKUP)
+ {
+ continue;
+ }
+
+ yield return new Fileset(e, backupType, m_filesets[e].Value, -1L, -1L);
+
+ if (!isFullBackupEncountered && backupType == BackupType.FULL_BACKUP)
+ {
+ isFullBackupEncountered = true;
+ }
+ }
}
+ }
}
}
@@ -373,24 +404,37 @@ namespace Duplicati.Library.Main.Database
get
{
var dict = new Dictionary<long, long>();
- for(var i = 0; i < m_filesets.Length; i++)
+ for (var i = 0; i < m_filesets.Length; i++)
+ {
dict[m_filesets[i].Key] = i;
+ }
- var summation = string.Format(@"SELECT ""A"".""FilesetID"" AS ""FilesetID"", COUNT(*) AS ""FileCount"", SUM(""C"".""Length"") AS ""FileSizes"" FROM ""FilesetEntry"" A, ""File"" B, ""Blockset"" C WHERE ""A"".""FileID"" = ""B"".""ID"" AND ""B"".""BlocksetID"" = ""C"".""ID"" AND ""A"".""FilesetID"" IN (SELECT DISTINCT ""FilesetID"" FROM ""{0}"") GROUP BY ""A"".""FilesetID"" ",m_tablename);
-
- using(var cmd = m_connection.CreateCommand())
- using (var rd = cmd.ExecuteReader(string.Format(@"SELECT DISTINCT ""A"".""FilesetID"", ""B"".""FileCount"", ""B"".""FileSizes"" FROM ""{0}"" A LEFT OUTER JOIN ( " + summation + @" ) B ON ""A"".""FilesetID"" = ""B"".""FilesetID"" ORDER BY ""A"".""Timestamp"" DESC ", m_tablename)))
- while(rd.Read())
+ var summation =
+ $@"SELECT ""A"".""FilesetID"" AS ""FilesetID"", COUNT(*) AS ""FileCount"", SUM(""C"".""Length"") AS ""FileSizes"" FROM ""FilesetEntry"" A, ""File"" B, ""Blockset"" C WHERE ""A"".""FileID"" = ""B"".""ID"" AND ""B"".""BlocksetID"" = ""C"".""ID"" AND ""A"".""FilesetID"" IN (SELECT DISTINCT ""FilesetID"" FROM ""{m_tablename}"") GROUP BY ""A"".""FilesetID"" ";
+
+ using (var cmd = m_connection.CreateCommand())
+ {
+ using (var rd = cmd.ExecuteReader(
+ $@"SELECT DISTINCT ""A"".""FilesetID"", ""A"".""IsFullBackup"", ""B"".""FileCount"", ""B"".""FileSizes"" FROM ""{m_tablename}"" A LEFT OUTER JOIN ( {summation} ) B ON ""A"".""FilesetID"" = ""B"".""FilesetID"" ORDER BY ""A"".""Timestamp"" DESC ")
+ )
{
- var id = rd.GetInt64(0);
- var e = dict[id];
-
- var filecount = rd.ConvertValueToInt64(1, -1L);
- var filesizes = rd.ConvertValueToInt64(2, -1L);
+ while (rd.Read())
+ {
+ var id = rd.GetInt64(0);
+ if (!dict.ContainsKey(id))
+ {
+ continue;
+ }
+ var isFullBackup = rd.GetInt32(1);
+ var e = dict[id];
+
+ var filecount = rd.ConvertValueToInt64(2, -1L);
+ var filesizes = rd.ConvertValueToInt64(3, -1L);
- yield return new Fileset(e, m_filesets[e].Value, filecount, filesizes);
+ yield return new Fileset(e, isFullBackup, m_filesets[e].Value, filecount, filesizes);
+ }
}
-
+ }
}
}
@@ -399,9 +443,11 @@ namespace Duplicati.Library.Main.Database
if (m_tablename != null)
{
try
- {
- using(var cmd = m_connection.CreateCommand())
+ {
+ using (var cmd = m_connection.CreateCommand())
+ {
cmd.ExecuteNonQuery(string.Format(@"DROP TABLE IF EXISTS ""{0}"" ", m_tablename));
+ }
}
catch {}
finally { m_tablename = null; }
diff --git a/Duplicati/Library/Main/Database/LocalPurgeDatabase.cs b/Duplicati/Library/Main/Database/LocalPurgeDatabase.cs
index a0d71b868..dd7855cd9 100644
--- a/Duplicati/Library/Main/Database/LocalPurgeDatabase.cs
+++ b/Duplicati/Library/Main/Database/LocalPurgeDatabase.cs
@@ -60,12 +60,13 @@ namespace Duplicati.Library.Main.Database
public interface ITemporaryFileset : IDisposable
{
long ParentID { get; }
+ bool IsFullBackup { get; }
long RemovedFileCount { get; }
long RemovedFileSize { get; }
void ApplyFilter(Library.Utility.IFilter filter);
void ApplyFilter(Action<System.Data.IDbCommand, long, string> filtercommand);
- Tuple<long, long> ConvertToPermanentFileset(string name, DateTime timestamp);
+ Tuple<long, long> ConvertToPermanentFileset(string name, DateTime timestamp, bool isFullBackup);
IEnumerable<KeyValuePair<string, long>> ListAllDeletedFiles();
}
@@ -77,6 +78,7 @@ namespace Duplicati.Library.Main.Database
private readonly LocalPurgeDatabase m_parentdb;
public long ParentID { get; private set; }
+ public bool IsFullBackup { get; private set; }
public long RemovedFileCount { get; private set; }
public long RemovedFileSize { get; private set; }
@@ -163,16 +165,19 @@ namespace Duplicati.Library.Main.Database
}
}
- public Tuple<long, long> ConvertToPermanentFileset(string name, DateTime timestamp)
- {
+ public Tuple<long, long> ConvertToPermanentFileset(string name, DateTime timestamp, bool isFullBackup)
+ {
+ this.IsFullBackup = isFullBackup;
var remotevolid = m_parentdb.RegisterRemoteVolume(name, RemoteVolumeType.Files, RemoteVolumeState.Temporary, m_transaction);
var filesetid = m_parentdb.CreateFileset(remotevolid, timestamp, m_transaction);
+ m_parentdb.UpdateFullBackupStateInFileset(filesetid, isFullBackup);
+
using (var cmd = m_connection.CreateCommand(m_transaction))
cmd.ExecuteNonQuery(string.Format(@"INSERT INTO ""FilesetEntry"" (""FilesetID"", ""FileID"", ""Lastmodified"") SELECT ?, ""FileID"", ""LastModified"" FROM ""FilesetEntry"" WHERE ""FilesetID"" = ? AND ""FileID"" NOT IN ""{0}"" ", m_tablename), filesetid, ParentID);
return new Tuple<long, long>(remotevolid, filesetid);
}
-
+
public IEnumerable<KeyValuePair<string, long>> ListAllDeletedFiles()
{
using (var cmd = m_connection.CreateCommand(m_transaction))
diff --git a/Duplicati/Library/Main/Database/LocalRecreateDatabase.cs b/Duplicati/Library/Main/Database/LocalRecreateDatabase.cs
index 769025026..2fec80c85 100644
--- a/Duplicati/Library/Main/Database/LocalRecreateDatabase.cs
+++ b/Duplicati/Library/Main/Database/LocalRecreateDatabase.cs
@@ -274,7 +274,7 @@ namespace Duplicati.Library.Main.Database
}
catch (Exception ex)
{
- Logging.Log.WriteErrorMessage(LOGTAG, "BlocksetInsertFailed", ex, "Blockset insert failed, comitting temporary tables for trace purposes");
+ Logging.Log.WriteErrorMessage(LOGTAG, "BlocksetInsertFailed", ex, "Blockset insert failed, committing temporary tables for trace purposes");
using (var fixcmd = m_connection.CreateCommand())
{
diff --git a/Duplicati/Library/Main/Duplicati.Library.Main.csproj b/Duplicati/Library/Main/Duplicati.Library.Main.csproj
index 91df740fa..d3edaee9e 100644
--- a/Duplicati/Library/Main/Duplicati.Library.Main.csproj
+++ b/Duplicati/Library/Main/Duplicati.Library.Main.csproj
@@ -217,6 +217,7 @@
</Target>
-->
<ItemGroup>
+ <EmbeddedResource Include="Database\Database schema\10. Add IsFullBackup to Fileset table.sql" />
<Content Include="default_compressed_extensions.txt">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</Content>
diff --git a/Duplicati/Library/Main/Operation/Backup/BackupDatabase.cs b/Duplicati/Library/Main/Operation/Backup/BackupDatabase.cs
index 325e0664c..cccb9f480 100644
--- a/Duplicati/Library/Main/Operation/Backup/BackupDatabase.cs
+++ b/Duplicati/Library/Main/Operation/Backup/BackupDatabase.cs
@@ -248,6 +248,11 @@ namespace Duplicati.Library.Main.Operation.Backup
return RunOnMain(() => m_database.WriteFileset(fsw, filesetid, m_transaction));
}
+ public Task UpdateFilesetAndMarkAsFullBackupAsync(long filesetid)
+ {
+ return RunOnMain(() => m_database.UpdateFullBackupStateInFileset(filesetid, true, m_transaction));
+ }
+
public Task<string[]> GetMissingIndexFilesAsync()
{
return RunOnMain(() => m_database.GetMissingIndexFiles(m_transaction).ToArray());
@@ -282,6 +287,7 @@ namespace Duplicati.Library.Main.Operation.Backup
{
return RunOnMain(() => m_database.UpdateChangeJournalData(journalData, lastfilesetid, m_transaction));
}
+
}
}
diff --git a/Duplicati/Library/Main/Operation/Backup/CountFilesHandler.cs b/Duplicati/Library/Main/Operation/Backup/CountFilesHandler.cs
index 3247c633c..eaa898b19 100644
--- a/Duplicati/Library/Main/Operation/Backup/CountFilesHandler.cs
+++ b/Duplicati/Library/Main/Operation/Backup/CountFilesHandler.cs
@@ -1,26 +1,26 @@
-// Copyright (C) 2015, The Duplicati Team
-// http://www.duplicati.com, info@duplicati.com
+#region Disclaimer / License
+// Copyright (C) 2019, The Duplicati Team
+// http://www.duplicati.com, info@duplicati.com
//
-// This library is free software; you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as
-// published by the Free Software Foundation; either version 2.1 of the
-// License, or (at your option) any later version.
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
//
-// This library is distributed in the hope that it will be useful, but
-// WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-// Lesser General Public License for more details.
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+// Lesser General Public License for more details.
//
-// You should have received a copy of the GNU Lesser General Public
-// License along with this library; if not, write to the Free Software
-// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-using System;
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+//
+#endregion
using System.Collections.Generic;
using System.Threading.Tasks;
using Duplicati.Library.Utility;
-using System.Threading;
using CoCoL;
-using Duplicati.Library.Snapshots;
namespace Duplicati.Library.Main.Operation.Backup
{
@@ -33,8 +33,8 @@ namespace Duplicati.Library.Main.Operation.Backup
using(Logging.Log.StartIsolatingScope(true))
using (new IsolatedChannelScope())
{
- var enumeratorTask = Backup.FileEnumerationProcess.Run(sources, snapshot, null, options.FileAttributeFilter, sourcefilter, filter, options.SymlinkPolicy, options.HardlinkPolicy, options.ExcludeEmptyFolders, options.IgnoreFilenames, options.ChangedFilelist, taskreader);
- var counterTask = AutomationExtensions.RunTask(new
+ var enumeratorTask = Backup.FileEnumerationProcess.Run(sources, snapshot, null, options.FileAttributeFilter, sourcefilter, filter, options.SymlinkPolicy, options.HardlinkPolicy, options.ExcludeEmptyFolders, options.IgnoreFilenames, options.ChangedFilelist, taskreader, token);
+ var counterTask = AutomationExtensions.RunTask(new
{
Input = Backup.Channels.SourcePaths.ForRead
},
diff --git a/Duplicati/Library/Main/Operation/Backup/FileBlockProcessor.cs b/Duplicati/Library/Main/Operation/Backup/FileBlockProcessor.cs
index 0acaa5b30..dcf1a4229 100644
--- a/Duplicati/Library/Main/Operation/Backup/FileBlockProcessor.cs
+++ b/Duplicati/Library/Main/Operation/Backup/FileBlockProcessor.cs
@@ -1,28 +1,27 @@
-// Copyright (C) 2015, The Duplicati Team
-// http://www.duplicati.com, info@duplicati.com
+#region Disclaimer / License
+// Copyright (C) 2019, The Duplicati Team
+// http://www.duplicati.com, info@duplicati.com
//
-// This library is free software; you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as
-// published by the Free Software Foundation; either version 2.1 of the
-// License, or (at your option) any later version.
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
//
-// This library is distributed in the hope that it will be useful, but
-// WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-// Lesser General Public License for more details.
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+// Lesser General Public License for more details.
//
-// You should have received a copy of the GNU Lesser General Public
-// License along with this library; if not, write to the Free Software
-// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+//
+#endregion
using System;
using CoCoL;
using Duplicati.Library.Main.Operation.Common;
using System.Threading.Tasks;
-using System.Collections.Generic;
-using Duplicati.Library.Utility;
-using System.Linq;
-using Duplicati.Library.Interface;
-using System.IO;
+using System.Threading;
namespace Duplicati.Library.Main.Operation.Backup
{
@@ -36,7 +35,7 @@ namespace Duplicati.Library.Main.Operation.Backup
/// </summary>
private static readonly string FILELOGTAG = Logging.Log.LogTagFromType(typeof(FileBlockProcessor)) + ".FileEntry";
- public static Task Run(Snapshots.ISnapshotService snapshot, Options options, BackupDatabase database, BackupStatsCollector stats, ITaskReader taskreader)
+ public static Task Run(Snapshots.ISnapshotService snapshot, Options options, BackupDatabase database, BackupStatsCollector stats, ITaskReader taskreader, CancellationToken token)
{
return AutomationExtensions.RunTask(
new
@@ -53,6 +52,11 @@ namespace Duplicati.Library.Main.Operation.Backup
try
{
+ if (token.IsCancellationRequested)
+ {
+ break;
+ }
+
var hint = options.GetCompressionHintFromFilename(e.Path);
var oldHash = e.OldId < 0 ? null : await database.GetFileHashAsync(e.OldId);
@@ -137,10 +141,7 @@ namespace Duplicati.Library.Main.Operation.Backup
Logging.Log.WriteWarningMessage(FILELOGTAG, "PathProcessingFailed", ex, "Failed to process path: {0}", e.Path);
}
}
- }
- );
-
-
+ });
}
}
}
diff --git a/Duplicati/Library/Main/Operation/Backup/FileEnumerationProcess.cs b/Duplicati/Library/Main/Operation/Backup/FileEnumerationProcess.cs
index ec924dbfe..e816fbbab 100644
--- a/Duplicati/Library/Main/Operation/Backup/FileEnumerationProcess.cs
+++ b/Duplicati/Library/Main/Operation/Backup/FileEnumerationProcess.cs
@@ -1,30 +1,33 @@
-// Copyright (C) 2015, The Duplicati Team
-// http://www.duplicati.com, info@duplicati.com
+#region Disclaimer / License
+// Copyright (C) 2019, The Duplicati Team
+// http://www.duplicati.com, info@duplicati.com
//
-// This library is free software; you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as
-// published by the Free Software Foundation; either version 2.1 of the
-// License, or (at your option) any later version.
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
//
-// This library is distributed in the hope that it will be useful, but
-// WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-// Lesser General Public License for more details.
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+// Lesser General Public License for more details.
//
-// You should have received a copy of the GNU Lesser General Public
-// License along with this library; if not, write to the Free Software
-// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+//
+#endregion
using System;
using CoCoL;
using System.Threading.Tasks;
using System.IO;
using System.Collections.Generic;
using System.Linq;
-using Duplicati.Library.Interface;
+using System.Threading;
using Duplicati.Library.Main.Operation.Common;
using Duplicati.Library.Snapshots;
-using Duplicati.Library.Common.IO;
-
+using Duplicati.Library.Common.IO;
+
namespace Duplicati.Library.Main.Operation.Backup
{
/// <summary>
@@ -39,7 +42,7 @@ namespace Duplicati.Library.Main.Operation.Backup
/// </summary>
private static readonly string FILTER_LOGTAG = Logging.Log.LogTagFromType(typeof(FileEnumerationProcess));
- public static Task Run(IEnumerable<string> sources, Snapshots.ISnapshotService snapshot, UsnJournalService journalService, FileAttributes fileAttributes, Duplicati.Library.Utility.IFilter sourcefilter, Duplicati.Library.Utility.IFilter emitfilter, Options.SymlinkStrategy symlinkPolicy, Options.HardlinkStrategy hardlinkPolicy, bool excludeemptyfolders, string[] ignorenames, string[] changedfilelist, ITaskReader taskreader)
+ public static Task Run(IEnumerable<string> sources, Snapshots.ISnapshotService snapshot, UsnJournalService journalService, FileAttributes fileAttributes, Duplicati.Library.Utility.IFilter sourcefilter, Duplicati.Library.Utility.IFilter emitfilter, Options.SymlinkStrategy symlinkPolicy, Options.HardlinkStrategy hardlinkPolicy, bool excludeemptyfolders, string[] ignorenames, string[] changedfilelist, ITaskReader taskreader, CancellationToken token)
{
return AutomationExtensions.RunTask(
new
@@ -49,66 +52,86 @@ namespace Duplicati.Library.Main.Operation.Backup
async self =>
{
- var hardlinkmap = new Dictionary<string, string>();
- var mixinqueue = new Queue<string>();
- Duplicati.Library.Utility.IFilter enumeratefilter = emitfilter;
-
- bool includes;
- bool excludes;
- Library.Utility.FilterExpression.AnalyzeFilters(emitfilter, out includes, out excludes);
- if (includes && !excludes)
- enumeratefilter = Library.Utility.FilterExpression.Combine(emitfilter, new Duplicati.Library.Utility.FilterExpression("*" + System.IO.Path.DirectorySeparatorChar, true));
-
- // Simplify checking for an empty list
- if (ignorenames != null && ignorenames.Length == 0)
- ignorenames = null;
-
- // If we have a specific list, use that instead of enumerating the filesystem
- IEnumerable<string> worklist;
- if (changedfilelist != null && changedfilelist.Length > 0)
+ if (!token.IsCancellationRequested)
{
- worklist = changedfilelist.Where(x =>
+ var hardlinkmap = new Dictionary<string, string>();
+ var mixinqueue = new Queue<string>();
+ Duplicati.Library.Utility.IFilter enumeratefilter = emitfilter;
+
+ bool includes;
+ bool excludes;
+ Library.Utility.FilterExpression.AnalyzeFilters(emitfilter, out includes, out excludes);
+ if (includes && !excludes)
+ enumeratefilter = Library.Utility.FilterExpression.Combine(emitfilter, new Duplicati.Library.Utility.FilterExpression("*" + System.IO.Path.DirectorySeparatorChar, true));
+
+ // Simplify checking for an empty list
+ if (ignorenames != null && ignorenames.Length == 0)
+ ignorenames = null;
+
+ // If we have a specific list, use that instead of enumerating the filesystem
+ IEnumerable<string> worklist;
+ if (changedfilelist != null && changedfilelist.Length > 0)
{
- var fa = FileAttributes.Normal;
- try
+ worklist = changedfilelist.Where(x =>
{
- fa = snapshot.GetAttributes(x);
- }
- catch
+ var fa = FileAttributes.Normal;
+ try
+ {
+ fa = snapshot.GetAttributes(x);
+ }
+ catch
+ {
+ }
+
+ if (token.IsCancellationRequested)
+ {
+ return false;
+ }
+
+ return AttributeFilter(x, fa, snapshot, sourcefilter, hardlinkPolicy, symlinkPolicy, hardlinkmap, fileAttributes, enumeratefilter, ignorenames, mixinqueue);
+ });
+ }
+ else
+ {
+ Library.Utility.Utility.EnumerationFilterDelegate attributeFilter = (root, path, attr) =>
+ AttributeFilter(path, attr, snapshot, sourcefilter, hardlinkPolicy, symlinkPolicy, hardlinkmap, fileAttributes, enumeratefilter, ignorenames, mixinqueue);
+
+ if (journalService != null)
{
+ // filter sources using USN journal, to obtain a sub-set of files / folders that may have been modified
+ sources = journalService.GetModifiedSources(attributeFilter);
}
- return AttributeFilter(x, fa, snapshot, sourcefilter, hardlinkPolicy, symlinkPolicy, hardlinkmap, fileAttributes, enumeratefilter, ignorenames, mixinqueue);
- });
- }
- else
- {
- Library.Utility.Utility.EnumerationFilterDelegate attributeFilter = (root, path, attr) =>
- AttributeFilter(path, attr, snapshot, sourcefilter, hardlinkPolicy, symlinkPolicy, hardlinkmap, fileAttributes, enumeratefilter, ignorenames, mixinqueue);
+ worklist = snapshot.EnumerateFilesAndFolders(sources, attributeFilter, (rootpath, errorpath, ex) =>
+ {
+ Logging.Log.WriteWarningMessage(FILTER_LOGTAG, "FileAccessError", ex, "Error reported while accessing file: {0}", errorpath);
+ });
+ }
- if (journalService != null)
+ if (token.IsCancellationRequested)
{
- // filter sources using USN journal, to obtain a sub-set of files / folders that may have been modified
- sources = journalService.GetModifiedSources(attributeFilter);
+ return;
}
- worklist = snapshot.EnumerateFilesAndFolders(sources, attributeFilter, (rootpath, errorpath, ex) =>
- {
- Logging.Log.WriteWarningMessage(FILTER_LOGTAG, "FileAccessError", ex, "Error reported while accessing file: {0}", errorpath);
- });
- }
+ var source = ExpandWorkList(worklist, mixinqueue, emitfilter, enumeratefilter);
+ if (excludeemptyfolders)
+ source = ExcludeEmptyFolders(source);
- var source = ExpandWorkList(worklist, mixinqueue, emitfilter, enumeratefilter);
- if (excludeemptyfolders)
- source = ExcludeEmptyFolders(source);
+ // Process each path, and dequeue the mixins with symlinks as we go
+ foreach (var s in source)
+ {
+ if (token.IsCancellationRequested)
+ {
+ break;
+ }
- // Process each path, and dequeue the mixins with symlinks as we go
- foreach (var s in source)
- {
- if (!await taskreader.ProgressAsync)
- return;
+ if (!await taskreader.ProgressAsync)
+ {
+ return;
+ }
- await self.Output.WriteAsync(s);
+ await self.Output.WriteAsync(s);
+ }
}
});
}
diff --git a/Duplicati/Library/Main/Operation/Backup/MetadataPreProcess.cs b/Duplicati/Library/Main/Operation/Backup/MetadataPreProcess.cs
index c891fe717..bc72b7be7 100644
--- a/Duplicati/Library/Main/Operation/Backup/MetadataPreProcess.cs
+++ b/Duplicati/Library/Main/Operation/Backup/MetadataPreProcess.cs
@@ -19,6 +19,8 @@ using CoCoL;
using System.Threading.Tasks;
using System.IO;
using System.Collections.Generic;
+using System.Security.Principal;
+using System.Threading;
using Duplicati.Library.Interface;
using Duplicati.Library.Main.Operation.Common;
using Duplicati.Library.Snapshots;
@@ -58,7 +60,7 @@ namespace Duplicati.Library.Main.Operation.Backup
public bool MetadataChanged;
}
- public static Task Run(Snapshots.ISnapshotService snapshot, Options options, BackupDatabase database, long lastfilesetid)
+ public static Task Run(Snapshots.ISnapshotService snapshot, Options options, BackupDatabase database, long lastfilesetid, CancellationToken token)
{
return AutomationExtensions.RunTask(new
{
@@ -118,7 +120,8 @@ namespace Duplicati.Library.Main.Operation.Backup
if (CHECKFILETIMEONLY || DISABLEFILETIMECHECK)
{
var tmp = await database.GetFileLastModifiedAsync(prefixid, split.Value, lastfilesetid, false);
- await self.Output.WriteAsync(new FileEntry() {
+ await self.Output.WriteAsync(new FileEntry
+ {
OldId = tmp.Item1,
Path = path,
PathPrefixID = prefixid,
@@ -126,7 +129,7 @@ namespace Duplicati.Library.Main.Operation.Backup
Attributes = attributes,
LastWrite = lastwrite,
OldModified = tmp.Item2,
- LastFileSize = tmp.Item3 ,
+ LastFileSize = tmp.Item3,
OldMetaHash = null,
OldMetaSize = -1
});
@@ -134,7 +137,8 @@ namespace Duplicati.Library.Main.Operation.Backup
else
{
var res = await database.GetFileEntryAsync(prefixid, split.Value, lastfilesetid);
- await self.Output.WriteAsync(new FileEntry() {
+ await self.Output.WriteAsync(new FileEntry
+ {
OldId = res == null ? -1 : res.id,
Path = path,
PathPrefixID = prefixid,
@@ -148,9 +152,14 @@ namespace Duplicati.Library.Main.Operation.Backup
});
}
}
- catch(Exception ex)
+ catch (Exception ex)
{
- Logging.Log.WriteWarningMessage(FILELOGTAG, "ProcessingMetadataFailed", ex, "Failed to process entry, path: {0}", path);
+ if (ex.IsRetiredException() || token.IsCancellationRequested)
+ {
+ continue;
+ }
+ Logging.Log.WriteWarningMessage(FILELOGTAG, "ProcessingMetadataFailed", ex,
+ "Failed to process entry, path: {0}", path);
}
}
}
diff --git a/Duplicati/Library/Main/Operation/Backup/UploadRealFilelist.cs b/Duplicati/Library/Main/Operation/Backup/UploadRealFilelist.cs
index 2b3c6518d..30984f89d 100644
--- a/Duplicati/Library/Main/Operation/Backup/UploadRealFilelist.cs
+++ b/Duplicati/Library/Main/Operation/Backup/UploadRealFilelist.cs
@@ -18,7 +18,6 @@ using System;
using System.Threading.Tasks;
using CoCoL;
using Duplicati.Library.Main.Volumes;
-using System.IO;
namespace Duplicati.Library.Main.Operation.Backup
{
@@ -63,6 +62,9 @@ namespace Duplicati.Library.Main.Operation.Backup
return;
await db.WriteFilesetAsync(filesetvolume, filesetid);
+
+ filesetvolume.AddFilelistFile();
+
filesetvolume.Close();
if (!await taskreader.ProgressAsync)
diff --git a/Duplicati/Library/Main/Operation/Backup/UploadSyntheticFilelist.cs b/Duplicati/Library/Main/Operation/Backup/UploadSyntheticFilelist.cs
index 4b97e6886..e92db5226 100644
--- a/Duplicati/Library/Main/Operation/Backup/UploadSyntheticFilelist.cs
+++ b/Duplicati/Library/Main/Operation/Backup/UploadSyntheticFilelist.cs
@@ -111,6 +111,8 @@ namespace Duplicati.Library.Main.Operation.Backup
foreach(var p in options.ControlFiles.Split(new char[] { System.IO.Path.PathSeparator }, StringSplitOptions.RemoveEmptyEntries))
fsw.AddControlFile(p, options.GetCompressionHintFromFilename(p));
+ fsw.AddFilelistFile();
+
var newFilesetID = await database.CreateFilesetAsync(fsw.VolumeID, fileTime);
await database.LinkFilesetToVolumeAsync(newFilesetID, fsw.VolumeID);
await database.AppendFilesFromPreviousSetAsync(null, newFilesetID, prevId, fileTime);
diff --git a/Duplicati/Library/Main/Operation/BackupHandler.cs b/Duplicati/Library/Main/Operation/BackupHandler.cs
index 752981236..89a88461c 100644
--- a/Duplicati/Library/Main/Operation/BackupHandler.cs
+++ b/Duplicati/Library/Main/Operation/BackupHandler.cs
@@ -1,29 +1,25 @@
#region Disclaimer / License
-
-// Copyright (C) 2015, The Duplicati Team
+// Copyright (C) 2019, The Duplicati Team
// http://www.duplicati.com, info@duplicati.com
-//
+//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
-//
+//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
-//
+//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
-//
-
+//
#endregion
using System;
using System.Collections.Generic;
using System.Linq;
-using System.Text;
-using System.IO;
using Duplicati.Library.Main.Database;
using Duplicati.Library.Main.Volumes;
using Duplicati.Library.Interface;
@@ -34,6 +30,8 @@ using Duplicati.Library.Snapshots;
using Duplicati.Library.Utility;
using Duplicati.Library.Common.IO;
using Duplicati.Library.Common;
+using Duplicati.Library.Main.Operation.Backup;
+using Duplicati.Library.Main.Operation.Common;
namespace Duplicati.Library.Main.Operation
{
@@ -60,6 +58,8 @@ namespace Duplicati.Library.Main.Operation
private readonly BackupResults m_result;
+ public CancellationTokenSource cancellationTokenSource = new CancellationTokenSource();
+
public BackupHandler(string backendurl, Options options, BackupResults results)
{
m_options = options;
@@ -103,7 +103,7 @@ namespace Duplicati.Library.Main.Operation
{
if (m_options.UsnStrategy == Options.OptimizationStrategy.Off) return null;
var journalData = m_database.GetChangeJournalData(lastfilesetid);
- var service = new UsnJournalService(sources, snapshot, filter, journalData);
+ var service = new UsnJournalService(sources, snapshot, filter, journalData, cancellationTokenSource.Token);
foreach (var volumeData in service.VolumeDataList)
{
@@ -170,36 +170,41 @@ namespace Duplicati.Library.Main.Operation
/// <summary>
/// Performs the bulk of work by starting all relevant processes
/// </summary>
- private static async Task RunMainOperation(IEnumerable<string> sources, Snapshots.ISnapshotService snapshot, UsnJournalService journalService, Backup.BackupDatabase database, Backup.BackupStatsCollector stats, Options options, IFilter sourcefilter, IFilter filter, BackupResults result, Common.ITaskReader taskreader, long lastfilesetid)
+ private static async Task RunMainOperation(IEnumerable<string> sources, Snapshots.ISnapshotService snapshot, UsnJournalService journalService, Backup.BackupDatabase database, Backup.BackupStatsCollector stats, Options options, IFilter sourcefilter, IFilter filter, BackupResults result, Common.ITaskReader taskreader, long filesetid, long lastfilesetid, CancellationToken token)
{
- using(new Logging.Timer(LOGTAG, "BackupMainOperation", "BackupMainOperation"))
+ using (new Logging.Timer(LOGTAG, "BackupMainOperation", "BackupMainOperation"))
{
// Make sure the CompressionHints table is initialized, otherwise all workers will initialize it
var unused = options.CompressionHints.Count;
Task all;
- using(new ChannelScope())
+ using (new ChannelScope())
{
all = Task.WhenAll(
- new []
- {
- Backup.DataBlockProcessor.Run(database, options, taskreader),
- Backup.FileBlockProcessor.Run(snapshot, options, database, stats, taskreader),
- Backup.StreamBlockSplitter.Run(options, database, taskreader),
- Backup.FileEnumerationProcess.Run(sources, snapshot, journalService, options.FileAttributeFilter, sourcefilter, filter, options.SymlinkPolicy, options.HardlinkPolicy, options.ExcludeEmptyFolders, options.IgnoreFilenames, options.ChangedFilelist, taskreader),
- Backup.FilePreFilterProcess.Run(snapshot, options, stats, database),
- Backup.MetadataPreProcess.Run(snapshot, options, database, lastfilesetid),
- Backup.SpillCollectorProcess.Run(options, database, taskreader),
- Backup.ProgressHandler.Run(result)
- }
- // Spawn additional block hashers
- .Union(
- Enumerable.Range(0, options.ConcurrencyBlockHashers - 1).Select(x => Backup.StreamBlockSplitter.Run(options, database, taskreader))
- )
- // Spawn additional compressors
- .Union(
- Enumerable.Range(0, options.ConcurrencyCompressors - 1).Select(x => Backup.DataBlockProcessor.Run(database, options, taskreader))
- )
+ new[]
+ {
+ Backup.DataBlockProcessor.Run(database, options, taskreader),
+ Backup.FileBlockProcessor.Run(snapshot, options, database, stats, taskreader, token),
+ Backup.StreamBlockSplitter.Run(options, database, taskreader),
+ Backup.FileEnumerationProcess.Run(sources, snapshot, journalService,
+ options.FileAttributeFilter, sourcefilter, filter, options.SymlinkPolicy,
+ options.HardlinkPolicy, options.ExcludeEmptyFolders, options.IgnoreFilenames,
+ options.ChangedFilelist, taskreader, token),
+ Backup.FilePreFilterProcess.Run(snapshot, options, stats, database),
+ Backup.MetadataPreProcess.Run(snapshot, options, database, lastfilesetid, token),
+ Backup.SpillCollectorProcess.Run(options, database, taskreader),
+ Backup.ProgressHandler.Run(result)
+ }
+ // Spawn additional block hashers
+ .Union(
+ Enumerable.Range(0, options.ConcurrencyBlockHashers - 1).Select(x =>
+ Backup.StreamBlockSplitter.Run(options, database, taskreader))
+ )
+ // Spawn additional compressors
+ .Union(
+ Enumerable.Range(0, options.ConcurrencyCompressors - 1).Select(x =>
+ Backup.DataBlockProcessor.Run(database, options, taskreader))
+ )
);
}
@@ -235,7 +240,17 @@ namespace Duplicati.Library.Main.Operation
await database.UpdateChangeJournalDataAsync(data, lastfilesetid);
}
}
-
+
+ if (token.IsCancellationRequested)
+ {
+ result.PartialBackup = true;
+ }
+ else
+ {
+ result.PartialBackup = false;
+ await database.UpdateFilesetAndMarkAsFullBackupAsync(filesetid);
+ }
+
result.OperationProgressUpdater.UpdatefileCount(result.ExaminedFiles, result.SizeOfExaminedFiles, true);
}
}
@@ -319,9 +334,9 @@ namespace Duplicati.Library.Main.Operation
}
}
- public void Run(string[] sources, Library.Utility.IFilter filter)
+ public void Run(string[] sources, Library.Utility.IFilter filter, CancellationToken token)
{
- RunAsync(sources, filter).WaitForTaskOrThrow();
+ RunAsync(sources, filter, token).WaitForTaskOrThrow();
}
private static Exception BuildException(Exception source, params Task[] tasks)
@@ -361,7 +376,7 @@ namespace Duplicati.Library.Main.Operation
return await flushReq.LastWriteSizeAsync;
}
- private async Task RunAsync(string[] sources, Library.Utility.IFilter filter)
+ private async Task RunAsync(string[] sources, Library.Utility.IFilter filter, CancellationToken token)
{
m_result.OperationProgressUpdater.UpdatePhase(OperationPhase.Backup_Begin);
@@ -475,7 +490,9 @@ namespace Duplicati.Library.Main.Operation
// Run the backup operation
if (await m_result.TaskReader.ProgressAsync)
- await RunMainOperation(sources, snapshot, journalService, db, stats, m_options, m_sourceFilter, m_filter, m_result, m_result.TaskReader, lastfilesetid).ConfigureAwait(false);
+ {
+ await RunMainOperation(sources, snapshot, journalService, db, stats, m_options, m_sourceFilter, m_filter, m_result, m_result.TaskReader, filesetid, lastfilesetid, token).ConfigureAwait(false);
+ }
}
finally
{
@@ -484,8 +501,11 @@ namespace Duplicati.Library.Main.Operation
}
}
+ // Add the fileset file to the dlist file
+ filesetvolume.CreateFilesetFile(!token.IsCancellationRequested);
+
// Ensure the database is in a sane state after adding data
- using(new Logging.Timer(LOGTAG, "VerifyConsistency", "VerifyConsistency"))
+ using (new Logging.Timer(LOGTAG, "VerifyConsistency", "VerifyConsistency"))
await db.VerifyConsistencyAsync(m_options.Blocksize, m_options.BlockhashSize, false);
// Send the actual filelist
@@ -523,7 +543,7 @@ namespace Duplicati.Library.Main.Operation
m_transaction = null;
- if (m_result.TaskControlRendevouz() != TaskControlState.Stop)
+ if (m_result.TaskControlRendevouz() != TaskControlState.Abort)
{
if (m_options.NoBackendverification)
UpdateStorageStatsFromDatabase();
diff --git a/Duplicati/Library/Main/Operation/DeleteHandler.cs b/Duplicati/Library/Main/Operation/DeleteHandler.cs
index dcf31f9af..9d15bbe27 100644
--- a/Duplicati/Library/Main/Operation/DeleteHandler.cs
+++ b/Duplicati/Library/Main/Operation/DeleteHandler.cs
@@ -1,24 +1,26 @@
-// Copyright (C) 2013, The Duplicati Team
-
-// http://www.duplicati.com, opensource@duplicati.com
+#region Disclaimer / License
+// Copyright (C) 2019, The Duplicati Team
+// http://www.duplicati.com, info@duplicati.com
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
//
-// This library is free software; you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as
-// published by the Free Software Foundation; either version 2.1 of the
-// License, or (at your option) any later version.
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+// Lesser General Public License for more details.
//
-// This library is distributed in the hope that it will be useful, but
-// WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-// Lesser General Public License for more details.
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
-// You should have received a copy of the GNU Lesser General Public
-// License along with this library; if not, write to the Free Software
-// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#endregion
using System;
using System.Linq;
-using System.Text;
using System.Collections.Generic;
+using System.Globalization;
using Duplicati.Library.Interface;
namespace Duplicati.Library.Main.Operation
@@ -44,7 +46,7 @@ namespace Duplicati.Library.Main.Operation
m_options = options;
m_result = result;
}
-
+
public void Run()
{
if (!System.IO.File.Exists(m_options.Dbpath))
@@ -94,7 +96,7 @@ namespace Duplicati.Library.Main.Operation
var filesetNumbers = db.FilesetTimes.Zip(Enumerable.Range(0, db.FilesetTimes.Count()), (a, b) => new Tuple<long, DateTime>(b, a.Value)).ToList();
var sets = db.FilesetTimes.Select(x => x.Value).ToArray();
- var toDelete = GetFilesetsToDelete(sets);
+ var toDelete = GetFilesetsToDelete(db, sets);
if (!m_options.AllowFullRemoval && sets.Length == toDelete.Length)
{
@@ -173,45 +175,91 @@ namespace Duplicati.Library.Main.Operation
/// </summary>
/// <returns>The filesets to delete</returns>
/// <param name="allBackups">The list of backups that can be deleted</param>
- private DateTime[] GetFilesetsToDelete(DateTime[] allBackups)
+ private DateTime[] GetFilesetsToDelete(Database.LocalDeleteDatabase db, DateTime[] allBackups)
{
if (allBackups.Length == 0)
+ {
return allBackups;
+ }
+
+ DateTime[] sortedAllBackups = allBackups.OrderByDescending(x => x.ToUniversalTime()).ToArray();
- if (allBackups.Select(x => x.ToUniversalTime()).Distinct().Count() != allBackups.Length)
- throw new Exception(string.Format("List of backup timestamps contains duplicates: {0}", string.Join(", ", allBackups.Select(x => x.ToString()))));
+ if (sortedAllBackups.Select(x => x.ToUniversalTime()).Distinct().Count() != sortedAllBackups.Length)
+ {
+ throw new Exception($"List of backup timestamps contains duplicates: {string.Join(", ", sortedAllBackups.Select(x => x.ToString()))}");
+ }
List<DateTime> toDelete = new List<DateTime>();
- // Remove backups explicitely specified via option
+ // Remove backups explicitly specified via option
var versions = m_options.Version;
if (versions != null && versions.Length > 0)
+ {
foreach (var ix in versions.Distinct())
- if (ix >= 0 && ix < allBackups.Length)
- toDelete.Add(allBackups[ix]);
+ {
+ if (ix >= 0 && ix < sortedAllBackups.Length)
+ {
+ toDelete.Add(sortedAllBackups[ix]);
+ }
+ }
+ }
// Remove backups that are older than date specified via option
var keepTime = m_options.KeepTime;
if (keepTime.Ticks > 0)
- toDelete.AddRange(allBackups.SkipWhile(x => x >= keepTime));
+ {
+ toDelete.AddRange(sortedAllBackups.SkipWhile(x => x >= keepTime));
+ }
- // Remove backups via rentention policy option
- toDelete.AddRange(ApplyRetentionPolicy(allBackups));
+ // Remove backups via retention policy option
+ toDelete.AddRange(ApplyRetentionPolicy(db, sortedAllBackups));
- // Check how many backups will be remaining after the previous steps
+ // Check how many full backups will be remaining after the previous steps
// and remove oldest backups while there are still more backups than should be kept as specified via option
- var backupsRemaining = allBackups.Except(toDelete).ToList();
- var keepVersions = m_options.KeepVersions;
- if (keepVersions > 0 && keepVersions < backupsRemaining.Count())
- toDelete.AddRange(backupsRemaining.Skip(keepVersions));
-
- var toDeleteDistinct = toDelete.Distinct().OrderByDescending(x => x.ToUniversalTime()).AsEnumerable();
+ var backupsRemaining = sortedAllBackups.Except(toDelete).ToList();
+ var fullVersionsToKeep = m_options.KeepVersions;
+ var fullVersionsKeptCount = 0;
+ if (fullVersionsToKeep > 0 && fullVersionsToKeep < backupsRemaining.Count)
+ {
+ // keep the number of full backups specified in fullVersionsToKeep.
+ // once the last full backup t okeep is found, also keep the partials immediately after it the full backup.
+ // add the remainder of full and partial backups to toDelete
+ bool foundLastFullBackupToKeep = false;
+ foreach (var backup in backupsRemaining)
+ {
+ bool isFullBackup;
+ if (fullVersionsKeptCount < fullVersionsToKeep)
+ {
+ isFullBackup = db.IsFilesetFullBackup(backup);
+ // count only a full backup
+ if (fullVersionsKeptCount < fullVersionsToKeep && isFullBackup)
+ {
+ fullVersionsKeptCount++;
+ }
+ continue;
+ }
+ // do not include any partial backup that precedes the last full backup
+ if (!foundLastFullBackupToKeep)
+ {
+ isFullBackup = db.IsFilesetFullBackup(backup);
+ if (!isFullBackup)
+ {
+ continue;
+ }
+ foundLastFullBackupToKeep = true;
+ }
+ toDelete.Add(backup);
+ }
+ }
- var removeCount = toDeleteDistinct.Count();
- if (removeCount > allBackups.Length)
- throw new Exception(string.Format("Too many entries {0} vs {1}, lists: {2} vs {3}", removeCount, allBackups.Length, string.Join(", ", toDeleteDistinct.Select(x => x.ToString())), string.Join(", ", allBackups.Select(x => x.ToString()))));
+ var toDeleteDistinct = toDelete.Distinct().OrderByDescending(x => x.ToUniversalTime()).ToArray();
+ var removeCount = toDeleteDistinct.Length;
+ if (removeCount > sortedAllBackups.Length)
+ {
+ throw new Exception($"Too many entries {removeCount} vs {sortedAllBackups.Length}, lists: {string.Join(", ", toDeleteDistinct.Select(x => x.ToString(CultureInfo.InvariantCulture)))} vs {string.Join(", ", sortedAllBackups.Select(x => x.ToString(CultureInfo.InvariantCulture)))}");
+ }
- return toDeleteDistinct.ToArray();
+ return toDeleteDistinct;
}
/// <summary>
@@ -220,7 +268,7 @@ namespace Duplicati.Library.Main.Operation
/// </summary>
/// <returns>The filesets to delete</returns>
/// <param name="backups">The list of backups that can be deleted</param>
- private List<DateTime> ApplyRetentionPolicy(DateTime[] backups)
+ private List<DateTime> ApplyRetentionPolicy(Database.LocalDeleteDatabase db, DateTime[] backups)
{
// Any work to do?
var retentionPolicyOptionValues = m_options.RetentionPolicy;
@@ -238,7 +286,7 @@ namespace Duplicati.Library.Main.Operation
clonedBackupList = clonedBackupList.OrderByDescending(x => x).ToList();
// Most recent backup usually should never get deleted in this process, so exclude it for now,
- // but keep a reference to potentiall delete it when allow-full-removal is set
+ // but keep a reference to potential delete it when allow-full-removal is set
var mostRecentBackup = clonedBackupList.ElementAt(0);
clonedBackupList.RemoveAt(0);
var deleteMostRecentBackup = m_options.AllowFullRemoval;
@@ -264,7 +312,7 @@ namespace Duplicati.Library.Main.Operation
List<DateTime> backupsInTimeFrame = new List<DateTime>();
while (clonedBackupList.Count > 0 && clonedBackupList[0] >= timeFrame)
{
- backupsInTimeFrame.Insert(0, clonedBackupList[0]); // Insert at begining to reverse order, which is nessecary for next step
+ backupsInTimeFrame.Insert(0, clonedBackupList[0]); // Insert at beginning to reverse order, which is necessary for next step
clonedBackupList.RemoveAt(0); // remove from here to not handle the same backup in two time frames
}
@@ -275,18 +323,31 @@ namespace Duplicati.Library.Main.Operation
DateTime? lastKept = null;
foreach (DateTime backup in backupsInTimeFrame)
{
+ var isFullBackup = db.IsFilesetFullBackup(backup);
+
// Keep this backup if
// - no backup has yet been added to the time frame (keeps at least the oldest backup in a time frame)
// - difference between last added backup and this backup is bigger than the specified interval
if (lastKept == null || singleRetentionPolicyOptionValue.IsKeepAllVersions() || (backup - lastKept.Value) >= singleRetentionPolicyOptionValue.Interval)
{
- Logging.Log.WriteProfilingMessage(LOGTAG_RETENTION, "KeepBackups", string.Format("Keeping backup: {0}", backup), Logging.LogMessageType.Profiling);
- lastKept = backup;
+ Logging.Log.WriteProfilingMessage(LOGTAG_RETENTION, "KeepBackups", $"Keeping {(isFullBackup ? "" : "partial")} backup: {backup}", Logging.LogMessageType.Profiling);
+ if (isFullBackup)
+ {
+ lastKept = backup;
+ }
}
else
{
- Logging.Log.WriteProfilingMessage(LOGTAG_RETENTION, "DeletingBackups", "Deleting backup: {0}", backup);
- backupsToDelete.Add(backup);
+ if (isFullBackup)
+ {
+ Logging.Log.WriteProfilingMessage(LOGTAG_RETENTION, "DeletingBackups",
+ "Deleting backup: {0}", backup);
+ backupsToDelete.Add(backup);
+ }
+ else
+ {
+ Logging.Log.WriteProfilingMessage(LOGTAG_RETENTION, "KeepBackups", $"Keeping partial backup: {backup}", Logging.LogMessageType.Profiling);
+ }
}
}
diff --git a/Duplicati/Library/Main/Operation/FilelistProcessor.cs b/Duplicati/Library/Main/Operation/FilelistProcessor.cs
index c5badc9f0..ba1aed082 100644
--- a/Duplicati/Library/Main/Operation/FilelistProcessor.cs
+++ b/Duplicati/Library/Main/Operation/FilelistProcessor.cs
@@ -217,7 +217,7 @@ namespace Duplicati.Library.Main.Operation
log.KnownFileSize = knownFileSize;
log.UnknownFileCount = unknownlist.Count;
log.UnknownFileSize = unknownlist.Select(x => Math.Max(0, x.Size)).Sum();
- log.BackupListCount = filesets.Count;
+ log.BackupListCount = database.FilesetTimes.Count();
log.LastBackupDate = filesets.Count == 0 ? new DateTime(0) : filesets[0].Time.ToLocalTime();
// TODO: We should query through the backendmanager
diff --git a/Duplicati/Library/Main/Operation/ListControlFilesHandler.cs b/Duplicati/Library/Main/Operation/ListControlFilesHandler.cs
index 8b89bb707..6da503d86 100644
--- a/Duplicati/Library/Main/Operation/ListControlFilesHandler.cs
+++ b/Duplicati/Library/Main/Operation/ListControlFilesHandler.cs
@@ -18,6 +18,7 @@
using System;
using System.Linq;
using System.Collections.Generic;
+using Duplicati.Library.Main.Database;
namespace Duplicati.Library.Main.Operation
{
@@ -68,7 +69,7 @@ namespace Duplicati.Library.Main.Operation
if (Library.Utility.FilterExpression.Matches(filter, cf.Key))
files.Add(new ListResultFile(cf.Key, null));
- m_result.SetResult(new Library.Interface.IListResultFileset[] { new ListResultFileset(fileversion.Key, fileversion.Value.Time, -1, -1) }, files);
+ m_result.SetResult(new Library.Interface.IListResultFileset[] { new ListResultFileset(fileversion.Key, LocalDatabase.BackupType.PARTIAL_BACKUP, fileversion.Value.Time, -1, -1) }, files);
lastEx = null;
break;
}
diff --git a/Duplicati/Library/Main/Operation/ListFilesHandler.cs b/Duplicati/Library/Main/Operation/ListFilesHandler.cs
index 3f20d524a..d6dc03f56 100644
--- a/Duplicati/Library/Main/Operation/ListFilesHandler.cs
+++ b/Duplicati/Library/Main/Operation/ListFilesHandler.cs
@@ -3,11 +3,12 @@ using System.Collections.Generic;
using System.Linq;
using System.Text;
using Duplicati.Library.Interface;
+using Duplicati.Library.Main.Database;
namespace Duplicati.Library.Main.Operation
-{
+{
internal class ListFilesHandler
- {
+ {
/// <summary>
/// The tag used for logging
/// </summary>
@@ -29,49 +30,65 @@ namespace Duplicati.Library.Main.Operation
var parsedfilter = new Library.Utility.FilterExpression(filterstrings);
var filter = Library.Utility.JoinedFilterExpression.Join(parsedfilter, compositefilter);
var simpleList = !((filter is Library.Utility.FilterExpression && ((Library.Utility.FilterExpression)filter).Type == Library.Utility.FilterType.Simple) || m_options.AllVersions);
-
+
//Use a speedy local query
if (!m_options.NoLocalDb && System.IO.File.Exists(m_options.Dbpath))
- using(var db = new Database.LocalListDatabase(m_options.Dbpath))
+ using (var db = new Database.LocalListDatabase(m_options.Dbpath))
{
m_result.SetDatabase(db);
- using(var filesets = db.SelectFileSets(m_options.Time, m_options.Version))
+ using (var filesets = db.SelectFileSets(m_options.Time, m_options.Version))
{
if (!filter.Empty)
{
if (simpleList || (m_options.ListFolderContents && !m_options.AllVersions))
+ {
filesets.TakeFirst();
+ }
}
IEnumerable<Database.LocalListDatabase.IFileversion> files;
if (m_options.ListFolderContents)
+ {
files = filesets.SelectFolderContents(filter);
+ }
else if (m_options.ListPrefixOnly)
+ {
files = filesets.GetLargestPrefix(filter);
+ }
else if (filter.Empty)
+ {
files = null;
+ }
else
+ {
files = filesets.SelectFiles(filter);
-
+ }
+
if (m_options.ListSetsOnly)
+ {
m_result.SetResult(
- filesets.QuickSets.Select(x => new ListResultFileset(x.Version, x.Time, x.FileCount, x.FileSizes)).ToArray(),
+ filesets.QuickSets.Select(x => new ListResultFileset(x.Version, x.IsFullBackup, x.Time, x.FileCount, x.FileSizes)).ToArray(),
null
);
+ }
else
+ {
m_result.SetResult(
- filesets.Sets.Select(x => new ListResultFileset(x.Version, x.Time, x.FileCount, x.FileSizes)).ToArray(),
- files == null ? null :
- (from n in files
- select (Duplicati.Library.Interface.IListResultFile)(new ListResultFile(n.Path, n.Sizes.ToArray())))
- .ToArray()
+ filesets.Sets.Select(x =>
+ new ListResultFileset(x.Version, x.IsFullBackup, x.Time, x.FileCount, x.FileSizes)).ToArray(),
+ files == null
+ ? null
+ : (from n in files
+ select (Duplicati.Library.Interface.IListResultFile)(new ListResultFile(n.Path,
+ n.Sizes.ToArray())))
+ .ToArray()
);
-
+ }
return;
}
}
-
+
Logging.Log.WriteInformationMessage(LOGTAG, "NoLocalDatabase", "No local database, accessing remote store");
//TODO: Add prefix and foldercontents
@@ -86,7 +103,7 @@ namespace Duplicati.Library.Main.Operation
using (var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db))
{
m_result.SetDatabase(db);
-
+
var filteredList = ParseAndFilterFilesets(backend.List(), m_options);
if (filteredList.Count == 0)
throw new UserInformationException("No filesets found on remote target", "EmptyRemoteFolder");
@@ -98,14 +115,14 @@ namespace Duplicati.Library.Main.Operation
m_result.EncryptedFiles = filteredList.Any(x => !string.IsNullOrWhiteSpace(x.Value.EncryptionModule));
return;
}
-
+
var firstEntry = filteredList[0].Value;
filteredList.RemoveAt(0);
- Dictionary<string, List<long>> res;
-
+ Dictionary<string, List<long>> res;
+
if (m_result.TaskControlRendevouz() == TaskControlState.Stop)
return;
-
+
using (var tmpfile = backend.Get(firstEntry.File.Name, firstEntry.File.Size, null))
using (var rd = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(firstEntry.File.Name), tmpfile, m_options))
if (simpleList)
@@ -113,12 +130,12 @@ namespace Duplicati.Library.Main.Operation
m_result.SetResult(
numberSeq.Take(1),
(from n in rd.Files
- where Library.Utility.FilterExpression.Matches(filter, n.Path)
- orderby n.Path
- select new ListResultFile(n.Path, new long[] { n.Size }))
+ where Library.Utility.FilterExpression.Matches(filter, n.Path)
+ orderby n.Path
+ select new ListResultFile(n.Path, new long[] { n.Size }))
.ToArray()
);
-
+
return;
}
else
@@ -126,9 +143,9 @@ namespace Duplicati.Library.Main.Operation
res = rd.Files
.Where(x => Library.Utility.FilterExpression.Matches(filter, x.Path))
.ToDictionary(
- x => x.Path,
- y =>
- {
+ x => x.Path,
+ y =>
+ {
var lst = new List<long>();
lst.Add(y.Size);
return lst;
@@ -136,35 +153,35 @@ namespace Duplicati.Library.Main.Operation
Library.Utility.Utility.ClientFilenameStringComparer
);
}
-
+
long flindex = 1;
- foreach(var flentry in filteredList)
- using(var tmpfile = backend.Get(flentry.Value.File.Name, flentry.Value.File == null ? -1 : flentry.Value.File.Size, null))
+ foreach (var flentry in filteredList)
+ using (var tmpfile = backend.Get(flentry.Value.File.Name, flentry.Value.File == null ? -1 : flentry.Value.File.Size, null))
using (var rd = new Volumes.FilesetVolumeReader(flentry.Value.CompressionModule, tmpfile, m_options))
{
if (m_result.TaskControlRendevouz() == TaskControlState.Stop)
return;
-
- foreach(var p in from n in rd.Files where Library.Utility.FilterExpression.Matches(filter, n.Path) select n)
+
+ foreach (var p in from n in rd.Files where Library.Utility.FilterExpression.Matches(filter, n.Path) select n)
{
List<long> lst;
if (!res.TryGetValue(p.Path, out lst))
{
lst = new List<long>();
res[p.Path] = lst;
- for(var i = 0; i < flindex; i++)
+ for (var i = 0; i < flindex; i++)
lst.Add(-1);
}
-
+
lst.Add(p.Size);
}
-
- foreach(var n in from i in res where i.Value.Count < flindex + 1 select i)
+
+ foreach (var n in from i in res where i.Value.Count < flindex + 1 select i)
n.Value.Add(-1);
-
+
flindex++;
}
-
+
m_result.SetResult(
numberSeq,
from n in res
@@ -177,17 +194,17 @@ namespace Duplicati.Library.Main.Operation
public static List<KeyValuePair<long, Volumes.IParsedVolume>> ParseAndFilterFilesets(IEnumerable<Duplicati.Library.Interface.IFileEntry> rawlist, Options options)
{
var parsedlist = (from n in rawlist
- let p = Volumes.VolumeBase.ParseFilename(n)
- where p != null && p.FileType == RemoteVolumeType.Files
- orderby p.Time descending
- select p).ToArray();
+ let p = Volumes.VolumeBase.ParseFilename(n)
+ where p != null && p.FileType == RemoteVolumeType.Files
+ orderby p.Time descending
+ select p).ToArray();
var filelistFilter = RestoreHandler.FilterNumberedFilelist(options.Time, options.Version);
- return filelistFilter(parsedlist).ToList();
+ return filelistFilter(parsedlist).ToList();
}
-
+
public static IEnumerable<Library.Interface.IListResultFileset> CreateResultSequence(IEnumerable<KeyValuePair<long, Volumes.IParsedVolume>> filteredList)
{
- return (from n in filteredList select (Library.Interface.IListResultFileset)(new ListResultFileset(n.Key, n.Value.Time.ToLocalTime(), -1, -1))).ToArray();
- }
+ return (from n in filteredList select (Library.Interface.IListResultFileset)(new ListResultFileset(n.Key, LocalDatabase.BackupType.PARTIAL_BACKUP, n.Value.Time.ToLocalTime(), -1, -1))).ToArray();
+ }
}
}
diff --git a/Duplicati/Library/Main/Operation/PurgeFilesHandler.cs b/Duplicati/Library/Main/Operation/PurgeFilesHandler.cs
index 25a1bcf1e..4dba73fde 100644
--- a/Duplicati/Library/Main/Operation/PurgeFilesHandler.cs
+++ b/Duplicati/Library/Main/Operation/PurgeFilesHandler.cs
@@ -15,8 +15,10 @@
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
using System;
+using System.ComponentModel;
using System.Linq;
using Duplicati.Library.Interface;
+using Duplicati.Library.Main.Database;
namespace Duplicati.Library.Main.Operation
{
@@ -69,7 +71,7 @@ namespace Duplicati.Library.Main.Operation
if (db.RepairInProgress && filtercommand == null)
throw new UserInformationException(string.Format("The purge command does not work on an incomplete database, try the {0} operation.", "purge-broken-files"), "PurgeNotAllowedOnIncompleteDatabase");
- var versions = db.GetFilesetIDs(m_options.Time, m_options.Version).ToArray();
+ var versions = db.GetFilesetIDs(m_options.Time, m_options.Version).OrderByDescending(x => x).ToArray();
if (versions.Length <= 0)
throw new UserInformationException("No filesets matched the supplied time or versions", "NoFilesetFoundForTimeOrVersion");
@@ -90,7 +92,7 @@ namespace Duplicati.Library.Main.Operation
FilelistProcessor.VerifyRemoteList(backend, m_options, db, m_result.BackendWriter, null);
}
- var filesets = db.FilesetTimes.ToArray();
+ var filesets = db.FilesetTimes.OrderByDescending(x => x.Value).ToArray();
var versionprogress = ((doCompactStep ? 0.75f : 1.0f) / versions.Length) * pgspan;
var currentprogress = pgoffset;
@@ -123,7 +125,9 @@ namespace Duplicati.Library.Main.Operation
break;
}
- var ts = filesets[ix].Value.AddSeconds(secs);
+ var tsOriginal = filesets[ix].Value;
+ var ts = tsOriginal.AddSeconds(secs);
+
var prevfilename = db.GetRemoteVolumeNameForFileset(filesets[ix].Key, tr);
if (secs >= 60)
@@ -151,7 +155,8 @@ namespace Duplicati.Library.Main.Operation
using (var tf = new Library.Utility.TempFile())
using (var vol = new Volumes.FilesetVolumeWriter(m_options, ts))
{
- var newids = tempset.ConvertToPermanentFileset(vol.RemoteFilename, ts);
+ var isOriginalFilesetFullBackup = db.IsFilesetFullBackup(tsOriginal);
+ var newids = tempset.ConvertToPermanentFileset(vol.RemoteFilename, ts, isOriginalFilesetFullBackup);
vol.VolumeID = newids.Item1;
Logging.Log.WriteInformationMessage(LOGTAG, "ReplacingFileset", "Replacing fileset {0} with {1} which has with {2} fewer file(s) ({3} reduction)", prevfilename, vol.RemoteFilename, tempset.RemovedFileCount, Library.Utility.Utility.FormatSizeString(tempset.RemovedFileSize));
diff --git a/Duplicati/Library/Main/Operation/RecreateDatabaseHandler.cs b/Duplicati/Library/Main/Operation/RecreateDatabaseHandler.cs
index 0bb367027..5d1258176 100644
--- a/Duplicati/Library/Main/Operation/RecreateDatabaseHandler.cs
+++ b/Duplicati/Library/Main/Operation/RecreateDatabaseHandler.cs
@@ -219,9 +219,15 @@ namespace Duplicati.Library.Main.Operation
hashes_pr_block = blocksize / m_options.BlockhashSize;
}
-
// Create timestamped operations based on the file timestamp
var filesetid = restoredb.CreateFileset(volumeIds[entry.Name], parsed.Time, tr);
+
+ // retrieve fileset data from dlist
+ var filesetData = VolumeReaderBase.GetFilesetData(parsed.CompressionModule, tmpfile, m_options);
+
+ // update fileset using filesetData
+ restoredb.UpdateFullBackupStateInFileset(filesetid, filesetData.IsFullBackup);
+
using(var filelistreader = new FilesetVolumeReader(parsed.CompressionModule, tmpfile, m_options))
foreach(var fe in filelistreader.Files.Where(x => Library.Utility.FilterExpression.Matches(filter, x.Path)))
{
diff --git a/Duplicati/Library/Main/Operation/RestoreHandler.cs b/Duplicati/Library/Main/Operation/RestoreHandler.cs
index 9c43619e1..a12876b9d 100644
--- a/Duplicati/Library/Main/Operation/RestoreHandler.cs
+++ b/Duplicati/Library/Main/Operation/RestoreHandler.cs
@@ -1,8 +1,27 @@
-using System;
+#region Disclaimer / License
+// Copyright (C) 2019, The Duplicati Team
+// http://www.duplicati.com, info@duplicati.com
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
+//
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+// Lesser General Public License for more details.
+//
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+//
+#endregion
+using System;
using System.Collections.Generic;
using System.Linq;
using Duplicati.Library.Interface;
-using Duplicati.Library.Common.IO;
+using Duplicati.Library.Common.IO;
using Duplicati.Library.Main.Database;
using Duplicati.Library.Main.Volumes;
@@ -18,7 +37,7 @@ namespace Duplicati.Library.Main.Operation
private readonly string m_backendurl;
private readonly Options m_options;
private byte[] m_blockbuffer;
- private readonly RestoreResults m_result;
+ private readonly RestoreResults m_result;
private static readonly string DIRSEP = Util.DirectorySeparatorString;
public RestoreHandler(string backendurl, Options options, RestoreResults result)
diff --git a/Duplicati/Library/Main/Operation/TestFilterHandler.cs b/Duplicati/Library/Main/Operation/TestFilterHandler.cs
index e3d58ec9c..0c9b16c68 100644
--- a/Duplicati/Library/Main/Operation/TestFilterHandler.cs
+++ b/Duplicati/Library/Main/Operation/TestFilterHandler.cs
@@ -1,24 +1,25 @@
-// Copyright (C) 2015, The Duplicati Team
-
-// http://www.duplicati.com, info@duplicati.com
+#region Disclaimer / License
+// Copyright (C) 2019, The Duplicati Team
+// http://www.duplicati.com, info@duplicati.com
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
//
-// This library is free software; you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as
-// published by the Free Software Foundation; either version 2.1 of the
-// License, or (at your option) any later version.
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+// Lesser General Public License for more details.
//
-// This library is distributed in the hope that it will be useful, but
-// WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-// Lesser General Public License for more details.
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
-// You should have received a copy of the GNU Lesser General Public
-// License along with this library; if not, write to the Free Software
-// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#endregion
using System;
using System.IO;
-using System.Collections.Generic;
-using System.Linq;
+using System.Threading;
using Duplicati.Library.Snapshots;
using CoCoL;
@@ -40,17 +41,22 @@ namespace Duplicati.Library.Main.Operation
m_result = results;
}
- public void Run(string[] sources, Library.Utility.IFilter filter)
+ public void Run(string[] sources, Library.Utility.IFilter filter, CancellationToken token)
{
var sourcefilter = new Library.Utility.FilterExpression(sources, true);
- using(var snapshot = BackupHandler.GetSnapshot(sources, m_options))
- using(new IsolatedChannelScope())
+ using (var snapshot = BackupHandler.GetSnapshot(sources, m_options))
+ using (new IsolatedChannelScope())
{
- var source = Operation.Backup.FileEnumerationProcess.Run(sources, snapshot, null, m_options.FileAttributeFilter, sourcefilter, filter, m_options.SymlinkPolicy, m_options.HardlinkPolicy, m_options.ExcludeEmptyFolders, m_options.IgnoreFilenames, null, m_result.TaskReader);
+ var source = Operation.Backup.FileEnumerationProcess.Run(sources, snapshot, null,
+ m_options.FileAttributeFilter, sourcefilter, filter, m_options.SymlinkPolicy,
+ m_options.HardlinkPolicy, m_options.ExcludeEmptyFolders, m_options.IgnoreFilenames, null,
+ m_result.TaskReader, token);
+
var sink = CoCoL.AutomationExtensions.RunTask(
new { source = Operation.Backup.Channels.SourcePaths.ForRead },
- async self => {
+ async self =>
+ {
while (true)
{
var path = await self.source.ReadAsync();
@@ -110,7 +116,7 @@ namespace Duplicati.Library.Main.Operation
System.Threading.Tasks.Task.WhenAll(source, sink).WaitForTaskOrThrow();
}
}
-
+
#region IDisposable implementation
public void Dispose()
{
diff --git a/Duplicati/Library/Main/ResultClasses.cs b/Duplicati/Library/Main/ResultClasses.cs
index 8c5d51360..0016b5aae 100644
--- a/Duplicati/Library/Main/ResultClasses.cs
+++ b/Duplicati/Library/Main/ResultClasses.cs
@@ -1,20 +1,22 @@
-// Copyright (C) 2015, The Duplicati Team
-
-// http://www.duplicati.com, info@duplicati.com
+#region Disclaimer / License
+// Copyright (C) 2019, The Duplicati Team
+// http://www.duplicati.com, info@duplicati.com
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
//
-// This library is free software; you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as
-// published by the Free Software Foundation; either version 2.1 of the
-// License, or (at your option) any later version.
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+// Lesser General Public License for more details.
//
-// This library is distributed in the hope that it will be useful, but
-// WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-// Lesser General Public License for more details.
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
-// You should have received a copy of the GNU Lesser General Public
-// License along with this library; if not, write to the Free Software
-// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#endregion
using System;
using Duplicati.Library.Interface;
using System.Collections.Generic;
@@ -152,7 +154,7 @@ namespace Duplicati.Library.Main
{
void Pause();
void Resume();
- void Stop();
+ void Stop(bool allowCurrentFileToFinish);
void Abort();
}
@@ -438,10 +440,10 @@ namespace Duplicati.Library.Main
/// <summary>
/// Request that this task stops.
/// </summary>
- public void Stop()
+ public void Stop(bool allowCurrentFileToFinish)
{
if (m_parent != null)
- m_parent.Stop();
+ m_parent.Stop(allowCurrentFileToFinish);
else
{
lock (m_lock)
@@ -449,6 +451,10 @@ namespace Duplicati.Library.Main
{
m_controlState = TaskControlState.Stop;
m_pauseEvent.Set();
+ if (!allowCurrentFileToFinish)
+ {
+ m_taskController.Stop(true);
+ }
}
if (StateChangedEvent != null)
@@ -665,12 +671,14 @@ namespace Duplicati.Library.Main
internal class ListResultFileset : Duplicati.Library.Interface.IListResultFileset
{
public long Version { get; private set; }
+ public int IsFullBackup { get; private set; }
public DateTime Time { get; private set; }
public long FileCount { get; private set; }
public long FileSizes { get; private set; }
- public ListResultFileset(long version, DateTime time, long fileCount, long fileSizes)
+ public ListResultFileset(long version, int isFullBackup, DateTime time, long fileCount, long fileSizes)
{
this.Version = version;
+ this.IsFullBackup = isFullBackup;
this.Time = time;
this.FileCount = fileCount;
this.FileSizes = fileSizes;
diff --git a/Duplicati/Library/Main/Volumes/FilesetVolumeWriter.cs b/Duplicati/Library/Main/Volumes/FilesetVolumeWriter.cs
index be226df57..89ea5d894 100644
--- a/Duplicati/Library/Main/Volumes/FilesetVolumeWriter.cs
+++ b/Duplicati/Library/Main/Volumes/FilesetVolumeWriter.cs
@@ -1,7 +1,5 @@
using System;
using System.Collections.Generic;
-using System.Linq;
-using System.Text;
using System.IO;
using Newtonsoft.Json;
using Duplicati.Library.Interface;
@@ -10,6 +8,7 @@ namespace Duplicati.Library.Main.Volumes
{
public class FilesetVolumeWriter : VolumeWriterBase
{
+ private MemoryStream m_memorystream;
private StreamWriter m_streamwriter;
private readonly JsonWriter m_writer;
private long m_filecount;
@@ -20,7 +19,8 @@ namespace Duplicati.Library.Main.Volumes
public FilesetVolumeWriter(Options options, DateTime timestamp)
: base(options, timestamp)
{
- m_streamwriter = new StreamWriter(m_compression.CreateFile(FILELIST, CompressionHint.Compressible, DateTime.UtcNow));
+ m_memorystream = new MemoryStream();
+ m_streamwriter = new StreamWriter(m_memorystream, ENCODING);
m_writer = new JsonTextWriter(m_streamwriter);
m_writer.WriteStartArray();
}
@@ -84,7 +84,7 @@ namespace Duplicati.Library.Main.Volumes
if (blocklisthashes != null)
{
- //Slightly akward, but we avoid writing if there are no entries
+ //Slightly awkward, but we avoid writing if there are no entries
using (var en = blocklisthashes.GetEnumerator())
{
if (en.MoveNext() && !string.IsNullOrEmpty(en.Current))
@@ -132,7 +132,6 @@ namespace Duplicati.Library.Main.Volumes
{
if (m_streamwriter != null)
{
- m_writer.WriteEndArray();
m_writer.Close();
m_streamwriter.Dispose();
m_streamwriter = null;
@@ -141,6 +140,23 @@ namespace Duplicati.Library.Main.Volumes
base.Close();
}
+ public void AddFilelistFile()
+ {
+ if (m_streamwriter != null)
+ {
+ m_writer.WriteEndArray();
+ m_writer.Flush();
+ m_streamwriter.Flush();
+ }
+
+ using (Stream sr = m_compression.CreateFile(FILELIST, CompressionHint.Compressible, DateTime.UtcNow))
+ {
+ m_memorystream.Seek(0, SeekOrigin.Begin);
+ m_memorystream.CopyTo(sr);
+ sr.Flush();
+ }
+ }
+
public void AddControlFile(string localfile, CompressionHint hint, string filename = null)
{
filename = filename ?? System.IO.Path.GetFileName(localfile);
diff --git a/Duplicati/Library/Main/Volumes/VolumeBase.cs b/Duplicati/Library/Main/Volumes/VolumeBase.cs
index 96319a722..cd00cbdf1 100644
--- a/Duplicati/Library/Main/Volumes/VolumeBase.cs
+++ b/Duplicati/Library/Main/Volumes/VolumeBase.cs
@@ -1,13 +1,25 @@
using System;
using System.Collections.Generic;
-using System.Linq;
-using System.Text;
using Newtonsoft.Json;
namespace Duplicati.Library.Main.Volumes
{
public abstract class VolumeBase
{
+ public class FilesetData
+ {
+ [JsonProperty("IsFullBackup")]
+ public bool IsFullBackup { get; set; } = true;
+
+ public static string GetFilesetInstance(bool isFullBackup = true)
+ {
+ return JsonConvert.SerializeObject(new FilesetData
+ {
+ IsFullBackup = isFullBackup
+ });
+ }
+ }
+
protected class ManifestData
{
public const string ENCODING = "utf8";
@@ -38,6 +50,7 @@ namespace Duplicati.Library.Main.Volumes
public static void VerifyManifest(string manifest, long blocksize, string blockhash, string filehash)
{
var d = JsonConvert.DeserializeObject<ManifestData>(manifest);
+
if (d.Version > VERSION)
throw new InvalidManifestException("Version", d.Version.ToString(), VERSION.ToString());
if (d.Encoding != ENCODING)
@@ -51,7 +64,6 @@ namespace Duplicati.Library.Main.Volumes
}
}
-
private class ParsedVolume : IParsedVolume
{
public RemoteVolumeType FileType { get; private set; }
@@ -137,6 +149,7 @@ namespace Duplicati.Library.Main.Volumes
return ParsedVolume.Parse(filename);
}
+ protected const string FILESET_FILENAME = "fileset";
protected const string MANIFEST_FILENAME = "manifest";
protected const string FILELIST = "filelist.json";
diff --git a/Duplicati/Library/Main/Volumes/VolumeReaderBase.cs b/Duplicati/Library/Main/Volumes/VolumeReaderBase.cs
index 4f674b09d..1011ac37d 100644
--- a/Duplicati/Library/Main/Volumes/VolumeReaderBase.cs
+++ b/Duplicati/Library/Main/Volumes/VolumeReaderBase.cs
@@ -1,18 +1,20 @@
using Duplicati.Library.Interface;
-using Duplicati.Library.Utility;
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.IO;
+using Duplicati.Library.Utility;
namespace Duplicati.Library.Main.Volumes
{
public abstract class VolumeReaderBase : VolumeBase, IDisposable
{
+ public bool IsFullBackup { get; set; }
+
protected bool m_disposeCompression = false;
protected ICompression m_compression;
protected Stream m_stream;
-
+
private static ICompression LoadCompressor(string compressor, Stream stream, Options options)
{
var tmp = DynamicLoader.CompressionLoader.GetModule(compressor, stream, Interface.ArchiveMode.Read, options.RawOptions);
@@ -38,8 +40,11 @@ namespace Duplicati.Library.Main.Volumes
: base(options)
{
m_compression = LoadCompressor(compressor, file, options, out m_stream);
- ReadManifests(options);
+ ReadFileset();
+
+ ReadManifests(options);
+
m_disposeCompression = true;
}
@@ -50,17 +55,57 @@ namespace Duplicati.Library.Main.Volumes
ReadManifests(options);
}
+ private void ReadFileset()
+ {
+ using (var s = m_compression.OpenRead(FILESET_FILENAME))
+ {
+ if (s == null)
+ {
+ IsFullBackup = new FilesetData().IsFullBackup; // use default value
+ }
+ else
+ {
+ using (var fs = new StreamReader(s, ENCODING))
+ {
+ FilesetData fileset = JsonConvert.DeserializeObject<FilesetData>(fs.ReadToEnd());
+ IsFullBackup = fileset.IsFullBackup;
+ }
+ }
+ }
+ }
+
private void ReadManifests(Options options)
{
- if (!options.DontReadManifests)
+ if (options.DontReadManifests) return;
+
+ using (var s = m_compression.OpenRead(MANIFEST_FILENAME))
{
- using (var s = m_compression.OpenRead(MANIFEST_FILENAME))
+ if (s == null)
{
- if (s == null)
- throw new InvalidManifestException("No manifest file found in volume");
+ throw new InvalidManifestException("No manifest file found in volume");
+ }
- using (var fs = new StreamReader(s, ENCODING))
- ManifestData.VerifyManifest(fs.ReadToEnd(), m_blocksize, options.BlockHashAlgorithm, options.FileHashAlgorithm);
+ using (var fs = new StreamReader(s, ENCODING))
+ {
+ ManifestData.VerifyManifest(fs.ReadToEnd(), m_blocksize, options.BlockHashAlgorithm, options.FileHashAlgorithm);
+ }
+ }
+ }
+
+ public static FilesetData GetFilesetData(string compressor, string file, Options options)
+ {
+ using (var stream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read))
+ using (var c = LoadCompressor(compressor, stream, options))
+ using (var s = c.OpenRead(FILESET_FILENAME))
+ {
+ if (s == null)
+ {
+ return new FilesetData(); // return default
+ }
+
+ using (var fs = new StreamReader(s, ENCODING))
+ {
+ return JsonConvert.DeserializeObject<FilesetData>(fs.ReadToEnd());
}
}
}
@@ -79,7 +124,7 @@ namespace Duplicati.Library.Main.Volumes
string n;
if (!options.RawOptions.TryGetValue("blocksize", out n) || string.IsNullOrEmpty(n))
- options.RawOptions["blocksize"] = d.Blocksize.ToString() + "b";
+ options.RawOptions["blocksize"] = d.Blocksize + "b";
if (!options.RawOptions.TryGetValue("block-hash-algorithm", out n) || string.IsNullOrEmpty(n))
options.RawOptions["block-hash-algorithm"] = d.BlockHash;
if (!options.RawOptions.TryGetValue("file-hash-algorithm", out n) || string.IsNullOrEmpty(n))
@@ -114,7 +159,6 @@ namespace Duplicati.Library.Main.Volumes
read++;
yield return Convert.ToBase64String(buffer);
}
-
}
}
diff --git a/Duplicati/Library/Main/Volumes/VolumeWriterBase.cs b/Duplicati/Library/Main/Volumes/VolumeWriterBase.cs
index ab152bd09..d28e16628 100644
--- a/Duplicati/Library/Main/Volumes/VolumeWriterBase.cs
+++ b/Duplicati/Library/Main/Volumes/VolumeWriterBase.cs
@@ -1,7 +1,6 @@
-using Duplicati.Library.Interface;
-using Duplicati.Library.Utility;
-using System;
+using System;
using System.IO;
+using Duplicati.Library.Interface;
namespace Duplicati.Library.Main.Volumes
{
@@ -63,15 +62,23 @@ namespace Duplicati.Library.Main.Volumes
if ((this is IndexVolumeWriter || this is FilesetVolumeWriter) && m_compression is Library.Interface.ICompressionHinting)
((Library.Interface.ICompressionHinting)m_compression).LowOverheadMode = true;
- AddManifestfile();
+ AddManifestFile();
}
- protected void AddManifestfile()
+ protected void AddManifestFile()
{
using (var sr = new StreamWriter(m_compression.CreateFile(MANIFEST_FILENAME, CompressionHint.Compressible, DateTime.UtcNow), ENCODING))
sr.Write(ManifestData.GetManifestInstance(m_blocksize, m_blockhash, m_filehash));
}
+ public void CreateFilesetFile(bool isFullBackup)
+ {
+ using (var sr = new StreamWriter(m_compression.CreateFile(FILESET_FILENAME, CompressionHint.Compressible, DateTime.UtcNow), ENCODING))
+ {
+ sr.Write(FilesetData.GetFilesetInstance(isFullBackup));
+ }
+ }
+
public virtual void Dispose()
{
if (m_compression != null)
diff --git a/Duplicati/Library/Snapshots/UsnJournalService.cs b/Duplicati/Library/Snapshots/UsnJournalService.cs
index 657543cc5..b6db60f62 100644
--- a/Duplicati/Library/Snapshots/UsnJournalService.cs
+++ b/Duplicati/Library/Snapshots/UsnJournalService.cs
@@ -24,8 +24,9 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
+using System.Threading;
using Duplicati.Library.Interface;
-using Duplicati.Library.Common.IO;
+using Duplicati.Library.Common.IO;
using Duplicati.Library.Utility;
namespace Duplicati.Library.Snapshots
@@ -35,6 +36,7 @@ namespace Duplicati.Library.Snapshots
private readonly ISnapshotService m_snapshot;
private readonly IEnumerable<string> m_sources;
private readonly Dictionary<string, VolumeData> m_volumeDataDict;
+ private readonly CancellationToken m_token;
/// <summary>
/// Constructor.
@@ -43,12 +45,13 @@ namespace Duplicati.Library.Snapshots
/// <param name="snapshot"></param>
/// <param name="emitFilter">Emit filter</param>
/// <param name="prevJournalData">Journal-data of previous fileset</param>
- public UsnJournalService(IEnumerable<string> sources, ISnapshotService snapshot, IFilter emitFilter,
- IEnumerable<USNJournalDataEntry> prevJournalData)
+ public UsnJournalService(IEnumerable<string> sources, ISnapshotService snapshot, IFilter emitFilter,
+ IEnumerable<USNJournalDataEntry> prevJournalData, CancellationToken token)
{
m_sources = sources;
m_snapshot = snapshot;
m_volumeDataDict = Initialize(emitFilter, prevJournalData);
+ m_token = token;
}
public IEnumerable<VolumeData> VolumeDataList => m_volumeDataDict.Select(e => e.Value);
@@ -76,6 +79,8 @@ namespace Duplicati.Library.Snapshots
// iterate over volumes
foreach (var sourcesPerVolume in SortByVolume(m_sources))
{
+ if (m_token.IsCancellationRequested) break;
+
var volume = sourcesPerVolume.Key;
var volumeSources = sourcesPerVolume.Value;
var volumeData = new VolumeData
@@ -117,8 +122,12 @@ namespace Duplicati.Library.Snapshots
// obtain changed files and folders, per volume
foreach (var source in volumeSources)
{
+ if (m_token.IsCancellationRequested) break;
+
foreach (var entry in journal.GetChangedFileSystemEntries(source, prevData.NextUsn))
{
+ if (m_token.IsCancellationRequested) break;
+
if (entry.Item2.HasFlag(USNJournal.EntryType.File))
{
changedFiles.Add(entry.Item1);
@@ -171,7 +180,7 @@ namespace Duplicati.Library.Snapshots
volumeData.Files.Add(path);
}
}
- }
+ }
}
return result;
@@ -193,6 +202,11 @@ namespace Duplicati.Library.Snapshots
var cache = new Dictionary<string, bool>();
foreach (var source in m_sources)
{
+ if (m_token.IsCancellationRequested)
+ {
+ break;
+ }
+
cache[source] = true;
}
@@ -206,7 +220,14 @@ namespace Duplicati.Library.Snapshots
if (volumeData.Value.Folders != null)
{
foreach (var folder in FilterExcludedFolders(volumeData.Value.Folders, filter, cache).Where(m_snapshot.DirectoryExists))
+ {
+ if (m_token.IsCancellationRequested)
+ {
+ break;
+ }
+
yield return folder;
+ }
}
// The simplified file list also needs to be checked against the exclusion filter, as it
@@ -218,7 +239,14 @@ namespace Duplicati.Library.Snapshots
if (volumeData.Value.Files != null)
{
foreach (var files in FilterExcludedFiles(volumeData.Value.Files, filter, cache).Where(m_snapshot.FileExists))
+ {
+ if (m_token.IsCancellationRequested)
+ {
+ break;
+ }
+
yield return files;
+ }
}
}
}
@@ -238,7 +266,12 @@ namespace Duplicati.Library.Snapshots
var result = new List<string>();
foreach (var file in files)
- {
+ {
+ if (m_token.IsCancellationRequested)
+ {
+ break;
+ }
+
var attr = m_snapshot.FileExists(file) ? m_snapshot.GetAttributes(file) : FileAttributes.Normal;
try
{
@@ -280,6 +313,11 @@ namespace Duplicati.Library.Snapshots
foreach (var folder in folders)
{
+ if (m_token.IsCancellationRequested)
+ {
+ break;
+ }
+
try
{
if (!IsFolderOrAncestorsExcluded(folder, filter, cache))
@@ -313,6 +351,11 @@ namespace Duplicati.Library.Snapshots
List<string> parents = null;
while (folder != null)
{
+ if (m_token.IsCancellationRequested)
+ {
+ break;
+ }
+
// first check cache
if (cache.TryGetValue(folder, out var include))
{
@@ -391,6 +434,11 @@ namespace Duplicati.Library.Snapshots
foreach (var folder in volumeData.Folders)
{
+ if (m_token.IsCancellationRequested)
+ {
+ break;
+ }
+
if (path.Equals(folder, Utility.Utility.ClientFilenameStringComparison))
return true; // do not append from previous set, already scanned
diff --git a/Duplicati/Server/Runner.cs b/Duplicati/Server/Runner.cs
index 8dc14c9e9..f317b4de9 100644
--- a/Duplicati/Server/Runner.cs
+++ b/Duplicati/Server/Runner.cs
@@ -1,20 +1,22 @@
-// Copyright (C) 2015, The Duplicati Team
-
-// http://www.duplicati.com, info@duplicati.com
+#region Disclaimer / License
+// Copyright (C) 2019, The Duplicati Team
+// http://www.duplicati.com, info@duplicati.com
+//
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
//
-// This library is free software; you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as
-// published by the Free Software Foundation; either version 2.1 of the
-// License, or (at your option) any later version.
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+// Lesser General Public License for more details.
//
-// This library is distributed in the hope that it will be useful, but
-// WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-// Lesser General Public License for more details.
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
//
-// You should have received a copy of the GNU Lesser General Public
-// License along with this library; if not, write to the Free Software
-// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#endregion
using System;
using System.Linq;
using System.Collections.Generic;
@@ -29,7 +31,7 @@ namespace Duplicati.Server
Duplicati.Server.Serialization.Interface.IBackup Backup { get; }
IDictionary<string, string> ExtraOptions { get; }
string[] FilterStrings { get; }
- void Stop();
+ void Stop(bool allowCurrentFileToFinish);
void Abort();
void Pause();
void Resume();
@@ -55,12 +57,12 @@ namespace Duplicati.Server
{
Controller = controller;
}
-
- public void Stop()
+
+ public void Stop(bool allowCurrentFileToFinish)
{
var c = Controller;
if (c != null)
- c.Stop();
+ c.Stop(allowCurrentFileToFinish);
}
public void Abort()
diff --git a/Duplicati/Server/WebServer/RESTMethods/Task.cs b/Duplicati/Server/WebServer/RESTMethods/Task.cs
index 730a30cf6..756dd2e3e 100644
--- a/Duplicati/Server/WebServer/RESTMethods/Task.cs
+++ b/Duplicati/Server/WebServer/RESTMethods/Task.cs
@@ -1,19 +1,22 @@
-// Copyright (C) 2015, The Duplicati Team
-// http://www.duplicati.com, info@duplicati.com
+#region Disclaimer / License
+// Copyright (C) 2019, The Duplicati Team
+// http://www.duplicati.com, info@duplicati.com
//
-// This library is free software; you can redistribute it and/or modify
-// it under the terms of the GNU Lesser General Public License as
-// published by the Free Software Foundation; either version 2.1 of the
-// License, or (at your option) any later version.
+// This library is free software; you can redistribute it and/or
+// modify it under the terms of the GNU Lesser General Public
+// License as published by the Free Software Foundation; either
+// version 2.1 of the License, or (at your option) any later version.
//
-// This library is distributed in the hope that it will be useful, but
-// WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-// Lesser General Public License for more details.
+// This library is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+// Lesser General Public License for more details.
//
-// You should have received a copy of the GNU Lesser General Public
-// License along with this library; if not, write to the Free Software
-// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+// You should have received a copy of the GNU Lesser General Public
+// License along with this library; if not, write to the Free Software
+// Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+//
+#endregion
using System;
using System.Linq;
using System.Collections.Generic;
@@ -84,13 +87,18 @@ namespace Duplicati.Server.WebServer.RESTMethods
switch (parts.Last().ToLowerInvariant())
{
- case "abort":
- task.Abort();
+ case "stopaftercurrentfile":
+ task.Stop(allowCurrentFileToFinish: true);
+ info.OutputOK();
+ return;
+
+ case "stopnow":
+ task.Stop(allowCurrentFileToFinish: false);
info.OutputOK();
return;
- case "stop":
- task.Stop();
+ case "abort":
+ task.Abort();
info.OutputOK();
return;
}
diff --git a/Duplicati/Server/webroot/ngax/index.html b/Duplicati/Server/webroot/ngax/index.html
index 1a03c57ea..842f38e24 100755
--- a/Duplicati/Server/webroot/ngax/index.html
+++ b/Duplicati/Server/webroot/ngax/index.html
@@ -176,7 +176,7 @@
</div>
</span>
<span ng-show="StopReqId == activeTaskID">
- <strong translate>Stopping after upload:</strong> {{activeBackup.Backup.Name}}
+ <strong translate>Stopping after the current file:</strong> {{activeBackup.Backup.Name}}
</span>
</span>
<span ng-show="activeBackup == null">
diff --git a/Duplicati/Server/webroot/ngax/scripts/controllers/RestoreController.js b/Duplicati/Server/webroot/ngax/scripts/controllers/RestoreController.js
index 186070278..d11c42d4d 100644
--- a/Duplicati/Server/webroot/ngax/scripts/controllers/RestoreController.js
+++ b/Duplicati/Server/webroot/ngax/scripts/controllers/RestoreController.js
@@ -54,6 +54,9 @@ backupApp.controller('RestoreController', function ($rootScope, $scope, $routePa
for(var n in $scope.Filesets) {
var item = $scope.Filesets[n];
item.DisplayLabel = item.Version + ': ' + AppUtils.toDisplayDateAndTime(AppUtils.parseDate(item.Time));
+ if (item.IsFullBackup === 0) {
+ item.DisplayLabel = item.DisplayLabel + ' (partial)';
+ }
item.GroupLabel = n == 0 ? gettextCatalog.getString('Latest') : createGroupLabel(AppUtils.parseDate(item.Time));
filesetStamps[item.Version + ''] = item.Time;
diff --git a/Duplicati/Server/webroot/ngax/scripts/controllers/StateController.js b/Duplicati/Server/webroot/ngax/scripts/controllers/StateController.js
index e1b168a48..2d17b8f60 100644
--- a/Duplicati/Server/webroot/ngax/scripts/controllers/StateController.js
+++ b/Duplicati/Server/webroot/ngax/scripts/controllers/StateController.js
@@ -66,7 +66,7 @@ backupApp.controller('StateController', function($scope, $timeout, ServerStatus,
else if ($scope.state.lastPgEvent.Phase == 'Backup_Complete' || $scope.state.lastPgEvent.Phase == 'Backup_WaitForUpload')
{
pg = 1;
- }
+ }
else if ($scope.state.lastPgEvent.OverallProgress > 0) {
pg = $scope.state.lastPgEvent.OverallProgress;
}
@@ -89,28 +89,30 @@ backupApp.controller('StateController', function($scope, $timeout, ServerStatus,
function handleClick(ix) {
if (ix == 0)
{
- AppService.post('/task/' + taskId + '/stop');
+ AppService.post('/task/' + taskId + '/stopaftercurrentfile');
+ $scope.StopReqId = taskId;
+ }
+ else if (ix == 1) {
+ AppService.post('/task/' + taskId + '/stopnow');
$scope.StopReqId = taskId;
}
- else if (ix == 1)
- AppService.post('/task/' + taskId + '/abort');
};
if (txt.indexOf('Backup_') == 0)
{
DialogService.dialog(
- gettextCatalog.getString('Stop running backup'),
- gettextCatalog.getString('You can stop the backup immediately, or stop after the current file has been uploaded.'),
- [gettextCatalog.getString('Stop after upload'), gettextCatalog.getString('Stop now'), gettextCatalog.getString('Cancel')],
+ gettextCatalog.getString("Stop running backup"),
+ gettextCatalog.getString("You can stop the backup after any file uploads currently in progress have finished."),
+ [gettextCatalog.getString("Stop after current file"), gettextCatalog.getString("Stop now"), gettextCatalog.getString("Cancel")],
handleClick
);
}
else
{
DialogService.dialog(
- gettextCatalog.getString('Stop running task'),
- gettextCatalog.getString('You can stop the task immediately, or allow the process to continue its current file and the stop.'),
- [gettextCatalog.getString('Stop after the current file'), gettextCatalog.getString('Stop now'), gettextCatalog.getString('Cancel')],
+ gettextCatalog.getString("Stop running task"),
+ gettextCatalog.getString("You can stop the task immediately, or allow the process to continue its current file and then stop."),
+ [gettextCatalog.getString("Stop after the current file"), gettextCatalog.getString("Stop now"), gettextCatalog.getString("Cancel")],
handleClick
);
}