From 0c68daff6c2204808c37a79ea4abd2a84f35f79c Mon Sep 17 00:00:00 2001 From: Kenneth Skovhede Date: Thu, 15 Sep 2016 11:39:27 +0200 Subject: Fixed all whitespace to be 4 spaces instead of tabs --- .../Library/AutoUpdater/AutoUpdateSettings.cs | 66 +- Duplicati/Library/AutoUpdater/UpdaterManager.cs | 226 +++---- .../AlternativeFTP/AlternativeFTPBackend.cs | 4 +- .../Library/Backend/AmazonCloudDrive/AmzCD.cs | 4 +- Duplicati/Library/Backend/Backblaze/B2.cs | 12 +- .../Library/Backend/Backblaze/B2AuthHelper.cs | 2 +- Duplicati/Library/Backend/Box/BoxBackend.cs | 4 +- Duplicati/Library/Backend/CloudFiles/CloudFiles.cs | 6 +- Duplicati/Library/Backend/Dropbox/Dropbox.cs | 72 +-- Duplicati/Library/Backend/Dropbox/DropboxHelper.cs | 152 ++--- Duplicati/Library/Backend/Dropbox/Strings.cs | 14 +- Duplicati/Library/Backend/FTP/FTPBackend.cs | 8 +- .../Backend/GoogleServices/GoogleCloudStorage.cs | 2 +- .../Library/Backend/GoogleServices/GoogleDrive.cs | 2 +- .../Library/Backend/GoogleServices/Strings.cs | 2 +- .../Library/Backend/OAuthHelper/JSONWebHelper.cs | 2 +- .../Library/Backend/OAuthHelper/OAuthHelper.cs | 44 +- Duplicati/Library/Backend/OneDrive/OneDrive.cs | 8 +- .../Library/Backend/OpenStack/OpenStackStorage.cs | 2 +- Duplicati/Library/Backend/S3/S3Backend.cs | 8 +- Duplicati/Library/Backend/S3/S3IAM.cs | 302 +++++----- Duplicati/Library/Backend/S3/S3Wrapper.cs | 2 +- Duplicati/Library/Backend/SSHv2/SSHv2Backend.cs | 4 +- Duplicati/Library/Backend/SSHv2/Strings.cs | 2 +- .../Backend/SharePoint/SharePointBackend.cs | 10 +- .../Library/Backend/TahoeLAFS/TahoeBackend.cs | 4 +- Duplicati/Library/Backend/WEBDAV/WEBDAV.cs | 42 +- Duplicati/Library/Compression/FileArchiveZip.cs | 28 +- Duplicati/Library/Compression/StreamWrapper.cs | 2 +- .../Library/Interface/IGenericCallbackModule.cs | 36 +- Duplicati/Library/Interface/ResultInterfaces.cs | 28 +- Duplicati/Library/Main/AsyncDownloader.cs | 134 ++--- Duplicati/Library/Main/BackendManager.cs | 348 +++++------ Duplicati/Library/Main/Controller.cs | 150 ++--- .../Library/Main/Database/LocalBackupDatabase.cs | 20 +- .../Main/Database/LocalBugReportDatabase.cs | 70 +-- Duplicati/Library/Main/Database/LocalDatabase.cs | 252 ++++---- .../Library/Main/Database/LocalDeleteDatabase.cs | 666 ++++++++++----------- .../Library/Main/Database/LocalListDatabase.cs | 6 +- .../Library/Main/Database/LocalRecreateDatabase.cs | 12 +- .../Library/Main/Database/LocalRepairDatabase.cs | 182 +++--- .../Library/Main/Database/LocalRestoreDatabase.cs | 250 ++++---- .../Library/Main/Database/RemoteVolumeEntry.cs | 12 +- Duplicati/Library/Main/Operation/BackupHandler.cs | 124 ++-- Duplicati/Library/Main/Operation/CompactHandler.cs | 258 ++++---- .../Main/Operation/CreateBugReportHandler.cs | 24 +- Duplicati/Library/Main/Operation/DeleteHandler.cs | 24 +- .../Library/Main/Operation/FilelistProcessor.cs | 50 +- .../Library/Main/Operation/ListFilesHandler.cs | 2 +- .../Main/Operation/RecreateDatabaseHandler.cs | 70 +-- Duplicati/Library/Main/Operation/RepairHandler.cs | 34 +- .../Main/Operation/RestoreControlFilesHandler.cs | 70 +-- Duplicati/Library/Main/Operation/RestoreHandler.cs | 218 +++---- .../Operation/RestoreHandlerMetadataStorage.cs | 28 +- Duplicati/Library/Main/Options.cs | 30 +- Duplicati/Library/Main/Volumes/VolumeBase.cs | 54 +- Duplicati/Library/Main/Volumes/VolumeWriterBase.cs | 14 +- Duplicati/Library/Modules/Builtin/HttpOptions.cs | 96 +-- Duplicati/Library/Modules/Builtin/RunScript.cs | 120 ++-- Duplicati/Library/Modules/Builtin/Strings.cs | 6 +- Duplicati/Library/Snapshots/LinuxSnapshot.cs | 6 +- Duplicati/Library/Snapshots/NoSnapshot.cs | 6 +- Duplicati/Library/Snapshots/NoSnapshotWindows.cs | 8 +- Duplicati/Library/Snapshots/WindowsSnapshot.cs | 8 +- .../Library/UsageReporter/ReportSetUploader.cs | 26 +- Duplicati/Library/Utility/AsyncHttpRequest.cs | 18 +- Duplicati/Library/Utility/CommandLineParser.cs | 4 +- Duplicati/Library/Utility/FileBackedList.cs | 128 ++-- Duplicati/Library/Utility/HostKeyException.cs | 84 +-- Duplicati/Library/Utility/IFilter.cs | 22 +- Duplicati/Library/Utility/TempFile.cs | 66 +- Duplicati/Library/Utility/Utility.cs | 24 +- 72 files changed, 2399 insertions(+), 2425 deletions(-) (limited to 'Duplicati/Library') diff --git a/Duplicati/Library/AutoUpdater/AutoUpdateSettings.cs b/Duplicati/Library/AutoUpdater/AutoUpdateSettings.cs index 76c06724a..850dab24b 100644 --- a/Duplicati/Library/AutoUpdater/AutoUpdateSettings.cs +++ b/Duplicati/Library/AutoUpdater/AutoUpdateSettings.cs @@ -28,15 +28,15 @@ namespace Duplicati.Library.AutoUpdater private const string APP_NAME = "AutoUpdateAppName.txt"; private const string UPDATE_URL = "AutoUpdateURL.txt"; private const string UPDATE_KEY = "AutoUpdateSignKey.txt"; - private const string UPDATE_CHANNEL = "AutoUpdateBuildChannel.txt"; + private const string UPDATE_CHANNEL = "AutoUpdateBuildChannel.txt"; private const string UPDATE_README = "AutoUpdateFolderReadme.txt"; private const string UPDATE_INSTALL_FILE = "AutoUpdateInstallIDTemplate.txt"; - private const string OEM_APP_NAME = "oem-app-name.txt"; - private const string OEM_UPDATE_URL = "oem-update-url.txt"; - private const string OEM_UPDATE_KEY = "oem-update-key.txt"; - private const string OEM_UPDATE_README = "oem-update-readme.txt"; - private const string OEM_UPDATE_INSTALL_FILE = "oem-update-installid.txt"; + private const string OEM_APP_NAME = "oem-app-name.txt"; + private const string OEM_UPDATE_URL = "oem-update-url.txt"; + private const string OEM_UPDATE_KEY = "oem-update-key.txt"; + private const string OEM_UPDATE_README = "oem-update-readme.txt"; + private const string OEM_UPDATE_INSTALL_FILE = "oem-update-installid.txt"; internal const string UPDATEURL_ENVNAME_TEMPLATE = "AUTOUPDATER_{0}_URLS"; internal const string UPDATECHANNEL_ENVNAME_TEMPLATE = "AUTOUPDATER_{0}_CHANNEL"; @@ -56,15 +56,15 @@ namespace Duplicati.Library.AutoUpdater static AutoUpdateSettings() { - ReadResourceText(APP_NAME, OEM_APP_NAME); - ReadResourceText(UPDATE_URL, OEM_UPDATE_URL); - ReadResourceText(UPDATE_KEY, OEM_UPDATE_KEY); - ReadResourceText(UPDATE_README, OEM_UPDATE_README); - ReadResourceText(UPDATE_INSTALL_FILE, OEM_UPDATE_INSTALL_FILE); - ReadResourceText(UPDATE_CHANNEL, null); + ReadResourceText(APP_NAME, OEM_APP_NAME); + ReadResourceText(UPDATE_URL, OEM_UPDATE_URL); + ReadResourceText(UPDATE_KEY, OEM_UPDATE_KEY); + ReadResourceText(UPDATE_README, OEM_UPDATE_README); + ReadResourceText(UPDATE_INSTALL_FILE, OEM_UPDATE_INSTALL_FILE); + ReadResourceText(UPDATE_CHANNEL, null); } - private static string ReadResourceText(string name, string oemname) + private static string ReadResourceText(string name, string oemname) { string result; if (_cache.TryGetValue(name, out result)) @@ -80,15 +80,15 @@ namespace Duplicati.Library.AutoUpdater { } - try - { - // Check for OEM override - if (!string.IsNullOrWhiteSpace(oemname) && System.IO.File.Exists(oemname)) - result = System.IO.File.ReadAllText(oemname); - } - catch - { - } + try + { + // Check for OEM override + if (!string.IsNullOrWhiteSpace(oemname) && System.IO.File.Exists(oemname)) + result = System.IO.File.ReadAllText(oemname); + } + catch + { + } if (string.IsNullOrWhiteSpace(result)) result = ""; @@ -106,7 +106,7 @@ namespace Duplicati.Library.AutoUpdater if (UsesAlternateURLs) return Environment.GetEnvironmentVariable(string.Format(UPDATEURL_ENVNAME_TEMPLATE, AppName)).Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries); else - return ReadResourceText(UPDATE_URL, OEM_UPDATE_URL).Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries);; + return ReadResourceText(UPDATE_URL, OEM_UPDATE_URL).Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries);; } } @@ -137,8 +137,8 @@ namespace Duplicati.Library.AutoUpdater } } - if (string.IsNullOrWhiteSpace(channelstring)) - channelstring = BuildUpdateChannel; + if (string.IsNullOrWhiteSpace(channelstring)) + channelstring = BuildUpdateChannel; if (string.IsNullOrWhiteSpace(channelstring)) channelstring = UpdaterManager.BaseVersion.ReleaseType; @@ -160,22 +160,22 @@ namespace Duplicati.Library.AutoUpdater public static string AppName { - get { return ReadResourceText(APP_NAME, OEM_APP_NAME); } + get { return ReadResourceText(APP_NAME, OEM_APP_NAME); } } - public static string BuildUpdateChannel - { - get { return ReadResourceText(UPDATE_CHANNEL, null); } - } + public static string BuildUpdateChannel + { + get { return ReadResourceText(UPDATE_CHANNEL, null); } + } public static string UpdateFolderReadme { - get { return ReadResourceText(UPDATE_README, OEM_UPDATE_README); } + get { return ReadResourceText(UPDATE_README, OEM_UPDATE_README); } } public static string UpdateInstallFileText { - get { return string.Format(ReadResourceText(UPDATE_INSTALL_FILE, OEM_UPDATE_INSTALL_FILE), Guid.NewGuid().ToString("N")); } + get { return string.Format(ReadResourceText(UPDATE_INSTALL_FILE, OEM_UPDATE_INSTALL_FILE), Guid.NewGuid().ToString("N")); } } public static System.Security.Cryptography.RSACryptoServiceProvider SignKey @@ -185,7 +185,7 @@ namespace Duplicati.Library.AutoUpdater try { var key = System.Security.Cryptography.RSACryptoServiceProvider.Create(); - key.FromXmlString(ReadResourceText(UPDATE_KEY, OEM_UPDATE_KEY)); + key.FromXmlString(ReadResourceText(UPDATE_KEY, OEM_UPDATE_KEY)); return (System.Security.Cryptography.RSACryptoServiceProvider)key; } catch diff --git a/Duplicati/Library/AutoUpdater/UpdaterManager.cs b/Duplicati/Library/AutoUpdater/UpdaterManager.cs index 847737ca7..7bc058e01 100644 --- a/Duplicati/Library/AutoUpdater/UpdaterManager.cs +++ b/Duplicati/Library/AutoUpdater/UpdaterManager.cs @@ -41,9 +41,9 @@ namespace Duplicati.Library.AutoUpdater public static readonly string INSTALLDIR; private static readonly string INSTALLED_BASE_DIR = - string.IsNullOrWhiteSpace(System.Environment.GetEnvironmentVariable(string.Format(BASEINSTALLDIR_ENVNAME_TEMPLATE, APPNAME))) - ? System.IO.Path.GetDirectoryName(Duplicati.Library.Utility.Utility.getEntryAssembly().Location) - : Library.Utility.Utility.ExpandEnvironmentVariables(System.Environment.GetEnvironmentVariable(string.Format(BASEINSTALLDIR_ENVNAME_TEMPLATE, APPNAME))); + string.IsNullOrWhiteSpace(System.Environment.GetEnvironmentVariable(string.Format(BASEINSTALLDIR_ENVNAME_TEMPLATE, APPNAME))) + ? System.IO.Path.GetDirectoryName(Duplicati.Library.Utility.Utility.getEntryAssembly().Location) + : Library.Utility.Utility.ExpandEnvironmentVariables(System.Environment.GetEnvironmentVariable(string.Format(BASEINSTALLDIR_ENVNAME_TEMPLATE, APPNAME))); private static readonly bool DISABLE_UPDATE_DOMAIN = !string.IsNullOrWhiteSpace(System.Environment.GetEnvironmentVariable(string.Format(SKIPUPDATE_ENVNAME_TEMPLATE, APPNAME))); @@ -58,8 +58,8 @@ namespace Duplicati.Library.AutoUpdater public static event Action OnError; private const string DATETIME_FORMAT = "yyyymmddhhMMss"; - private const string BASEINSTALLDIR_ENVNAME_TEMPLATE = "AUTOUPDATER_{0}_INSTALL_ROOT"; - private const string UPDATEINSTALLDIR_ENVNAME_TEMPLATE = "AUTOUPDATER_{0}_UPDATE_ROOT"; + private const string BASEINSTALLDIR_ENVNAME_TEMPLATE = "AUTOUPDATER_{0}_INSTALL_ROOT"; + private const string UPDATEINSTALLDIR_ENVNAME_TEMPLATE = "AUTOUPDATER_{0}_UPDATE_ROOT"; internal const string SKIPUPDATE_ENVNAME_TEMPLATE = "AUTOUPDATER_{0}_SKIP_UPDATE"; private const string RUN_UPDATED_FOLDER_PATH = "AUTOUPDATER_LOAD_UPDATE"; private const string SLEEP_ENVNAME_TEMPLATE = "AUTOUPDATER_{0}_SLEEP"; @@ -82,103 +82,103 @@ namespace Duplicati.Library.AutoUpdater static UpdaterManager() { - // Update folder strategy is a bit complicated, - // because it depends on the actual system, - // and because it tries to find a good spot - // by probing for locations - - // The "overrides" paths are checked, - // to see if they exist and are writeable. - // The first existing and writeable path - // for "overrides" is chosen - - // If override was not found, the "legacypaths" - // are checked in the same way to see if - // we have previously used such a folder - // and if that folder has contents, - // which indicates that it has been used. - - // Finally we check the "attempts", - // which are suitable candidates - // for storing the updates on each - // operating system - - if (string.IsNullOrWhiteSpace(System.Environment.GetEnvironmentVariable(string.Format(UPDATEINSTALLDIR_ENVNAME_TEMPLATE, APPNAME)))) - { - string installdir = null; - var programfiles = System.Environment.GetFolderPath(Environment.SpecialFolder.ProgramFiles); - - // The user can override updates by having a local updates folder - var overrides = new List(new string [] { - System.IO.Path.Combine(InstalledBaseDir, "updates"), - }); - - if (Library.Utility.Utility.IsClientWindows) - { - overrides.Add(System.IO.Path.Combine(System.Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), APPNAME, "updates")); - overrides.Add(System.IO.Path.Combine(System.Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), APPNAME, "updates")); - } - else - { - if (Library.Utility.Utility.IsClientOSX) - overrides.Add(System.IO.Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Personal), "Library", "Application Support", APPNAME, "updates")); - - overrides.Add(System.IO.Path.Combine(System.Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), APPNAME, "updates")); - } - - // Previous locations that we don't want to use, - // but we keep them active to avoid breaking the update syste - var legacypaths = new List(); - - if (!string.IsNullOrWhiteSpace(programfiles)) - legacypaths.Add(System.IO.Path.Combine(programfiles, APPNAME, "updates")); - if (Library.Utility.Utility.IsClientLinux) - legacypaths.Add(System.IO.Path.Combine(System.Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), APPNAME, "updates")); - - // The real attempts that we probe for - var attempts = new List(); - - // We do not want to install anything in the basedir, if the application is installed in "ProgramFiles" - if (!string.IsNullOrWhiteSpace(programfiles) && !InstalledBaseDir.StartsWith(Library.Utility.Utility.AppendDirSeparator(programfiles))) - attempts.Add(System.IO.Path.Combine(InstalledBaseDir, "updates")); - - if (Library.Utility.Utility.IsClientOSX) - attempts.Add(System.IO.Path.Combine("/", "Library", "Application Support", APPNAME, "updates")); - else - attempts.Add(System.IO.Path.Combine(System.Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData), APPNAME, "updates")); - - attempts.AddRange(overrides.Skip(1)); - - // Check if the override folder exists, and choose that - foreach (var p in overrides) - if (!string.IsNullOrWhiteSpace(p) && System.IO.Directory.Exists(p) && TestDirectoryIsWriteable(p)) - { - installdir = p; - break; - } - - if (string.IsNullOrWhiteSpace(installdir)) - foreach (var p in legacypaths) - if (!string.IsNullOrWhiteSpace(p) && System.IO.Directory.Exists(p) && System.IO.Directory.EnumerateFiles(p, "*", System.IO.SearchOption.TopDirectoryOnly).Count() > 0 && TestDirectoryIsWriteable(p)) - { - installdir = p; - break; - } - - if (string.IsNullOrWhiteSpace(installdir)) - foreach (var p in attempts) - if (!string.IsNullOrWhiteSpace(p) && TestDirectoryIsWriteable(p)) - { - installdir = p; - break; - } + // Update folder strategy is a bit complicated, + // because it depends on the actual system, + // and because it tries to find a good spot + // by probing for locations + + // The "overrides" paths are checked, + // to see if they exist and are writeable. + // The first existing and writeable path + // for "overrides" is chosen + + // If override was not found, the "legacypaths" + // are checked in the same way to see if + // we have previously used such a folder + // and if that folder has contents, + // which indicates that it has been used. + + // Finally we check the "attempts", + // which are suitable candidates + // for storing the updates on each + // operating system + + if (string.IsNullOrWhiteSpace(System.Environment.GetEnvironmentVariable(string.Format(UPDATEINSTALLDIR_ENVNAME_TEMPLATE, APPNAME)))) + { + string installdir = null; + var programfiles = System.Environment.GetFolderPath(Environment.SpecialFolder.ProgramFiles); + + // The user can override updates by having a local updates folder + var overrides = new List(new string [] { + System.IO.Path.Combine(InstalledBaseDir, "updates"), + }); + + if (Library.Utility.Utility.IsClientWindows) + { + overrides.Add(System.IO.Path.Combine(System.Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), APPNAME, "updates")); + overrides.Add(System.IO.Path.Combine(System.Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), APPNAME, "updates")); + } + else + { + if (Library.Utility.Utility.IsClientOSX) + overrides.Add(System.IO.Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.Personal), "Library", "Application Support", APPNAME, "updates")); + + overrides.Add(System.IO.Path.Combine(System.Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData), APPNAME, "updates")); + } + + // Previous locations that we don't want to use, + // but we keep them active to avoid breaking the update syste + var legacypaths = new List(); + + if (!string.IsNullOrWhiteSpace(programfiles)) + legacypaths.Add(System.IO.Path.Combine(programfiles, APPNAME, "updates")); + if (Library.Utility.Utility.IsClientLinux) + legacypaths.Add(System.IO.Path.Combine(System.Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData), APPNAME, "updates")); + + // The real attempts that we probe for + var attempts = new List(); + + // We do not want to install anything in the basedir, if the application is installed in "ProgramFiles" + if (!string.IsNullOrWhiteSpace(programfiles) && !InstalledBaseDir.StartsWith(Library.Utility.Utility.AppendDirSeparator(programfiles))) + attempts.Add(System.IO.Path.Combine(InstalledBaseDir, "updates")); + + if (Library.Utility.Utility.IsClientOSX) + attempts.Add(System.IO.Path.Combine("/", "Library", "Application Support", APPNAME, "updates")); + else + attempts.Add(System.IO.Path.Combine(System.Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData), APPNAME, "updates")); + + attempts.AddRange(overrides.Skip(1)); + + // Check if the override folder exists, and choose that + foreach (var p in overrides) + if (!string.IsNullOrWhiteSpace(p) && System.IO.Directory.Exists(p) && TestDirectoryIsWriteable(p)) + { + installdir = p; + break; + } + + if (string.IsNullOrWhiteSpace(installdir)) + foreach (var p in legacypaths) + if (!string.IsNullOrWhiteSpace(p) && System.IO.Directory.Exists(p) && System.IO.Directory.EnumerateFiles(p, "*", System.IO.SearchOption.TopDirectoryOnly).Count() > 0 && TestDirectoryIsWriteable(p)) + { + installdir = p; + break; + } - INSTALLDIR = installdir; - } - else - { - INSTALLDIR = Library.Utility.Utility.ExpandEnvironmentVariables(System.Environment.GetEnvironmentVariable(string.Format(UPDATEINSTALLDIR_ENVNAME_TEMPLATE, APPNAME))); - } + if (string.IsNullOrWhiteSpace(installdir)) + foreach (var p in attempts) + if (!string.IsNullOrWhiteSpace(p) && TestDirectoryIsWriteable(p)) + { + installdir = p; + break; + } + + INSTALLDIR = installdir; + } + else + { + INSTALLDIR = Library.Utility.Utility.ExpandEnvironmentVariables(System.Environment.GetEnvironmentVariable(string.Format(UPDATEINSTALLDIR_ENVNAME_TEMPLATE, APPNAME))); + } if (INSTALLDIR != null) @@ -207,22 +207,22 @@ namespace Duplicati.Library.AutoUpdater { } - if (selfVersion == null) - { - selfVersion = new UpdateInfo() { - Displayname = string.IsNullOrWhiteSpace(Duplicati.License.VersionNumbers.TAG) ? "Current" : Duplicati.License.VersionNumbers.TAG, - Version = System.Reflection.Assembly.GetExecutingAssembly().GetName().Version.ToString(), - ReleaseTime = new DateTime(0), - ReleaseType = + if (selfVersion == null) + { + selfVersion = new UpdateInfo() { + Displayname = string.IsNullOrWhiteSpace(Duplicati.License.VersionNumbers.TAG) ? "Current" : Duplicati.License.VersionNumbers.TAG, + Version = System.Reflection.Assembly.GetExecutingAssembly().GetName().Version.ToString(), + ReleaseTime = new DateTime(0), + ReleaseType = #if DEBUG "Debug" #else - string.IsNullOrWhiteSpace(AutoUpdateSettings.BuildUpdateChannel) ? "Nightly" : AutoUpdateSettings.BuildUpdateChannel + string.IsNullOrWhiteSpace(AutoUpdateSettings.BuildUpdateChannel) ? "Nightly" : AutoUpdateSettings.BuildUpdateChannel #endif - }; + }; - } + } if (baseVersion == null) baseVersion = selfVersion; @@ -445,9 +445,9 @@ namespace Duplicati.Library.AutoUpdater wreq.UserAgent = string.Format("{0} v{1}", APPNAME, SelfVersion.Version); wreq.Headers.Add("X-Install-ID", InstallID); - var areq = new Duplicati.Library.Utility.AsyncHttpRequest(wreq); + var areq = new Duplicati.Library.Utility.AsyncHttpRequest(wreq); using(var resp = areq.GetResponse()) - using(var rss = areq.GetResponseStream()) + using(var rss = areq.GetResponseStream()) using(var pgs = new Duplicati.Library.Utility.ProgressReportingStream(rss, version.CompressedSize, cb)) using(var fs = System.IO.File.Open(tempfile, System.IO.FileMode.Create)) Duplicati.Library.Utility.Utility.CopyStream(pgs, fs); diff --git a/Duplicati/Library/Backend/AlternativeFTP/AlternativeFTPBackend.cs b/Duplicati/Library/Backend/AlternativeFTP/AlternativeFTPBackend.cs index 0087b51d7..a96df6616 100644 --- a/Duplicati/Library/Backend/AlternativeFTP/AlternativeFTPBackend.cs +++ b/Duplicati/Library/Backend/AlternativeFTP/AlternativeFTPBackend.cs @@ -177,7 +177,7 @@ namespace Duplicati.Library.Backend.AlternativeFTP // Process the aftp-ssl-protocols option string sslProtocolsString; - if (!options.TryGetValue(CONFIG_KEY_AFTP_SSL_PROTOCOLS, out sslProtocolsString) || string.IsNullOrWhiteSpace(sslProtocolsString)) + if (!options.TryGetValue(CONFIG_KEY_AFTP_SSL_PROTOCOLS, out sslProtocolsString) || string.IsNullOrWhiteSpace(sslProtocolsString)) { sslProtocolsString = null; } @@ -288,7 +288,7 @@ namespace Duplicati.Library.Backend.AlternativeFTP } } } - }// Message "Directory not found." string + }// Message "Directory not found." string catch (FtpCommandException ex) { if (ex.Message == "Directory not found.") diff --git a/Duplicati/Library/Backend/AmazonCloudDrive/AmzCD.cs b/Duplicati/Library/Backend/AmazonCloudDrive/AmzCD.cs index 761c45bc9..116ed5ca4 100644 --- a/Duplicati/Library/Backend/AmazonCloudDrive/AmzCD.cs +++ b/Duplicati/Library/Backend/AmazonCloudDrive/AmzCD.cs @@ -295,8 +295,8 @@ namespace Duplicati.Library.Backend.AmazonCloudDrive public void Get(string remotename, System.IO.Stream stream) { using (var resp = m_oauth.GetResponse(string.Format("{0}/nodes/{1}/content", ContentUrl, GetFileID(remotename)))) - using(var rs = Library.Utility.AsyncHttpRequest.TrySetTimeout(resp.GetResponseStream())) - Utility.Utility.CopyStream(rs, stream); + using(var rs = Library.Utility.AsyncHttpRequest.TrySetTimeout(resp.GetResponseStream())) + Utility.Utility.CopyStream(rs, stream); } #endregion diff --git a/Duplicati/Library/Backend/Backblaze/B2.cs b/Duplicati/Library/Backend/Backblaze/B2.cs index ff7472701..0f9e65f81 100644 --- a/Duplicati/Library/Backend/Backblaze/B2.cs +++ b/Duplicati/Library/Backend/Backblaze/B2.cs @@ -36,7 +36,7 @@ namespace Duplicati.Library.Backend.Backblaze private string m_bucketname; private string m_prefix; - private string m_urlencodedprefix; + private string m_urlencodedprefix; private string m_bucketType; private B2AuthHelper m_helper; private UploadUrlResponse m_uploadUrl; @@ -62,7 +62,7 @@ namespace Duplicati.Library.Backend.Backblaze while(m_prefix.StartsWith("/")) m_prefix = m_prefix.Substring(1); - m_urlencodedprefix = string.Join("/", m_prefix.Split(new [] { '/' }).Select(x => Library.Utility.Uri.UrlPathEncode(x))); + m_urlencodedprefix = string.Join("/", m_prefix.Split(new [] { '/' }).Select(x => Library.Utility.Uri.UrlPathEncode(x))); m_bucketType = DEFAULT_BUCKET_TYPE; if (options.ContainsKey(B2_CREATE_BUCKET_TYPE_OPTION)) @@ -209,7 +209,7 @@ namespace Duplicati.Library.Backend.Backblaze req.Method = "POST"; req.Headers["Authorization"] = UploadUrlData.AuthorizationToken; req.Headers["X-Bz-Content-Sha1"] = sha1; - req.Headers["X-Bz-File-Name"] = m_urlencodedprefix + Utility.Uri.UrlPathEncode(remotename); + req.Headers["X-Bz-File-Name"] = m_urlencodedprefix + Utility.Uri.UrlPathEncode(remotename); req.ContentType = "application/octet-stream"; req.ContentLength = stream.Length; }, @@ -266,12 +266,12 @@ namespace Duplicati.Library.Backend.Backblaze if (m_filecache != null && m_filecache.ContainsKey(remotename)) req = new AsyncHttpRequest(m_helper.CreateRequest(string.Format("{0}/b2api/v1/b2_download_file_by_id?fileId={1}", m_helper.DownloadUrl, Library.Utility.Uri.UrlEncode(GetFileID(remotename))))); else - req = new AsyncHttpRequest(m_helper.CreateRequest(string.Format("{0}/{1}{2}", m_helper.DownloadUrl, m_urlencodedprefix, Library.Utility.Uri.UrlPathEncode(remotename)))); + req = new AsyncHttpRequest(m_helper.CreateRequest(string.Format("{0}/{1}{2}", m_helper.DownloadUrl, m_urlencodedprefix, Library.Utility.Uri.UrlPathEncode(remotename)))); try { - using(var resp = req.GetResponse()) - using(var rs = req.GetResponseStream()) + using(var resp = req.GetResponse()) + using(var rs = req.GetResponseStream()) Library.Utility.Utility.CopyStream(rs, stream); } catch (Exception ex) diff --git a/Duplicati/Library/Backend/Backblaze/B2AuthHelper.cs b/Duplicati/Library/Backend/Backblaze/B2AuthHelper.cs index 9b4a98817..8adef2e89 100644 --- a/Duplicati/Library/Backend/Backblaze/B2AuthHelper.cs +++ b/Duplicati/Library/Backend/Backblaze/B2AuthHelper.cs @@ -123,7 +123,7 @@ namespace Duplicati.Library.Backend.Backblaze { string rawdata = null; var hs = (ex as WebException).Response as HttpWebResponse; - using(var rs = Library.Utility.AsyncHttpRequest.TrySetTimeout(hs.GetResponseStream())) + using(var rs = Library.Utility.AsyncHttpRequest.TrySetTimeout(hs.GetResponseStream())) using(var sr = new System.IO.StreamReader(rs)) rawdata = sr.ReadToEnd(); diff --git a/Duplicati/Library/Backend/Box/BoxBackend.cs b/Duplicati/Library/Backend/Box/BoxBackend.cs index 38b451c7c..51e3c838e 100644 --- a/Duplicati/Library/Backend/Box/BoxBackend.cs +++ b/Duplicati/Library/Backend/Box/BoxBackend.cs @@ -57,7 +57,7 @@ namespace Duplicati.Library.Backend.Box { string rawdata = null; var hs = (ex as WebException).Response as HttpWebResponse; - using(var rs = Library.Utility.AsyncHttpRequest.TrySetTimeout(hs.GetResponseStream())) + using(var rs = Library.Utility.AsyncHttpRequest.TrySetTimeout(hs.GetResponseStream())) using(var sr = new System.IO.StreamReader(rs)) rawdata = sr.ReadToEnd(); @@ -244,7 +244,7 @@ namespace Duplicati.Library.Backend.Box public void Get(string remotename, System.IO.Stream stream) { using (var resp = m_oauth.GetResponse(string.Format("{0}/files/{1}/content", BOX_API_URL, GetFileID(remotename)))) - using(var rs = Duplicati.Library.Utility.AsyncHttpRequest.TrySetTimeout(resp.GetResponseStream())) + using(var rs = Duplicati.Library.Utility.AsyncHttpRequest.TrySetTimeout(resp.GetResponseStream())) Library.Utility.Utility.CopyStream(rs, stream); } diff --git a/Duplicati/Library/Backend/CloudFiles/CloudFiles.cs b/Duplicati/Library/Backend/CloudFiles/CloudFiles.cs index 6f83c2af2..0f0063343 100644 --- a/Duplicati/Library/Backend/CloudFiles/CloudFiles.cs +++ b/Duplicati/Library/Backend/CloudFiles/CloudFiles.cs @@ -146,7 +146,7 @@ namespace Duplicati.Library.Backend { var areq = new Utility.AsyncHttpRequest(req); using (var resp = (HttpWebResponse)areq.GetResponse()) - using (var s = areq.GetResponseStream()) + using (var s = areq.GetResponseStream()) doc.Load(s); } catch (WebException wex) @@ -223,7 +223,7 @@ namespace Duplicati.Library.Backend if ((int)resp.StatusCode >= 300) throw new WebException(Strings.CloudFiles.FileDeleteError, null, WebExceptionStatus.ProtocolError, resp); else - using (areq.GetResponseStream()) + using (areq.GetResponseStream()) { } } } @@ -299,7 +299,7 @@ namespace Duplicati.Library.Backend var areq = new Utility.AsyncHttpRequest(req); using (var resp = areq.GetResponse()) - using (var s = areq.GetResponseStream()) + using (var s = areq.GetResponseStream()) using (var mds = new Utility.MD5CalculatingStream(s)) { string md5Hash = resp.Headers["ETag"]; diff --git a/Duplicati/Library/Backend/Dropbox/Dropbox.cs b/Duplicati/Library/Backend/Dropbox/Dropbox.cs index e438bf147..8a70684a0 100644 --- a/Duplicati/Library/Backend/Dropbox/Dropbox.cs +++ b/Duplicati/Library/Backend/Dropbox/Dropbox.cs @@ -41,7 +41,7 @@ namespace Duplicati.Library.Backend public string DisplayName { - get { return Strings.Dropbox.DisplayName; } + get { return Strings.Dropbox.DisplayName; } } public string ProtocolKey @@ -49,41 +49,41 @@ namespace Duplicati.Library.Backend get { return "dropbox"; } } - private FileEntry ParseEntry(MetaData md) - { - var ife = new FileEntry(md.name); - if (md.IsFile) - { - ife.IsFolder = false; - ife.Size = (long)md.size; - } - else - { - ife.IsFolder = true; - } - - try { ife.LastModification = ife.LastAccess = DateTime.Parse(md.server_modified).ToUniversalTime(); } - catch { } - - return ife; - } + private FileEntry ParseEntry(MetaData md) + { + var ife = new FileEntry(md.name); + if (md.IsFile) + { + ife.IsFolder = false; + ife.Size = (long)md.size; + } + else + { + ife.IsFolder = true; + } + + try { ife.LastModification = ife.LastAccess = DateTime.Parse(md.server_modified).ToUniversalTime(); } + catch { } + + return ife; + } public List List() { try { - var list = new List(); - var lfr = dbx.ListFiles(m_path); + var list = new List(); + var lfr = dbx.ListFiles(m_path); foreach (var md in lfr.entries) - list.Add(ParseEntry(md)); + list.Add(ParseEntry(md)); while (lfr.has_more) { lfr = dbx.ListFilesContinue(lfr.cursor); foreach (var md in lfr.entries) - list.Add(ParseEntry(md)); - } + list.Add(ParseEntry(md)); + } return list; } @@ -91,7 +91,7 @@ namespace Duplicati.Library.Backend { if (de.errorJSON["error"][".tag"].ToString() == "path" && de.errorJSON["error"]["path"][".tag"].ToString() == "not_found") throw new FolderMissingException(); - + throw; } } @@ -122,17 +122,17 @@ namespace Duplicati.Library.Backend } } - public IList SupportedCommands - { - get - { - return new List(new ICommandLineArgument[] { - new CommandLineArgument(AUTHID_OPTION, CommandLineArgument.ArgumentType.Password, Strings.Dropbox.AuthidShort, Strings.Dropbox.AuthidLong(OAuthHelper.OAUTH_LOGIN_URL("dropbox"))), - }); - } - } + public IList SupportedCommands + { + get + { + return new List(new ICommandLineArgument[] { + new CommandLineArgument(AUTHID_OPTION, CommandLineArgument.ArgumentType.Password, Strings.Dropbox.AuthidShort, Strings.Dropbox.AuthidLong(OAuthHelper.OAUTH_LOGIN_URL("dropbox"))), + }); + } + } - public string Description { get { return Strings.Dropbox.Description; } } + public string Description { get { return Strings.Dropbox.Description; } } public void Test() { @@ -182,4 +182,4 @@ namespace Duplicati.Library.Backend } } } -} \ No newline at end of file +} diff --git a/Duplicati/Library/Backend/Dropbox/DropboxHelper.cs b/Duplicati/Library/Backend/Dropbox/DropboxHelper.cs index 91d5e75e0..5573b1c78 100644 --- a/Duplicati/Library/Backend/Dropbox/DropboxHelper.cs +++ b/Duplicati/Library/Backend/Dropbox/DropboxHelper.cs @@ -9,18 +9,18 @@ using Newtonsoft.Json.Linq; namespace Duplicati.Library.Backend { - public class DropboxHelper : OAuthHelper + public class DropboxHelper : OAuthHelper { private const string API_URL = "https://api.dropboxapi.com/2"; private const string CONTENT_API_URL = "https://content.dropboxapi.com/2"; - private const int DROPBOX_MAX_CHUNK_UPLOAD = 10 * 1024 * 1024; // 10 MB max upload + private const int DROPBOX_MAX_CHUNK_UPLOAD = 10 * 1024 * 1024; // 10 MB max upload private const string API_ARG_HEADER = "DROPBOX-API-arg"; public DropboxHelper(string accessToken) - : base(accessToken, "dropbox") + : base(accessToken, "dropbox") { - base.AutoAuthHeader = true; - base.AccessTokenOnly = true; + base.AutoAuthHeader = true; + base.AccessTokenOnly = true; } public ListFolderResult ListFiles(string path) @@ -43,7 +43,7 @@ namespace Duplicati.Library.Backend public ListFolderResult ListFilesContinue(string cursor) { - var lfca = new ListFolderContinueArg() { cursor = cursor }; + var lfca = new ListFolderContinueArg() { cursor = cursor }; var url = string.Format("{0}/files/list_folder/continue", API_URL); try @@ -59,12 +59,12 @@ namespace Duplicati.Library.Backend public FolderMetadata CreateFolder(string path) { - var pa = new PathArg() { path = path }; + var pa = new PathArg() { path = path }; var url = string.Format("{0}/files/create_folder", API_URL); try { - return PostAndGetJSONData(url, pa); + return PostAndGetJSONData(url, pa); } catch (Exception ex) { @@ -78,7 +78,7 @@ namespace Duplicati.Library.Backend // start a session var ussa = new UploadSessionStartArg(); - var chunksize = (int)Math.Min(DROPBOX_MAX_CHUNK_UPLOAD, stream.Length); + var chunksize = (int)Math.Min(DROPBOX_MAX_CHUNK_UPLOAD, stream.Length); var url = string.Format("{0}/files/upload_session/start", CONTENT_API_URL); var req = CreateRequest(url, "POST"); @@ -95,14 +95,14 @@ namespace Duplicati.Library.Backend using (var rs = areq.GetRequestStream()) { int bytesRead = 0; - do - { - bytesRead = stream.Read(buffer, 0, Math.Min((int)Utility.Utility.DEFAULT_BUFFER_SIZE, chunksize)); - globalBytesRead += (ulong)bytesRead; - rs.Write(buffer, 0, bytesRead); - - } - while (bytesRead > 0 && globalBytesRead < (ulong)chunksize); + do + { + bytesRead = stream.Read(buffer, 0, Math.Min((int)Utility.Utility.DEFAULT_BUFFER_SIZE, chunksize)); + globalBytesRead += (ulong)bytesRead; + rs.Write(buffer, 0, bytesRead); + + } + while (bytesRead > 0 && globalBytesRead < (ulong)chunksize); } var ussr = ReadJSONResponse(areq); // pun intended @@ -120,12 +120,12 @@ namespace Duplicati.Library.Backend usaa.close = remaining < DROPBOX_MAX_CHUNK_UPLOAD; url = string.Format("{0}/files/upload_session/append_v2", CONTENT_API_URL); - chunksize = (int)Math.Min(DROPBOX_MAX_CHUNK_UPLOAD, (long)remaining); + chunksize = (int)Math.Min(DROPBOX_MAX_CHUNK_UPLOAD, (long)remaining); req = CreateRequest(url, "POST"); req.Headers[API_ARG_HEADER] = JsonConvert.SerializeObject(usaa); req.ContentType = "application/octet-stream"; - req.ContentLength = chunksize; + req.ContentLength = chunksize; req.Timeout = 200000; areq = new AsyncHttpRequest(req); @@ -136,24 +136,24 @@ namespace Duplicati.Library.Backend int bytesRead = 0; do { - bytesRead = stream.Read(buffer, 0, Math.Min(chunksize, (int)Utility.Utility.DEFAULT_BUFFER_SIZE)); + bytesRead = stream.Read(buffer, 0, Math.Min(chunksize, (int)Utility.Utility.DEFAULT_BUFFER_SIZE)); bytesReadInRequest += bytesRead; globalBytesRead += (ulong)bytesRead; rs.Write(buffer, 0, bytesRead); } - while (bytesRead > 0 && bytesReadInRequest < chunksize); + while (bytesRead > 0 && bytesReadInRequest < chunksize); } - using (var response = GetResponse(areq)) - using (var sr = new StreamReader(response.GetResponseStream())) - sr.ReadToEnd(); + using (var response = GetResponse(areq)) + using (var sr = new StreamReader(response.GetResponseStream())) + sr.ReadToEnd(); } // finish session and commit try { - var usfa = new UploadSessionFinishArg(); + var usfa = new UploadSessionFinishArg(); usfa.cursor.session_id = ussr.session_id; usfa.cursor.offset = (ulong)globalBytesRead; usfa.commit.path = path; @@ -177,13 +177,13 @@ namespace Duplicati.Library.Backend { try { - var pa = new PathArg() { path = path }; + var pa = new PathArg() { path = path }; var url = string.Format("{0}/files/download", CONTENT_API_URL); var req = CreateRequest(url, "POST"); req.Headers[API_ARG_HEADER] = JsonConvert.SerializeObject(pa); - using (var response = GetResponse(req)) - Utility.Utility.CopyStream(response.GetResponseStream(), fs); + using (var response = GetResponse(req)) + Utility.Utility.CopyStream(response.GetResponseStream(), fs); } catch (Exception ex) { @@ -196,11 +196,11 @@ namespace Duplicati.Library.Backend { try { - var pa = new PathArg() { path = path }; + var pa = new PathArg() { path = path }; var url = string.Format("{0}/files/delete", API_URL); - using (var response = GetResponse(url, pa)) - using(var sr = new StreamReader(response.GetResponseStream())) - sr.ReadToEnd(); + using (var response = GetResponse(url, pa)) + using(var sr = new StreamReader(response.GetResponseStream())) + sr.ReadToEnd(); } catch (Exception ex) { @@ -211,47 +211,47 @@ namespace Duplicati.Library.Backend private void handleDropboxException(Exception ex, bool filerequest) { - if (ex is WebException) - { - string json = string.Empty; - - try - { - using (var sr = new StreamReader(((WebException)ex).Response.GetResponseStream())) - json = sr.ReadToEnd(); - } - catch { } - - // Special mapping for exceptions: - // https://www.dropbox.com/developers-v1/core/docs - - if (((WebException)ex).Response is HttpWebResponse) - { - var httpResp = ((WebException)ex).Response as HttpWebResponse; - - if (httpResp.StatusCode == HttpStatusCode.NotFound) - { - if (filerequest) - throw new Duplicati.Library.Interface.FileMissingException(json); - else - throw new Duplicati.Library.Interface.FolderMissingException(json); - } - if (httpResp.StatusCode == HttpStatusCode.Conflict) - { - //TODO: Should actually parse and see if something else happens - if (filerequest) - throw new Duplicati.Library.Interface.FileMissingException(json); - else - throw new Duplicati.Library.Interface.FolderMissingException(json); - } - if (httpResp.StatusCode == HttpStatusCode.Unauthorized) - ThrowAuthException(json, ex); - if ((int)httpResp.StatusCode == 429 || (int)httpResp.StatusCode == 507) - ThrowOverQuotaError(); - } - - throw new DropboxException() { errorJSON = JObject.Parse(json) }; - } + if (ex is WebException) + { + string json = string.Empty; + + try + { + using (var sr = new StreamReader(((WebException)ex).Response.GetResponseStream())) + json = sr.ReadToEnd(); + } + catch { } + + // Special mapping for exceptions: + // https://www.dropbox.com/developers-v1/core/docs + + if (((WebException)ex).Response is HttpWebResponse) + { + var httpResp = ((WebException)ex).Response as HttpWebResponse; + + if (httpResp.StatusCode == HttpStatusCode.NotFound) + { + if (filerequest) + throw new Duplicati.Library.Interface.FileMissingException(json); + else + throw new Duplicati.Library.Interface.FolderMissingException(json); + } + if (httpResp.StatusCode == HttpStatusCode.Conflict) + { + //TODO: Should actually parse and see if something else happens + if (filerequest) + throw new Duplicati.Library.Interface.FileMissingException(json); + else + throw new Duplicati.Library.Interface.FolderMissingException(json); + } + if (httpResp.StatusCode == HttpStatusCode.Unauthorized) + ThrowAuthException(json, ex); + if ((int)httpResp.StatusCode == 429 || (int)httpResp.StatusCode == 507) + ThrowOverQuotaError(); + } + + throw new DropboxException() { errorJSON = JObject.Parse(json) }; + } } } @@ -355,8 +355,8 @@ namespace Duplicati.Library.Backend } - public class FileMetaData : MetaData - { + public class FileMetaData : MetaData + { - } + } } diff --git a/Duplicati/Library/Backend/Dropbox/Strings.cs b/Duplicati/Library/Backend/Dropbox/Strings.cs index de38e32f4..222158870 100644 --- a/Duplicati/Library/Backend/Dropbox/Strings.cs +++ b/Duplicati/Library/Backend/Dropbox/Strings.cs @@ -17,13 +17,13 @@ using Duplicati.Library.Localization.Short; namespace Duplicati.Library.Backend.Strings { - internal static class Dropbox - { - public static string Description { get { return LC.L(@"This backend can read and write data to Dropbox. Supported format is ""dropbox://folder/subfolder""."); } } - public static string DisplayName { get { return LC.L(@"Dropbox"); } } - public static string AuthidShort { get { return LC.L(@"The authorization code"); } } - public static string AuthidLong(string url) { return LC.L(@"The authorization token retrieved from {0}", url); } + internal static class Dropbox + { + public static string Description { get { return LC.L(@"This backend can read and write data to Dropbox. Supported format is ""dropbox://folder/subfolder""."); } } + public static string DisplayName { get { return LC.L(@"Dropbox"); } } + public static string AuthidShort { get { return LC.L(@"The authorization code"); } } + public static string AuthidLong(string url) { return LC.L(@"The authorization token retrieved from {0}", url); } - } + } } diff --git a/Duplicati/Library/Backend/FTP/FTPBackend.cs b/Duplicati/Library/Backend/FTP/FTPBackend.cs index a45040524..c39fa96db 100644 --- a/Duplicati/Library/Backend/FTP/FTPBackend.cs +++ b/Duplicati/Library/Backend/FTP/FTPBackend.cs @@ -138,7 +138,7 @@ namespace Duplicati.Library.Backend string time = m.Groups["timestamp"].Value; string dir = m.Groups["dir"].Value; - //Unused + //Unused //string permission = m.Groups["permission"].Value; if (dir != "" && dir != "-") @@ -186,7 +186,7 @@ namespace Duplicati.Library.Backend var lst = new List(); var areq = new Utility.AsyncHttpRequest(req); using (var resp = areq.GetResponse()) - using (var rs = areq.GetResponseStream()) + using (var rs = areq.GetResponseStream()) using (var sr = new System.IO.StreamReader(new StreamReadHelper(rs))) { string line; @@ -264,7 +264,7 @@ namespace Duplicati.Library.Backend var areq = new Utility.AsyncHttpRequest(req); using (var resp = areq.GetResponse()) - using (var rs = areq.GetResponseStream()) + using (var rs = areq.GetResponseStream()) Utility.Utility.CopyStream(rs, output, false, m_copybuffer); } @@ -279,7 +279,7 @@ namespace Duplicati.Library.Backend System.Net.FtpWebRequest req = CreateRequest(remotename); req.Method = System.Net.WebRequestMethods.Ftp.DeleteFile; Utility.AsyncHttpRequest areq = new Utility.AsyncHttpRequest(req); - using (areq.GetResponse()) + using (areq.GetResponse()) { } } diff --git a/Duplicati/Library/Backend/GoogleServices/GoogleCloudStorage.cs b/Duplicati/Library/Backend/GoogleServices/GoogleCloudStorage.cs index eea2cca16..efb756a93 100644 --- a/Duplicati/Library/Backend/GoogleServices/GoogleCloudStorage.cs +++ b/Duplicati/Library/Backend/GoogleServices/GoogleCloudStorage.cs @@ -281,7 +281,7 @@ namespace Duplicati.Library.Backend.GoogleCloudStorage var areq = new AsyncHttpRequest(req); using(var resp = areq.GetResponse()) - using(var rs = areq.GetResponseStream()) + using(var rs = areq.GetResponseStream()) Library.Utility.Utility.CopyStream(rs, stream); } catch (WebException wex) diff --git a/Duplicati/Library/Backend/GoogleServices/GoogleDrive.cs b/Duplicati/Library/Backend/GoogleServices/GoogleDrive.cs index 9c7ae8bed..74bde3659 100644 --- a/Duplicati/Library/Backend/GoogleServices/GoogleDrive.cs +++ b/Duplicati/Library/Backend/GoogleServices/GoogleDrive.cs @@ -181,7 +181,7 @@ namespace Duplicati.Library.Backend.GoogleDrive var req = m_oauth.CreateRequest(string.Format("{0}/files/{1}?alt=media", DRIVE_API_URL, fileid)); var areq = new AsyncHttpRequest(req); using(var resp = (HttpWebResponse)areq.GetResponse()) - using(var rs = areq.GetResponseStream()) + using(var rs = areq.GetResponseStream()) Duplicati.Library.Utility.Utility.CopyStream(rs, stream); } diff --git a/Duplicati/Library/Backend/GoogleServices/Strings.cs b/Duplicati/Library/Backend/GoogleServices/Strings.cs index 10b81c1bc..e93f12491 100644 --- a/Duplicati/Library/Backend/GoogleServices/Strings.cs +++ b/Duplicati/Library/Backend/GoogleServices/Strings.cs @@ -44,7 +44,7 @@ namespace Duplicati.Library.Backend.Strings public static string AuthidLong(string url) { return LC.L(@"The authorization token retrieved from {0}", url); } public static string DisplayName { get { return LC.L(@"Google Drive"); } } public static string MissingAuthID(string url) { return LC.L(@"You need an AuthID, you can get it from: {0}", url); } - public static string MultipleEntries(string folder, string parent) { return LC.L(@"There is more than one item named ""{0}"" in the folder ""{1}""", folder, parent); } + public static string MultipleEntries(string folder, string parent) { return LC.L(@"There is more than one item named ""{0}"" in the folder ""{1}""", folder, parent); } } } diff --git a/Duplicati/Library/Backend/OAuthHelper/JSONWebHelper.cs b/Duplicati/Library/Backend/OAuthHelper/JSONWebHelper.cs index 19e8d3740..6b4137dfe 100644 --- a/Duplicati/Library/Backend/OAuthHelper/JSONWebHelper.cs +++ b/Duplicati/Library/Backend/OAuthHelper/JSONWebHelper.cs @@ -211,7 +211,7 @@ namespace Duplicati.Library public virtual T ReadJSONResponse(HttpWebResponse resp) { - using(var rs = Duplicati.Library.Utility.AsyncHttpRequest.TrySetTimeout(resp.GetResponseStream())) + using(var rs = Duplicati.Library.Utility.AsyncHttpRequest.TrySetTimeout(resp.GetResponseStream())) using(var tr = new System.IO.StreamReader(rs)) using(var jr = new Newtonsoft.Json.JsonTextReader(tr)) return new Newtonsoft.Json.JsonSerializer().Deserialize(jr); diff --git a/Duplicati/Library/Backend/OAuthHelper/OAuthHelper.cs b/Duplicati/Library/Backend/OAuthHelper/OAuthHelper.cs index f374ab148..6a624a03d 100644 --- a/Duplicati/Library/Backend/OAuthHelper/OAuthHelper.cs +++ b/Duplicati/Library/Backend/OAuthHelper/OAuthHelper.cs @@ -25,7 +25,7 @@ namespace Duplicati.Library { private string m_token; private string m_authid; - private DateTime m_tokenExpires = DateTime.UtcNow; + private DateTime m_tokenExpires = DateTime.UtcNow; private static string _override_server = null; @@ -45,14 +45,14 @@ namespace Duplicati.Library return string.Format(addr.ToString(), modulename); } - /// - /// Set to true to automatically add the Authorization header to requets - /// + /// + /// Set to true to automatically add the Authorization header to requets + /// public bool AutoAuthHeader { get; set; } - /// - /// Set to true if the provider does not use refresh tokens, but only access tokens - /// - public bool AccessTokenOnly { get; set; } + /// + /// Set to true if the provider does not use refresh tokens, but only access tokens + /// + public bool AccessTokenOnly { get; set; } public OAuthHelper(string authid, string servicename, string useragent = null) : base(useragent) @@ -85,8 +85,8 @@ namespace Duplicati.Library { get { - if (AccessTokenOnly) - return m_authid; + if (AccessTokenOnly) + return m_authid; if (m_token == null || m_tokenExpires < DateTime.UtcNow) { @@ -142,18 +142,18 @@ namespace Duplicati.Library } } - public void ThrowOverQuotaError() - { - throw new Exception(Strings.OAuthHelper.OverQuotaError); - } - - public void ThrowAuthException(string msg, Exception ex) - { - if (ex == null) - throw new Exception(Strings.OAuthHelper.AuthorizationFailure(msg, OAuthLoginUrl)); - else - throw new Exception(Strings.OAuthHelper.AuthorizationFailure(msg, OAuthLoginUrl), ex); - } + public void ThrowOverQuotaError() + { + throw new Exception(Strings.OAuthHelper.OverQuotaError); + } + + public void ThrowAuthException(string msg, Exception ex) + { + if (ex == null) + throw new Exception(Strings.OAuthHelper.AuthorizationFailure(msg, OAuthLoginUrl)); + else + throw new Exception(Strings.OAuthHelper.AuthorizationFailure(msg, OAuthLoginUrl), ex); + } diff --git a/Duplicati/Library/Backend/OneDrive/OneDrive.cs b/Duplicati/Library/Backend/OneDrive/OneDrive.cs index 0d75b84cc..f160daf62 100644 --- a/Duplicati/Library/Backend/OneDrive/OneDrive.cs +++ b/Duplicati/Library/Backend/OneDrive/OneDrive.cs @@ -167,7 +167,7 @@ namespace Duplicati.Library.Backend } using (var resp = (HttpWebResponse)areq.GetResponse()) - using (var rs = areq.GetResponseStream()) + using (var rs = areq.GetResponseStream()) using (var tr = new System.IO.StreamReader(rs)) using (var jr = new Newtonsoft.Json.JsonTextReader(tr)) { @@ -357,7 +357,7 @@ namespace Duplicati.Library.Backend var areq = new Utility.AsyncHttpRequest(req); using(var resp = (HttpWebResponse)areq.GetResponse()) - using(var rs = areq.GetResponseStream()) + using(var rs = areq.GetResponseStream()) using(var tr = new System.IO.StreamReader(rs)) using(var jr = new Newtonsoft.Json.JsonTextReader(tr)) m_userid = new Newtonsoft.Json.JsonSerializer().Deserialize(jr).id; @@ -518,7 +518,7 @@ namespace Duplicati.Library.Backend Utility.Utility.CopyStream(stream, reqs, true, m_copybuffer); using(var resp = (HttpWebResponse)areq.GetResponse()) - using(var rs = areq.GetResponseStream()) + using(var rs = areq.GetResponseStream()) using(var tr = new System.IO.StreamReader(rs)) using(var jr = new Newtonsoft.Json.JsonTextReader(tr)) { @@ -537,7 +537,7 @@ namespace Duplicati.Library.Backend var areq = new Utility.AsyncHttpRequest(req); using (var resp = (HttpWebResponse)areq.GetResponse()) - using (var rs = areq.GetResponseStream()) + using (var rs = areq.GetResponseStream()) Utility.Utility.CopyStream(rs, stream, true, m_copybuffer); } diff --git a/Duplicati/Library/Backend/OpenStack/OpenStackStorage.cs b/Duplicati/Library/Backend/OpenStack/OpenStackStorage.cs index ae74a7508..eea8d266a 100644 --- a/Duplicati/Library/Backend/OpenStack/OpenStackStorage.cs +++ b/Duplicati/Library/Backend/OpenStack/OpenStackStorage.cs @@ -290,7 +290,7 @@ namespace Duplicati.Library.Backend.OpenStack try { using(var resp = m_helper.GetResponse(url)) - using(var rs = AsyncHttpRequest.TrySetTimeout(resp.GetResponseStream())) + using(var rs = AsyncHttpRequest.TrySetTimeout(resp.GetResponseStream())) Library.Utility.Utility.CopyStream(rs, stream); } catch(WebException wex) diff --git a/Duplicati/Library/Backend/S3/S3Backend.cs b/Duplicati/Library/Backend/S3/S3Backend.cs index 713b9003a..793cb0119 100644 --- a/Duplicati/Library/Backend/S3/S3Backend.cs +++ b/Duplicati/Library/Backend/S3/S3Backend.cs @@ -73,8 +73,8 @@ namespace Duplicati.Library.Backend static S3() { var ns = new List> { - new KeyValuePair("(default)", ""), - new KeyValuePair("Standard", "STANDARD"), + new KeyValuePair("(default)", ""), + new KeyValuePair("Standard", "STANDARD"), new KeyValuePair("Infrequent Access (IA)", "STANDARD_IA"), new KeyValuePair("Glacier", "GLACIER"), new KeyValuePair("Reduced Redundancy Storage (RRS)", "REDUCED_REDUNDANCY"), @@ -309,8 +309,8 @@ namespace Duplicati.Library.Backend { Connection.AddFileStream(m_bucket, GetFullKey(remotename), input); } - catch (Exception ex) - { + catch (Exception ex) + { //Catch "non-existing" buckets Amazon.S3.AmazonS3Exception s3ex = ex as Amazon.S3.AmazonS3Exception; if (s3ex != null && (s3ex.StatusCode == System.Net.HttpStatusCode.NotFound || "NoSuchBucket".Equals(s3ex.ErrorCode))) diff --git a/Duplicati/Library/Backend/S3/S3IAM.cs b/Duplicati/Library/Backend/S3/S3IAM.cs index 76d72fce6..ab5b02c91 100644 --- a/Duplicati/Library/Backend/S3/S3IAM.cs +++ b/Duplicati/Library/Backend/S3/S3IAM.cs @@ -23,21 +23,21 @@ using Amazon.IdentityManagement.Model; namespace Duplicati.Library.Backend { - public class S3IAM : IWebModule - { - private const string KEY_OPERATION = "s3-operation"; - private const string KEY_USERNAME = "s3-username"; - private const string KEY_PASSWORD = "s3-password"; - private const string KEY_PATH = "s3-path"; - - public enum Operation - { - CanCreateUser, - CreateIAMUser, - GetPolicyDoc - } - - public const string POLICY_DOCUMENT_TEMPLATE = + public class S3IAM : IWebModule + { + private const string KEY_OPERATION = "s3-operation"; + private const string KEY_USERNAME = "s3-username"; + private const string KEY_PASSWORD = "s3-password"; + private const string KEY_PATH = "s3-path"; + + public enum Operation + { + CanCreateUser, + CreateIAMUser, + GetPolicyDoc + } + + public const string POLICY_DOCUMENT_TEMPLATE = @" { ""Version"": ""2012-10-17"", @@ -60,141 +60,141 @@ namespace Duplicati.Library.Backend } "; - public S3IAM() - { - } - - public string Key { get { return "s3-iamconfig"; } } - - public string DisplayName { get { return "S3 IAM support module"; } } - - public string Description { get { return "Exposes S3 IAM manipulation as a web module"; } } - - - public IList SupportedCommands - { - get - { - return new List(new ICommandLineArgument[] { - new CommandLineArgument(KEY_OPERATION, CommandLineArgument.ArgumentType.Enumeration, "The operation to perform", "Selects the operation to perform", null, Enum.GetNames(typeof(Operation))), - new CommandLineArgument(KEY_USERNAME, CommandLineArgument.ArgumentType.String, "The username", "The Amazon Access Key ID"), - new CommandLineArgument(KEY_PASSWORD, CommandLineArgument.ArgumentType.String, "The password", "The Amazon Secret Key"), - }); - } - } - - public IDictionary Execute(IDictionary options) - { - string operationstring; - string username; - string password; - string path; - Operation operation; - - options.TryGetValue(KEY_OPERATION, out operationstring); - options.TryGetValue(KEY_USERNAME, out username); - options.TryGetValue(KEY_PASSWORD, out password); - options.TryGetValue(KEY_PATH, out path); - - if (string.IsNullOrWhiteSpace(operationstring)) - throw new ArgumentNullException(KEY_OPERATION); - - if (!Enum.TryParse(operationstring, true, out operation)) - throw new ArgumentException(string.Format("Unable to parse {0} as an operation", operationstring)); - - switch (operation) - { - case Operation.GetPolicyDoc: - if (string.IsNullOrWhiteSpace(path)) - throw new ArgumentNullException(KEY_PATH); - return GetPolicyDoc(path); - - case Operation.CreateIAMUser: - if (string.IsNullOrWhiteSpace(username)) - throw new ArgumentNullException(KEY_USERNAME); - if (string.IsNullOrWhiteSpace(password)) - throw new ArgumentNullException(KEY_PASSWORD); - if (string.IsNullOrWhiteSpace(path)) - throw new ArgumentNullException(KEY_PATH); - return CreateUnprivilegedUser(username, password, path); - - case Operation.CanCreateUser: - default: - if (string.IsNullOrWhiteSpace(username)) - throw new ArgumentNullException(KEY_USERNAME); - if (string.IsNullOrWhiteSpace(password)) - throw new ArgumentNullException(KEY_PASSWORD); - return CanCreateUser(username, password); - } - } - - private Dictionary GetPolicyDoc(string path) - { - var dict = new Dictionary(); - dict["doc"] = GeneratePolicyDoc(path); - return dict; - } - - private string GeneratePolicyDoc(string path) - { - if (string.IsNullOrWhiteSpace(path)) - throw new ArgumentNullException("path"); - - path = path.Trim().Trim('/').Trim(); - - if (string.IsNullOrWhiteSpace(path)) - throw new ArgumentException("Invalid value for path"); - - return POLICY_DOCUMENT_TEMPLATE.Replace("bucket-name-and-path", path).Trim(); - } - - private Dictionary CanCreateUser(string awsid, string awskey) - { - var dict = new Dictionary(); - var cl = new AmazonIdentityManagementServiceClient(awsid, awskey); - try - { - var user = cl.GetUser().User; - - dict["isroot"] = "False"; //user.Arn.EndsWith(":root", StringComparison.Ordinal).ToString(); - dict["arn"] = user.Arn; - dict["id"] = user.UserId; - dict["name"] = user.UserName; - - dict["isroot"] = (cl.SimulatePrincipalPolicy(new SimulatePrincipalPolicyRequest() { PolicySourceArn = user.Arn, ActionNames = new[] { "iam:CreateUser" }.ToList() }).EvaluationResults.First().EvalDecision == PolicyEvaluationDecisionType.Allowed).ToString(); - } - catch (Exception ex) - { - dict["ex"] = ex.ToString(); - dict["error"] = ex.Message; - } - - return dict; - } - - private Dictionary CreateUnprivilegedUser(string awsid, string awskey, string path) - { - var now = Library.Utility.Utility.SerializeDateTime(DateTime.Now); - var username = string.Format("duplicati-autocreated-backup-user-{0}", now); - var policyname = string.Format("duplicati-autocreated-policy-{0}", now); - var policydoc = GeneratePolicyDoc(path); - - var cl = new AmazonIdentityManagementServiceClient(awsid, awskey); - var user = cl.CreateUser(new CreateUserRequest(username)).User; - cl.PutUserPolicy(new PutUserPolicyRequest( - user.UserName, - policyname, - policydoc - )); - var key = cl.CreateAccessKey(new CreateAccessKeyRequest() { UserName = user.UserName }).AccessKey; - - var dict = new Dictionary(); - dict["accessid"] = key.AccessKeyId; - dict["secretkey"] = key.SecretAccessKey; - dict["username"] = key.UserName; - - return dict; - } - } + public S3IAM() + { + } + + public string Key { get { return "s3-iamconfig"; } } + + public string DisplayName { get { return "S3 IAM support module"; } } + + public string Description { get { return "Exposes S3 IAM manipulation as a web module"; } } + + + public IList SupportedCommands + { + get + { + return new List(new ICommandLineArgument[] { + new CommandLineArgument(KEY_OPERATION, CommandLineArgument.ArgumentType.Enumeration, "The operation to perform", "Selects the operation to perform", null, Enum.GetNames(typeof(Operation))), + new CommandLineArgument(KEY_USERNAME, CommandLineArgument.ArgumentType.String, "The username", "The Amazon Access Key ID"), + new CommandLineArgument(KEY_PASSWORD, CommandLineArgument.ArgumentType.String, "The password", "The Amazon Secret Key"), + }); + } + } + + public IDictionary Execute(IDictionary options) + { + string operationstring; + string username; + string password; + string path; + Operation operation; + + options.TryGetValue(KEY_OPERATION, out operationstring); + options.TryGetValue(KEY_USERNAME, out username); + options.TryGetValue(KEY_PASSWORD, out password); + options.TryGetValue(KEY_PATH, out path); + + if (string.IsNullOrWhiteSpace(operationstring)) + throw new ArgumentNullException(KEY_OPERATION); + + if (!Enum.TryParse(operationstring, true, out operation)) + throw new ArgumentException(string.Format("Unable to parse {0} as an operation", operationstring)); + + switch (operation) + { + case Operation.GetPolicyDoc: + if (string.IsNullOrWhiteSpace(path)) + throw new ArgumentNullException(KEY_PATH); + return GetPolicyDoc(path); + + case Operation.CreateIAMUser: + if (string.IsNullOrWhiteSpace(username)) + throw new ArgumentNullException(KEY_USERNAME); + if (string.IsNullOrWhiteSpace(password)) + throw new ArgumentNullException(KEY_PASSWORD); + if (string.IsNullOrWhiteSpace(path)) + throw new ArgumentNullException(KEY_PATH); + return CreateUnprivilegedUser(username, password, path); + + case Operation.CanCreateUser: + default: + if (string.IsNullOrWhiteSpace(username)) + throw new ArgumentNullException(KEY_USERNAME); + if (string.IsNullOrWhiteSpace(password)) + throw new ArgumentNullException(KEY_PASSWORD); + return CanCreateUser(username, password); + } + } + + private Dictionary GetPolicyDoc(string path) + { + var dict = new Dictionary(); + dict["doc"] = GeneratePolicyDoc(path); + return dict; + } + + private string GeneratePolicyDoc(string path) + { + if (string.IsNullOrWhiteSpace(path)) + throw new ArgumentNullException("path"); + + path = path.Trim().Trim('/').Trim(); + + if (string.IsNullOrWhiteSpace(path)) + throw new ArgumentException("Invalid value for path"); + + return POLICY_DOCUMENT_TEMPLATE.Replace("bucket-name-and-path", path).Trim(); + } + + private Dictionary CanCreateUser(string awsid, string awskey) + { + var dict = new Dictionary(); + var cl = new AmazonIdentityManagementServiceClient(awsid, awskey); + try + { + var user = cl.GetUser().User; + + dict["isroot"] = "False"; //user.Arn.EndsWith(":root", StringComparison.Ordinal).ToString(); + dict["arn"] = user.Arn; + dict["id"] = user.UserId; + dict["name"] = user.UserName; + + dict["isroot"] = (cl.SimulatePrincipalPolicy(new SimulatePrincipalPolicyRequest() { PolicySourceArn = user.Arn, ActionNames = new[] { "iam:CreateUser" }.ToList() }).EvaluationResults.First().EvalDecision == PolicyEvaluationDecisionType.Allowed).ToString(); + } + catch (Exception ex) + { + dict["ex"] = ex.ToString(); + dict["error"] = ex.Message; + } + + return dict; + } + + private Dictionary CreateUnprivilegedUser(string awsid, string awskey, string path) + { + var now = Library.Utility.Utility.SerializeDateTime(DateTime.Now); + var username = string.Format("duplicati-autocreated-backup-user-{0}", now); + var policyname = string.Format("duplicati-autocreated-policy-{0}", now); + var policydoc = GeneratePolicyDoc(path); + + var cl = new AmazonIdentityManagementServiceClient(awsid, awskey); + var user = cl.CreateUser(new CreateUserRequest(username)).User; + cl.PutUserPolicy(new PutUserPolicyRequest( + user.UserName, + policyname, + policydoc + )); + var key = cl.CreateAccessKey(new CreateAccessKeyRequest() { UserName = user.UserName }).AccessKey; + + var dict = new Dictionary(); + dict["accessid"] = key.AccessKeyId; + dict["secretkey"] = key.SecretAccessKey; + dict["username"] = key.UserName; + + return dict; + } + } } diff --git a/Duplicati/Library/Backend/S3/S3Wrapper.cs b/Duplicati/Library/Backend/S3/S3Wrapper.cs index cab82f346..36cec0ab7 100644 --- a/Duplicati/Library/Backend/S3/S3Wrapper.cs +++ b/Duplicati/Library/Backend/S3/S3Wrapper.cs @@ -36,7 +36,7 @@ namespace Duplicati.Library.Backend protected string m_locationConstraint; protected string m_storageClass; - protected AmazonS3Client m_client; + protected AmazonS3Client m_client; public S3Wrapper(string awsID, string awsKey, string locationConstraint, string servername, string storageClass, bool useSSL, Dictionary options) { diff --git a/Duplicati/Library/Backend/SSHv2/SSHv2Backend.cs b/Duplicati/Library/Backend/SSHv2/SSHv2Backend.cs index fb7ab7d5a..a2a44e249 100644 --- a/Duplicati/Library/Backend/SSHv2/SSHv2Backend.cs +++ b/Duplicati/Library/Backend/SSHv2/SSHv2Backend.cs @@ -255,7 +255,7 @@ namespace Duplicati.Library.Backend string hostFingerprint = e.HostKeyName + " " + e.KeyLength.ToString() + " " + BitConverter.ToString(e.FingerPrint).Replace('-', ':'); if (string.IsNullOrEmpty(m_fingerprint)) - throw new Library.Utility.HostKeyException(Strings.SSHv2Backend.FingerprintNotSpecifiedManagedError(hostFingerprint.ToLower(), SSH_FINGERPRINT_OPTION, SSH_FINGERPRINT_ACCEPT_ANY_OPTION), hostFingerprint, m_fingerprint); + throw new Library.Utility.HostKeyException(Strings.SSHv2Backend.FingerprintNotSpecifiedManagedError(hostFingerprint.ToLower(), SSH_FINGERPRINT_OPTION, SSH_FINGERPRINT_ACCEPT_ANY_OPTION), hostFingerprint, m_fingerprint); if (hostFingerprint.ToLower() != m_fingerprint.ToLower()) throw new Library.Utility.HostKeyException(Strings.SSHv2Backend.FingerprintNotMatchManagedError(hostFingerprint.ToLower()), hostFingerprint, m_fingerprint); @@ -344,4 +344,4 @@ namespace Duplicati.Library.Backend } } } -} \ No newline at end of file +} diff --git a/Duplicati/Library/Backend/SSHv2/Strings.cs b/Duplicati/Library/Backend/SSHv2/Strings.cs index 448af41e7..fb8f74f83 100644 --- a/Duplicati/Library/Backend/SSHv2/Strings.cs +++ b/Duplicati/Library/Backend/SSHv2/Strings.cs @@ -33,6 +33,6 @@ namespace Duplicati.Library.Backend.Strings { public static string DisplayName { get { return LC.L(@"SFTP (SSH)"); } } public static string FolderNotFoundManagedError(string foldername, string message) { return LC.L(@"Unable to set folder to {0}, error message: {1}", foldername, message); } public static string FingerprintNotMatchManagedError(string fingerprint) { return LC.L(@"Validation of server fingerprint failed. Server returned fingerprint ""{0}"". Cause of this message is either not correct configuration or Man-in-the-middle attack!", fingerprint); } - public static string FingerprintNotSpecifiedManagedError(string fingerprint, string hostkeyoption, string allkeysoptions) { return LC.L(@"Please add --{1}=""{0}"" to trust this host. Optionally you can use --{2} (NOT SECURE) for testing!", fingerprint, hostkeyoption, allkeysoptions); } + public static string FingerprintNotSpecifiedManagedError(string fingerprint, string hostkeyoption, string allkeysoptions) { return LC.L(@"Please add --{1}=""{0}"" to trust this host. Optionally you can use --{2} (NOT SECURE) for testing!", fingerprint, hostkeyoption, allkeysoptions); } } } diff --git a/Duplicati/Library/Backend/SharePoint/SharePointBackend.cs b/Duplicati/Library/Backend/SharePoint/SharePointBackend.cs index d6c922a4c..962aae9b4 100644 --- a/Duplicati/Library/Backend/SharePoint/SharePointBackend.cs +++ b/Duplicati/Library/Backend/SharePoint/SharePointBackend.cs @@ -500,10 +500,10 @@ namespace Duplicati.Library.Backend } - /// - /// Upload in chunks to bypass filesize limit. - /// https://msdn.microsoft.com/en-us/library/office/dn904536.aspx - /// + /// + /// Upload in chunks to bypass filesize limit. + /// https://msdn.microsoft.com/en-us/library/office/dn904536.aspx + /// private SP.File uploadFileSlicePerSlice(ClientContext ctx, Folder folder, Stream sourceFileStream, string fileName) { // Each sliced upload requires a unique ID. @@ -591,7 +591,7 @@ namespace Duplicati.Library.Backend return uploadFile; } - public void CreateFolder() { doCreateFolder(false); } + public void CreateFolder() { doCreateFolder(false); } private void doCreateFolder(bool useNewContext) { SP.ClientContext ctx = getSpClientContext(useNewContext); diff --git a/Duplicati/Library/Backend/TahoeLAFS/TahoeBackend.cs b/Duplicati/Library/Backend/TahoeLAFS/TahoeBackend.cs index ef4d9c954..7edf747fb 100644 --- a/Duplicati/Library/Backend/TahoeLAFS/TahoeBackend.cs +++ b/Duplicati/Library/Backend/TahoeLAFS/TahoeBackend.cs @@ -161,7 +161,7 @@ namespace Duplicati.Library.Backend if (code < 200 || code >= 300) //For some reason Mono does not throw this automatically throw new System.Net.WebException(resp.StatusDescription, null, System.Net.WebExceptionStatus.ProtocolError, resp); - using (var rs = areq.GetResponseStream()) + using (var rs = areq.GetResponseStream()) using (var sr = new System.IO.StreamReader(rs)) using (var jr = new Newtonsoft.Json.JsonTextReader(sr)) { @@ -314,7 +314,7 @@ namespace Duplicati.Library.Backend if (code < 200 || code >= 300) //For some reason Mono does not throw this automatically throw new System.Net.WebException(resp.StatusDescription, null, System.Net.WebExceptionStatus.ProtocolError, resp); - using (var s = areq.GetResponseStream()) + using (var s = areq.GetResponseStream()) Utility.Utility.CopyStream(s, stream, true, m_copybuffer); } } diff --git a/Duplicati/Library/Backend/WEBDAV/WEBDAV.cs b/Duplicati/Library/Backend/WEBDAV/WEBDAV.cs index 6f8a95654..c5149d53a 100644 --- a/Duplicati/Library/Backend/WEBDAV/WEBDAV.cs +++ b/Duplicati/Library/Backend/WEBDAV/WEBDAV.cs @@ -29,8 +29,8 @@ namespace Duplicati.Library.Backend private System.Net.NetworkCredential m_userInfo; private string m_url; private string m_path; - private string m_sanitizedUrl; - private string m_reverseProtocolUrl; + private string m_sanitizedUrl; + private string m_reverseProtocolUrl; private string m_rawurl; private string m_rawurlPort; private bool m_useIntegratedAuthentication = false; @@ -96,9 +96,9 @@ namespace Duplicati.Library.Backend m_path = u.Path; if (!m_path.StartsWith("/")) - m_path = "/" + m_path; + m_path = "/" + m_path; if (!m_path.EndsWith("/")) - m_path += "/"; + m_path += "/"; m_path = Library.Utility.Uri.UrlDecode(m_path); m_rawurl = new Utility.Uri(m_useSSL ? "https" : "http", u.Host, m_path).ToString(); @@ -108,8 +108,8 @@ namespace Duplicati.Library.Backend port = m_useSSL ? 443 : 80; m_rawurlPort = new Utility.Uri(m_useSSL ? "https" : "http", u.Host, m_path, null, null, null, port).ToString(); - m_sanitizedUrl = new Utility.Uri(m_useSSL ? "https" : "http", u.Host, m_path).ToString(); - m_reverseProtocolUrl = new Utility.Uri(m_useSSL ? "http" : "https", u.Host, m_path).ToString(); + m_sanitizedUrl = new Utility.Uri(m_useSSL ? "https" : "http", u.Host, m_path).ToString(); + m_reverseProtocolUrl = new Utility.Uri(m_useSSL ? "http" : "https", u.Host, m_path).ToString(); options.TryGetValue("debug-propfind-file", out m_debugPropfindFile); } @@ -149,17 +149,17 @@ namespace Duplicati.Library.Backend if (!string.IsNullOrEmpty(m_debugPropfindFile)) { - using (var rs = areq.GetResponseStream()) + using (var rs = areq.GetResponseStream()) using (var fs = new System.IO.FileStream(m_debugPropfindFile, System.IO.FileMode.Create, System.IO.FileAccess.Write, System.IO.FileShare.None)) Utility.Utility.CopyStream(rs, fs, false, m_copybuffer); doc.Load(m_debugPropfindFile); } else - { - using (var rs = areq.GetResponseStream()) - doc.Load(rs); - } + { + using (var rs = areq.GetResponseStream()) + doc.Load(rs); + } } System.Xml.XmlNamespaceManager nm = new System.Xml.XmlNamespaceManager(doc.NameTable); @@ -175,9 +175,9 @@ namespace Duplicati.Library.Backend string name = Library.Utility.Uri.UrlDecode(n.InnerText.Replace("+", "%2B")); string cmp_path; - - //TODO: This list is getting ridiculous, should change to regexps - + + //TODO: This list is getting ridiculous, should change to regexps + if (name.StartsWith(m_url)) cmp_path = m_url; else if (name.StartsWith(m_rawurl)) @@ -186,9 +186,9 @@ namespace Duplicati.Library.Backend cmp_path = m_rawurlPort; else if (name.StartsWith(m_path)) cmp_path = m_path; - else if (name.StartsWith("/" + m_path)) - cmp_path = "/" + m_path; - else if (name.StartsWith(m_sanitizedUrl)) + else if (name.StartsWith("/" + m_path)) + cmp_path = "/" + m_path; + else if (name.StartsWith(m_sanitizedUrl)) cmp_path = m_sanitizedUrl; else if (name.StartsWith(m_reverseProtocolUrl)) cmp_path = m_reverseProtocolUrl; @@ -346,12 +346,12 @@ namespace Duplicati.Library.Backend req.Credentials = cred; } else - { + { req.Credentials = m_userInfo; //We need this under Mono for some reason, // and it appears some servers require this as well - req.PreAuthenticate = true; - } + req.PreAuthenticate = true; + } req.KeepAlive = false; req.UserAgent = "Duplicati WEBDAV Client v" + System.Reflection.Assembly.GetExecutingAssembly().GetName().Version.ToString(); @@ -408,7 +408,7 @@ namespace Duplicati.Library.Backend if (code < 200 || code >= 300) //For some reason Mono does not throw this automatically throw new System.Net.WebException(resp.StatusDescription, null, System.Net.WebExceptionStatus.ProtocolError, resp); - using (var s = areq.GetResponseStream()) + using (var s = areq.GetResponseStream()) Utility.Utility.CopyStream(s, stream, true, m_copybuffer); } } diff --git a/Duplicati/Library/Compression/FileArchiveZip.cs b/Duplicati/Library/Compression/FileArchiveZip.cs index 86b6fe33e..89d55ecdd 100644 --- a/Duplicati/Library/Compression/FileArchiveZip.cs +++ b/Duplicati/Library/Compression/FileArchiveZip.cs @@ -302,20 +302,20 @@ namespace Duplicati.Library.Compression if (m_isWriting) throw new InvalidOperationException("Cannot read while writing"); - if (m_entryDict == null) - { - m_entryDict = new Dictionary(Duplicati.Library.Utility.Utility.ClientFilenameStringComparer); - foreach(IArchiveEntry en in Archive.Entries) - m_entryDict[en.Key] = en; - } - - IArchiveEntry e; - if (m_entryDict.TryGetValue(file, out e)) - return e; - if (m_entryDict.TryGetValue(file.Replace('/', '\\'), out e)) - return e; - - return null; + if (m_entryDict == null) + { + m_entryDict = new Dictionary(Duplicati.Library.Utility.Utility.ClientFilenameStringComparer); + foreach(IArchiveEntry en in Archive.Entries) + m_entryDict[en.Key] = en; + } + + IArchiveEntry e; + if (m_entryDict.TryGetValue(file, out e)) + return e; + if (m_entryDict.TryGetValue(file.Replace('/', '\\'), out e)) + return e; + + return null; } /// diff --git a/Duplicati/Library/Compression/StreamWrapper.cs b/Duplicati/Library/Compression/StreamWrapper.cs index 5d730e77b..66d3cdbd8 100644 --- a/Duplicati/Library/Compression/StreamWrapper.cs +++ b/Duplicati/Library/Compression/StreamWrapper.cs @@ -33,4 +33,4 @@ namespace Duplicati.Library.Compression return m_basestream == null ? 0 : m_basestream.Length; } } -} \ No newline at end of file +} diff --git a/Duplicati/Library/Interface/IGenericCallbackModule.cs b/Duplicati/Library/Interface/IGenericCallbackModule.cs index abab5d223..dcdf6690d 100644 --- a/Duplicati/Library/Interface/IGenericCallbackModule.cs +++ b/Duplicati/Library/Interface/IGenericCallbackModule.cs @@ -18,22 +18,22 @@ using System; namespace Duplicati.Library.Interface { - /// - /// Interface for implementing callback based modules - /// - public interface IGenericCallbackModule : IGenericModule - { - /// - /// Called when the operation starts - /// - /// The full name of the operation - void OnStart(string operationname, ref string remoteurl, ref string[] localpath); - - /// - /// Called when the operation finishes - /// - /// The full name of the operation - /// The result object, if this derives from an exception, the operation failed - void OnFinish(object result); - } + /// + /// Interface for implementing callback based modules + /// + public interface IGenericCallbackModule : IGenericModule + { + /// + /// Called when the operation starts + /// + /// The full name of the operation + void OnStart(string operationname, ref string remoteurl, ref string[] localpath); + + /// + /// Called when the operation finishes + /// + /// The full name of the operation + /// The result object, if this derives from an exception, the operation failed + void OnFinish(object result); + } } diff --git a/Duplicati/Library/Interface/ResultInterfaces.cs b/Duplicati/Library/Interface/ResultInterfaces.cs index 6c6a21784..a2aa45dba 100644 --- a/Duplicati/Library/Interface/ResultInterfaces.cs +++ b/Duplicati/Library/Interface/ResultInterfaces.cs @@ -20,8 +20,8 @@ using System.Collections.Generic; namespace Duplicati.Library.Interface { - public interface IBasicResults - { + public interface IBasicResults + { DateTime BeginTime { get; } DateTime EndTime { get; } TimeSpan Duration { get; } @@ -29,22 +29,22 @@ namespace Duplicati.Library.Interface IEnumerable Errors { get; } IEnumerable Warnings { get; } IEnumerable Messages { get; } - } + } public interface IBackendStatstics { - long RemoteCalls { get; } - long BytesUploaded { get; } - long BytesDownloaded { get; } - long FilesUploaded { get; } - long FilesDownloaded { get; } - long FilesDeleted { get; } - long FoldersCreated { get; } - long RetryAttempts { get; } + long RemoteCalls { get; } + long BytesUploaded { get; } + long BytesDownloaded { get; } + long FilesUploaded { get; } + long FilesDownloaded { get; } + long FilesDeleted { get; } + long FoldersCreated { get; } + long RetryAttempts { get; } } - public interface IParsedBackendStatistics : IBackendStatstics - { + public interface IParsedBackendStatistics : IBackendStatstics + { long UnknownFileSize { get; } long UnknownFileCount { get; } long KnownFileCount { get; } @@ -54,7 +54,7 @@ namespace Duplicati.Library.Interface long TotalQuotaSpace { get; } long FreeQuotaSpace { get; } long AssignedQuotaSpace { get; } - } + } public interface IListResultFile { diff --git a/Duplicati/Library/Main/AsyncDownloader.cs b/Duplicati/Library/Main/AsyncDownloader.cs index 550b00feb..30977e265 100644 --- a/Duplicati/Library/Main/AsyncDownloader.cs +++ b/Duplicati/Library/Main/AsyncDownloader.cs @@ -6,52 +6,52 @@ using Duplicati.Library.Main.Database; namespace Duplicati.Library.Main { - internal interface IAsyncDownloadedFile : IRemoteVolume - { - Library.Utility.TempFile TempFile { get; } - } + internal interface IAsyncDownloadedFile : IRemoteVolume + { + Library.Utility.TempFile TempFile { get; } + } internal class AsyncDownloader : IEnumerable { private class AsyncDownloaderEnumerator : IEnumerator { - private class AsyncDownloadedFile : IAsyncDownloadedFile - { - private Exception m_exception; - private Library.Utility.TempFile m_file; - - public string Name { get; private set; } - public string Hash { get; private set; } - public long Size { get; private set; } - - public void DisposeTempFile() - { - if (m_file != null) - try { m_file.Dispose(); } - finally { m_file = null; } - } - - - public Library.Utility.TempFile TempFile - { - get - { - if (m_exception != null) - throw m_exception; - - return m_file; - } - } - - public AsyncDownloadedFile(string name, string hash, long size, Library.Utility.TempFile tempfile, Exception exception) - { - this.Name = name; - this.Hash = hash; - this.Size = size; - m_exception = exception; - m_file = tempfile; - } - } + private class AsyncDownloadedFile : IAsyncDownloadedFile + { + private Exception m_exception; + private Library.Utility.TempFile m_file; + + public string Name { get; private set; } + public string Hash { get; private set; } + public long Size { get; private set; } + + public void DisposeTempFile() + { + if (m_file != null) + try { m_file.Dispose(); } + finally { m_file = null; } + } + + + public Library.Utility.TempFile TempFile + { + get + { + if (m_exception != null) + throw m_exception; + + return m_file; + } + } + + public AsyncDownloadedFile(string name, string hash, long size, Library.Utility.TempFile tempfile, Exception exception) + { + this.Name = name; + this.Hash = hash; + this.Size = size; + m_exception = exception; + m_file = tempfile; + } + } private IList m_volumes; private BackendManager.IDownloadWaitHandle m_handle; @@ -86,33 +86,33 @@ namespace Duplicati.Library.Main } public bool MoveNext() - { - if (m_current != null) - { - m_current.DisposeTempFile(); - m_current = null; - } - - if (m_index >= m_volumes.Count) - return false; - - if (m_handle == null) - m_handle = m_backend.GetAsync(m_volumes[m_index].Name, m_volumes[m_index].Size, m_volumes[m_index].Hash); + { + if (m_current != null) + { + m_current.DisposeTempFile(); + m_current = null; + } + + if (m_index >= m_volumes.Count) + return false; + + if (m_handle == null) + m_handle = m_backend.GetAsync(m_volumes[m_index].Name, m_volumes[m_index].Size, m_volumes[m_index].Hash); + + string hash = null; + long size = -1; + Library.Utility.TempFile file = null; + Exception exception = null; + try + { + file = m_handle.Wait(out hash, out size); + + } + catch (Exception ex) + { + exception = ex; + } - string hash = null; - long size = -1; - Library.Utility.TempFile file = null; - Exception exception = null; - try - { - file = m_handle.Wait(out hash, out size); - - } - catch (Exception ex) - { - exception = ex; - } - m_current = new AsyncDownloadedFile(m_volumes[m_index].Name, hash, size, file, exception); m_handle = null; diff --git a/Duplicati/Library/Main/BackendManager.cs b/Duplicati/Library/Main/BackendManager.cs index 8c6a6eb05..b06599b48 100644 --- a/Duplicati/Library/Main/BackendManager.cs +++ b/Duplicati/Library/Main/BackendManager.cs @@ -12,7 +12,7 @@ namespace Duplicati.Library.Main { internal class BackendManager : IDisposable { - public const string VOLUME_HASH = "SHA256"; + public const string VOLUME_HASH = "SHA256"; /// /// Class to represent hash failures @@ -161,8 +161,8 @@ namespace Duplicati.Library.Main { this.WaitForComplete(); if (Exception != null) - throw Exception; - + throw Exception; + return (TempFile)this.Result; } @@ -171,8 +171,8 @@ namespace Duplicati.Library.Main this.WaitForComplete(); if (Exception != null) - throw Exception; - + throw Exception; + hash = this.Hash; size = this.Size; @@ -288,56 +288,56 @@ namespace Duplicati.Library.Main private class DatabaseCollector { - private object m_dbqueuelock = new object(); - private LocalDatabase m_database; - private System.Threading.Thread m_callerThread; - private List m_dbqueue; - private IBackendWriter m_stats; - - private interface IDbEntry { } - - private class DbOperation : IDbEntry - { - public string Action; - public string File; - public string Result; - } - - private class DbUpdate : IDbEntry - { - public string Remotename; - public RemoteVolumeState State; - public long Size; - public string Hash; - } - + private object m_dbqueuelock = new object(); + private LocalDatabase m_database; + private System.Threading.Thread m_callerThread; + private List m_dbqueue; + private IBackendWriter m_stats; + + private interface IDbEntry { } + + private class DbOperation : IDbEntry + { + public string Action; + public string File; + public string Result; + } + + private class DbUpdate : IDbEntry + { + public string Remotename; + public RemoteVolumeState State; + public long Size; + public string Hash; + } + private class DbRename : IDbEntry { public string Oldname; public string Newname; } - public DatabaseCollector(LocalDatabase database, IBackendWriter stats) - { - m_database = database; - m_stats = stats; - m_dbqueue = new List(); - if (m_database != null) - m_callerThread = System.Threading.Thread.CurrentThread; - } - - - public void LogDbOperation(string action, string file, string result) - { - lock(m_dbqueuelock) - m_dbqueue.Add(new DbOperation() { Action = action, File = file, Result = result }); - } - - public void LogDbUpdate(string remotename, RemoteVolumeState state, long size, string hash) - { - lock(m_dbqueuelock) - m_dbqueue.Add(new DbUpdate() { Remotename = remotename, State = state, Size = size, Hash = hash }); - } + public DatabaseCollector(LocalDatabase database, IBackendWriter stats) + { + m_database = database; + m_stats = stats; + m_dbqueue = new List(); + if (m_database != null) + m_callerThread = System.Threading.Thread.CurrentThread; + } + + + public void LogDbOperation(string action, string file, string result) + { + lock(m_dbqueuelock) + m_dbqueue.Add(new DbOperation() { Action = action, File = file, Result = result }); + } + + public void LogDbUpdate(string remotename, RemoteVolumeState state, long size, string hash) + { + lock(m_dbqueuelock) + m_dbqueue.Add(new DbUpdate() { Remotename = remotename, State = state, Size = size, Hash = hash }); + } public void LogDbRename(string oldname, string newname) { @@ -345,33 +345,33 @@ namespace Duplicati.Library.Main m_dbqueue.Add(new DbRename() { Oldname = oldname, Newname = newname }); } - public bool FlushDbMessages(bool checkThread = false) - { - if (m_database != null && (checkThread == false || m_callerThread == System.Threading.Thread.CurrentThread)) - return FlushDbMessages(m_database, null); - - return false; - } - - public bool FlushDbMessages(LocalDatabase db, System.Data.IDbTransaction transaction) - { - List entries; - lock(m_dbqueuelock) - if (m_dbqueue.Count == 0) - return false; - else - { - entries = m_dbqueue; - m_dbqueue = new List(); - } + public bool FlushDbMessages(bool checkThread = false) + { + if (m_database != null && (checkThread == false || m_callerThread == System.Threading.Thread.CurrentThread)) + return FlushDbMessages(m_database, null); + + return false; + } + + public bool FlushDbMessages(LocalDatabase db, System.Data.IDbTransaction transaction) + { + List entries; + lock(m_dbqueuelock) + if (m_dbqueue.Count == 0) + return false; + else + { + entries = m_dbqueue; + m_dbqueue = new List(); + } // collect removed volumes for final db cleanup. HashSet volsRemoved = new HashSet(); - //As we replace the list, we can now freely access the elements without locking - foreach(var e in entries) - if (e is DbOperation) - db.LogRemoteOperation(((DbOperation)e).Action, ((DbOperation)e).File, ((DbOperation)e).Result, transaction); + //As we replace the list, we can now freely access the elements without locking + foreach(var e in entries) + if (e is DbOperation) + db.LogRemoteOperation(((DbOperation)e).Action, ((DbOperation)e).File, ((DbOperation)e).Result, transaction); else if (e is DbUpdate && ((DbUpdate)e).State == RemoteVolumeState.Deleted) { db.UpdateRemoteVolume(((DbUpdate)e).Remotename, RemoteVolumeState.Deleted, ((DbUpdate)e).Size, ((DbUpdate)e).Hash, true, transaction); @@ -388,8 +388,8 @@ namespace Duplicati.Library.Main if (volsRemoved.Count > 0) db.RemoveRemoteVolumes(volsRemoved); - return true; - } + return true; + } } private BlockingQueue m_queue; @@ -402,7 +402,7 @@ namespace Duplicati.Library.Main private IBackendWriter m_statwriter; private System.Threading.Thread m_thread; private BasicResults m_taskControl; - private DatabaseCollector m_db; + private DatabaseCollector m_db; public string BackendUrl { get { return m_backendurl; } } @@ -418,16 +418,16 @@ namespace Duplicati.Library.Main m_db = new DatabaseCollector(database, statwriter); m_backend = DynamicLoader.BackendLoader.GetBackend(m_backendurl, m_options.RawOptions); - if (m_backend == null) - { - string shortname = m_backendurl; + if (m_backend == null) + { + string shortname = m_backendurl; - // Try not to leak hostnames or other information in the error messages - try { shortname = new Library.Utility.Uri(shortname).Scheme; } - catch { } + // Try not to leak hostnames or other information in the error messages + try { shortname = new Library.Utility.Uri(shortname).Scheme; } + catch { } - throw new Exception(string.Format("Backend not supported: {0}", shortname)); - } + throw new Exception(string.Format("Backend not supported: {0}", shortname)); + } if (!m_options.NoEncryption) { @@ -563,19 +563,19 @@ namespace Duplicati.Library.Main m_statwriter.SendEvent(item.BackendActionType, retries < m_options.NumberOfRetries ? BackendEventType.Retrying : BackendEventType.Failed, item.RemoteFilename, item.Size); - bool recovered = false; + bool recovered = false; if (!uploadSuccess && ex is Duplicati.Library.Interface.FolderMissingException && m_options.AutocreateFolders) { - try - { - // If we successfully create the folder, we can re-use the connection - m_backend.CreateFolder(); - recovered = true; - } - catch(Exception dex) - { - m_statwriter.AddWarning(string.Format("Failed to create folder: {0}", ex.Message), dex); - } + try + { + // If we successfully create the folder, we can re-use the connection + m_backend.CreateFolder(); + recovered = true; + } + catch(Exception dex) + { + m_statwriter.AddWarning(string.Format("Failed to create folder: {0}", ex.Message), dex); + } } // To work around the Apache WEBDAV issue, we rename the file here @@ -616,17 +616,17 @@ namespace Duplicati.Library.Main if (lastException != null) { - item.Exception = lastException; + item.Exception = lastException; if (item.Operation == OperationType.Put) - item.DeleteLocalFile(m_statwriter); + item.DeleteLocalFile(m_statwriter); if (item.ExceptionKillsHandler) { - m_lastException = lastException; + m_lastException = lastException; - //TODO: If there are temp files in the queue, we must delete them - m_queue.SetCompleted(); - } + //TODO: If there are temp files in the queue, we must delete them + m_queue.SetCompleted(); + } } @@ -708,7 +708,7 @@ namespace Duplicati.Library.Main item.Encrypt(m_encryption, m_statwriter); if (item.UpdateHashAndSize(m_options) && !item.NotTrackedInDb) - m_db.LogDbUpdate(item.RemoteFilename, RemoteVolumeState.Uploading, item.Size, item.Hash); + m_db.LogDbUpdate(item.RemoteFilename, RemoteVolumeState.Uploading, item.Size, item.Hash); if (item.Indexfile != null && !item.IndexfileUpdated) { @@ -736,8 +736,8 @@ namespace Duplicati.Library.Main Logging.Log.WriteMessage(string.Format("Uploaded {0} in {1}, {2}/s", Library.Utility.Utility.FormatSizeString(item.Size), duration, Library.Utility.Utility.FormatSizeString((long)(item.Size / duration.TotalSeconds))), Duplicati.Library.Logging.LogMessageType.Profiling); if (!item.NotTrackedInDb) - m_db.LogDbUpdate(item.RemoteFilename, RemoteVolumeState.Uploaded, item.Size, item.Hash); - + m_db.LogDbUpdate(item.RemoteFilename, RemoteVolumeState.Uploaded, item.Size, item.Hash); + m_statwriter.SendEvent(BackendActionType.Put, BackendEventType.Completed, item.RemoteFilename, item.Size); if (m_options.ListVerifyUploads) @@ -749,7 +749,7 @@ namespace Duplicati.Library.Main throw new Exception(string.Format("List verify failed for file: {0}, size was {1} but expected to be {2}", f.Name, f.Size, item.Size)); } - item.DeleteLocalFile(m_statwriter); + item.DeleteLocalFile(m_statwriter); } private TempFile coreDoGetPiping(FileEntryItem item, Interface.IEncryption useDecrypter, out long retDownloadSize, out string retHashcode) @@ -1166,16 +1166,16 @@ namespace Duplicati.Library.Main } public void Put(VolumeWriterBase item, IndexVolumeWriter indexfile = null) - { - if (m_lastException != null) - throw m_lastException; + { + if (m_lastException != null) + throw m_lastException; - item.Close(); - var req = new FileEntryItem(OperationType.Put, item.RemoteFilename, null); - req.LocalTempfile = item.TempFile; - - if (m_lastException != null) - throw m_lastException; + item.Close(); + var req = new FileEntryItem(OperationType.Put, item.RemoteFilename, null); + req.LocalTempfile = item.TempFile; + + if (m_lastException != null) + throw m_lastException; FileEntryItem req2 = null; @@ -1191,11 +1191,11 @@ namespace Duplicati.Library.Main // We do not encrypt the dindex volume, because it is small, // and may need to be re-written if the dblock upload is retried - if (indexfile != null) - { - m_db.LogDbUpdate(indexfile.RemoteFilename, RemoteVolumeState.Uploading, -1, null); - req2 = new FileEntryItem(OperationType.Put, indexfile.RemoteFilename); - req2.LocalTempfile = indexfile.TempFile; + if (indexfile != null) + { + m_db.LogDbUpdate(indexfile.RemoteFilename, RemoteVolumeState.Uploading, -1, null); + req2 = new FileEntryItem(OperationType.Put, indexfile.RemoteFilename); + req2.LocalTempfile = indexfile.TempFile; req.Indexfile = new Tuple(indexfile, req2); } @@ -1236,16 +1236,16 @@ namespace Duplicati.Library.Main } public Library.Utility.TempFile Get(string remotename, long size, string hash) - { - if (m_lastException != null) - throw m_lastException; + { + if (m_lastException != null) + throw m_lastException; - var req = new FileEntryItem(OperationType.Get, remotename, size, hash); + var req = new FileEntryItem(OperationType.Get, remotename, size, hash); if (m_queue.Enqueue(req)) ((IDownloadWaitHandle)req).Wait(); - if (m_lastException != null) - throw m_lastException; + if (m_lastException != null) + throw m_lastException; return (Library.Utility.TempFile)req.Result; } @@ -1259,8 +1259,8 @@ namespace Duplicati.Library.Main if (m_queue.Enqueue(req)) return req; - if (m_lastException != null) - throw m_lastException; + if (m_lastException != null) + throw m_lastException; else throw new InvalidOperationException("GetAsync called after backend is shut down"); } @@ -1287,27 +1287,27 @@ namespace Duplicati.Library.Main } public IList List() - { - if (m_lastException != null) - throw m_lastException; + { + if (m_lastException != null) + throw m_lastException; - var req = new FileEntryItem(OperationType.List, null); - if (m_queue.Enqueue(req)) - { - req.WaitForComplete(); - if (req.Exception != null) - throw req.Exception; - } + var req = new FileEntryItem(OperationType.List, null); + if (m_queue.Enqueue(req)) + { + req.WaitForComplete(); + if (req.Exception != null) + throw req.Exception; + } - if (m_lastException != null) - throw m_lastException; + if (m_lastException != null) + throw m_lastException; return (IList)req.Result; } public void WaitForComplete(LocalDatabase db, System.Data.IDbTransaction transation) { - m_db.FlushDbMessages(db, transation); + m_db.FlushDbMessages(db, transation); if (m_lastException != null) throw m_lastException; @@ -1315,7 +1315,7 @@ namespace Duplicati.Library.Main if (m_queue.Enqueue(item)) item.WaitForComplete(); - m_db.FlushDbMessages(db, transation); + m_db.FlushDbMessages(db, transation); if (m_lastException != null) throw m_lastException; @@ -1338,48 +1338,48 @@ namespace Duplicati.Library.Main } public void CreateFolder(string remotename) - { - if (m_lastException != null) - throw m_lastException; - - var req = new FileEntryItem(OperationType.CreateFolder, remotename); - if (m_queue.Enqueue(req)) - { - req.WaitForComplete(); - if (req.Exception != null) - throw req.Exception; - } - - if (m_lastException != null) - throw m_lastException; + { + if (m_lastException != null) + throw m_lastException; + + var req = new FileEntryItem(OperationType.CreateFolder, remotename); + if (m_queue.Enqueue(req)) + { + req.WaitForComplete(); + if (req.Exception != null) + throw req.Exception; + } + + if (m_lastException != null) + throw m_lastException; } public void Delete(string remotename, long size, bool synchronous = false) - { - if (m_lastException != null) - throw m_lastException; - - m_db.LogDbUpdate(remotename, RemoteVolumeState.Deleting, size, null); - var req = new FileEntryItem(OperationType.Delete, remotename, size, null); - if (m_queue.Enqueue(req) && synchronous) - { - req.WaitForComplete(); - if (req.Exception != null) - throw req.Exception; - } - - if (m_lastException != null) - throw m_lastException; - } + { + if (m_lastException != null) + throw m_lastException; + + m_db.LogDbUpdate(remotename, RemoteVolumeState.Deleting, size, null); + var req = new FileEntryItem(OperationType.Delete, remotename, size, null); + if (m_queue.Enqueue(req) && synchronous) + { + req.WaitForComplete(); + if (req.Exception != null) + throw req.Exception; + } + + if (m_lastException != null) + throw m_lastException; + } public bool FlushDbMessages(LocalDatabase database, System.Data.IDbTransaction transaction) { - return m_db.FlushDbMessages(database, transaction); + return m_db.FlushDbMessages(database, transaction); } public bool FlushDbMessages() { - return m_db.FlushDbMessages(false); + return m_db.FlushDbMessages(false); } public void Dispose() @@ -1406,8 +1406,8 @@ namespace Duplicati.Library.Main m_backend = null; } - try { m_db.FlushDbMessages(true); } - catch (Exception ex) { m_statwriter.AddError(string.Format("Backend Shutdown error: {0}", ex.Message), ex); } + try { m_db.FlushDbMessages(true); } + catch (Exception ex) { m_statwriter.AddError(string.Format("Backend Shutdown error: {0}", ex.Message), ex); } } } } diff --git a/Duplicati/Library/Main/Controller.cs b/Duplicati/Library/Main/Controller.cs index 750a22ebf..e7a6ecc11 100644 --- a/Duplicati/Library/Main/Controller.cs +++ b/Duplicati/Library/Main/Controller.cs @@ -109,29 +109,29 @@ namespace Duplicati.Library.Main } public Duplicati.Library.Interface.IBackupResults Backup(string[] inputsources, IFilter filter = null) - { + { Library.UsageReporter.Reporter.Report("USE_BACKEND", new Library.Utility.Uri(m_backend).Scheme); Library.UsageReporter.Reporter.Report("USE_COMPRESSION", m_options.CompressionModule); Library.UsageReporter.Reporter.Report("USE_ENCRYPTION", m_options.EncryptionModule); return RunAction(new BackupResults(), ref inputsources, ref filter, (result) => { - if (inputsources == null || inputsources.Length == 0) - throw new Exception(Strings.Controller.NoSourceFoldersError); + if (inputsources == null || inputsources.Length == 0) + throw new Exception(Strings.Controller.NoSourceFoldersError); var sources = new List(inputsources); - //Make sure they all have the same format and exist - for(int i = 0; i < sources.Count; i++) - { - try - { - sources[i] = System.IO.Path.GetFullPath(sources[i]); - } - catch (Exception ex) - { - throw new ArgumentException(Strings.Controller.InvalidPathError(sources[i], ex.Message), ex); - } + //Make sure they all have the same format and exist + for(int i = 0; i < sources.Count; i++) + { + try + { + sources[i] = System.IO.Path.GetFullPath(sources[i]); + } + catch (Exception ex) + { + throw new ArgumentException(Strings.Controller.InvalidPathError(sources[i], ex.Message), ex); + } var fi = new System.IO.FileInfo(sources[i]); var di = new System.IO.DirectoryInfo(sources[i]); @@ -139,20 +139,20 @@ namespace Duplicati.Library.Main throw new System.IO.IOException(Strings.Controller.SourceIsMissingError(sources[i])); if (!fi.Exists) - sources[i] = Library.Utility.Utility.AppendDirSeparator(sources[i]); - } - - //Sanity check for duplicate folders and multiple inclusions of the same folder - for(int i = 0; i < sources.Count - 1; i++) - { - for(int j = i + 1; j < sources.Count; j++) - if (sources[i].Equals(sources[j], Library.Utility.Utility.IsFSCaseSensitive ? StringComparison.CurrentCulture : StringComparison.CurrentCultureIgnoreCase)) + sources[i] = Library.Utility.Utility.AppendDirSeparator(sources[i]); + } + + //Sanity check for duplicate folders and multiple inclusions of the same folder + for(int i = 0; i < sources.Count - 1; i++) + { + for(int j = i + 1; j < sources.Count; j++) + if (sources[i].Equals(sources[j], Library.Utility.Utility.IsFSCaseSensitive ? StringComparison.CurrentCulture : StringComparison.CurrentCultureIgnoreCase)) { result.AddVerboseMessage("Removing duplicate source: {0}", sources[j]); - sources.RemoveAt(j); + sources.RemoveAt(j); j--; } - else if (sources[i].StartsWith(sources[j], Library.Utility.Utility.IsFSCaseSensitive ? StringComparison.CurrentCulture : StringComparison.CurrentCultureIgnoreCase)) + else if (sources[i].StartsWith(sources[j], Library.Utility.Utility.IsFSCaseSensitive ? StringComparison.CurrentCulture : StringComparison.CurrentCultureIgnoreCase)) { bool includes; bool excludes; @@ -174,7 +174,7 @@ namespace Duplicati.Library.Main i--; break; } - } + } using(var h = new Operation.BackupHandler(m_backend, m_options, result)) h.Run(sources.ToArray(), filter); @@ -186,9 +186,9 @@ namespace Duplicati.Library.Main } public Library.Interface.IRestoreResults Restore(string[] paths, Library.Utility.IFilter filter = null) - { + { return RunAction(new RestoreResults(), ref paths, ref filter, (result) => { - new Operation.RestoreHandler(m_backend, m_options, result).Run(paths, filter); + new Operation.RestoreHandler(m_backend, m_options, result).Run(paths, filter); Library.UsageReporter.Reporter.Report("RESTORE_FILECOUNT", result.FilesRestored); Library.UsageReporter.Reporter.Report("RESTORE_FILESIZE", result.SizeOfRestoredFiles); @@ -204,9 +204,9 @@ namespace Duplicati.Library.Main } public Duplicati.Library.Interface.IDeleteResults Delete() - { + { return RunAction(new DeleteResults(), (result) => { - new Operation.DeleteHandler(m_backend, m_options, result).Run(); + new Operation.DeleteHandler(m_backend, m_options, result).Run(); }); } @@ -228,9 +228,9 @@ namespace Duplicati.Library.Main } public Duplicati.Library.Interface.IListResults List(IEnumerable filterstrings, Library.Utility.IFilter filter = null) - { + { return RunAction(new ListResults(), ref filter, (result) => { - new Operation.ListFilesHandler(m_backend, m_options, result).Run(filterstrings, filter); + new Operation.ListFilesHandler(m_backend, m_options, result).Run(filterstrings, filter); }); } @@ -379,7 +379,7 @@ namespace Duplicati.Library.Main m_currentTask = null; m_currentTaskThread = null; } - } + } /// /// Attempts to get the locale, but delays linking to the calls as they are missing in some environments @@ -401,8 +401,8 @@ namespace Duplicati.Library.Main System.Globalization.CultureInfo.DefaultThreadCurrentUICulture = uiLocale; } - private void OnOperationComplete(object result) - { + private void OnOperationComplete(object result) + { if (m_options != null && m_options.LoadedModules != null) { foreach (KeyValuePair mx in m_options.LoadedModules) @@ -457,7 +457,7 @@ namespace Duplicati.Library.Main sl.Dispose(); m_hasSetLogging = false; } - } + } private void SetupCommonOptions(ISetCommonOptions result, ref string[] paths, ref IFilter filter) { @@ -581,8 +581,8 @@ namespace Duplicati.Library.Main /// The statistics into which warnings are written private void ValidateOptions(ILogWriter log) { - if (m_options.KeepTime.Ticks > 0 && m_options.KeepVersions > 0) - throw new Exception(string.Format("Setting both --{0} and --{1} is not permitted", "keep-versions", "keep-time")); + if (m_options.KeepTime.Ticks > 0 && m_options.KeepVersions > 0) + throw new Exception(string.Format("Setting both --{0} and --{1} is not permitted", "keep-versions", "keep-time")); //No point in going through with this if we can't report if (log == null) @@ -701,44 +701,44 @@ namespace Duplicati.Library.Main /// The value to check /// Null if no errors are found, an error message otherwise public static string ValidateOptionValue(Library.Interface.ICommandLineArgument arg, string optionname, string value) - { - if (arg.Type == Duplicati.Library.Interface.CommandLineArgument.ArgumentType.Enumeration) - { - bool found = false; - foreach (string v in arg.ValidValues ?? new string[0]) - if (string.Equals(v, value, StringComparison.CurrentCultureIgnoreCase)) - { - found = true; - break; - } - - if (!found) - return Strings.Controller.UnsupportedEnumerationValue(optionname, value, arg.ValidValues ?? new string[0]); - - } - else if (arg.Type == Duplicati.Library.Interface.CommandLineArgument.ArgumentType.Flags) - { - bool validatedAllFlags = false; - var flags = (value ?? string.Empty).ToLowerInvariant().Split(new[] {","}, StringSplitOptions.None).Select(flag => flag.Trim()).Distinct(); - var validFlags = arg.ValidValues ?? new string[0]; - - foreach (var flag in flags) - { - if (!validFlags.Any(validFlag => string.Equals(validFlag, flag, StringComparison.CurrentCultureIgnoreCase))) - { - validatedAllFlags = false; - break; - } - - validatedAllFlags = true; - } - - if (!validatedAllFlags) - { - return Strings.Controller.UnsupportedFlagsValue(optionname, value, validFlags); - } - } - else if (arg.Type == Duplicati.Library.Interface.CommandLineArgument.ArgumentType.Boolean) + { + if (arg.Type == Duplicati.Library.Interface.CommandLineArgument.ArgumentType.Enumeration) + { + bool found = false; + foreach (string v in arg.ValidValues ?? new string[0]) + if (string.Equals(v, value, StringComparison.CurrentCultureIgnoreCase)) + { + found = true; + break; + } + + if (!found) + return Strings.Controller.UnsupportedEnumerationValue(optionname, value, arg.ValidValues ?? new string[0]); + + } + else if (arg.Type == Duplicati.Library.Interface.CommandLineArgument.ArgumentType.Flags) + { + bool validatedAllFlags = false; + var flags = (value ?? string.Empty).ToLowerInvariant().Split(new[] {","}, StringSplitOptions.None).Select(flag => flag.Trim()).Distinct(); + var validFlags = arg.ValidValues ?? new string[0]; + + foreach (var flag in flags) + { + if (!validFlags.Any(validFlag => string.Equals(validFlag, flag, StringComparison.CurrentCultureIgnoreCase))) + { + validatedAllFlags = false; + break; + } + + validatedAllFlags = true; + } + + if (!validatedAllFlags) + { + return Strings.Controller.UnsupportedFlagsValue(optionname, value, validFlags); + } + } + else if (arg.Type == Duplicati.Library.Interface.CommandLineArgument.ArgumentType.Boolean) { if (!string.IsNullOrEmpty(value) && Library.Utility.Utility.ParseBool(value, true) != Library.Utility.Utility.ParseBool(value, false)) return Strings.Controller.UnsupportedBooleanValue(optionname, value); diff --git a/Duplicati/Library/Main/Database/LocalBackupDatabase.cs b/Duplicati/Library/Main/Database/LocalBackupDatabase.cs index 30701efa1..2de6fe011 100644 --- a/Duplicati/Library/Main/Database/LocalBackupDatabase.cs +++ b/Duplicati/Library/Main/Database/LocalBackupDatabase.cs @@ -82,8 +82,8 @@ namespace Duplicati.Library.Main.Database private readonly System.Data.IDbCommand m_selectblocklistHashesCommand; private readonly System.Data.IDbCommand m_insertfileOperationCommand; - - private HashLookupHelper> m_blockHashLookup; + + private HashLookupHelper> m_blockHashLookup; private HashLookupHelper m_fileHashLookup; private HashLookupHelper m_metadataLookup; private PathLookupHelper m_pathLookup; @@ -97,9 +97,9 @@ namespace Duplicati.Library.Main.Database { this.ShouldCloseConnection = true; } - + public LocalBackupDatabase(LocalDatabase db, Options options) - : base(db) + : base(db) { m_findblockCommand = m_connection.CreateCommand(); m_insertblockCommand = m_connection.CreateCommand(); @@ -116,8 +116,8 @@ namespace Duplicati.Library.Main.Database m_selectfileSimpleCommand = m_connection.CreateCommand(); m_selectfileHashCommand = m_connection.CreateCommand(); m_insertblocksetentryFastCommand = m_connection.CreateCommand(); - - m_findblockCommand.CommandText = @"SELECT ""ID"" FROM ""Block"" WHERE ""Hash"" = ? AND ""Size"" = ?"; + + m_findblockCommand.CommandText = @"SELECT ""ID"" FROM ""Block"" WHERE ""Hash"" = ? AND ""Size"" = ?"; m_findblockCommand.AddParameters(2); m_findblocksetCommand.CommandText = @"SELECT ""ID"" FROM ""Blockset"" WHERE ""Fullhash"" = ? AND ""Length"" = ?"; @@ -639,9 +639,9 @@ namespace Duplicati.Library.Main.Database } public void WriteFileset(Volumes.FilesetVolumeWriter filesetvolume, System.Data.IDbTransaction transaction) - { - WriteFileset(filesetvolume, transaction, m_filesetId); - } + { + WriteFileset(filesetvolume, transaction, m_filesetId); + } public override void Dispose () { @@ -745,7 +745,7 @@ namespace Duplicati.Library.Main.Database { return m_filesetId = base.CreateFileset(volumeid, timestamp, transaction); } - + public IEnumerable> GetIncompleteFilesets(System.Data.IDbTransaction transaction) { using(var cmd = m_connection.CreateCommand()) diff --git a/Duplicati/Library/Main/Database/LocalBugReportDatabase.cs b/Duplicati/Library/Main/Database/LocalBugReportDatabase.cs index aaf595f11..85620f4ad 100644 --- a/Duplicati/Library/Main/Database/LocalBugReportDatabase.cs +++ b/Duplicati/Library/Main/Database/LocalBugReportDatabase.cs @@ -19,52 +19,52 @@ using System; namespace Duplicati.Library.Main.Database { - internal class LocalBugReportDatabase : LocalDatabase - { - public LocalBugReportDatabase(string path) - : base(path, "BugReportCreate", false) - { + internal class LocalBugReportDatabase : LocalDatabase + { + public LocalBugReportDatabase(string path) + : base(path, "BugReportCreate", false) + { ShouldCloseConnection = true; - } + } - public void Fix() - { - using(var tr = m_connection.BeginTransaction()) - using(var cmd = m_connection.CreateCommand()) - { - cmd.Transaction = tr; + public void Fix() + { + using(var tr = m_connection.BeginTransaction()) + using(var cmd = m_connection.CreateCommand()) + { + cmd.Transaction = tr; var tablename = "PathMap-" + Library.Utility.Utility.ByteArrayAsHexString(Guid.NewGuid().ToByteArray()); - - using(var upcmd = m_connection.CreateCommand()) - { - upcmd.Transaction = tr; + using(var upcmd = m_connection.CreateCommand()) + { + + upcmd.Transaction = tr; upcmd.ExecuteNonQuery(string.Format(@"CREATE TEMPORARY TABLE ""{0}"" (""ID"" INTEGER PRIMARY KEY, ""RealPath"" TEXT NOT NULL, ""Obfuscated"" TEXT NULL)", tablename)); upcmd.ExecuteNonQuery(string.Format(@"INSERT INTO ""{0}"" (""RealPath"") SELECT DISTINCT ""Path"" FROM ""File"" ORDER BY ""Path"" ", tablename)); upcmd.ExecuteNonQuery(string.Format(@"UPDATE ""{0}"" SET ""Obfuscated"" = ? || length(""RealPath"") || ? || ""ID"" || (CASE WHEN substr(""RealPath"", length(""RealPath"")) = ? THEN ? ELSE ? END) ", tablename), Library.Utility.Utility.IsClientLinux ? "/" : "X:\\", Library.Utility.Utility.DirectorySeparatorString, Library.Utility.Utility.DirectorySeparatorString, Library.Utility.Utility.DirectorySeparatorString, ".bin"); - long id = 1; - using(var rd = cmd.ExecuteReader(string.Format(@"SELECT ""RealPath"", ""Obfuscated"" FROM ""{0}"" ", tablename))) - while(rd.Read()) - { - upcmd.ExecuteNonQuery(@"UPDATE ""LogData"" SET ""Message"" = replace(""Message"", ?, ?), ""Exception"" = replace(""Exception"", ?, ?)", rd.GetValue(0), rd.GetValue(1), rd.GetValue(0), rd.GetValue(1) ); - id++; - } - } + long id = 1; + using(var rd = cmd.ExecuteReader(string.Format(@"SELECT ""RealPath"", ""Obfuscated"" FROM ""{0}"" ", tablename))) + while(rd.Read()) + { + upcmd.ExecuteNonQuery(@"UPDATE ""LogData"" SET ""Message"" = replace(""Message"", ?, ?), ""Exception"" = replace(""Exception"", ?, ?)", rd.GetValue(0), rd.GetValue(1), rd.GetValue(0), rd.GetValue(1) ); + id++; + } + } - cmd.ExecuteNonQuery(@"UPDATE ""LogData"" SET ""Message"" = ""ERASED!"" WHERE ""Message"" LIKE ""%/%"" OR ""Message"" LIKE ""%:\%"" "); - cmd.ExecuteNonQuery(@"UPDATE ""LogData"" SET ""Exception"" = ""ERASED!"" WHERE ""Exception"" LIKE ""%/%"" OR ""Exception"" LIKE ""%:\%"" "); - cmd.ExecuteNonQuery(string.Format(@"UPDATE ""File"" SET ""Path"" = (SELECT ""Obfuscated"" FROM ""{0}"" WHERE ""Path"" = ""RealPath"") ", tablename)); - + cmd.ExecuteNonQuery(@"UPDATE ""LogData"" SET ""Message"" = ""ERASED!"" WHERE ""Message"" LIKE ""%/%"" OR ""Message"" LIKE ""%:\%"" "); + cmd.ExecuteNonQuery(@"UPDATE ""LogData"" SET ""Exception"" = ""ERASED!"" WHERE ""Exception"" LIKE ""%/%"" OR ""Exception"" LIKE ""%:\%"" "); + cmd.ExecuteNonQuery(string.Format(@"UPDATE ""File"" SET ""Path"" = (SELECT ""Obfuscated"" FROM ""{0}"" WHERE ""Path"" = ""RealPath"") ", tablename)); + cmd.ExecuteNonQuery(string.Format(@"DROP TABLE IF EXISTS ""{0}"" ", tablename)); using(new Logging.Timer("CommitUpdateBugReport")) - tr.Commit(); - - cmd.Transaction = null; - cmd.ExecuteNonQuery("VACUUM"); - } - } - } + tr.Commit(); + + cmd.Transaction = null; + cmd.ExecuteNonQuery("VACUUM"); + } + } + } } diff --git a/Duplicati/Library/Main/Database/LocalDatabase.cs b/Duplicati/Library/Main/Database/LocalDatabase.cs index 8fd672a3f..fa7dd62a2 100644 --- a/Duplicati/Library/Main/Database/LocalDatabase.cs +++ b/Duplicati/Library/Main/Database/LocalDatabase.cs @@ -7,7 +7,7 @@ using System.IO; namespace Duplicati.Library.Main.Database { internal class LocalDatabase : IDisposable - { + { protected readonly System.Data.IDbConnection m_connection; protected readonly long m_operationid = -1; @@ -15,7 +15,7 @@ namespace Duplicati.Library.Main.Database private readonly System.Data.IDbCommand m_selectremotevolumesCommand; private readonly System.Data.IDbCommand m_selectremotevolumeCommand; private readonly System.Data.IDbCommand m_removeremotevolumeCommand; - private readonly System.Data.IDbCommand m_selectremotevolumeIdCommand; + private readonly System.Data.IDbCommand m_selectremotevolumeIdCommand; private readonly System.Data.IDbCommand m_createremotevolumeCommand; private readonly System.Data.IDbCommand m_selectduplicateRemoteVolumesCommand; @@ -38,7 +38,7 @@ namespace Duplicati.Library.Main.Database protected static System.Data.IDbConnection CreateConnection(string path) { - path = System.IO.Path.GetFullPath(path); + path = System.IO.Path.GetFullPath(path); var c = (System.Data.IDbConnection)Activator.CreateInstance(Duplicati.Library.SQLiteHelper.SQLiteLoader.SQLiteConnectionType); if (!System.IO.Directory.Exists(System.IO.Path.GetDirectoryName(path))) System.IO.Directory.CreateDirectory(System.IO.Path.GetDirectoryName(path)); @@ -65,21 +65,21 @@ namespace Duplicati.Library.Main.Database /// The path to the database /// The name of the operation public LocalDatabase(LocalDatabase db) - : this(db.m_connection) - { - this.OperationTimestamp = db.OperationTimestamp; - this.m_connection = db.m_connection; - this.m_operationid = db.m_operationid; + : this(db.m_connection) + { + this.OperationTimestamp = db.OperationTimestamp; + this.m_connection = db.m_connection; + this.m_operationid = db.m_operationid; this.m_result = db.m_result; - } - + } + /// /// Creates a new database instance and starts a new operation /// /// The path to the database /// The name of the operation public LocalDatabase(System.Data.IDbConnection connection, string operation) - : this(connection) + : this(connection) { this.OperationTimestamp = DateTime.UtcNow; m_connection = connection; @@ -89,10 +89,10 @@ namespace Duplicati.Library.Main.Database using (var cmd = m_connection.CreateCommand()) m_operationid = cmd.ExecuteScalarInt64( @"INSERT INTO ""Operation"" (""Description"", ""Timestamp"") VALUES (?, ?); SELECT last_insert_rowid();", -1, operation, NormalizeDateTimeToEpochSeconds(OperationTimestamp)); - } - - private LocalDatabase(System.Data.IDbConnection connection) - { + } + + private LocalDatabase(System.Data.IDbConnection connection) + { m_updateremotevolumeCommand = connection.CreateCommand(); m_selectremotevolumesCommand = connection.CreateCommand(); m_selectduplicateRemoteVolumesCommand = connection.CreateCommand(); @@ -100,8 +100,8 @@ namespace Duplicati.Library.Main.Database m_insertlogCommand = connection.CreateCommand(); m_insertremotelogCommand = connection.CreateCommand(); m_removeremotevolumeCommand = connection.CreateCommand(); - m_selectremotevolumeIdCommand = connection.CreateCommand(); - m_createremotevolumeCommand = connection.CreateCommand(); + m_selectremotevolumeIdCommand = connection.CreateCommand(); + m_createremotevolumeCommand = connection.CreateCommand(); m_insertIndexBlockLink = connection.CreateCommand(); m_insertlogCommand.CommandText = @"INSERT INTO ""LogData"" (""OperationID"", ""Timestamp"", ""Type"", ""Message"", ""Exception"") VALUES (?, ?, ?, ?, ?)"; @@ -123,20 +123,20 @@ namespace Duplicati.Library.Main.Database m_removeremotevolumeCommand.CommandText = @"DELETE FROM ""Remotevolume"" WHERE ""Name"" = ?"; m_removeremotevolumeCommand.AddParameter(); - m_selectremotevolumeIdCommand.CommandText = @"SELECT ""ID"" FROM ""Remotevolume"" WHERE ""Name"" = ?"; + m_selectremotevolumeIdCommand.CommandText = @"SELECT ""ID"" FROM ""Remotevolume"" WHERE ""Name"" = ?"; - m_createremotevolumeCommand.CommandText = @"INSERT INTO ""Remotevolume"" (""OperationID"", ""Name"", ""Type"", ""State"", ""Size"", ""VerificationCount"", ""DeleteGraceTime"") VALUES (?, ?, ?, ?, ?, ?, ?); SELECT last_insert_rowid();"; + m_createremotevolumeCommand.CommandText = @"INSERT INTO ""Remotevolume"" (""OperationID"", ""Name"", ""Type"", ""State"", ""Size"", ""VerificationCount"", ""DeleteGraceTime"") VALUES (?, ?, ?, ?, ?, ?, ?); SELECT last_insert_rowid();"; m_createremotevolumeCommand.AddParameters(7); m_insertIndexBlockLink.CommandText = @"INSERT INTO ""IndexBlockLink"" (""IndexVolumeID"", ""BlockVolumeID"") VALUES (?, ?)"; m_insertIndexBlockLink.AddParameters(2); - } + } internal void SetResult(BasicResults result) { m_result = result; } - + /// /// Normalizes a DateTime instance floor'ed to seconds and in UTC /// @@ -191,7 +191,7 @@ namespace Duplicati.Library.Main.Database if (!suppressCleanup && state == RemoteVolumeState.Deleted) - RemoveRemoteVolume(name, transaction); + RemoveRemoteVolume(name, transaction); } public IEnumerable> FilesetTimes @@ -206,10 +206,10 @@ namespace Duplicati.Library.Main.Database } public Tuple GetFilelistWhereClause(DateTime time, long[] versions, IEnumerable> filesetslist = null, bool singleTimeMatch = false) - { - var filesets = (filesetslist ?? this.FilesetTimes).ToArray(); - string query = ""; - var args = new List(); + { + var filesets = (filesetslist ?? this.FilesetTimes).ToArray(); + string query = ""; + var args = new List(); if (time.Ticks > 0 || (versions != null && versions.Length > 0)) { var hasTime = false; @@ -258,10 +258,10 @@ namespace Duplicati.Library.Main.Database } public long GetRemoteVolumeID(string file, System.Data.IDbTransaction transaction = null) - { - m_selectremotevolumeIdCommand.Transaction = transaction; - return m_selectremotevolumeIdCommand.ExecuteScalarInt64(null, -1, file); - } + { + m_selectremotevolumeIdCommand.Transaction = transaction; + return m_selectremotevolumeIdCommand.ExecuteScalarInt64(null, -1, file); + } public bool GetRemoteVolume(string file, out string hash, out long size, out RemoteVolumeType type, out RemoteVolumeState state) { @@ -320,7 +320,7 @@ namespace Duplicati.Library.Main.Database /// Any data relating to the operation public void LogRemoteOperation(string operation, string path, string data, System.Data.IDbTransaction transaction) { - m_insertremotelogCommand.Transaction = transaction; + m_insertremotelogCommand.Transaction = transaction; m_insertremotelogCommand.SetParameterValue(0, m_operationid); m_insertremotelogCommand.SetParameterValue(1, NormalizeDateTimeToEpochSeconds(DateTime.UtcNow)); m_insertremotelogCommand.SetParameterValue(2, operation); @@ -337,7 +337,7 @@ namespace Duplicati.Library.Main.Database /// An optional exception public void LogMessage(string type, string message, Exception exception, System.Data.IDbTransaction transaction) { - m_insertlogCommand.Transaction = transaction; + m_insertlogCommand.Transaction = transaction; m_insertlogCommand.SetParameterValue(0, m_operationid); m_insertlogCommand.SetParameterValue(1, NormalizeDateTimeToEpochSeconds(DateTime.UtcNow)); m_insertlogCommand.SetParameterValue(2, type); @@ -392,9 +392,9 @@ namespace Duplicati.Library.Main.Database var volIdsSubQuery = string.Format(@"SELECT ""ID"" FROM ""{0}"" ", volidstable); deletecmd.Parameters.Clear(); - // If the volume is a block or index volume, this will update the crosslink table, otherwise nothing will happen + // If the volume is a block or index volume, this will update the crosslink table, otherwise nothing will happen deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""IndexBlockLink"" WHERE ""BlockVolumeID"" IN ({0}) OR ""IndexVolumeID"" IN ({0})", volIdsSubQuery)); - + // If the volume is a fileset, this will remove the fileset, otherwise nothing will happen deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""FilesetEntry"" WHERE ""FilesetID"" IN (SELECT ""ID"" FROM ""Fileset"" WHERE ""VolumeID"" IN ({0}))", volIdsSubQuery)); deletecmd.ExecuteNonQuery(string.Format(@"DELETE FROM ""Fileset"" WHERE ""VolumeID"" IN ({0})", volIdsSubQuery)); @@ -467,9 +467,9 @@ namespace Duplicati.Library.Main.Database } public long RegisterRemoteVolume(string name, RemoteVolumeType type, RemoteVolumeState state, long size, TimeSpan deleteGraceTime, System.Data.IDbTransaction transaction) - { - using(var tr = new TemporaryTransactionWrapper(m_connection, transaction)) - { + { + using(var tr = new TemporaryTransactionWrapper(m_connection, transaction)) + { m_createremotevolumeCommand.SetParameterValue(0, m_operationid); m_createremotevolumeCommand.SetParameterValue(1, name); m_createremotevolumeCommand.SetParameterValue(2, type.ToString()); @@ -649,21 +649,21 @@ namespace Duplicati.Library.Main.Database using(var cmd = m_connection.CreateCommand()) using(var rd = cmd.ExecuteReader(@"SELECT ""A"".""Path"", ""B"".""Length"", ""B"".""FullHash"", ""D"".""FullHash"" FROM ""File"" A, ""Blockset"" B, ""Metadataset"" C, ""Blockset"" D, ""FilesetEntry"" E WHERE ""A"".""BlocksetID"" = ""B"".""ID"" AND ""A"".""MetadataID"" = ""C"".""ID"" AND ""C"".""BlocksetID"" = ""D"".""ID"" AND ""A"".""ID"" = ""E"".""FileID"" AND ""E"".""FilesetID"" = ? ", filesetId)) while(rd.Read()) - yield return new LocalFileEntry(rd); + yield return new LocalFileEntry(rd); } private IEnumerable> GetDbOptionList(System.Data.IDbTransaction transaction = null) - { + { using(var cmd = m_connection.CreateCommand(transaction)) using(var rd = cmd.ExecuteReader(@"SELECT ""Key"", ""Value"" FROM ""Configuration"" ")) while(rd.Read()) - yield return new KeyValuePair(rd.GetValue(0).ToString(), rd.GetValue(1).ToString()); - } - + yield return new KeyValuePair(rd.GetValue(0).ToString(), rd.GetValue(1).ToString()); + } + public IDictionary GetDbOptions(System.Data.IDbTransaction transaction = null) - { - return GetDbOptionList(transaction).ToDictionary(x => x.Key, x => x.Value); - } + { + return GetDbOptionList(transaction).ToDictionary(x => x.Key, x => x.Value); + } public bool RepairInProgress { @@ -684,50 +684,50 @@ namespace Duplicati.Library.Main.Database } } - public bool PartiallyRecreated - { - get - { - return GetDbOptions().ContainsKey("partially-recreated"); - } - set - { - var opts = GetDbOptions(); - - if (value) - opts["partially-recreated"] = "true"; - else - opts.Remove("partially-recreated"); - - SetDbOptions(opts); - } - } - - public void SetDbOptions(IDictionary options, System.Data.IDbTransaction transaction = null) - { - using(var tr = new TemporaryTransactionWrapper(m_connection, transaction)) + public bool PartiallyRecreated + { + get + { + return GetDbOptions().ContainsKey("partially-recreated"); + } + set + { + var opts = GetDbOptions(); + + if (value) + opts["partially-recreated"] = "true"; + else + opts.Remove("partially-recreated"); + + SetDbOptions(opts); + } + } + + public void SetDbOptions(IDictionary options, System.Data.IDbTransaction transaction = null) + { + using(var tr = new TemporaryTransactionWrapper(m_connection, transaction)) using(var cmd = m_connection.CreateCommand()) - { - cmd.Transaction = tr.Parent; - cmd.ExecuteNonQuery(@"DELETE FROM ""Configuration"" "); - foreach(var kp in options) - cmd.ExecuteNonQuery(@"INSERT INTO ""Configuration"" (""Key"", ""Value"") VALUES (?, ?) ", kp.Key, kp.Value); - - tr.Commit(); - } - } - - public long GetBlocksLargerThan(long fhblocksize) - { + { + cmd.Transaction = tr.Parent; + cmd.ExecuteNonQuery(@"DELETE FROM ""Configuration"" "); + foreach(var kp in options) + cmd.ExecuteNonQuery(@"INSERT INTO ""Configuration"" (""Key"", ""Value"") VALUES (?, ?) ", kp.Key, kp.Value); + + tr.Commit(); + } + } + + public long GetBlocksLargerThan(long fhblocksize) + { using(var cmd = m_connection.CreateCommand()) - return cmd.ExecuteScalarInt64(@"SELECT COUNT(*) FROM ""Block"" WHERE ""Size"" > ?", -1, fhblocksize); - } + return cmd.ExecuteScalarInt64(@"SELECT COUNT(*) FROM ""Block"" WHERE ""Size"" > ?", -1, fhblocksize); + } public void VerifyConsistency(System.Data.IDbTransaction transaction, long blocksize, long hashsize) { using (var cmd = m_connection.CreateCommand()) { - cmd.Transaction = transaction; + cmd.Transaction = transaction; // Calculate the lengths for each blockset var combinedLengths = @"SELECT ""BlocksetEntry"".""BlocksetID"" AS ""BlocksetID"", SUM(""Block"".""Size"") AS ""CalcLen"", ""Blockset"".""Length"" AS ""Length"" FROM ""Block"", ""BlocksetEntry"", ""Blockset"" WHERE ""BlocksetEntry"".""BlockID"" = ""Block"".""ID"" AND ""BlocksetEntry"".""BlocksetID"" = ""Blockset"".""ID"" GROUP BY ""BlocksetEntry"".""BlocksetID"""; @@ -735,25 +735,25 @@ namespace Duplicati.Library.Main.Database var reportDetails = @"SELECT ""CalcLen"", ""Length"", ""A"".""BlocksetID"", ""File"".""Path"" FROM (" + combinedLengths + @") A, ""File"" WHERE ""A"".""BlocksetID"" = ""File"".""BlocksetID"" AND ""A"".""CalcLen"" != ""A"".""Length"" "; using(var rd = cmd.ExecuteReader(reportDetails)) - if (rd.Read()) - { - var sb = new StringBuilder(); - sb.AppendLine("Found inconsistency in the following files while validating database: "); - var c = 0; - do - { - if (c < 5) - sb.AppendFormat("{0}, actual size {1}, dbsize {2}, blocksetid: {3}{4}", rd.GetValue(3), rd.GetValue(1), rd.GetValue(0), rd.GetValue(2), Environment.NewLine); - c++; - } while(rd.Read()); - - c -= 5; - if (c > 0) - sb.AppendFormat("... and {0} more", c); - + if (rd.Read()) + { + var sb = new StringBuilder(); + sb.AppendLine("Found inconsistency in the following files while validating database: "); + var c = 0; + do + { + if (c < 5) + sb.AppendFormat("{0}, actual size {1}, dbsize {2}, blocksetid: {3}{4}", rd.GetValue(3), rd.GetValue(1), rd.GetValue(0), rd.GetValue(2), Environment.NewLine); + c++; + } while(rd.Read()); + + c -= 5; + if (c > 0) + sb.AppendFormat("... and {0} more", c); + sb.Append(". Run repair to fix it."); - throw new InvalidDataException(sb.ToString()); - } + throw new InvalidDataException(sb.ToString()); + } var real_count = cmd.ExecuteScalarInt64(@"SELECT Count(*) FROM ""BlocklistHash""", 0); var unique_count = cmd.ExecuteScalarInt64(@"SELECT Count(*) FROM (SELECT DISTINCT ""BlocksetID"", ""Index"" FROM ""BlocklistHash"")", 0); @@ -771,31 +771,31 @@ namespace Duplicati.Library.Main.Database } } - public interface IBlock - { - string Hash { get; } - long Size { get; } - } - - internal class Block : IBlock - { - public string Hash { get; private set; } - public long Size { get; private set; } - - public Block(string hash, long size) - { - this.Hash = hash; - this.Size = size; - } - } - - public IEnumerable GetBlocks(long volumeid) - { - using(var cmd = m_connection.CreateCommand()) - using(var rd = cmd.ExecuteReader(@"SELECT DISTINCT ""Hash"", ""Size"" FROM ""Block"" WHERE ""VolumeID"" = ?", volumeid)) - while (rd.Read()) + public interface IBlock + { + string Hash { get; } + long Size { get; } + } + + internal class Block : IBlock + { + public string Hash { get; private set; } + public long Size { get; private set; } + + public Block(string hash, long size) + { + this.Hash = hash; + this.Size = size; + } + } + + public IEnumerable GetBlocks(long volumeid) + { + using(var cmd = m_connection.CreateCommand()) + using(var rd = cmd.ExecuteReader(@"SELECT DISTINCT ""Hash"", ""Size"" FROM ""Block"" WHERE ""VolumeID"" = ?", volumeid)) + while (rd.Read()) yield return new Block(rd.GetValue(0).ToString(), rd.GetInt64(1)); - } + } private class BlocklistHashEnumerable : IEnumerable { @@ -889,7 +889,7 @@ namespace Duplicati.Library.Main.Database { using (var cmd = m_connection.CreateCommand()) { - cmd.Transaction = transaction; + cmd.Transaction = transaction; cmd.CommandText = @"SELECT ""B"".""BlocksetID"", ""B"".""ID"", ""B"".""Path"", ""D"".""Length"", ""D"".""FullHash"", ""A"".""Lastmodified"" FROM ""FilesetEntry"" A, ""File"" B, ""Metadataset"" C, ""Blockset"" D WHERE ""A"".""FileID"" = ""B"".""ID"" AND ""B"".""MetadataID"" = ""C"".""ID"" AND ""C"".""BlocksetID"" = ""D"".""ID"" AND (""B"".""BlocksetID"" = ? OR ""B"".""BlocksetID"" = ?) AND ""A"".""FilesetID"" = ? "; cmd.AddParameter(FOLDER_BLOCKSET_ID); cmd.AddParameter(SYMLINK_BLOCKSET_ID); @@ -953,7 +953,7 @@ namespace Duplicati.Library.Main.Database Tablename = "Filenames-" + Library.Utility.Utility.ByteArrayAsHexString(Guid.NewGuid().ToByteArray()); var type = Library.Utility.FilterType.Regexp; if (filter is Library.Utility.FilterExpression) - type = ((Library.Utility.FilterExpression)filter).Type; + type = ((Library.Utility.FilterExpression)filter).Type; // Bugfix: SQLite does not handle case-insensitive LIKE with non-ascii characters if (type != Library.Utility.FilterType.Regexp && !Library.Utility.Utility.IsFSCaseSensitive && filter.ToString().Any(x => x > 127)) diff --git a/Duplicati/Library/Main/Database/LocalDeleteDatabase.cs b/Duplicati/Library/Main/Database/LocalDeleteDatabase.cs index cb6bd1a03..600f04cb1 100644 --- a/Duplicati/Library/Main/Database/LocalDeleteDatabase.cs +++ b/Duplicati/Library/Main/Database/LocalDeleteDatabase.cs @@ -23,109 +23,109 @@ using System.IO; namespace Duplicati.Library.Main.Database { - internal class LocalDeleteDatabase : LocalDatabase - { + internal class LocalDeleteDatabase : LocalDatabase + { private System.Data.IDbCommand m_moveBlockToNewVolumeCommand; - public LocalDeleteDatabase(string path, bool isCompact) - : base(path, isCompact ? "Compact" : "Delete", true) - { - InitializeCommands(); - } - - public LocalDeleteDatabase(LocalDatabase db) - : base(db) - { - InitializeCommands(); - } - - private void InitializeCommands() - { - m_moveBlockToNewVolumeCommand = m_connection.CreateCommand(); - - m_moveBlockToNewVolumeCommand.CommandText = @"UPDATE ""Block"" SET ""VolumeID"" = ? WHERE ""Hash"" = ? AND ""Size"" = ?"; - m_moveBlockToNewVolumeCommand.AddParameters(3); - } - - private long GetLastFilesetID(System.Data.IDbCommand cmd) - { - return cmd.ExecuteScalarInt64(@"SELECT ""ID"" FROM ""Fileset"" ORDER BY ""Timestamp"" DESC LIMIT 1", -1); - } - - /// - /// Drops all entries related to operations listed in the table. - /// - /// The fileset entries to delete - /// The transaction to execute the commands in - /// A list of filesets to delete - public IEnumerable> DropFilesetsFromTable(DateTime[] toDelete, System.Data.IDbTransaction transaction) - { - using(var cmd = m_connection.CreateCommand()) - { - cmd.Transaction = transaction; + public LocalDeleteDatabase(string path, bool isCompact) + : base(path, isCompact ? "Compact" : "Delete", true) + { + InitializeCommands(); + } + + public LocalDeleteDatabase(LocalDatabase db) + : base(db) + { + InitializeCommands(); + } + + private void InitializeCommands() + { + m_moveBlockToNewVolumeCommand = m_connection.CreateCommand(); + + m_moveBlockToNewVolumeCommand.CommandText = @"UPDATE ""Block"" SET ""VolumeID"" = ? WHERE ""Hash"" = ? AND ""Size"" = ?"; + m_moveBlockToNewVolumeCommand.AddParameters(3); + } + + private long GetLastFilesetID(System.Data.IDbCommand cmd) + { + return cmd.ExecuteScalarInt64(@"SELECT ""ID"" FROM ""Fileset"" ORDER BY ""Timestamp"" DESC LIMIT 1", -1); + } + + /// + /// Drops all entries related to operations listed in the table. + /// + /// The fileset entries to delete + /// The transaction to execute the commands in + /// A list of filesets to delete + public IEnumerable> DropFilesetsFromTable(DateTime[] toDelete, System.Data.IDbTransaction transaction) + { + using(var cmd = m_connection.CreateCommand()) + { + cmd.Transaction = transaction; - var q = ""; - foreach(var n in toDelete) - if (q.Length == 0) - q = "?"; - else - q += ",?"; - - //First we remove unwanted entries + var q = ""; + foreach(var n in toDelete) + if (q.Length == 0) + q = "?"; + else + q += ",?"; + + //First we remove unwanted entries var deleted = cmd.ExecuteNonQuery(@"DELETE FROM ""Fileset"" WHERE ""Timestamp"" IN (" + q + @") ", toDelete.Select(x => NormalizeDateTimeToEpochSeconds(x)).Cast().ToArray()); - - if (deleted != toDelete.Length) - throw new Exception(string.Format("Unexpected number of deleted filesets {0} vs {1}", deleted, toDelete.Length)); - - //Then we delete anything that is no longer being referenced + + if (deleted != toDelete.Length) + throw new Exception(string.Format("Unexpected number of deleted filesets {0} vs {1}", deleted, toDelete.Length)); + + //Then we delete anything that is no longer being referenced cmd.ExecuteNonQuery(@"DELETE FROM ""FilesetEntry"" WHERE ""FilesetID"" NOT IN (SELECT DISTINCT ""ID"" FROM ""Fileset"")"); - cmd.ExecuteNonQuery(@"DELETE FROM ""File"" WHERE ""ID"" NOT IN (SELECT DISTINCT ""FileID"" FROM ""FilesetEntry"") "); - cmd.ExecuteNonQuery(@"DELETE FROM ""Metadataset"" WHERE ""ID"" NOT IN (SELECT DISTINCT ""MetadataID"" FROM ""File"") "); - cmd.ExecuteNonQuery(@"DELETE FROM ""Blockset"" WHERE ""ID"" NOT IN (SELECT DISTINCT ""BlocksetID"" FROM ""File"" UNION SELECT DISTINCT ""BlocksetID"" FROM ""Metadataset"") "); - cmd.ExecuteNonQuery(@"DELETE FROM ""BlocksetEntry"" WHERE ""BlocksetID"" NOT IN (SELECT DISTINCT ""ID"" FROM ""Blockset"") "); - cmd.ExecuteNonQuery(@"DELETE FROM ""BlocklistHash"" WHERE ""BlocksetID"" NOT IN (SELECT DISTINCT ""ID"" FROM ""Blockset"") "); - - //We save the block info for the remote files, before we delete it - cmd.ExecuteNonQuery(@"INSERT INTO ""DeletedBlock"" (""Hash"", ""Size"", ""VolumeID"") SELECT ""Hash"", ""Size"", ""VolumeID"" FROM ""Block"" WHERE ""ID"" NOT IN (SELECT DISTINCT ""BlockID"" AS ""BlockID"" FROM ""BlocksetEntry"" UNION SELECT DISTINCT ""ID"" FROM ""Block"", ""BlocklistHash"" WHERE ""Block"".""Hash"" = ""BlocklistHash"".""Hash"") "); - cmd.ExecuteNonQuery(@"DELETE FROM ""Block"" WHERE ""ID"" NOT IN (SELECT DISTINCT ""BlockID"" FROM ""BlocksetEntry"" UNION SELECT DISTINCT ""ID"" FROM ""Block"", ""BlocklistHash"" WHERE ""Block"".""Hash"" = ""BlocklistHash"".""Hash"") "); - - //Find all remote filesets that are no longer required, and mark them as delete - var updated = cmd.ExecuteNonQuery(@"UPDATE ""RemoteVolume"" SET ""State"" = ? WHERE ""Type"" = ? AND ""State"" IN (?, ?) AND ""ID"" NOT IN (SELECT ""VolumeID"" FROM ""Fileset"") ", RemoteVolumeState.Deleting.ToString(), RemoteVolumeType.Files.ToString(), RemoteVolumeState.Uploaded.ToString(), RemoteVolumeState.Verified.ToString()); - - if (deleted != updated) - throw new Exception(string.Format("Unexpected number of remote volumes marked as deleted. Found {0} filesets, but {1} volumes", deleted, updated)); - - using (var rd = cmd.ExecuteReader(@"SELECT ""Name"", ""Size"" FROM ""RemoteVolume"" WHERE ""Type"" = ? AND ""State"" = ? ", RemoteVolumeType.Files.ToString(), RemoteVolumeState.Deleting.ToString())) - while (rd.Read()) + cmd.ExecuteNonQuery(@"DELETE FROM ""File"" WHERE ""ID"" NOT IN (SELECT DISTINCT ""FileID"" FROM ""FilesetEntry"") "); + cmd.ExecuteNonQuery(@"DELETE FROM ""Metadataset"" WHERE ""ID"" NOT IN (SELECT DISTINCT ""MetadataID"" FROM ""File"") "); + cmd.ExecuteNonQuery(@"DELETE FROM ""Blockset"" WHERE ""ID"" NOT IN (SELECT DISTINCT ""BlocksetID"" FROM ""File"" UNION SELECT DISTINCT ""BlocksetID"" FROM ""Metadataset"") "); + cmd.ExecuteNonQuery(@"DELETE FROM ""BlocksetEntry"" WHERE ""BlocksetID"" NOT IN (SELECT DISTINCT ""ID"" FROM ""Blockset"") "); + cmd.ExecuteNonQuery(@"DELETE FROM ""BlocklistHash"" WHERE ""BlocksetID"" NOT IN (SELECT DISTINCT ""ID"" FROM ""Blockset"") "); + + //We save the block info for the remote files, before we delete it + cmd.ExecuteNonQuery(@"INSERT INTO ""DeletedBlock"" (""Hash"", ""Size"", ""VolumeID"") SELECT ""Hash"", ""Size"", ""VolumeID"" FROM ""Block"" WHERE ""ID"" NOT IN (SELECT DISTINCT ""BlockID"" AS ""BlockID"" FROM ""BlocksetEntry"" UNION SELECT DISTINCT ""ID"" FROM ""Block"", ""BlocklistHash"" WHERE ""Block"".""Hash"" = ""BlocklistHash"".""Hash"") "); + cmd.ExecuteNonQuery(@"DELETE FROM ""Block"" WHERE ""ID"" NOT IN (SELECT DISTINCT ""BlockID"" FROM ""BlocksetEntry"" UNION SELECT DISTINCT ""ID"" FROM ""Block"", ""BlocklistHash"" WHERE ""Block"".""Hash"" = ""BlocklistHash"".""Hash"") "); + + //Find all remote filesets that are no longer required, and mark them as delete + var updated = cmd.ExecuteNonQuery(@"UPDATE ""RemoteVolume"" SET ""State"" = ? WHERE ""Type"" = ? AND ""State"" IN (?, ?) AND ""ID"" NOT IN (SELECT ""VolumeID"" FROM ""Fileset"") ", RemoteVolumeState.Deleting.ToString(), RemoteVolumeType.Files.ToString(), RemoteVolumeState.Uploaded.ToString(), RemoteVolumeState.Verified.ToString()); + + if (deleted != updated) + throw new Exception(string.Format("Unexpected number of remote volumes marked as deleted. Found {0} filesets, but {1} volumes", deleted, updated)); + + using (var rd = cmd.ExecuteReader(@"SELECT ""Name"", ""Size"" FROM ""RemoteVolume"" WHERE ""Type"" = ? AND ""State"" = ? ", RemoteVolumeType.Files.ToString(), RemoteVolumeState.Deleting.ToString())) + while (rd.Read()) yield return new KeyValuePair(rd.GetString(0), rd.ConvertValueToInt64(1)); - } - } + } + } - private struct VolumeUsage - { - public string Name; - public long DataSize; - public long WastedSize; - public long CompressedSize; - - public VolumeUsage(string name, long datasize, long wastedsize, long compressedsize) - { - this.Name = name; - this.DataSize = datasize; - this.WastedSize = wastedsize; - this.CompressedSize = compressedsize; - } - } + private struct VolumeUsage + { + public string Name; + public long DataSize; + public long WastedSize; + public long CompressedSize; + + public VolumeUsage(string name, long datasize, long wastedsize, long compressedsize) + { + this.Name = name; + this.DataSize = datasize; + this.WastedSize = wastedsize; + this.CompressedSize = compressedsize; + } + } - /// - /// Returns the number of bytes stored in each volume, - /// and the number of bytes no longer needed in each volume. - /// The sizes are the uncompressed values. - /// - /// A list of tuples with name, datasize, wastedbytes. - private IEnumerable GetWastedSpaceReport(System.Data.IDbTransaction transaction) - { - var tmptablename = "UsageReport-" + Library.Utility.Utility.ByteArrayAsHexString(Guid.NewGuid().ToByteArray()); + /// + /// Returns the number of bytes stored in each volume, + /// and the number of bytes no longer needed in each volume. + /// The sizes are the uncompressed values. + /// + /// A list of tuples with name, datasize, wastedbytes. + private IEnumerable GetWastedSpaceReport(System.Data.IDbTransaction transaction) + { + var tmptablename = "UsageReport-" + Library.Utility.Utility.ByteArrayAsHexString(Guid.NewGuid().ToByteArray()); var usedBlocks = @"SELECT SUM(""Block"".""Size"") AS ""ActiveSize"", ""Block"".""VolumeID"" AS ""VolumeID"" FROM ""Block"", ""Remotevolume"" WHERE ""Block"".""VolumeID"" = ""Remotevolume"".""ID"" AND ""Block"".""ID"" NOT IN (SELECT ""Block"".""ID"" FROM ""Block"",""DeletedBlock"" WHERE ""Block"".""Hash"" = ""DeletedBlock"".""Hash"" AND ""Block"".""Size"" = ""DeletedBlock"".""Size"") GROUP BY ""Block"".""VolumeID"" "; var lastmodifiedFile = @"SELECT ""Block"".""VolumeID"" AS ""VolumeID"", ""Fileset"".""Timestamp"" AS ""Sorttime"" FROM ""Fileset"", ""FilesetEntry"", ""File"", ""BlocksetEntry"", ""Block"" WHERE ""FilesetEntry"".""FileID"" = ""File"".""ID"" AND ""File"".""BlocksetID"" = ""BlocksetEntry"".""BlocksetID"" AND ""BlocksetEntry"".""BlockID"" = ""Block"".""ID"" AND ""Fileset"".""ID"" = ""FilesetEntry"".""FilesetID"" "; @@ -133,78 +133,78 @@ namespace Duplicati.Library.Main.Database var scantime = @"SELECT ""VolumeID"" AS ""VolumeID"", MIN(""Sorttime"") AS ""Sorttime"" FROM (" + lastmodifiedFile + @" UNION " + lastmodifiedMetadata + @") GROUP BY ""VolumeID"" "; var active = @"SELECT ""A"".""ActiveSize"" AS ""ActiveSize"", 0 AS ""InactiveSize"", ""A"".""VolumeID"" AS ""VolumeID"", CASE WHEN ""B"".""Sorttime"" IS NULL THEN 0 ELSE ""B"".""Sorttime"" END AS ""Sorttime"" FROM (" + usedBlocks + @") A LEFT OUTER JOIN (" + scantime + @") B ON ""B"".""VolumeID"" = ""A"".""VolumeID"" "; - var inactive = @"SELECT 0 AS ""ActiveSize"", SUM(""Size"") AS ""InactiveSize"", ""VolumeID"" AS ""VolumeID"", 0 AS ""SortScantime"" FROM ""DeletedBlock"" GROUP BY ""VolumeID"" "; + var inactive = @"SELECT 0 AS ""ActiveSize"", SUM(""Size"") AS ""InactiveSize"", ""VolumeID"" AS ""VolumeID"", 0 AS ""SortScantime"" FROM ""DeletedBlock"" GROUP BY ""VolumeID"" "; var empty = @"SELECT 0 AS ""ActiveSize"", 0 AS ""InactiveSize"", ""Remotevolume"".""ID"" AS ""VolumeID"", 0 AS ""SortScantime"" FROM ""Remotevolume"" WHERE ""Remotevolume"".""Type"" = ? AND ""Remotevolume"".""State"" IN (?, ?) AND ""Remotevolume"".""ID"" NOT IN (SELECT ""VolumeID"" FROM ""Block"") "; - - var combined = active + " UNION " + inactive + " UNION " + empty; - var collected = @"SELECT ""VolumeID"" AS ""VolumeID"", SUM(""ActiveSize"") AS ""ActiveSize"", SUM(""InactiveSize"") AS ""InactiveSize"", MAX(""Sortime"") AS ""Sorttime"" FROM (" + combined + @") GROUP BY ""VolumeID"" "; - var createtable = @"CREATE TEMPORARY TABLE """ + tmptablename + @""" AS " + collected; - - using (var cmd = m_connection.CreateCommand()) - { - cmd.Transaction = transaction; - try - { + + var combined = active + " UNION " + inactive + " UNION " + empty; + var collected = @"SELECT ""VolumeID"" AS ""VolumeID"", SUM(""ActiveSize"") AS ""ActiveSize"", SUM(""InactiveSize"") AS ""InactiveSize"", MAX(""Sortime"") AS ""Sorttime"" FROM (" + combined + @") GROUP BY ""VolumeID"" "; + var createtable = @"CREATE TEMPORARY TABLE """ + tmptablename + @""" AS " + collected; + + using (var cmd = m_connection.CreateCommand()) + { + cmd.Transaction = transaction; + try + { cmd.ExecuteNonQuery(createtable, RemoteVolumeType.Blocks.ToString(), RemoteVolumeState.Uploaded.ToString(), RemoteVolumeState.Verified.ToString()); - using (var rd = cmd.ExecuteReader(string.Format(@"SELECT ""A"".""Name"", ""B"".""ActiveSize"", ""B"".""InactiveSize"", ""A"".""Size"" FROM ""Remotevolume"" A, ""{0}"" B WHERE ""A"".""ID"" = ""B"".""VolumeID"" ORDER BY ""B"".""Sorttime"" ASC ", tmptablename))) - while (rd.Read()) + using (var rd = cmd.ExecuteReader(string.Format(@"SELECT ""A"".""Name"", ""B"".""ActiveSize"", ""B"".""InactiveSize"", ""A"".""Size"" FROM ""Remotevolume"" A, ""{0}"" B WHERE ""A"".""ID"" = ""B"".""VolumeID"" ORDER BY ""B"".""Sorttime"" ASC ", tmptablename))) + while (rd.Read()) yield return new VolumeUsage(rd.GetValue(0).ToString(), rd.ConvertValueToInt64(1, 0) + rd.ConvertValueToInt64(2, 0), rd.ConvertValueToInt64(2, 0), rd.ConvertValueToInt64(3, 0)); - } - finally - { - try { cmd.ExecuteNonQuery(string.Format(@"DROP TABLE IF EXISTS ""{0}"" ", tmptablename)); } - catch { } - } - } - } - - public interface ICompactReport - { - IEnumerable DeleteableVolumes { get; } - IEnumerable CompactableVolumes { get; } - bool ShouldReclaim { get; } - bool ShouldCompact { get; } - void ReportCompactData(ILogWriter log); - } - - private class CompactReport : ICompactReport - { - private IEnumerable m_report; - private IEnumerable m_cleandelete; - private IEnumerable m_wastevolumes; - private IEnumerable m_smallvolumes; - - private long m_deletablevolumes; - private long m_wastedspace; - private long m_smallspace; - private long m_fullsize; + } + finally + { + try { cmd.ExecuteNonQuery(string.Format(@"DROP TABLE IF EXISTS ""{0}"" ", tmptablename)); } + catch { } + } + } + } + + public interface ICompactReport + { + IEnumerable DeleteableVolumes { get; } + IEnumerable CompactableVolumes { get; } + bool ShouldReclaim { get; } + bool ShouldCompact { get; } + void ReportCompactData(ILogWriter log); + } + + private class CompactReport : ICompactReport + { + private IEnumerable m_report; + private IEnumerable m_cleandelete; + private IEnumerable m_wastevolumes; + private IEnumerable m_smallvolumes; + + private long m_deletablevolumes; + private long m_wastedspace; + private long m_smallspace; + private long m_fullsize; private long m_smallvolumecount; - - private long m_wastethreshold; - private long m_volsize; + + private long m_wastethreshold; + private long m_volsize; private long m_maxsmallfilecount; - - public CompactReport(long volsize, long wastethreshold, long smallfilesize, long maxsmallfilecount, IEnumerable report) - { - m_report = report; - + + public CompactReport(long volsize, long wastethreshold, long smallfilesize, long maxsmallfilecount, IEnumerable report) + { + m_report = report; + m_cleandelete = (from n in m_report where n.DataSize <= n.WastedSize select n).ToArray(); - m_wastevolumes = from n in m_report where ((((n.WastedSize / (float)n.DataSize) * 100) >= wastethreshold) || (((n.WastedSize / (float)volsize) * 100) >= wastethreshold)) && !m_cleandelete.Contains(n) select n; - m_smallvolumes = from n in m_report where n.CompressedSize <= smallfilesize && !m_cleandelete.Contains(n) select n; + m_wastevolumes = from n in m_report where ((((n.WastedSize / (float)n.DataSize) * 100) >= wastethreshold) || (((n.WastedSize / (float)volsize) * 100) >= wastethreshold)) && !m_cleandelete.Contains(n) select n; + m_smallvolumes = from n in m_report where n.CompressedSize <= smallfilesize && !m_cleandelete.Contains(n) select n; - m_wastethreshold = wastethreshold; - m_volsize = volsize; + m_wastethreshold = wastethreshold; + m_volsize = volsize; m_maxsmallfilecount = maxsmallfilecount; - m_deletablevolumes = m_cleandelete.Count(); - m_fullsize = report.Select(x => x.DataSize).Sum(); - - m_wastedspace = m_wastevolumes.Select(x => x.WastedSize).Sum(); - m_smallspace = m_smallvolumes.Select(x => x.CompressedSize).Sum(); + m_deletablevolumes = m_cleandelete.Count(); + m_fullsize = report.Select(x => x.DataSize).Sum(); + + m_wastedspace = m_wastevolumes.Select(x => x.WastedSize).Sum(); + m_smallspace = m_smallvolumes.Select(x => x.CompressedSize).Sum(); m_smallvolumecount = m_smallvolumes.Count(); - } - - public void ReportCompactData(ILogWriter log) + } + + public void ReportCompactData(ILogWriter log) { var wastepercentage = ((m_wastedspace / (float)m_fullsize) * 100); if (log.VerboseOutput) @@ -213,198 +213,198 @@ namespace Duplicati.Library.Main.Database log.AddVerboseMessage(string.Format("Found {0} small volumes(s) with a total size of {1}", m_smallvolumes.Count(), Library.Utility.Utility.FormatSizeString(m_smallspace))); log.AddVerboseMessage(string.Format("Found {0} volume(s) with a total of {1:F2}% wasted space ({2} of {3})", m_wastevolumes.Count(), wastepercentage, Library.Utility.Utility.FormatSizeString(m_wastedspace), Library.Utility.Utility.FormatSizeString(m_fullsize))); } - - if (m_deletablevolumes > 0) - log.AddMessage(string.Format("Compacting because there are {0} fully deletable volume(s)", m_deletablevolumes)); - else if (wastepercentage >= m_wastethreshold && m_wastevolumes.Count() >= 2) - log.AddMessage(string.Format("Compacting because there is {0:F2}% wasted space and the limit is {1}%", wastepercentage, m_wastethreshold)); - else if (m_smallspace > m_volsize) - log.AddMessage(string.Format("Compacting because there are {0} in small volumes and the volume size is {1}", Library.Utility.Utility.FormatSizeString(m_smallspace), Library.Utility.Utility.FormatSizeString(m_volsize))); + + if (m_deletablevolumes > 0) + log.AddMessage(string.Format("Compacting because there are {0} fully deletable volume(s)", m_deletablevolumes)); + else if (wastepercentage >= m_wastethreshold && m_wastevolumes.Count() >= 2) + log.AddMessage(string.Format("Compacting because there is {0:F2}% wasted space and the limit is {1}%", wastepercentage, m_wastethreshold)); + else if (m_smallspace > m_volsize) + log.AddMessage(string.Format("Compacting because there are {0} in small volumes and the volume size is {1}", Library.Utility.Utility.FormatSizeString(m_smallspace), Library.Utility.Utility.FormatSizeString(m_volsize))); else if (m_smallvolumecount > m_maxsmallfilecount) log.AddMessage(string.Format("Compacting because there are {0} small volumes and the maximum is {1}", m_smallvolumecount, m_maxsmallfilecount)); - else - log.AddMessage("Compacting not required"); - } - - public bool ShouldReclaim - { - get - { - return m_deletablevolumes > 0; - } - } - - public bool ShouldCompact - { - get - { - return (((m_wastedspace / (float)m_fullsize) * 100) >= m_wastethreshold && m_wastevolumes.Count() >= 2) || m_smallspace > m_volsize || m_smallvolumecount > m_maxsmallfilecount; - } - } + else + log.AddMessage("Compacting not required"); + } + + public bool ShouldReclaim + { + get + { + return m_deletablevolumes > 0; + } + } + + public bool ShouldCompact + { + get + { + return (((m_wastedspace / (float)m_fullsize) * 100) >= m_wastethreshold && m_wastevolumes.Count() >= 2) || m_smallspace > m_volsize || m_smallvolumecount > m_maxsmallfilecount; + } + } - public IEnumerable DeleteableVolumes - { - get { return from n in m_cleandelete select n.Name; } - } - - public IEnumerable CompactableVolumes - { - get - { - //The order matters, we compact old volumes together first, - // as we anticipate old data will stay around, where never data - // is more likely to be discarded again - return m_wastevolumes.Union(m_smallvolumes).Select(x => x.Name).Distinct(); - } - } - } - - public ICompactReport GetCompactReport(long volsize, long wastethreshold, long smallfilesize, long maxsmallfilecount, System.Data.IDbTransaction transaction) - { - return new CompactReport(volsize, wastethreshold, smallfilesize, maxsmallfilecount, GetWastedSpaceReport(transaction).ToList()); - } - - - public interface IBlockQuery : IDisposable - { + public IEnumerable DeleteableVolumes + { + get { return from n in m_cleandelete select n.Name; } + } + + public IEnumerable CompactableVolumes + { + get + { + //The order matters, we compact old volumes together first, + // as we anticipate old data will stay around, where never data + // is more likely to be discarded again + return m_wastevolumes.Union(m_smallvolumes).Select(x => x.Name).Distinct(); + } + } + } + + public ICompactReport GetCompactReport(long volsize, long wastethreshold, long smallfilesize, long maxsmallfilecount, System.Data.IDbTransaction transaction) + { + return new CompactReport(volsize, wastethreshold, smallfilesize, maxsmallfilecount, GetWastedSpaceReport(transaction).ToList()); + } + + + public interface IBlockQuery : IDisposable + { bool UseBlock(string hash, long size, System.Data.IDbTransaction transaction); - } - - private class BlockQuery : IBlockQuery - { - private System.Data.IDbCommand m_command; - private HashLookupHelper m_lookup; - - public BlockQuery(System.Data.IDbConnection con, Options options, System.Data.IDbTransaction transaction) - { - m_command = con.CreateCommand(); - m_command.Transaction = transaction; - - if (options.BlockHashLookupMemory > 0) - { - m_lookup = new HashLookupHelper((ulong)options.BlockHashLookupMemory); - using(var reader = m_command.ExecuteReader(@"SELECT ""Hash"", ""Size"" FROM ""Block"" ")) - while (reader.Read()) - { + } + + private class BlockQuery : IBlockQuery + { + private System.Data.IDbCommand m_command; + private HashLookupHelper m_lookup; + + public BlockQuery(System.Data.IDbConnection con, Options options, System.Data.IDbTransaction transaction) + { + m_command = con.CreateCommand(); + m_command.Transaction = transaction; + + if (options.BlockHashLookupMemory > 0) + { + m_lookup = new HashLookupHelper((ulong)options.BlockHashLookupMemory); + using(var reader = m_command.ExecuteReader(@"SELECT ""Hash"", ""Size"" FROM ""Block"" ")) + while (reader.Read()) + { var hash = reader.GetString(0); var size = reader.GetInt64(1); - m_lookup.Add(hash, size, size); - } - } - - m_command.Parameters.Clear(); - m_command.CommandText = @"SELECT ""VolumeID"" FROM ""Block"" WHERE ""Hash"" = ? AND ""Size"" = ? "; - m_command.AddParameters(2); - } - + m_lookup.Add(hash, size, size); + } + } + + m_command.Parameters.Clear(); + m_command.CommandText = @"SELECT ""VolumeID"" FROM ""Block"" WHERE ""Hash"" = ? AND ""Size"" = ? "; + m_command.AddParameters(2); + } + public bool UseBlock(string hash, long size, System.Data.IDbTransaction transaction) - { - if (m_lookup != null) - { - long nsize; - if(m_lookup.TryGet(hash, size, out nsize) && nsize == size) + { + if (m_lookup != null) + { + long nsize; + if(m_lookup.TryGet(hash, size, out nsize) && nsize == size) return true; else return false; - } - + } + m_command.Transaction = transaction; - m_command.SetParameterValue(0, hash); - m_command.SetParameterValue(1, size); - var r = m_command.ExecuteScalar(); - return r != null && r != DBNull.Value; - } - - public void Dispose() - { - m_lookup = null; - if (m_command != null) - try { m_command.Dispose(); } - finally { m_command = null; } - } - } - - /// - /// Builds a lookup table to enable faster response to block queries - /// - /// The name of the volume to prepare for - public IBlockQuery CreateBlockQueryHelper(Options options, System.Data.IDbTransaction transaction) - { - return new BlockQuery(m_connection, options, transaction); - } + m_command.SetParameterValue(0, hash); + m_command.SetParameterValue(1, size); + var r = m_command.ExecuteScalar(); + return r != null && r != DBNull.Value; + } + + public void Dispose() + { + m_lookup = null; + if (m_command != null) + try { m_command.Dispose(); } + finally { m_command = null; } + } + } + + /// + /// Builds a lookup table to enable faster response to block queries + /// + /// The name of the volume to prepare for + public IBlockQuery CreateBlockQueryHelper(Options options, System.Data.IDbTransaction transaction) + { + return new BlockQuery(m_connection, options, transaction); + } - public void MoveBlockToNewVolume(string hash, long size, long volumeID, System.Data.IDbTransaction tr) - { - m_moveBlockToNewVolumeCommand.SetParameterValue(0, volumeID); - m_moveBlockToNewVolumeCommand.SetParameterValue(1, hash); - m_moveBlockToNewVolumeCommand.SetParameterValue(2, size); - m_moveBlockToNewVolumeCommand.Transaction = tr; - var c = m_moveBlockToNewVolumeCommand.ExecuteNonQuery(); - if (c != 1) - throw new Exception("Unexpected update result"); - } - - /// - /// Calculates the sequence in which files should be deleted based on their releations. - /// - /// The deletable volumes. - /// Block volumes slated for deletion. - public IEnumerable GetDeletableVolumes(IEnumerable deleteableVolumes, System.Data.IDbTransaction transaction) - { - using(var cmd = m_connection.CreateCommand()) - { - // Although the generated index volumes are always in pairs, - // this code handles many-to-many relations between - // index files and block volumes, should this be added later - var lookupBlock = new Dictionary>(); - var lookupIndexfiles = new Dictionary>(); - - cmd.Transaction = transaction; + public void MoveBlockToNewVolume(string hash, long size, long volumeID, System.Data.IDbTransaction tr) + { + m_moveBlockToNewVolumeCommand.SetParameterValue(0, volumeID); + m_moveBlockToNewVolumeCommand.SetParameterValue(1, hash); + m_moveBlockToNewVolumeCommand.SetParameterValue(2, size); + m_moveBlockToNewVolumeCommand.Transaction = tr; + var c = m_moveBlockToNewVolumeCommand.ExecuteNonQuery(); + if (c != 1) + throw new Exception("Unexpected update result"); + } + + /// + /// Calculates the sequence in which files should be deleted based on their releations. + /// + /// The deletable volumes. + /// Block volumes slated for deletion. + public IEnumerable GetDeletableVolumes(IEnumerable deleteableVolumes, System.Data.IDbTransaction transaction) + { + using(var cmd = m_connection.CreateCommand()) + { + // Although the generated index volumes are always in pairs, + // this code handles many-to-many relations between + // index files and block volumes, should this be added later + var lookupBlock = new Dictionary>(); + var lookupIndexfiles = new Dictionary>(); - using(var rd = cmd.ExecuteReader(@"SELECT ""C"".""Name"", ""B"".""Name"", ""B"".""Hash"", ""B"".""Size"" FROM ""IndexBlockLink"" A, ""RemoteVolume"" B, ""RemoteVolume"" C WHERE ""A"".""IndexVolumeID"" = ""B"".""ID"" AND ""A"".""BlockVolumeID"" = ""C"".""ID"" AND ""B"".""Hash"" IS NOT NULL AND ""B"".""Size"" IS NOT NULL ")) - while(rd.Read()) - { - var name = rd.GetValue(0).ToString(); - List indexfileList; - if (!lookupBlock.TryGetValue(name, out indexfileList)) - { - indexfileList = new List(); - lookupBlock.Add(name, indexfileList); - } - + cmd.Transaction = transaction; + + using(var rd = cmd.ExecuteReader(@"SELECT ""C"".""Name"", ""B"".""Name"", ""B"".""Hash"", ""B"".""Size"" FROM ""IndexBlockLink"" A, ""RemoteVolume"" B, ""RemoteVolume"" C WHERE ""A"".""IndexVolumeID"" = ""B"".""ID"" AND ""A"".""BlockVolumeID"" = ""C"".""ID"" AND ""B"".""Hash"" IS NOT NULL AND ""B"".""Size"" IS NOT NULL ")) + while(rd.Read()) + { + var name = rd.GetValue(0).ToString(); + List indexfileList; + if (!lookupBlock.TryGetValue(name, out indexfileList)) + { + indexfileList = new List(); + lookupBlock.Add(name, indexfileList); + } + var v = new RemoteVolume(rd.GetString(1), rd.GetString(2), rd.GetInt64(3)); - indexfileList.Add(v); + indexfileList.Add(v); - List blockList; - if (!lookupIndexfiles.TryGetValue(v.Name, out blockList)) - { - blockList = new List(); - lookupIndexfiles.Add(v.Name, blockList); - } - blockList.Add(name); - } + List blockList; + if (!lookupIndexfiles.TryGetValue(v.Name, out blockList)) + { + blockList = new List(); + lookupIndexfiles.Add(v.Name, blockList); + } + blockList.Add(name); + } - foreach(var r in deleteableVolumes.Distinct()) - { - // Return the input - yield return r; - List indexfileList; - if (lookupBlock.TryGetValue(r.Name, out indexfileList)) - foreach(var sh in indexfileList) - { - List backref; - if (lookupIndexfiles.TryGetValue(sh.Name, out backref)) - { - //If this is the last reference, - // remove the index file as well - if (backref.Remove(r.Name) && backref.Count == 0) - yield return sh; - } - } - } - } - } + foreach(var r in deleteableVolumes.Distinct()) + { + // Return the input + yield return r; + List indexfileList; + if (lookupBlock.TryGetValue(r.Name, out indexfileList)) + foreach(var sh in indexfileList) + { + List backref; + if (lookupIndexfiles.TryGetValue(sh.Name, out backref)) + { + //If this is the last reference, + // remove the index file as well + if (backref.Remove(r.Name) && backref.Count == 0) + yield return sh; + } + } + } + } + } - } + } } diff --git a/Duplicati/Library/Main/Database/LocalListDatabase.cs b/Duplicati/Library/Main/Database/LocalListDatabase.cs index b9845c5eb..d634eb6ec 100644 --- a/Duplicati/Library/Main/Database/LocalListDatabase.cs +++ b/Duplicati/Library/Main/Database/LocalListDatabase.cs @@ -65,9 +65,9 @@ namespace Duplicati.Library.Main.Database m_connection = owner.m_connection; m_filesets = owner.FilesetTimes.ToArray(); m_tablename = "Filesets-" + Library.Utility.Utility.ByteArrayAsHexString(Guid.NewGuid().ToByteArray()); - var tmp = owner.GetFilelistWhereClause(time, versions, m_filesets); - string query = tmp.Item1; - var args = tmp.Item2; + var tmp = owner.GetFilelistWhereClause(time, versions, m_filesets); + string query = tmp.Item1; + var args = tmp.Item2; using(var cmd = m_connection.CreateCommand()) { diff --git a/Duplicati/Library/Main/Database/LocalRecreateDatabase.cs b/Duplicati/Library/Main/Database/LocalRecreateDatabase.cs index 86ab30571..1d1ce6ab8 100644 --- a/Duplicati/Library/Main/Database/LocalRecreateDatabase.cs +++ b/Duplicati/Library/Main/Database/LocalRecreateDatabase.cs @@ -535,7 +535,7 @@ namespace Duplicati.Library.Main.Database { using(var cmd = m_connection.CreateCommand()) { - cmd.CommandText = string.Format(@"SELECT DISTINCT ""BlocklistHash"".""Hash"" FROM ""BlocklistHash"", ""Block"" WHERE ""Block"".""Hash"" = ""BlocklistHash"".""Hash"" AND ""Block"".""VolumeID"" = ?"); + cmd.CommandText = string.Format(@"SELECT DISTINCT ""BlocklistHash"".""Hash"" FROM ""BlocklistHash"", ""Block"" WHERE ""Block"".""Hash"" = ""BlocklistHash"".""Hash"" AND ""Block"".""VolumeID"" = ?"); cmd.AddParameter(volumeid); using(var rd = cmd.ExecuteReader()) @@ -544,17 +544,17 @@ namespace Duplicati.Library.Main.Database } } - public IEnumerable GetMissingBlockListVolumes(int passNo, long blocksize, long hashsize) + public IEnumerable GetMissingBlockListVolumes(int passNo, long blocksize, long hashsize) { using(var cmd = m_connection.CreateCommand()) { var selectCommand = @"SELECT DISTINCT ""RemoteVolume"".""Name"", ""RemoteVolume"".""Hash"", ""RemoteVolume"".""Size"", ""RemoteVolume"".""ID"" FROM ""RemoteVolume"""; var missingBlocklistEntries = - string.Format( - @"SELECT ""BlocklistHash"".""Hash"" FROM ""BlocklistHash"" LEFT OUTER JOIN ""BlocksetEntry"" ON ""BlocksetEntry"".""Index"" = (""BlocklistHash"".""Index"" * {0}) AND ""BlocksetEntry"".""BlocksetID"" = ""BlocklistHash"".""BlocksetID"" WHERE ""BlocksetEntry"".""BlocksetID"" IS NULL", - blocksize / hashsize - ); + string.Format( + @"SELECT ""BlocklistHash"".""Hash"" FROM ""BlocklistHash"" LEFT OUTER JOIN ""BlocksetEntry"" ON ""BlocksetEntry"".""Index"" = (""BlocklistHash"".""Index"" * {0}) AND ""BlocksetEntry"".""BlocksetID"" = ""BlocklistHash"".""BlocksetID"" WHERE ""BlocksetEntry"".""BlocksetID"" IS NULL", + blocksize / hashsize + ); var missingBlockInfo = @"SELECT ""VolumeID"" FROM ""Block"" WHERE ""VolumeID"" < 0 "; diff --git a/Duplicati/Library/Main/Database/LocalRepairDatabase.cs b/Duplicati/Library/Main/Database/LocalRepairDatabase.cs index 62d9281ae..b10679f61 100644 --- a/Duplicati/Library/Main/Database/LocalRepairDatabase.cs +++ b/Duplicati/Library/Main/Database/LocalRepairDatabase.cs @@ -21,103 +21,103 @@ using System.Collections.Generic; namespace Duplicati.Library.Main.Database { - internal class LocalRepairDatabase : LocalDatabase - { - public LocalRepairDatabase(string path) - : base(path, "Repair", true) - { - - } - - public long GetFilesetIdFromRemotename(string filelist) - { - using(var cmd = m_connection.CreateCommand()) - { - var filesetid = cmd.ExecuteScalarInt64(@"SELECT ""Fileset"".""ID"" FROM ""Fileset"",""RemoteVolume"" WHERE ""Fileset"".""VolumeID"" = ""RemoteVolume"".""ID"" AND ""RemoteVolume"".""Name"" = ?", -1, filelist); + internal class LocalRepairDatabase : LocalDatabase + { + public LocalRepairDatabase(string path) + : base(path, "Repair", true) + { + + } + + public long GetFilesetIdFromRemotename(string filelist) + { + using(var cmd = m_connection.CreateCommand()) + { + var filesetid = cmd.ExecuteScalarInt64(@"SELECT ""Fileset"".""ID"" FROM ""Fileset"",""RemoteVolume"" WHERE ""Fileset"".""VolumeID"" = ""RemoteVolume"".""ID"" AND ""RemoteVolume"".""Name"" = ?", -1, filelist); if (filesetid == -1) - throw new Exception(string.Format("No such remote file: {0}", filelist)); - + throw new Exception(string.Format("No such remote file: {0}", filelist)); + return filesetid; - } - } - - public interface IBlockSource - { - string File { get; } - long Offset { get; } - } - - public interface IBlockWithSources : LocalBackupDatabase.IBlock - { - IEnumerable Sources { get; } - } - - private class BlockWithSources : LocalBackupDatabase.Block, IBlockWithSources - { - private class BlockSource : IBlockSource - { - public string File { get; private set; } - public long Offset { get; private set; } - - public BlockSource(string file, long offset) - { - this.File = file; - this.Offset = offset; - - } - } - - private System.Data.IDataReader m_rd; - public bool Done { get; private set; } - - public BlockWithSources(System.Data.IDataReader rd) + } + } + + public interface IBlockSource + { + string File { get; } + long Offset { get; } + } + + public interface IBlockWithSources : LocalBackupDatabase.IBlock + { + IEnumerable Sources { get; } + } + + private class BlockWithSources : LocalBackupDatabase.Block, IBlockWithSources + { + private class BlockSource : IBlockSource + { + public string File { get; private set; } + public long Offset { get; private set; } + + public BlockSource(string file, long offset) + { + this.File = file; + this.Offset = offset; + + } + } + + private System.Data.IDataReader m_rd; + public bool Done { get; private set; } + + public BlockWithSources(System.Data.IDataReader rd) : base(rd.GetString(0), rd.GetInt64(1)) - { - m_rd = rd; - Done = !m_rd.Read(); - } - - public IEnumerable Sources - { - get - { - if (Done) - yield break; - + { + m_rd = rd; + Done = !m_rd.Read(); + } + + public IEnumerable Sources + { + get + { + if (Done) + yield break; + var cur = new BlockSource(m_rd.GetString(2), m_rd.GetInt64(3)); - var file = cur.File; - - while(!Done && cur.File == file) - { - yield return cur; - Done = m_rd.Read(); - if (!Done) + var file = cur.File; + + while(!Done && cur.File == file) + { + yield return cur; + Done = m_rd.Read(); + if (!Done) cur = new BlockSource(m_rd.GetString(2), m_rd.GetInt64(3)); - } - } - } - } - - private class RemoteVolume : IRemoteVolume - { - public string Name { get; private set; } - public string Hash { get; private set; } - public long Size { get; private set; } - - public RemoteVolume(string name, string hash, long size) - { - this.Name = name; - this.Hash = hash; - this.Size = size; - } - } - - public IEnumerable GetBlockVolumesFromIndexName(string name) - { - using(var cmd = m_connection.CreateCommand()) + } + } + } + } + + private class RemoteVolume : IRemoteVolume + { + public string Name { get; private set; } + public string Hash { get; private set; } + public long Size { get; private set; } + + public RemoteVolume(string name, string hash, long size) + { + this.Name = name; + this.Hash = hash; + this.Size = size; + } + } + + public IEnumerable GetBlockVolumesFromIndexName(string name) + { + using(var cmd = m_connection.CreateCommand()) foreach(var rd in cmd.ExecuteReaderEnumerable(@"SELECT ""Name"", ""Hash"", ""Size"" FROM ""RemoteVolume"" WHERE ""ID"" IN (SELECT ""BlockVolumeID"" FROM ""IndexBlockLink"" WHERE ""IndexVolumeID"" IN (SELECT ""ID"" FROM ""RemoteVolume"" WHERE ""Name"" = ?))", name)) yield return new RemoteVolume(rd.GetString(0), rd.ConvertValueToString(1), rd.ConvertValueToInt64(2)); - } + } public interface IMissingBlockList : IDisposable { @@ -571,6 +571,6 @@ ORDER BY throw new Exception(string.Format("Too many source blocklist entries in {0}", hash)); } } - } + } } diff --git a/Duplicati/Library/Main/Database/LocalRestoreDatabase.cs b/Duplicati/Library/Main/Database/LocalRestoreDatabase.cs index 56870ab66..bc4220ac7 100644 --- a/Duplicati/Library/Main/Database/LocalRestoreDatabase.cs +++ b/Duplicati/Library/Main/Database/LocalRestoreDatabase.cs @@ -17,7 +17,7 @@ namespace Duplicati.Library.Main.Database protected DateTime m_restoreTime; - public DateTime RestoreTime { get { return m_restoreTime; } } + public DateTime RestoreTime { get { return m_restoreTime; } } public LocalRestoreDatabase(string path) : this(new LocalDatabase(path, "Restore", false)) @@ -356,41 +356,41 @@ namespace Duplicati.Library.Main.Database } public void SetTargetPaths(string largest_prefix, string destination) - { - using(var cmd = m_connection.CreateCommand()) - { - if (string.IsNullOrEmpty(destination)) - { - //The string fixing here is meant to provide some non-random - // defaults when restoring cross OS, e.g. backup on Linux, restore on Windows - //This is mostly meaningless, and the user really should use --restore-path - - if (Library.Utility.Utility.IsClientLinux) - // For Win -> Linux, we remove the colon from the drive letter, and use the drive letter as root folder - cmd.ExecuteNonQuery(string.Format(@"UPDATE ""{0}"" SET ""Targetpath"" = CASE WHEN SUBSTR(""Path"", 2, 1) == "":"" THEN ""/"" || SUBSTR(""Path"", 1, 1) || SUBSTR(""Path"", 3) ELSE ""Path"" END", m_tempfiletable)); - else - // For Linux -> Win, we use the temporary folder's drive as the root path - cmd.ExecuteNonQuery(string.Format(@"UPDATE ""{0}"" SET ""Targetpath"" = CASE WHEN SUBSTR(""Path"", 1, 1) == ""/"" THEN ? || SUBSTR(""Path"", 2) ELSE ""Path"" END", m_tempfiletable), Library.Utility.Utility.AppendDirSeparator(System.IO.Path.GetPathRoot(Library.Utility.TempFolder.SystemTempPath))); - - } - else - { - if (string.IsNullOrEmpty(largest_prefix)) - { - //Special case, restoring to new folder, but files are from different drives - // So we use the format / / - // To avoid generating paths with a colon - cmd.ExecuteNonQuery(string.Format(@"UPDATE ""{0}"" SET ""Targetpath"" = ? || CASE WHEN SUBSTR(""Path"", 2, 1) == "":"" THEN SUBSTR(""Path"", 1, 1) || SUBSTR(""Path"", 3) ELSE ""Path"" END", m_tempfiletable), destination); - } - else - { - largest_prefix = Library.Utility.Utility.AppendDirSeparator(largest_prefix); - cmd.CommandText = string.Format(@"UPDATE ""{0}"" SET ""Targetpath"" = ? || SUBSTR(""Path"", ?)", m_tempfiletable); - cmd.AddParameter(destination); - cmd.AddParameter(largest_prefix.Length + 1); - cmd.ExecuteNonQuery(); - } - } + { + using(var cmd = m_connection.CreateCommand()) + { + if (string.IsNullOrEmpty(destination)) + { + //The string fixing here is meant to provide some non-random + // defaults when restoring cross OS, e.g. backup on Linux, restore on Windows + //This is mostly meaningless, and the user really should use --restore-path + + if (Library.Utility.Utility.IsClientLinux) + // For Win -> Linux, we remove the colon from the drive letter, and use the drive letter as root folder + cmd.ExecuteNonQuery(string.Format(@"UPDATE ""{0}"" SET ""Targetpath"" = CASE WHEN SUBSTR(""Path"", 2, 1) == "":"" THEN ""/"" || SUBSTR(""Path"", 1, 1) || SUBSTR(""Path"", 3) ELSE ""Path"" END", m_tempfiletable)); + else + // For Linux -> Win, we use the temporary folder's drive as the root path + cmd.ExecuteNonQuery(string.Format(@"UPDATE ""{0}"" SET ""Targetpath"" = CASE WHEN SUBSTR(""Path"", 1, 1) == ""/"" THEN ? || SUBSTR(""Path"", 2) ELSE ""Path"" END", m_tempfiletable), Library.Utility.Utility.AppendDirSeparator(System.IO.Path.GetPathRoot(Library.Utility.TempFolder.SystemTempPath))); + + } + else + { + if (string.IsNullOrEmpty(largest_prefix)) + { + //Special case, restoring to new folder, but files are from different drives + // So we use the format / / + // To avoid generating paths with a colon + cmd.ExecuteNonQuery(string.Format(@"UPDATE ""{0}"" SET ""Targetpath"" = ? || CASE WHEN SUBSTR(""Path"", 2, 1) == "":"" THEN SUBSTR(""Path"", 1, 1) || SUBSTR(""Path"", 3) ELSE ""Path"" END", m_tempfiletable), destination); + } + else + { + largest_prefix = Library.Utility.Utility.AppendDirSeparator(largest_prefix); + cmd.CommandText = string.Format(@"UPDATE ""{0}"" SET ""Targetpath"" = ? || SUBSTR(""Path"", ?)", m_tempfiletable); + cmd.AddParameter(destination); + cmd.AddParameter(largest_prefix.Length + 1); + cmd.ExecuteNonQuery(); + } + } } } @@ -416,11 +416,11 @@ namespace Duplicati.Library.Main.Database } } - public void UpdateTargetPath(long ID, string newname) - { + public void UpdateTargetPath(long ID, string newname) + { using (var cmd = m_connection.CreateCommand()) - cmd.ExecuteNonQuery(string.Format(@"UPDATE ""{0}"" SET ""TargetPath"" = ? WHERE ""ID"" = ?", m_tempfiletable), newname, ID); - } + cmd.ExecuteNonQuery(string.Format(@"UPDATE ""{0}"" SET ""TargetPath"" = ? WHERE ""ID"" = ?", m_tempfiletable), newname, ID); + } public interface IExistingFileBlock { @@ -864,21 +864,21 @@ namespace Duplicati.Library.Main.Database return new FilesAndMetadata(m_connection, m_tempfiletable, m_tempblocktable, blocksize, curvolume); } - private class FileToRestore : IFileToRestore - { - public string Path { get; private set; } - public string Hash { get; private set; } - public long ID { get; private set; } + private class FileToRestore : IFileToRestore + { + public string Path { get; private set; } + public string Hash { get; private set; } + public long ID { get; private set; } public long Length { get; private set; } - + public FileToRestore(long id, string path, string hash, long length) - { - this.ID = id; - this.Path = path; - this.Hash = hash; + { + this.ID = id; + this.Path = path; + this.Hash = hash; this.Length = length; - } - } + } + } public IEnumerable GetFilesToRestore(bool onlyNonVerified) { @@ -1140,88 +1140,88 @@ namespace Duplicati.Library.Main.Database { using (var cmd = m_connection.CreateCommand()) using (var rd = cmd.ExecuteReader(string.Format(@"SELECT ""TargetPath"" FROM ""{0}"" WHERE ""BlocksetID"" == ?", m_tempfiletable), FOLDER_BLOCKSET_ID)) - while(rd.Read()) - yield return rd.GetValue(0).ToString(); + while(rd.Read()) + yield return rd.GetValue(0).ToString(); + } + + public interface IFastSource + { + string TargetPath { get; } + long TargetFileID { get; } + string SourcePath { get; } + IEnumerable Blocks { get; } + } + + public interface IBlockEntry + { + long Offset { get; } + long Size { get; } + long Index { get; } + string Hash { get; } } - public interface IFastSource - { - string TargetPath { get; } - long TargetFileID { get; } - string SourcePath { get; } - IEnumerable Blocks { get; } - } - - public interface IBlockEntry - { - long Offset { get; } - long Size { get; } - long Index { get; } - string Hash { get; } - } - - private class FastSource : IFastSource - { - private class BlockEntry : IBlockEntry - { - private System.Data.IDataReader m_rd; - private long m_blocksize; - public BlockEntry(System.Data.IDataReader rd, long blocksize) { m_rd = rd; m_blocksize = blocksize; } + private class FastSource : IFastSource + { + private class BlockEntry : IBlockEntry + { + private System.Data.IDataReader m_rd; + private long m_blocksize; + public BlockEntry(System.Data.IDataReader rd, long blocksize) { m_rd = rd; m_blocksize = blocksize; } public long Offset { get { return m_rd.GetInt64(3) * m_blocksize; } } public long Index { get { return m_rd.GetInt64(3); } } public long Size { get { return m_rd.GetInt64(5); } } public string Hash { get { return m_rd.GetString(4); } } - } - - private System.Data.IDataReader m_rd; - private long m_blocksize; - public FastSource(System.Data.IDataReader rd, long blocksize) { m_rd = rd; m_blocksize = blocksize; MoreData = true; } - public bool MoreData { get; private set; } - public string TargetPath { get { return m_rd.GetValue(0).ToString(); } } + } + + private System.Data.IDataReader m_rd; + private long m_blocksize; + public FastSource(System.Data.IDataReader rd, long blocksize) { m_rd = rd; m_blocksize = blocksize; MoreData = true; } + public bool MoreData { get; private set; } + public string TargetPath { get { return m_rd.GetValue(0).ToString(); } } public long TargetFileID { get { return m_rd.GetInt64(2); } } - public string SourcePath { get { return m_rd.GetValue(1).ToString(); } } - - public IEnumerable Blocks - { - get - { - var tid = this.TargetFileID; - - do - { - yield return new BlockEntry(m_rd, m_blocksize); - } while((MoreData = m_rd.Read()) && tid == this.TargetFileID); - - } - } - } + public string SourcePath { get { return m_rd.GetValue(1).ToString(); } } + + public IEnumerable Blocks + { + get + { + var tid = this.TargetFileID; + + do + { + yield return new BlockEntry(m_rd, m_blocksize); + } while((MoreData = m_rd.Read()) && tid == this.TargetFileID); + + } + } + } public IEnumerable GetFilesAndSourceBlocksFast(long blocksize) - { - var whereclause = string.Format(@" ""{0}"".""ID"" = ""{1}"".""FileID"" AND ""{1}"".""Restored"" = 0 AND ""{1}"".""Metadata"" = 0 AND ""{0}"".""TargetPath"" != ""{0}"".""Path"" ", m_tempfiletable, m_tempblocktable); - var sourepaths = string.Format(@"SELECT DISTINCT ""{0}"".""Path"" FROM ""{0}"", ""{1}"" WHERE " + whereclause, m_tempfiletable, m_tempblocktable); - var latestBlocksetIds = @"SELECT ""File"".""Path"", ""File"".""BlocksetID"", MAX(""Fileset"".""Timestamp"") FROM ""Fileset"", ""FilesetEntry"", ""File"" WHERE ""FilesetEntry"".""FileID"" = ""File"".""ID"" AND ""FilesetEntry"".""FilesetID"" = ""Fileset"".""ID"" AND ""File"".""Path"" IN (" + sourepaths + @") GROUP BY ""File"".""Path"" "; - var sources = string.Format(@"SELECT DISTINCT ""{0}"".""TargetPath"", ""{0}"".""Path"", ""{0}"".""ID"", ""{1}"".""Index"", ""{1}"".""Hash"", ""{1}"".""Size"" FROM ""{0}"", ""{1}"", ""File"", (" + latestBlocksetIds + @") S, ""Block"", ""BlocksetEntry"" WHERE ""BlocksetEntry"".""BlocksetID"" = ""S"".""BlocksetID"" AND ""BlocksetEntry"".""BlocksetID"" = ""File"".""BlocksetID"" AND ""BlocksetEntry"".""BlockID"" = ""Block"".""ID"" AND ""{1}"".""Index"" = ""BlocksetEntry"".""Index"" AND ""{1}"".""Hash"" = ""Block"".""Hash"" AND ""{1}"".""Size"" = ""Block"".""Size"" AND ""S"".""Path"" = ""{0}"".""Path"" AND " + whereclause + @" ORDER BY ""{0}"".""ID"", ""{1}"".""Index"" ", m_tempfiletable, m_tempblocktable); - using(var cmd = m_connection.CreateCommand()) - using(var rd = cmd.ExecuteReader(sources)) - { - if (rd.Read()) - { - var more = false; - do - { - var n = new FastSource(rd, blocksize); - var tid = n.TargetFileID; - yield return n; - - more = n.MoreData; - while(more && n.TargetFileID == tid) - more = rd.Read(); - - } while (more); - } - } - } + { + var whereclause = string.Format(@" ""{0}"".""ID"" = ""{1}"".""FileID"" AND ""{1}"".""Restored"" = 0 AND ""{1}"".""Metadata"" = 0 AND ""{0}"".""TargetPath"" != ""{0}"".""Path"" ", m_tempfiletable, m_tempblocktable); + var sourepaths = string.Format(@"SELECT DISTINCT ""{0}"".""Path"" FROM ""{0}"", ""{1}"" WHERE " + whereclause, m_tempfiletable, m_tempblocktable); + var latestBlocksetIds = @"SELECT ""File"".""Path"", ""File"".""BlocksetID"", MAX(""Fileset"".""Timestamp"") FROM ""Fileset"", ""FilesetEntry"", ""File"" WHERE ""FilesetEntry"".""FileID"" = ""File"".""ID"" AND ""FilesetEntry"".""FilesetID"" = ""Fileset"".""ID"" AND ""File"".""Path"" IN (" + sourepaths + @") GROUP BY ""File"".""Path"" "; + var sources = string.Format(@"SELECT DISTINCT ""{0}"".""TargetPath"", ""{0}"".""Path"", ""{0}"".""ID"", ""{1}"".""Index"", ""{1}"".""Hash"", ""{1}"".""Size"" FROM ""{0}"", ""{1}"", ""File"", (" + latestBlocksetIds + @") S, ""Block"", ""BlocksetEntry"" WHERE ""BlocksetEntry"".""BlocksetID"" = ""S"".""BlocksetID"" AND ""BlocksetEntry"".""BlocksetID"" = ""File"".""BlocksetID"" AND ""BlocksetEntry"".""BlockID"" = ""Block"".""ID"" AND ""{1}"".""Index"" = ""BlocksetEntry"".""Index"" AND ""{1}"".""Hash"" = ""Block"".""Hash"" AND ""{1}"".""Size"" = ""Block"".""Size"" AND ""S"".""Path"" = ""{0}"".""Path"" AND " + whereclause + @" ORDER BY ""{0}"".""ID"", ""{1}"".""Index"" ", m_tempfiletable, m_tempblocktable); + using(var cmd = m_connection.CreateCommand()) + using(var rd = cmd.ExecuteReader(sources)) + { + if (rd.Read()) + { + var more = false; + do + { + var n = new FastSource(rd, blocksize); + var tid = n.TargetFileID; + yield return n; + + more = n.MoreData; + while(more && n.TargetFileID == tid) + more = rd.Read(); + + } while (more); + } + } + } } } diff --git a/Duplicati/Library/Main/Database/RemoteVolumeEntry.cs b/Duplicati/Library/Main/Database/RemoteVolumeEntry.cs index a8c6b1621..555b6bd0f 100644 --- a/Duplicati/Library/Main/Database/RemoteVolumeEntry.cs +++ b/Duplicati/Library/Main/Database/RemoteVolumeEntry.cs @@ -7,13 +7,13 @@ namespace Duplicati.Library.Main.Database { public struct RemoteVolumeEntry : IRemoteVolume { - private readonly string m_name; - private readonly string m_hash; - private readonly long m_size; - private readonly RemoteVolumeType m_type; - private readonly RemoteVolumeState m_state; + private readonly string m_name; + private readonly string m_hash; + private readonly long m_size; + private readonly RemoteVolumeType m_type; + private readonly RemoteVolumeState m_state; private readonly DateTime m_deleteGracePeriod; - + public string Name { get { return m_name; } } public string Hash { get { return m_hash; } } public long Size { get { return m_size; } } diff --git a/Duplicati/Library/Main/Operation/BackupHandler.cs b/Duplicati/Library/Main/Operation/BackupHandler.cs index f9f167eac..4d15061e2 100644 --- a/Duplicati/Library/Main/Operation/BackupHandler.cs +++ b/Duplicati/Library/Main/Operation/BackupHandler.cs @@ -44,8 +44,8 @@ namespace Duplicati.Library.Main.Operation public BackupHandler(string backendurl, Options options, BackupResults results) { - EMPTY_METADATA = Utility.WrapMetadata(new Dictionary(), options); - + EMPTY_METADATA = Utility.WrapMetadata(new Dictionary(), options); + m_options = options; m_result = results; m_backendurl = backendurl; @@ -649,10 +649,10 @@ namespace Duplicati.Library.Main.Operation Utility.UpdateOptionsFromDb(m_database, m_options); Utility.VerifyParameters(m_database, m_options); - if (m_database.PartiallyRecreated) - throw new Exception("The database was only partially recreated. This database may be incomplete and the repair process is not allowed to alter remote files as that could result in data loss."); - - if (m_database.RepairInProgress) + if (m_database.PartiallyRecreated) + throw new Exception("The database was only partially recreated. This database may be incomplete and the repair process is not allowed to alter remote files as that could result in data loss."); + + if (m_database.RepairInProgress) throw new Exception("The database was attempted repaired, but the repair did not complete. This database may be incomplete and the backup process cannot continue. You may delete the local database and attempt to repair it again."); m_blocksize = m_options.Blocksize; @@ -678,7 +678,7 @@ namespace Duplicati.Library.Main.Operation // If there is a filter, we make sure that the sources are included m_filter = filter ?? new Library.Utility.FilterExpression(); m_sourceFilter = new Library.Utility.FilterExpression(sources, true); - + m_backendLogFlushTimer = DateTime.Now.Add(FLUSH_TIMESPAN); System.Threading.Thread parallelScanner = null; @@ -713,14 +713,14 @@ namespace Duplicati.Library.Main.Operation var repcnt = 0; while(repcnt < 100 && m_database.GetRemoteVolumeID(filesetvolume.RemoteFilename) >= 0) filesetvolume.ResetRemoteFilename(m_options, m_database.OperationTimestamp.AddSeconds(repcnt++)); - + if (m_database.GetRemoteVolumeID(filesetvolume.RemoteFilename) >= 0) throw new Exception("Unable to generate a unique fileset name"); m_result.OperationProgressUpdater.UpdatePhase(OperationPhase.Backup_ProcessingFiles); var filesetvolumeid = m_database.RegisterRemoteVolume(filesetvolume.RemoteFilename, RemoteVolumeType.Files, RemoteVolumeState.Temporary, m_transaction); m_database.CreateFileset(filesetvolumeid, VolumeBase.ParseFilename(filesetvolume.RemoteFilename).Time, m_transaction); - + RunMainOperation(snapshot, backend); //If the scanner is still running for some reason, make sure we kill it now @@ -729,21 +729,21 @@ namespace Duplicati.Library.Main.Operation } var lastVolumeSize = FinalizeRemoteVolumes(backend); - + using(new Logging.Timer("UpdateChangeStatistics")) m_database.UpdateChangeStatistics(m_result); using(new Logging.Timer("VerifyConsistency")) m_database.VerifyConsistency(m_transaction, m_options.Blocksize, m_options.BlockhashSize); UploadRealFileList(backend, filesetvolume); - + m_result.OperationProgressUpdater.UpdatePhase(OperationPhase.Backup_WaitForUpload); using(new Logging.Timer("Async backend wait")) backend.WaitForComplete(m_database, m_transaction); if (m_result.TaskControlRendevouz() != TaskControlState.Stop) CompactIfRequired(backend, lastVolumeSize); - + if (m_options.UploadVerificationFile) { m_result.OperationProgressUpdater.UpdatePhase(OperationPhase.Backup_VerificationUpload); @@ -890,14 +890,14 @@ namespace Duplicati.Library.Main.Operation Dictionary metadata = GenerateMetadata(snapshot, path, attributes); if (!metadata.ContainsKey("CoreSymlinkTarget")) - { - var p = snapshot.GetSymlinkTarget(path); - - if (string.IsNullOrWhiteSpace(p)) - m_result.AddVerboseMessage("Ignoring empty symlink {0}", path); - else - metadata["CoreSymlinkTarget"] = p; - } + { + var p = snapshot.GetSymlinkTarget(path); + + if (string.IsNullOrWhiteSpace(p)) + m_result.AddVerboseMessage("Ignoring empty symlink {0}", path); + else + metadata["CoreSymlinkTarget"] = p; + } var metahash = Utility.WrapMetadata(metadata, m_options); AddSymlinkToOutput(backend, path, DateTime.UtcNow, metahash); @@ -1035,17 +1035,17 @@ namespace Duplicati.Library.Main.Operation { m_result.AddedFiles++; m_result.SizeOfAddedFiles += filesize; - - if (m_options.Dryrun) - m_result.AddDryrunMessage(string.Format("Would add new file {0}, size {1}", path, Library.Utility.Utility.FormatSizeString(filesize))); + + if (m_options.Dryrun) + m_result.AddDryrunMessage(string.Format("Would add new file {0}, size {1}", path, Library.Utility.Utility.FormatSizeString(filesize))); } else { m_result.ModifiedFiles++; m_result.SizeOfModifiedFiles += filesize; - - if (m_options.Dryrun) - m_result.AddDryrunMessage(string.Format("Would add changed file {0}, size {1}", path, Library.Utility.Utility.FormatSizeString(filesize))); + + if (m_options.Dryrun) + m_result.AddDryrunMessage(string.Format("Would add changed file {0}, size {1}", path, Library.Utility.Utility.FormatSizeString(filesize))); } AddFileToOutput(backend, path, filesize, lastwrite, metahashandsize, hashcollector, filekey, blocklisthashes); @@ -1126,35 +1126,35 @@ namespace Duplicati.Library.Main.Operation if (m_blockvolume.Filesize > m_options.VolumeSize - m_options.Blocksize) { - if (m_options.Dryrun) - { + if (m_options.Dryrun) + { m_blockvolume.Close(); - m_result.AddDryrunMessage(string.Format("Would upload block volume: {0}, size: {1}", m_blockvolume.RemoteFilename, Library.Utility.Utility.FormatSizeString(new FileInfo(m_blockvolume.LocalFilename).Length))); - - if (m_indexvolume != null) - { - UpdateIndexVolume(); - m_indexvolume.FinishVolume(Library.Utility.Utility.CalculateHash(m_blockvolume.LocalFilename), new FileInfo(m_blockvolume.LocalFilename).Length); - m_result.AddDryrunMessage(string.Format("Would upload index volume: {0}, size: {1}", m_indexvolume.RemoteFilename, Library.Utility.Utility.FormatSizeString(new FileInfo(m_indexvolume.LocalFilename).Length))); - m_indexvolume.Dispose(); - m_indexvolume = null; - } + m_result.AddDryrunMessage(string.Format("Would upload block volume: {0}, size: {1}", m_blockvolume.RemoteFilename, Library.Utility.Utility.FormatSizeString(new FileInfo(m_blockvolume.LocalFilename).Length))); + + if (m_indexvolume != null) + { + UpdateIndexVolume(); + m_indexvolume.FinishVolume(Library.Utility.Utility.CalculateHash(m_blockvolume.LocalFilename), new FileInfo(m_blockvolume.LocalFilename).Length); + m_result.AddDryrunMessage(string.Format("Would upload index volume: {0}, size: {1}", m_indexvolume.RemoteFilename, Library.Utility.Utility.FormatSizeString(new FileInfo(m_indexvolume.LocalFilename).Length))); + m_indexvolume.Dispose(); + m_indexvolume = null; + } m_blockvolume.Dispose(); m_blockvolume = null; m_indexvolume.Dispose(); m_indexvolume = null; - } - else - { - //When uploading a new volume, we register the volumes and then flush the transaction - // this ensures that the local database and remote storage are as closely related as possible - m_database.UpdateRemoteVolume(m_blockvolume.RemoteFilename, RemoteVolumeState.Uploading, -1, null, m_transaction); + } + else + { + //When uploading a new volume, we register the volumes and then flush the transaction + // this ensures that the local database and remote storage are as closely related as possible + m_database.UpdateRemoteVolume(m_blockvolume.RemoteFilename, RemoteVolumeState.Uploading, -1, null, m_transaction); m_blockvolume.Close(); - UpdateIndexVolume(); - - backend.FlushDbMessages(m_database, m_transaction); - m_backendLogFlushTimer = DateTime.Now.Add(FLUSH_TIMESPAN); + UpdateIndexVolume(); + + backend.FlushDbMessages(m_database, m_transaction); + m_backendLogFlushTimer = DateTime.Now.Add(FLUSH_TIMESPAN); using(new Logging.Timer("CommitAddBlockToOutputFlush")) m_transaction.Commit(); @@ -1165,10 +1165,10 @@ namespace Duplicati.Library.Main.Operation m_indexvolume = null; using(new Logging.Timer("CommitAddBlockToOutputFlush")) - m_transaction.Commit(); - m_transaction = m_database.BeginTransaction(); - - } + m_transaction.Commit(); + m_transaction = m_database.BeginTransaction(); + + } } return true; @@ -1259,16 +1259,16 @@ namespace Duplicati.Library.Main.Operation private void UpdateIndexVolume() { - if (m_indexvolume != null) - { - m_database.AddIndexBlockLink(m_indexvolume.VolumeID, m_blockvolume.VolumeID, m_transaction); - m_indexvolume.StartVolume(m_blockvolume.RemoteFilename); - - foreach(var b in m_database.GetBlocks(m_blockvolume.VolumeID)) - m_indexvolume.AddBlock(b.Hash, b.Size); - - m_database.UpdateRemoteVolume(m_indexvolume.RemoteFilename, RemoteVolumeState.Uploading, -1, null, m_transaction); - } + if (m_indexvolume != null) + { + m_database.AddIndexBlockLink(m_indexvolume.VolumeID, m_blockvolume.VolumeID, m_transaction); + m_indexvolume.StartVolume(m_blockvolume.RemoteFilename); + + foreach(var b in m_database.GetBlocks(m_blockvolume.VolumeID)) + m_indexvolume.AddBlock(b.Hash, b.Size); + + m_database.UpdateRemoteVolume(m_indexvolume.RemoteFilename, RemoteVolumeState.Uploading, -1, null, m_transaction); + } } public void Dispose() diff --git a/Duplicati/Library/Main/Operation/CompactHandler.cs b/Duplicati/Library/Main/Operation/CompactHandler.cs index 2e7ed2c1f..b9f34b694 100644 --- a/Duplicati/Library/Main/Operation/CompactHandler.cs +++ b/Duplicati/Library/Main/Operation/CompactHandler.cs @@ -24,24 +24,24 @@ using System.Text; namespace Duplicati.Library.Main.Operation { - internal class CompactHandler - { + internal class CompactHandler + { protected string m_backendurl; protected Options m_options; protected CompactResults m_result; - public CompactHandler(string backend, Options options, CompactResults result) - { + public CompactHandler(string backend, Options options, CompactResults result) + { m_backendurl = backend; m_options = options; m_result = result; - } - - public virtual void Run() + } + + public virtual void Run() { if (!System.IO.File.Exists(m_options.Dbpath)) throw new Exception(string.Format("Database file does not exist: {0}", m_options.Dbpath)); - + using(var db = new LocalDeleteDatabase(m_options.Dbpath, true)) { var tr = db.BeginTransaction(); @@ -50,7 +50,7 @@ namespace Duplicati.Library.Main.Operation m_result.SetDatabase(db); Utility.UpdateOptionsFromDb(db, m_options); Utility.VerifyParameters(db, m_options); - + var changed = DoCompact(db, false, ref tr); if (changed && m_options.UploadVerificationFile) @@ -66,8 +66,8 @@ namespace Duplicati.Library.Main.Operation db.Vacuum(); } } - else - tr.Rollback(); + else + tr.Rollback(); tr = null; } @@ -78,23 +78,23 @@ namespace Duplicati.Library.Main.Operation catch { } } } - } - - internal bool DoCompact(LocalDeleteDatabase db, bool hasVerifiedBackend, ref System.Data.IDbTransaction transaction) + } + + internal bool DoCompact(LocalDeleteDatabase db, bool hasVerifiedBackend, ref System.Data.IDbTransaction transaction) { var report = db.GetCompactReport(m_options.VolumeSize, m_options.Threshold, m_options.SmallFileSize, m_options.SmallFileMaxCount, transaction); report.ReportCompactData(m_result); - + if (report.ShouldReclaim || report.ShouldCompact) { using(var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) { if (!hasVerifiedBackend && !m_options.NoBackendverification) FilelistProcessor.VerifyRemoteList(backend, m_options, db, m_result.BackendWriter); - + BlockVolumeWriter newvol = new BlockVolumeWriter(m_options); newvol.VolumeID = db.RegisterRemoteVolume(newvol.RemoteFilename, RemoteVolumeType.Blocks, RemoteVolumeState.Temporary, transaction); - + IndexVolumeWriter newvolindex = null; if (m_options.IndexfilePolicy != Options.IndexFileStrategy.None) { @@ -103,18 +103,18 @@ namespace Duplicati.Library.Main.Operation db.AddIndexBlockLink(newvolindex.VolumeID, newvol.VolumeID, transaction); newvolindex.StartVolume(newvol.RemoteFilename); } - + long blocksInVolume = 0; long discardedBlocks = 0; long discardedSize = 0; byte[] buffer = new byte[m_options.Blocksize]; var remoteList = db.GetRemoteVolumes().Where(n => n.State == RemoteVolumeState.Uploaded || n.State == RemoteVolumeState.Verified).ToArray(); - + //These are for bookkeeping var uploadedVolumes = new List>(); var deletedVolumes = new List>(); var downloadedVolumes = new List>(); - + //We start by deleting unused volumes to save space before uploading new stuff var fullyDeleteable = (from v in remoteList where report.DeleteableVolumes.Contains(v.Name) @@ -131,7 +131,7 @@ namespace Duplicati.Library.Main.Operation var volumesToDownload = (from v in remoteList where report.CompactableVolumes.Contains(v.Name) select (IRemoteVolume)v).ToList(); - + using(var q = db.CreateBlockQueryHelper(m_options, transaction)) { foreach(var entry in new AsyncDownloader(volumesToDownload, backend)) @@ -143,95 +143,95 @@ namespace Duplicati.Library.Main.Operation return false; } - downloadedVolumes.Add(new KeyValuePair(entry.Name, entry.Size)); - var inst = VolumeBase.ParseFilename(entry.Name); - using(var f = new BlockVolumeReader(inst.CompressionModule, tmpfile, m_options)) - { - foreach(var e in f.Blocks) - { + downloadedVolumes.Add(new KeyValuePair(entry.Name, entry.Size)); + var inst = VolumeBase.ParseFilename(entry.Name); + using(var f = new BlockVolumeReader(inst.CompressionModule, tmpfile, m_options)) + { + foreach(var e in f.Blocks) + { if (q.UseBlock(e.Key, e.Value, transaction)) - { - //TODO: How do we get the compression hint? Reverse query for filename in db? - var s = f.ReadBlock(e.Key, buffer); - if (s != e.Value) - throw new Exception(string.Format("Size mismatch problem for block {0}, {1} vs {2}", e.Key, s, e.Value)); - - newvol.AddBlock(e.Key, buffer, 0, s, Duplicati.Library.Interface.CompressionHint.Compressible); - if (newvolindex != null) - newvolindex.AddBlock(e.Key, e.Value); - - db.MoveBlockToNewVolume(e.Key, e.Value, newvol.VolumeID, transaction); - blocksInVolume++; - - if (newvol.Filesize > m_options.VolumeSize) - { - uploadedVolumes.Add(new KeyValuePair(newvol.RemoteFilename, new System.IO.FileInfo(newvol.LocalFilename).Length)); - if (newvolindex != null) - uploadedVolumes.Add(new KeyValuePair(newvolindex.RemoteFilename, new System.IO.FileInfo(newvolindex.LocalFilename).Length)); - - if (!m_options.Dryrun) - backend.Put(newvol, newvolindex); - else - m_result.AddDryrunMessage(string.Format("Would upload generated blockset of size {0}", Library.Utility.Utility.FormatSizeString(new System.IO.FileInfo(newvol.LocalFilename).Length))); - - - newvol = new BlockVolumeWriter(m_options); - newvol.VolumeID = db.RegisterRemoteVolume(newvol.RemoteFilename, RemoteVolumeType.Blocks, RemoteVolumeState.Temporary, transaction); - - if (m_options.IndexfilePolicy != Options.IndexFileStrategy.None) - { - newvolindex = new IndexVolumeWriter(m_options); - newvolindex.VolumeID = db.RegisterRemoteVolume(newvolindex.RemoteFilename, RemoteVolumeType.Index, RemoteVolumeState.Temporary, transaction); + { + //TODO: How do we get the compression hint? Reverse query for filename in db? + var s = f.ReadBlock(e.Key, buffer); + if (s != e.Value) + throw new Exception(string.Format("Size mismatch problem for block {0}, {1} vs {2}", e.Key, s, e.Value)); + + newvol.AddBlock(e.Key, buffer, 0, s, Duplicati.Library.Interface.CompressionHint.Compressible); + if (newvolindex != null) + newvolindex.AddBlock(e.Key, e.Value); + + db.MoveBlockToNewVolume(e.Key, e.Value, newvol.VolumeID, transaction); + blocksInVolume++; + + if (newvol.Filesize > m_options.VolumeSize) + { + uploadedVolumes.Add(new KeyValuePair(newvol.RemoteFilename, new System.IO.FileInfo(newvol.LocalFilename).Length)); + if (newvolindex != null) + uploadedVolumes.Add(new KeyValuePair(newvolindex.RemoteFilename, new System.IO.FileInfo(newvolindex.LocalFilename).Length)); + + if (!m_options.Dryrun) + backend.Put(newvol, newvolindex); + else + m_result.AddDryrunMessage(string.Format("Would upload generated blockset of size {0}", Library.Utility.Utility.FormatSizeString(new System.IO.FileInfo(newvol.LocalFilename).Length))); + + + newvol = new BlockVolumeWriter(m_options); + newvol.VolumeID = db.RegisterRemoteVolume(newvol.RemoteFilename, RemoteVolumeType.Blocks, RemoteVolumeState.Temporary, transaction); + + if (m_options.IndexfilePolicy != Options.IndexFileStrategy.None) + { + newvolindex = new IndexVolumeWriter(m_options); + newvolindex.VolumeID = db.RegisterRemoteVolume(newvolindex.RemoteFilename, RemoteVolumeType.Index, RemoteVolumeState.Temporary, transaction); db.AddIndexBlockLink(newvolindex.VolumeID, newvol.VolumeID, transaction); - newvolindex.StartVolume(newvol.RemoteFilename); - } - - blocksInVolume = 0; - - //After we upload this volume, we can delete all previous encountered volumes - deletedVolumes.AddRange(DoDelete(db, backend, deleteableVolumes, ref transaction)); + newvolindex.StartVolume(newvol.RemoteFilename); + } + + blocksInVolume = 0; + + //After we upload this volume, we can delete all previous encountered volumes + deletedVolumes.AddRange(DoDelete(db, backend, deleteableVolumes, ref transaction)); deleteableVolumes = new List(); - } - } - else - { - discardedBlocks++; - discardedSize += e.Value; - } - } - } - - deleteableVolumes.Add(entry); - } - - if (blocksInVolume > 0) - { - uploadedVolumes.Add(new KeyValuePair(newvol.RemoteFilename, new System.IO.FileInfo(newvol.LocalFilename).Length)); - if (newvolindex != null) - uploadedVolumes.Add(new KeyValuePair(newvolindex.RemoteFilename, new System.IO.FileInfo(newvolindex.LocalFilename).Length)); - if (!m_options.Dryrun) - backend.Put(newvol, newvolindex); - else - m_result.AddDryrunMessage(string.Format("Would upload generated blockset of size {0}", Library.Utility.Utility.FormatSizeString(new System.IO.FileInfo(newvol.LocalFilename).Length))); - } - else - { - db.RemoveRemoteVolume(newvol.RemoteFilename, transaction); - if (newvolindex != null) - { - db.RemoveRemoteVolume(newvolindex.RemoteFilename, transaction); - newvolindex.FinishVolume(null, 0); - } - } - } - } - - deletedVolumes.AddRange(DoDelete(db, backend, deleteableVolumes, ref transaction)); - + } + } + else + { + discardedBlocks++; + discardedSize += e.Value; + } + } + } + + deleteableVolumes.Add(entry); + } + + if (blocksInVolume > 0) + { + uploadedVolumes.Add(new KeyValuePair(newvol.RemoteFilename, new System.IO.FileInfo(newvol.LocalFilename).Length)); + if (newvolindex != null) + uploadedVolumes.Add(new KeyValuePair(newvolindex.RemoteFilename, new System.IO.FileInfo(newvolindex.LocalFilename).Length)); + if (!m_options.Dryrun) + backend.Put(newvol, newvolindex); + else + m_result.AddDryrunMessage(string.Format("Would upload generated blockset of size {0}", Library.Utility.Utility.FormatSizeString(new System.IO.FileInfo(newvol.LocalFilename).Length))); + } + else + { + db.RemoveRemoteVolume(newvol.RemoteFilename, transaction); + if (newvolindex != null) + { + db.RemoveRemoteVolume(newvolindex.RemoteFilename, transaction); + newvolindex.FinishVolume(null, 0); + } + } + } + } + + deletedVolumes.AddRange(DoDelete(db, backend, deleteableVolumes, ref transaction)); + var downloadSize = downloadedVolumes.Where(x => x.Value >= 0).Aggregate(0L, (a,x) => a + x.Value); var deletedSize = deletedVolumes.Where(x => x.Value >= 0).Aggregate(0L, (a,x) => a + x.Value); var uploadSize = uploadedVolumes.Where(x => x.Value >= 0).Aggregate(0L, (a,x) => a + x.Value); - + m_result.DeletedFileCount = deletedVolumes.Count; m_result.DownloadedFileCount = downloadedVolumes.Count; m_result.UploadedFileCount = uploadedVolumes.Count; @@ -240,31 +240,31 @@ namespace Duplicati.Library.Main.Operation m_result.UploadedFileSize = uploadSize; m_result.Dryrun = m_options.Dryrun; - if (m_result.Dryrun) - { + if (m_result.Dryrun) + { if (downloadedVolumes.Count == 0) m_result.AddDryrunMessage(string.Format("Would delete {0} files, which would reduce storage by {1}", m_result.DeletedFileCount, Library.Utility.Utility.FormatSizeString(m_result.DeletedFileSize))); else m_result.AddDryrunMessage(string.Format("Would download {0} file(s) with a total size of {1}, delete {2} file(s) with a total size of {3}, and compact to {4} file(s) with a size of {5}, which would reduce storage by {6} file(s) and {7}", m_result.DownloadedFileCount, Library.Utility.Utility.FormatSizeString(m_result.DownloadedFileSize), m_result.DeletedFileCount, Library.Utility.Utility.FormatSizeString(m_result.DeletedFileSize), m_result.UploadedFileCount, Library.Utility.Utility.FormatSizeString(m_result.UploadedFileSize), m_result.DeletedFileCount - m_result.UploadedFileCount, Library.Utility.Utility.FormatSizeString(m_result.DeletedFileSize - m_result.UploadedFileSize))); - } - else - { + } + else + { if (m_result.DownloadedFileCount == 0) m_result.AddMessage(string.Format("Deleted {0} files, which reduced storage by {1}", m_result.DeletedFileCount, Library.Utility.Utility.FormatSizeString(m_result.DeletedFileSize))); else m_result.AddMessage(string.Format("Downloaded {0} file(s) with a total size of {1}, deleted {2} file(s) with a total size of {3}, and compacted to {4} file(s) with a size of {5}, which reduced storage by {6} file(s) and {7}", m_result.DownloadedFileCount, Library.Utility.Utility.FormatSizeString(downloadSize), m_result.DeletedFileCount, Library.Utility.Utility.FormatSizeString(m_result.DeletedFileSize), m_result.UploadedFileCount, Library.Utility.Utility.FormatSizeString(m_result.UploadedFileSize), m_result.DeletedFileCount - m_result.UploadedFileCount, Library.Utility.Utility.FormatSizeString(m_result.DeletedFileSize - m_result.UploadedFileSize))); - } - - backend.WaitForComplete(db, transaction); - } + } + + backend.WaitForComplete(db, transaction); + } return (m_result.DeletedFileCount + m_result.UploadedFileCount) > 0; - } - else - { + } + else + { return false; - } - } + } + } private IEnumerable> DoDelete(LocalDeleteDatabase db, BackendManager backend, IEnumerable deleteableVolumes, ref System.Data.IDbTransaction transaction) { @@ -284,19 +284,19 @@ namespace Duplicati.Library.Main.Operation return PerformDelete(backend, db.GetDeletableVolumes(deleteableVolumes, transaction)); } - + private IEnumerable> PerformDelete(BackendManager backend, IEnumerable list) - { + { foreach(var f in list) - { - if (!m_options.Dryrun) - backend.Delete(f.Name, f.Size); - else - m_result.AddDryrunMessage(string.Format("Would delete remote file: {0}, size: {1}", f.Name, Library.Utility.Utility.FormatSizeString(f.Size))); + { + if (!m_options.Dryrun) + backend.Delete(f.Name, f.Size); + else + m_result.AddDryrunMessage(string.Format("Would delete remote file: {0}, size: {1}", f.Name, Library.Utility.Utility.FormatSizeString(f.Size))); - yield return new KeyValuePair(f.Name, f.Size); - } - } - } + yield return new KeyValuePair(f.Name, f.Size); + } + } + } } diff --git a/Duplicati/Library/Main/Operation/CreateBugReportHandler.cs b/Duplicati/Library/Main/Operation/CreateBugReportHandler.cs index 5cdc39d57..7246be56d 100644 --- a/Duplicati/Library/Main/Operation/CreateBugReportHandler.cs +++ b/Duplicati/Library/Main/Operation/CreateBugReportHandler.cs @@ -20,20 +20,20 @@ using Duplicati.Library.Main.Database; namespace Duplicati.Library.Main.Operation { - internal class CreateBugReportHandler - { + internal class CreateBugReportHandler + { private string m_targetpath; private Options m_options; private CreateLogDatabaseResults m_result; - public CreateBugReportHandler(string targetpath, Options options, CreateLogDatabaseResults result) - { + public CreateBugReportHandler(string targetpath, Options options, CreateLogDatabaseResults result) + { m_targetpath = targetpath; m_options = options; m_result = result; - } - - public void Run() + } + + public void Run() { var ext = System.IO.Path.GetExtension(m_targetpath); var module = m_options.CompressionModule; @@ -43,10 +43,10 @@ namespace Duplicati.Library.Main.Operation if (System.IO.File.Exists(m_targetpath)) throw new Exception(string.Format("Output file already exists, not overwriting: {0}", m_targetpath)); - + if (!System.IO.File.Exists(m_options.Dbpath)) throw new Exception(string.Format("Database file does not exist: {0}", m_options.Dbpath)); - + m_result.OperationProgressUpdater.UpdatePhase(OperationPhase.BugReport_Running); m_result.OperationProgressUpdater.UpdateProgress(0); @@ -73,8 +73,8 @@ namespace Duplicati.Library.Main.Operation } m_result.TargetPath = m_targetpath; - } - } - } + } + } + } } diff --git a/Duplicati/Library/Main/Operation/DeleteHandler.cs b/Duplicati/Library/Main/Operation/DeleteHandler.cs index 6842a1078..e77df5b1a 100644 --- a/Duplicati/Library/Main/Operation/DeleteHandler.cs +++ b/Duplicati/Library/Main/Operation/DeleteHandler.cs @@ -22,13 +22,13 @@ using System.Collections.Generic; namespace Duplicati.Library.Main.Operation { - internal class DeleteHandler - { + internal class DeleteHandler + { private DeleteResults m_result; protected string m_backendurl; protected Options m_options; - public DeleteHandler(string backend, Options options, DeleteResults result) + public DeleteHandler(string backend, Options options, DeleteResults result) { m_backendurl = backend; m_options = options; @@ -73,12 +73,12 @@ namespace Duplicati.Library.Main.Operation } public void DoRun(Database.LocalDeleteDatabase db, ref System.Data.IDbTransaction transaction, bool hasVerifiedBacked, bool forceCompact) - { + { using(var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, db)) { if (!hasVerifiedBacked && !m_options.NoBackendverification) FilelistProcessor.VerifyRemoteList(backend, m_options, db, m_result.BackendWriter); - + var filesetNumbers = db.FilesetTimes.Zip(Enumerable.Range(0, db.FilesetTimes.Count()), (a, b) => new Tuple(b, a.Value)); var toDelete = m_options.GetFilesetsToDelete(db.FilesetTimes.Select(x => x.Value).ToArray()); @@ -108,9 +108,9 @@ namespace Duplicati.Library.Main.Operation else m_result.AddDryrunMessage(string.Format("Would delete remote fileset: {0}", f.Key)); } - + backend.WaitForComplete(db, transaction); - + var count = lst.Length; if (!m_options.Dryrun) { @@ -121,7 +121,7 @@ namespace Duplicati.Library.Main.Operation } else { - + if (count == 0) m_result.AddDryrunMessage("No remote filesets would be deleted"); else @@ -130,20 +130,20 @@ namespace Duplicati.Library.Main.Operation if (count > 0 && m_options.Dryrun) m_result.AddDryrunMessage("Remove --dry-run to actually delete files"); } - + if (!m_options.NoAutoCompact && (forceCompact || (toDelete != null && toDelete.Length > 0))) { m_result.CompactResults = new CompactResults(m_result); new CompactHandler(m_backendurl, m_options, (CompactResults)m_result.CompactResults).DoCompact(db, true, ref transaction); } - + m_result.SetResults( from n in filesetNumbers where toDelete.Contains(n.Item2) select n, m_options.Dryrun); - } + } } - } + } } diff --git a/Duplicati/Library/Main/Operation/FilelistProcessor.cs b/Duplicati/Library/Main/Operation/FilelistProcessor.cs index 3c60c6aae..160fc58fc 100644 --- a/Duplicati/Library/Main/Operation/FilelistProcessor.cs +++ b/Duplicati/Library/Main/Operation/FilelistProcessor.cs @@ -76,29 +76,29 @@ namespace Duplicati.Library.Main.Operation /// The database to compare with /// The log instance to use public static void VerifyRemoteList(BackendManager backend, Options options, LocalDatabase database, IBackendWriter log) - { - var tp = RemoteListAnalysis(backend, options, database, log); - long extraCount = 0; - long missingCount = 0; + { + var tp = RemoteListAnalysis(backend, options, database, log); + long extraCount = 0; + long missingCount = 0; - foreach(var n in tp.ExtraVolumes) - { - log.AddWarning(string.Format("Extra unknown file: {0}", n.File.Name), null); - extraCount++; - } - - foreach(var n in tp.MissingVolumes) - { - log.AddWarning(string.Format("Missing file: {0}", n.Name), null); - missingCount++; - } - - if (extraCount > 0) - { - var s = string.Format("Found {0} remote files that are not recorded in local storage, please run repair", extraCount); - log.AddError(s, null); - throw new Exception(s); - } + foreach(var n in tp.ExtraVolumes) + { + log.AddWarning(string.Format("Extra unknown file: {0}", n.File.Name), null); + extraCount++; + } + + foreach(var n in tp.MissingVolumes) + { + log.AddWarning(string.Format("Missing file: {0}", n.Name), null); + missingCount++; + } + + if (extraCount > 0) + { + var s = string.Format("Found {0} remote files that are not recorded in local storage, please run repair", extraCount); + log.AddError(s, null); + throw new Exception(s); + } var lookup = new Dictionary(); var doubles = new Dictionary(); @@ -119,11 +119,11 @@ namespace Duplicati.Library.Main.Operation if (missingCount > 0) { - string s; + string s; if (!tp.BackupPrefixes.Contains(options.Prefix) && tp.BackupPrefixes.Length > 0) - s = string.Format("Found {0} files that are missing from the remote storage, and no files with the backup prefix {1}, but found the following backup prefixes: {2}", missingCount, options.Prefix, string.Join(", ", tp.BackupPrefixes)); + s = string.Format("Found {0} files that are missing from the remote storage, and no files with the backup prefix {1}, but found the following backup prefixes: {2}", missingCount, options.Prefix, string.Join(", ", tp.BackupPrefixes)); else - s = string.Format("Found {0} files that are missing from the remote storage, please run repair", missingCount); + s = string.Format("Found {0} files that are missing from the remote storage, please run repair", missingCount); log.AddError(s, null); throw new Exception(s); diff --git a/Duplicati/Library/Main/Operation/ListFilesHandler.cs b/Duplicati/Library/Main/Operation/ListFilesHandler.cs index 6a4d2edf9..fdb279ce3 100644 --- a/Duplicati/Library/Main/Operation/ListFilesHandler.cs +++ b/Duplicati/Library/Main/Operation/ListFilesHandler.cs @@ -66,7 +66,7 @@ namespace Duplicati.Library.Main.Operation } } - m_result.AddMessage("No local database, accessing remote store"); + m_result.AddMessage("No local database, accessing remote store"); //TODO: Add prefix and foldercontents if (m_options.ListFolderContents) diff --git a/Duplicati/Library/Main/Operation/RecreateDatabaseHandler.cs b/Duplicati/Library/Main/Operation/RecreateDatabaseHandler.cs index afa15dec3..e95429545 100644 --- a/Duplicati/Library/Main/Operation/RecreateDatabaseHandler.cs +++ b/Duplicati/Library/Main/Operation/RecreateDatabaseHandler.cs @@ -55,29 +55,29 @@ namespace Duplicati.Library.Main.Operation using(var db = new LocalDatabase(m_options.Dbpath, "Recreate", true)) { - m_result.SetDatabase(db); + m_result.SetDatabase(db); if (db.FindMatchingFilesets(m_options.Time, m_options.Version).Any()) throw new Exception(string.Format("The version(s) being updated to, already exists")); - // Mark as incomplete - db.PartiallyRecreated = true; + // Mark as incomplete + db.PartiallyRecreated = true; - Utility.UpdateOptionsFromDb(db, m_options, null); + Utility.UpdateOptionsFromDb(db, m_options, null); DoRun(db, true, filter, filelistfilter, blockprocessor); db.WriteResults(); } } - /// - /// Run the recreate procedure - /// - /// The database to restore into - /// True if this is an update call, false otherwise - /// A filter that can be used to disregard certain remote files, intended to be used to select a certain filelist - /// Filters the files in a filelist to prevent downloading unwanted data - /// A callback hook that can be used to work with downloaded block volumes, intended to be use to recover data blocks while processing blocklists - internal void DoRun(LocalDatabase dbparent, bool updating, Library.Utility.IFilter filter = null, NumberedFilterFilelistDelegate filelistfilter = null, BlockVolumePostProcessor blockprocessor = null) + /// + /// Run the recreate procedure + /// + /// The database to restore into + /// True if this is an update call, false otherwise + /// A filter that can be used to disregard certain remote files, intended to be used to select a certain filelist + /// Filters the files in a filelist to prevent downloading unwanted data + /// A callback hook that can be used to work with downloaded block volumes, intended to be use to recover data blocks while processing blocklists + internal void DoRun(LocalDatabase dbparent, bool updating, Library.Utility.IFilter filter = null, NumberedFilterFilelistDelegate filelistfilter = null, BlockVolumePostProcessor blockprocessor = null) { m_result.OperationProgressUpdater.UpdatePhase(OperationPhase.Recreate_Running); @@ -85,12 +85,12 @@ namespace Duplicati.Library.Main.Operation using(var restoredb = new LocalRecreateDatabase(dbparent, m_options)) using(var backend = new BackendManager(m_backendurl, m_options, m_result.BackendWriter, restoredb)) { - restoredb.RepairInProgress = true; + restoredb.RepairInProgress = true; var volumeIds = new Dictionary(); var rawlist = backend.List(); - + //First step is to examine the remote storage to see what // kind of data we can find var remotefiles = @@ -109,10 +109,10 @@ namespace Duplicati.Library.Main.Operation else { var tmp = - (from x in rawlist - let n = VolumeBase.ParseFilename(x) - where - n != null + (from x in rawlist + let n = VolumeBase.ParseFilename(x) + where + n != null select n.Prefix).ToArray(); var types = tmp.Distinct().ToArray(); @@ -356,7 +356,7 @@ namespace Duplicati.Library.Main.Operation for(var i = 0; i < 3; i++) { // Grab the list matching the pass type - var lst = restoredb.GetMissingBlockListVolumes(i, m_options.Blocksize, hashsize).ToList(); + var lst = restoredb.GetMissingBlockListVolumes(i, m_options.Blocksize, hashsize).ToList(); if (lst.Count > 0) { switch (i) @@ -422,25 +422,25 @@ namespace Duplicati.Library.Main.Operation } } - backend.WaitForComplete(restoredb, null); + backend.WaitForComplete(restoredb, null); - if (m_options.RepairOnlyPaths) - { - m_result.AddMessage("Recreate/path-update completed, not running consistency checks"); - } - else - { - m_result.AddMessage("Recreate completed, verifying the database consistency"); + if (m_options.RepairOnlyPaths) + { + m_result.AddMessage("Recreate/path-update completed, not running consistency checks"); + } + else + { + m_result.AddMessage("Recreate completed, verifying the database consistency"); - //All done, we must verify that we have all blocklist fully intact - // if this fails, the db will not be deleted, so it can be used, - // except to continue a backup - restoredb.VerifyConsistency(null, m_options.Blocksize, m_options.BlockhashSize); + //All done, we must verify that we have all blocklist fully intact + // if this fails, the db will not be deleted, so it can be used, + // except to continue a backup + restoredb.VerifyConsistency(null, m_options.Blocksize, m_options.BlockhashSize); - m_result.AddMessage("Recreate completed, and consistency checks completed, marking database as complete"); + m_result.AddMessage("Recreate completed, and consistency checks completed, marking database as complete"); - restoredb.RepairInProgress = false; - } + restoredb.RepairInProgress = false; + } } } diff --git a/Duplicati/Library/Main/Operation/RepairHandler.cs b/Duplicati/Library/Main/Operation/RepairHandler.cs index 8472a0ad1..294bb58e3 100644 --- a/Duplicati/Library/Main/Operation/RepairHandler.cs +++ b/Duplicati/Library/Main/Operation/RepairHandler.cs @@ -101,10 +101,10 @@ namespace Duplicati.Library.Main.Operation Utility.UpdateOptionsFromDb(db, m_options); Utility.VerifyParameters(db, m_options); - if (db.PartiallyRecreated) - throw new Exception("The database was only partially recreated. This database may be incomplete and the repair process is not allowed to alter remote files as that could result in data loss."); + if (db.PartiallyRecreated) + throw new Exception("The database was only partially recreated. This database may be incomplete and the repair process is not allowed to alter remote files as that could result in data loss."); - if (db.RepairInProgress) + if (db.RepairInProgress) throw new Exception("The database was attempted repaired, but the repair did not complete. This database may be incomplete and the repair process is not allowed to alter remote files as that could result in data loss."); var tp = FilelistProcessor.RemoteListAnalysis(backend, m_options, db, m_result.BackendWriter); @@ -116,7 +116,7 @@ namespace Duplicati.Library.Main.Operation throw new Exception(Strings.Common.InvalidHashAlgorithm(m_options.BlockHashAlgorithm)); if (!blockhasher.CanReuseTransform) throw new Exception(Strings.Common.InvalidCryptoSystem(m_options.BlockHashAlgorithm)); - + var progress = 0; var targetProgess = tp.ExtraVolumes.Count() + tp.MissingVolumes.Count() + tp.VerificationRequiredVolumes.Count(); @@ -256,7 +256,7 @@ namespace Duplicati.Library.Main.Operation if (ex is System.Threading.ThreadAbortException) throw; } - + foreach(var n in tp.MissingVolumes) { IDisposable newEntry = null; @@ -278,9 +278,9 @@ namespace Duplicati.Library.Main.Operation var w = new FilesetVolumeWriter(m_options, DateTime.UtcNow); newEntry = w; w.SetRemoteFilename(n.Name); - + db.WriteFileset(w, null, filesetId); - + w.Close(); if (m_options.Dryrun) m_result.AddDryrunMessage(string.Format("would re-upload fileset {0}, with size {1}, previous size {2}", n.Name, Library.Utility.Utility.FormatSizeString(new System.IO.FileInfo(w.LocalFilename).Length), Library.Utility.Utility.FormatSizeString(n.Size))); @@ -297,15 +297,15 @@ namespace Duplicati.Library.Main.Operation w.SetRemoteFilename(n.Name); var h = System.Security.Cryptography.HashAlgorithm.Create(m_options.BlockHashAlgorithm); - + foreach(var blockvolume in db.GetBlockVolumesFromIndexName(n.Name)) - { + { w.StartVolume(blockvolume.Name); var volumeid = db.GetRemoteVolumeID(blockvolume.Name); - + foreach(var b in db.GetBlocks(volumeid)) w.AddBlock(b.Hash, b.Size); - + w.FinishVolume(blockvolume.Hash, blockvolume.Size); if (m_options.IndexfilePolicy == Options.IndexFileStrategy.Full) @@ -318,9 +318,9 @@ namespace Duplicati.Library.Main.Operation w.WriteBlocklist(b.Item1, b.Item2, 0, b.Item3); } } - + w.Close(); - + if (m_options.Dryrun) m_result.AddDryrunMessage(string.Format("would re-upload index file {0}, with size {1}, previous size {2}", n.Name, Library.Utility.Utility.FormatSizeString(new System.IO.FileInfo(w.LocalFilename).Length), Library.Utility.Utility.FormatSizeString(n.Size))); else @@ -439,10 +439,10 @@ namespace Duplicati.Library.Main.Operation m_result.AddMessage("Destination and database are synchronized, not making any changes"); } - m_result.OperationProgressUpdater.UpdateProgress(1); - backend.WaitForComplete(db, null); + m_result.OperationProgressUpdater.UpdateProgress(1); + backend.WaitForComplete(db, null); db.WriteResults(); - } + } } public void RunRepairCommon() @@ -458,7 +458,7 @@ namespace Duplicati.Library.Main.Operation Utility.UpdateOptionsFromDb(db, m_options); - if (db.RepairInProgress || db.PartiallyRecreated) + if (db.RepairInProgress || db.PartiallyRecreated) m_result.AddWarning("The database is marked as \"in-progress\" and may be incomplete.", null); db.FixDuplicateMetahash(); diff --git a/Duplicati/Library/Main/Operation/RestoreControlFilesHandler.cs b/Duplicati/Library/Main/Operation/RestoreControlFilesHandler.cs index c97bfcdba..f5504c71f 100644 --- a/Duplicati/Library/Main/Operation/RestoreControlFilesHandler.cs +++ b/Duplicati/Library/Main/Operation/RestoreControlFilesHandler.cs @@ -33,17 +33,17 @@ namespace Duplicati.Library.Main.Operation var filter = Library.Utility.JoinedFilterExpression.Join(new Library.Utility.FilterExpression(filterstrings), compositefilter); - try - { + try + { var filteredList = ListFilesHandler.ParseAndFilterFilesets(backend.List(), m_options); if (filteredList.Count == 0) throw new Exception("No filesets found on remote target"); - Exception lastEx = new Exception("No suitable files found on remote target"); - - foreach(var fileversion in filteredList) - try - { + Exception lastEx = new Exception("No suitable files found on remote target"); + + foreach(var fileversion in filteredList) + try + { if (m_result.TaskControlRendevouz() == TaskControlState.Stop) { backend.WaitForComplete(db, null); @@ -51,44 +51,44 @@ namespace Duplicati.Library.Main.Operation } var file = fileversion.Value.File; - long size; - string hash; - RemoteVolumeType type; - RemoteVolumeState state; - if (!db.GetRemoteVolume(file.Name, out hash, out size, out type, out state)) - size = file.Size; - + long size; + string hash; + RemoteVolumeType type; + RemoteVolumeState state; + if (!db.GetRemoteVolume(file.Name, out hash, out size, out type, out state)) + size = file.Size; + var res = new List(); - using (var tmpfile = backend.Get(file.Name, size, hash)) - using (var tmp = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(file.Name), tmpfile, m_options)) - foreach (var cf in tmp.ControlFiles) + using (var tmpfile = backend.Get(file.Name, size, hash)) + using (var tmp = new Volumes.FilesetVolumeReader(RestoreHandler.GetCompressionModule(file.Name), tmpfile, m_options)) + foreach (var cf in tmp.ControlFiles) if (Library.Utility.FilterExpression.Matches(filter, cf.Key)) { var targetpath = System.IO.Path.Combine(m_options.Restorepath, cf.Key); - using (var ts = System.IO.File.Create(targetpath)) - Library.Utility.Utility.CopyStream(cf.Value, ts); + using (var ts = System.IO.File.Create(targetpath)) + Library.Utility.Utility.CopyStream(cf.Value, ts); res.Add(targetpath); } - + m_result.SetResult(res); - lastEx = null; - break; - } - catch(Exception ex) - { - lastEx = ex; + lastEx = null; + break; + } + catch(Exception ex) + { + lastEx = ex; if (ex is System.Threading.ThreadAbortException) throw; - } - - if (lastEx != null) - throw lastEx; - } - finally - { - backend.WaitForComplete(db, null); - } + } + + if (lastEx != null) + throw lastEx; + } + finally + { + backend.WaitForComplete(db, null); + } db.WriteResults(); } diff --git a/Duplicati/Library/Main/Operation/RestoreHandler.cs b/Duplicati/Library/Main/Operation/RestoreHandler.cs index a8a403271..a780bb056 100644 --- a/Duplicati/Library/Main/Operation/RestoreHandler.cs +++ b/Duplicati/Library/Main/Operation/RestoreHandler.cs @@ -80,7 +80,7 @@ namespace Duplicati.Library.Main.Operation // If we have both target paths and a filter, combine into a single filter filter = Library.Utility.JoinedFilterExpression.Join(new Library.Utility.FilterExpression(paths), filter); - + if (!m_options.NoLocalDb && m_systemIO.FileExists(m_options.Dbpath)) { using(var db = new LocalRestoreDatabase(m_options.Dbpath)) @@ -183,7 +183,7 @@ namespace Duplicati.Library.Main.Operation if (m_options.Version != null && m_options.Version.Length > 0) m_options.RawOptions["version"] = string.Join(",", Enumerable.Range(0, m_options.Version.Length).Select(x => x.ToString())); - DoRun(database, filter, m_result); + DoRun(database, filter, m_result); } } } @@ -321,7 +321,7 @@ namespace Duplicati.Library.Main.Operation m_systemIO.DirectoryCreate(folderpath); } - ApplyMetadata(targetpath, metainfo.Value, options.RestorePermissions, options.Dryrun); + ApplyMetadata(targetpath, metainfo.Value, options.RestorePermissions, options.Dryrun); } catch (Exception ex) { @@ -343,7 +343,7 @@ namespace Duplicati.Library.Main.Operation Utility.UpdateOptionsFromDb(database, m_options); Utility.VerifyParameters(database, m_options); m_blockbuffer = new byte[m_options.Blocksize]; - + var blockhasher = System.Security.Cryptography.HashAlgorithm.Create(m_options.BlockHashAlgorithm); var filehasher = System.Security.Cryptography.HashAlgorithm.Create(m_options.FileHashAlgorithm); if (blockhasher == null) @@ -417,26 +417,26 @@ namespace Duplicati.Library.Main.Operation } var brokenFiles = new List(); - foreach(var blockvolume in new AsyncDownloader(volumes, backend)) - try - { + foreach(var blockvolume in new AsyncDownloader(volumes, backend)) + try + { if (m_result.TaskControlRendevouz() == TaskControlState.Stop) { backend.WaitForComplete(database, null); return; } - using(var tmpfile = blockvolume.TempFile) - using(var blocks = new BlockVolumeReader(GetCompressionModule(blockvolume.Name), tmpfile, m_options)) + using(var tmpfile = blockvolume.TempFile) + using(var blocks = new BlockVolumeReader(GetCompressionModule(blockvolume.Name), tmpfile, m_options)) PatchWithBlocklist(database, blocks, m_options, result, m_blockbuffer, metadatastorage); - } - catch (Exception ex) - { + } + catch (Exception ex) + { brokenFiles.Add(blockvolume.Name); result.AddError(string.Format("Failed to patch with remote file: \"{0}\", message: {1}", blockvolume.Name, ex.Message), ex); if (ex is System.Threading.ThreadAbortException) throw; - } + } // Enforcing the length of files is now already done during ScanForExistingTargetBlocks // and thus not necessary anymore. @@ -447,7 +447,7 @@ namespace Duplicati.Library.Main.Operation // Reset the filehasher if it was used to verify existing files filehasher.Initialize(); - + if (m_result.TaskControlRendevouz() == TaskControlState.Stop) return; @@ -508,7 +508,7 @@ namespace Duplicati.Library.Main.Operation result.EndTime = DateTime.UtcNow; } - private static void ApplyMetadata(string path, System.IO.Stream stream, bool restorePermissions, bool dryrun) + private static void ApplyMetadata(string path, System.IO.Stream stream, bool restorePermissions, bool dryrun) { using(var tr = new System.IO.StreamReader(stream)) using(var jr = new Newtonsoft.Json.JsonTextReader(tr)) @@ -518,9 +518,9 @@ namespace Duplicati.Library.Main.Operation long t; System.IO.FileAttributes fa; - // If this is dry-run, we stop after having deserialized the metadata - if (dryrun) - return; + // If this is dry-run, we stop after having deserialized the metadata + if (dryrun) + return; var isDirTarget = path.EndsWith(DIRSEP); var targetpath = isDirTarget ? path.Substring(0, path.Length - 1) : path; @@ -528,9 +528,9 @@ namespace Duplicati.Library.Main.Operation // Make the symlink first, otherwise we cannot apply metadata to it if (metadata.TryGetValue("CoreSymlinkTarget", out k)) m_systemIO.CreateSymlink(targetpath, k, isDirTarget); - // If the target is a folder, make sure we create it first - else if (isDirTarget && !m_systemIO.DirectoryExists(targetpath)) - m_systemIO.DirectoryCreate(targetpath); + // If the target is a folder, make sure we create it first + else if (isDirTarget && !m_systemIO.DirectoryExists(targetpath)) + m_systemIO.DirectoryCreate(targetpath); if (metadata.TryGetValue("CoreLastWritetime", out k) && long.TryParse(k, out t)) { @@ -562,67 +562,67 @@ namespace Duplicati.Library.Main.Operation { var updateCount = 0L; foreach(var entry in database.GetFilesAndSourceBlocksFast(options.Blocksize)) - { + { var targetpath = entry.TargetPath; var targetfileid = entry.TargetFileID; var sourcepath = entry.SourcePath; var patched = false; - try - { - if (m_systemIO.FileExists(sourcepath)) - { - var folderpath = m_systemIO.PathGetDirectoryName(targetpath); - if (!options.Dryrun && !m_systemIO.DirectoryExists(folderpath)) - { - result.AddWarning(string.Format("Creating missing folder {0} for file {1}", folderpath, targetpath), null); - m_systemIO.DirectoryCreate(folderpath); - } - - using(var targetstream = options.Dryrun ? null : m_systemIO.FileOpenWrite(targetpath)) - { - try - { - using(var sourcestream = m_systemIO.FileOpenRead(sourcepath)) - { - foreach(var block in entry.Blocks) - { + try + { + if (m_systemIO.FileExists(sourcepath)) + { + var folderpath = m_systemIO.PathGetDirectoryName(targetpath); + if (!options.Dryrun && !m_systemIO.DirectoryExists(folderpath)) + { + result.AddWarning(string.Format("Creating missing folder {0} for file {1}", folderpath, targetpath), null); + m_systemIO.DirectoryCreate(folderpath); + } + + using(var targetstream = options.Dryrun ? null : m_systemIO.FileOpenWrite(targetpath)) + { + try + { + using(var sourcestream = m_systemIO.FileOpenRead(sourcepath)) + { + foreach(var block in entry.Blocks) + { if (result.TaskControlRendevouz() == TaskControlState.Stop) return; //TODO: Handle metadata - if (sourcestream.Length > block.Offset) - { - sourcestream.Position = block.Offset; - - var size = sourcestream.Read(blockbuffer, 0, blockbuffer.Length); - if (size == block.Size) - { - var key = Convert.ToBase64String(hasher.ComputeHash(blockbuffer, 0, size)); - if (key == block.Hash) - { + if (sourcestream.Length > block.Offset) + { + sourcestream.Position = block.Offset; + + var size = sourcestream.Read(blockbuffer, 0, blockbuffer.Length); + if (size == block.Size) + { + var key = Convert.ToBase64String(hasher.ComputeHash(blockbuffer, 0, size)); + if (key == block.Hash) + { patched = true; - if (!options.Dryrun) - { - targetstream.Position = block.Offset; - targetstream.Write(blockbuffer, 0, size); - } - - blockmarker.SetBlockRestored(targetfileid, block.Index, key, block.Size, false); - } - } - } - } - } - } - catch (Exception ex) - { - result.AddWarning(string.Format("Failed to patch file: \"{0}\" with data from local file \"{1}\", message: {2}", targetpath, sourcepath, ex.Message), ex); + if (!options.Dryrun) + { + targetstream.Position = block.Offset; + targetstream.Write(blockbuffer, 0, size); + } + + blockmarker.SetBlockRestored(targetfileid, block.Index, key, block.Size, false); + } + } + } + } + } + } + catch (Exception ex) + { + result.AddWarning(string.Format("Failed to patch file: \"{0}\" with data from local file \"{1}\", message: {2}", targetpath, sourcepath, ex.Message), ex); if (ex is System.Threading.ThreadAbortException) throw; - } - } + } + } if ((++updateCount) % 20 == 0) { @@ -631,12 +631,12 @@ namespace Duplicati.Library.Main.Operation return; } - } + } else { result.AddVerboseMessage("Local source file not found: {0}", sourcepath); } - } + } catch (Exception ex) { result.AddWarning(string.Format("Failed to patch file: \"{0}\" with local data, message: {1}", targetpath, ex.Message), ex); @@ -650,11 +650,11 @@ namespace Duplicati.Library.Main.Operation result.AddVerboseMessage("Target file is not patched any local data: {0}", targetpath); if (patched && options.Dryrun) - result.AddDryrunMessage(string.Format("Would patch file with local data: {0}", targetpath)); - } - + result.AddDryrunMessage(string.Format("Would patch file with local data: {0}", targetpath)); + } + blockmarker.UpdateProcessed(result.OperationProgressUpdater); - blockmarker.Commit(result); + blockmarker.Commit(result); } } @@ -674,12 +674,12 @@ namespace Duplicati.Library.Main.Operation if (result.TaskControlRendevouz() == TaskControlState.Stop) return; - var folderpath = m_systemIO.PathGetDirectoryName(targetpath); - if (!options.Dryrun && !m_systemIO.DirectoryExists(folderpath)) - { + var folderpath = m_systemIO.PathGetDirectoryName(targetpath); + if (!options.Dryrun && !m_systemIO.DirectoryExists(folderpath)) + { result.AddWarning(string.Format("Creating missing folder {0} for file {1}", folderpath, targetpath), null); - m_systemIO.DirectoryCreate(folderpath); - } + m_systemIO.DirectoryCreate(folderpath); + } using (var file = options.Dryrun ? null : m_systemIO.FileOpenWrite(targetpath)) foreach (var targetblock in restorelist.Blocks) @@ -711,7 +711,7 @@ namespace Duplicati.Library.Main.Operation var key = Convert.ToBase64String(hasher.ComputeHash(blockbuffer, 0, size)); if (key == targetblock.Hash) { - if (!options.Dryrun) + if (!options.Dryrun) { if (targetblock.IsMetadata) metadatastorage.Add(targetpath, new System.IO.MemoryStream(blockbuffer, 0, size)); @@ -721,7 +721,7 @@ namespace Duplicati.Library.Main.Operation file.Write(blockbuffer, 0, size); } } - + blockmarker.SetBlockRestored(targetfileid, targetblock.Index, key, targetblock.Size, false); patched = true; break; @@ -754,7 +754,7 @@ namespace Duplicati.Library.Main.Operation result.AddVerboseMessage("Target file is not patched any local data: {0}", targetpath); if (patched && options.Dryrun) - result.AddDryrunMessage(string.Format("Would patch file with local data: {0}", targetpath)); + result.AddDryrunMessage(string.Format("Would patch file with local data: {0}", targetpath)); } blockmarker.UpdateProcessed(result.OperationProgressUpdater); @@ -772,23 +772,23 @@ namespace Duplicati.Library.Main.Operation result.OperationProgressUpdater.UpdatefileCount(c.Item1, c.Item2, true); } - using(new Logging.Timer("SetTargetPaths")) - if (!string.IsNullOrEmpty(options.Restorepath)) - { - // Find the largest common prefix - string largest_prefix = database.GetLargestPrefix(); + using(new Logging.Timer("SetTargetPaths")) + if (!string.IsNullOrEmpty(options.Restorepath)) + { + // Find the largest common prefix + string largest_prefix = database.GetLargestPrefix(); result.AddVerboseMessage("Mapping restore path prefix to \"{0}\" to \"{1}\"", largest_prefix, Library.Utility.Utility.AppendDirSeparator(options.Restorepath)); - // Set the target paths, special care with C:\ and / - database.SetTargetPaths(largest_prefix, Library.Utility.Utility.AppendDirSeparator(options.Restorepath)); - } - else - { - database.SetTargetPaths("", ""); - } + // Set the target paths, special care with C:\ and / + database.SetTargetPaths(largest_prefix, Library.Utility.Utility.AppendDirSeparator(options.Restorepath)); + } + else + { + database.SetTargetPaths("", ""); + } // Create a temporary table BLOCKS that lists all blocks that needs to be recovered - using(new Logging.Timer("FindMissingBlocks")) + using(new Logging.Timer("FindMissingBlocks")) database.FindMissingBlocks(result, options.SkipMetadata); // Create temporary tables and triggers that automatically track progress @@ -798,18 +798,18 @@ namespace Duplicati.Library.Main.Operation } private static void CreateDirectoryStructure(LocalRestoreDatabase database, Options options, RestoreResults result) - { - // This part is not protected by try/catch as we need the target folder to exist - if (!string.IsNullOrEmpty(options.Restorepath)) + { + // This part is not protected by try/catch as we need the target folder to exist + if (!string.IsNullOrEmpty(options.Restorepath)) if (!m_systemIO.DirectoryExists(options.Restorepath)) { if (options.Verbose) result.AddVerboseMessage("Creating folder: {0}", options.Restorepath); - if (options.Dryrun) - result.AddDryrunMessage(string.Format("Would create folder: {0}", options.Restorepath)); - else - m_systemIO.DirectoryCreate(options.Restorepath); + if (options.Dryrun) + result.AddDryrunMessage(string.Format("Would create folder: {0}", options.Restorepath)); + else + m_systemIO.DirectoryCreate(options.Restorepath); } foreach (var folder in database.GetTargetFolders()) @@ -821,15 +821,15 @@ namespace Duplicati.Library.Main.Operation if (!m_systemIO.DirectoryExists(folder)) { - result.FoldersRestored++; - + result.FoldersRestored++; + if (options.Verbose) result.AddVerboseMessage("Creating folder: {0}", folder); - if (options.Dryrun) - result.AddDryrunMessage(string.Format("Would create folder: {0}", folder)); - else - m_systemIO.DirectoryCreate(folder); + if (options.Dryrun) + result.AddDryrunMessage(string.Format("Would create folder: {0}", folder)); + else + m_systemIO.DirectoryCreate(folder); } } catch (Exception ex) diff --git a/Duplicati/Library/Main/Operation/RestoreHandlerMetadataStorage.cs b/Duplicati/Library/Main/Operation/RestoreHandlerMetadataStorage.cs index 39b663d17..83897fa91 100644 --- a/Duplicati/Library/Main/Operation/RestoreHandlerMetadataStorage.cs +++ b/Duplicati/Library/Main/Operation/RestoreHandlerMetadataStorage.cs @@ -1,27 +1 @@ -// Copyright (C) 2015, The Duplicati Team -// http://www.duplicati.com, info@duplicati.com -// -// This library is free software; you can redistribute it and/or modify -// it under the terms of the GNU Lesser General Public License as -// published by the Free Software Foundation; either version 2.1 of the -// License, or (at your option) any later version. -// -// This library is distributed in the hope that it will be useful, but -// WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -// Lesser General Public License for more details. -// -// You should have received a copy of the GNU Lesser General Public -// License along with this library; if not, write to the Free Software -// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -using System; using System.IO; using Duplicati.Library.Utility; using System.Collections.Generic; -namespace Duplicati.Library.Main.Operation -{ - public class RestoreHandlerMetadataStorage : IDisposable - { private TempFile m_temp; private FileStream m_stream; private long m_entries; private long m_filepos; - public RestoreHandlerMetadataStorage() - { m_temp = new TempFile(); m_stream = File.Open(m_temp, FileMode.Truncate, FileAccess.ReadWrite, FileShare.None); - } public void Add(string path, Stream data) { var datalen = data.Length; if (datalen > Int32.MaxValue) throw new ArgumentOutOfRangeException("Metadata is larger than int32"); var pathbytes = System.Text.Encoding.UTF8.GetBytes(path); var pathlen = BitConverter.GetBytes(pathbytes.LongLength); var entrylen = BitConverter.GetBytes(datalen); var totalsize = pathbytes.Length + pathlen.Length + entrylen.Length + datalen; m_stream.Position = m_filepos; m_stream.Write(pathlen, 0, pathlen.Length); m_stream.Write(pathbytes, 0, pathbytes.Length); m_stream.Write(entrylen, 0, entrylen.Length); data.CopyTo(m_stream); if (m_stream.Position != m_filepos + totalsize) throw new Exception("Bad file write!"); m_filepos += totalsize; m_entries++; } private void CheckedRead(byte[] buffer, int offset, int count) { int r; while (count > 0 && (r = m_stream.Read(buffer, offset, count)) > 0) { offset += r; count -= r; } if (count != 0) throw new Exception("Bad file read"); } public IEnumerable> Records { get { long pos = 0; var bf = BitConverter.GetBytes(0L); var buf = new byte[8 * 1024]; Logging.Log.WriteMessage(string.Format("The metadata storage file has {0} entries and takes up {1}", m_entries, Library.Utility.Utility.FormatSizeString(m_stream.Length)), Duplicati.Library.Logging.LogMessageType.Profiling); using(new Logging.Timer("Read metadata from file")) for(var e = 0L; e < m_entries; e++) { m_stream.Position = pos; CheckedRead(bf, 0, bf.Length); var stringlen = BitConverter.ToInt64(bf, 0); var strbuf = stringlen > buf.Length ? new byte[stringlen] : buf; CheckedRead(strbuf, 0, (int)stringlen); var path = System.Text.Encoding.UTF8.GetString(strbuf, 0, (int)stringlen); CheckedRead(bf, 0, bf.Length); var datalen = BitConverter.ToInt64(bf, 0); if (datalen > Int32.MaxValue) throw new ArgumentOutOfRangeException("Metadata is larger than int32"); var databuf = datalen > buf.Length ? new byte[datalen] : buf; CheckedRead(databuf, 0, (int)datalen); pos += datalen + stringlen + bf.Length + bf.Length; yield return new KeyValuePair(path, new MemoryStream(databuf, 0, (int)datalen)); } } } - #region IDisposable implementation public void Dispose() { if (m_stream != null) try { m_stream.Dispose(); } catch { } finally { m_stream = null; } if (m_temp != null) try { m_temp.Dispose(); } catch { } finally { m_temp = null; } } #endregion } -} - +// Copyright (C) 2015, The Duplicati Team // http://www.duplicati.com, info@duplicati.com // // This library is free software; you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as // published by the Free Software Foundation; either version 2.1 of the // License, or (at your option) any later version. // // This library is distributed in the hope that it will be useful, but // WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU // Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public // License along with this library; if not, write to the Free Software // Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA using System; using System.IO; using Duplicati.Library.Utility; using System.Collections.Generic; namespace Duplicati.Library.Main.Operation { public class RestoreHandlerMetadataStorage : IDisposable { private TempFile m_temp; private FileStream m_stream; private long m_entries; private long m_filepos; public RestoreHandlerMetadataStorage() { m_temp = new TempFile(); m_stream = File.Open(m_temp, FileMode.Truncate, FileAccess.ReadWrite, FileShare.None); } public void Add(string path, Stream data) { var datalen = data.Length; if (datalen > Int32.MaxValue) throw new ArgumentOutOfRangeException("Metadata is larger than int32"); var pathbytes = System.Text.Encoding.UTF8.GetBytes(path); var pathlen = BitConverter.GetBytes(pathbytes.LongLength); var entrylen = BitConverter.GetBytes(datalen); var totalsize = pathbytes.Length + pathlen.Length + entrylen.Length + datalen; m_stream.Position = m_filepos; m_stream.Write(pathlen, 0, pathlen.Length); m_stream.Write(pathbytes, 0, pathbytes.Length); m_stream.Write(entrylen, 0, entrylen.Length); data.CopyTo(m_stream); if (m_stream.Position != m_filepos + totalsize) throw new Exception("Bad file write!"); m_filepos += totalsize; m_entries++; } private void CheckedRead(byte[] buffer, int offset, int count) { int r; while (count > 0 && (r = m_stream.Read(buffer, offset, count)) > 0) { offset += r; count -= r; } if (count != 0) throw new Exception("Bad file read"); } public IEnumerable> Records { get { long pos = 0; var bf = BitConverter.GetBytes(0L); var buf = new byte[8 * 1024]; Logging.Log.WriteMessage(string.Format("The metadata storage file has {0} entries and takes up {1}", m_entries, Library.Utility.Utility.FormatSizeString(m_stream.Length)), Duplicati.Library.Logging.LogMessageType.Profiling); using(new Logging.Timer("Read metadata from file")) for(var e = 0L; e < m_entries; e++) { m_stream.Position = pos; CheckedRead(bf, 0, bf.Length); var stringlen = BitConverter.ToInt64(bf, 0); var strbuf = stringlen > buf.Length ? new byte[stringlen] : buf; CheckedRead(strbuf, 0, (int)stringlen); var path = System.Text.Encoding.UTF8.GetString(strbuf, 0, (int)stringlen); CheckedRead(bf, 0, bf.Length); var datalen = BitConverter.ToInt64(bf, 0); if (datalen > Int32.MaxValue) throw new ArgumentOutOfRangeException("Metadata is larger than int32"); var databuf = datalen > buf.Length ? new byte[datalen] : buf; CheckedRead(databuf, 0, (int)datalen); pos += datalen + stringlen + bf.Length + bf.Length; yield return new KeyValuePair(path, new MemoryStream(databuf, 0, (int)datalen)); } } } #region IDisposable implementation public void Dispose() { if (m_stream != null) try { m_stream.Dispose(); } catch { } finally { m_stream = null; } if (m_temp != null) try { m_temp.Dispose(); } catch { } finally { m_temp = null; } } #endregion } } diff --git a/Duplicati/Library/Main/Options.cs b/Duplicati/Library/Main/Options.cs index 79111d681..9b8bdee2a 100644 --- a/Duplicati/Library/Main/Options.cs +++ b/Duplicati/Library/Main/Options.cs @@ -572,9 +572,9 @@ namespace Duplicati.Library.Main { get { - string v; - m_options.TryGetValue("control-files", out v); - return v; + string v; + m_options.TryGetValue("control-files", out v); + return v; } } @@ -740,12 +740,12 @@ namespace Duplicati.Library.Main { get { - string v; - m_options.TryGetValue("prefix", out v); - if (!string.IsNullOrEmpty(v)) - return v; - - return "duplicati"; + string v; + m_options.TryGetValue("prefix", out v); + if (!string.IsNullOrEmpty(v)) + return v; + + return "duplicati"; } } @@ -802,8 +802,8 @@ namespace Duplicati.Library.Main var versions = this.Version; if (versions != null && versions.Length > 0) foreach (var ix in versions.Distinct()) - if (ix >= 0 && ix < backups.Length) - res.Add(backups[ix]); + if (ix >= 0 && ix < backups.Length) + res.Add(backups[ix]); var keepVersions = this.KeepVersions; if (keepVersions > 0 && keepVersions < backups.Length) @@ -1663,10 +1663,10 @@ namespace Duplicati.Library.Main { get { - if (m_options.ContainsKey("dry-run")) - return Library.Utility.Utility.ParseBoolOption(m_options, "dry-run"); - else - return Library.Utility.Utility.ParseBoolOption(m_options, "dryrun"); + if (m_options.ContainsKey("dry-run")) + return Library.Utility.Utility.ParseBoolOption(m_options, "dry-run"); + else + return Library.Utility.Utility.ParseBoolOption(m_options, "dryrun"); } } diff --git a/Duplicati/Library/Main/Volumes/VolumeBase.cs b/Duplicati/Library/Main/Volumes/VolumeBase.cs index ff7e8978e..c96e03e93 100644 --- a/Duplicati/Library/Main/Volumes/VolumeBase.cs +++ b/Duplicati/Library/Main/Volumes/VolumeBase.cs @@ -18,8 +18,8 @@ namespace Duplicati.Library.Main.Volumes public string Encoding; public long Blocksize; public string BlockHash; - public string FileHash; - public string AppVersion; + public string FileHash; + public string AppVersion; public static string GetManifestInstance(long blocksize, string blockhash, string filehash) { @@ -31,7 +31,7 @@ namespace Duplicati.Library.Main.Volumes Created = Library.Utility.Utility.SerializeDateTime(DateTime.UtcNow), BlockHash = blockhash, FileHash = filehash, - AppVersion = System.Reflection.Assembly.GetExecutingAssembly().GetName().Version.ToString() + AppVersion = System.Reflection.Assembly.GetExecutingAssembly().GetName().Version.ToString() }); } @@ -62,25 +62,25 @@ namespace Duplicati.Library.Main.Volumes public string EncryptionModule { get; private set; } public Library.Interface.IFileEntry File { get; private set; } - internal static readonly IDictionary REMOTE_TYPENAME_MAP; - internal static readonly IDictionary REVERSE_REMOTE_TYPENAME_MAP; + internal static readonly IDictionary REMOTE_TYPENAME_MAP; + internal static readonly IDictionary REVERSE_REMOTE_TYPENAME_MAP; private static readonly System.Text.RegularExpressions.Regex FILENAME_REGEXP; - static ParsedVolume() - { - var dict = new Dictionary(); - dict[RemoteVolumeType.Blocks] = "dblock"; - dict[RemoteVolumeType.Files] = "dlist"; - dict[RemoteVolumeType.Index] = "dindex"; - - var reversedict = new Dictionary(System.StringComparer.InvariantCultureIgnoreCase); - foreach(var x in dict) - reversedict[x.Value] = x.Key; - - REMOTE_TYPENAME_MAP = dict; - REVERSE_REMOTE_TYPENAME_MAP = reversedict; - FILENAME_REGEXP = new System.Text.RegularExpressions.Regex(@"(?[^\-]+)\-(([i|b|I|B](?[0-9A-Fa-f]+))|((?