How to verify duplicati-verification.json?

I want to verify the integrity of the files of a remote backup. I have the duplicati-verification.json.

There is a Name and the Hash. The hash is a SHA-256 encoded as Base64. How can I verify the checksum for all files? Best a commandline call.

I am on Windows. Any tool sugestions or has Duplicati some command I do not know?

Ok. Answering myself… I have written a C# Program that verifies file size and checks the hash code of all files in a verification file.

Argument not given => look for file in current directory, otherwise give the path of the backup as argument. Just 2 classes. Feel free to use it!

VerifyDuplicati.cs

using System;
using System.Collections;
using System.Collections.Generic;
using System.IO;
using System.Security.Cryptography;

namespace VerifyDuplicati {
class VerifyDuplicati {
private static SHA256 mySHA256 = SHA256.Create();

     static void Main(string[] args) {
        string path = ".\\";
        if (args.Length == 1) {
           path = args[0];
        }
        if (args.Length > 1) {
           Console.WriteLine("Argument error");
        } else {
           DirectoryInfo di = new DirectoryInfo(args[0]);
           Analyse(di);
        }
     }

     private static void Analyse(DirectoryInfo rootPath) {
        FileInfo fi = new FileInfo(Path.Combine(rootPath.FullName, "duplicati-verification.json"));
        List<DuplicatiFile> files = ReadVerificationFile(fi.FullName);

        Console.WriteLine("Processing " + files.Count + " files");

        double totalBytes = 0;
        foreach (DuplicatiFile duplicatiFile in files) {
           totalBytes += duplicatiFile.Size;
        }

        Boolean success = false;
        double processedBytes = 0;
        foreach (DuplicatiFile duplicatiFile in files) {
           success = CheckFile(duplicatiFile, rootPath);
           if (success) {
              processedBytes += duplicatiFile.Size;
              long percent = (long) (processedBytes * 100.0d / totalBytes);
              Console.WriteLine("Processed " + duplicatiFile.Name + " success , " + percent + "%");
           } else { 
              break;
           }
        }

        if (success) {
           Console.WriteLine("SUCCESS");
           Environment.Exit(0);
        } else { 
           Console.WriteLine("FAILED");
           Environment.Exit(1);
        }
     }

     private static Boolean CheckFile(DuplicatiFile duplicatiFile, DirectoryInfo rootPath) {
        // Create a fileStream for the file.
        FileInfo fi = new FileInfo(Path.Combine(rootPath.FullName, duplicatiFile.Name));

        try {
           if (duplicatiFile.Size != fi.Length) {
              Console.WriteLine("Size differs " + fi.Name);
              return false;
           }
           FileStream fileStream = fi.Open(FileMode.Open);
           fileStream.Position = 0;
           byte[] hashValue = mySHA256.ComputeHash(fileStream);
           fileStream.Close();

           byte[] checkValue = Convert.FromBase64String(duplicatiFile.Hash);

           if (!StructuralComparisons.StructuralEqualityComparer.Equals(hashValue, checkValue)) {
              Console.WriteLine("Hash mismatch " + fi.Name);
              return false;
           }
        } catch (IOException e) {
           Console.WriteLine($"I/O Exception: {e.Message}");
           return false;
        } catch (UnauthorizedAccessException e) {
           Console.WriteLine($"Access Exception: {e.Message}");
           return false;
        }

        return true;
     }

     private static List<DuplicatiFile> ReadVerificationFile(string fullName) {
        string json = File.ReadAllText(fullName);
        List<DuplicatiFile> files = DuplicatiFile.FromJson(json);
        return files;
     }
  }

}

DuplicatiFile.cs

// generated https://app.quicktype.io/#l=cs&r=json2csharp

using System;
using System.Collections.Generic;

using System.Globalization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;

namespace VerifyDuplicati {
   public partial class DuplicatiFile {
      [JsonProperty("Name")]
      public string Name { get; set; }

      [JsonProperty("Hash")]
      public string Hash { get; set; }

      [JsonProperty("Size")]
      public long Size { get; set; }

      [JsonProperty("Type")]
      public long Type { get; set; }

      [JsonProperty("State")]
      public long State { get; set; }

      [JsonProperty("deleteGracePeriod")]
      public DateTimeOffset DeleteGracePeriod { get; set; }
   }

   public partial class DuplicatiFile {
      public static List<DuplicatiFile> FromJson(string json) => JsonConvert.DeserializeObject<List<DuplicatiFile>>(json, Converter.Settings);
   }

   public static class Serialize {
      public static string ToJson(this List<DuplicatiFile> self) => JsonConvert.SerializeObject(self, Converter.Settings);
   }

   internal static class Converter {
      public static readonly JsonSerializerSettings Settings = new JsonSerializerSettings {
         MetadataPropertyHandling = MetadataPropertyHandling.Ignore,
         DateParseHandling = DateParseHandling.None,
         Converters =
          {
                new IsoDateTimeConverter { DateTimeStyles = DateTimeStyles.AssumeUniversal }
            },
      };
   }
}
3 Likes

Adding to this, there is a PowerShell script for verification here:

And a Python script for verification here:

1 Like

The two DuplicatiVerify scripts are also in the Duplicati installation’s utility-scripts subfolder if that’s easier.

1 Like