dmdb/core/ExportProcess.cs

240 lines
11 KiB
C#

using System;
using System.IO;
using ICSharpCode.SharpZipLib.Zip;
using dezentrale.model;
using System.Collections.Generic;
namespace dezentrale.core
{
//! \short Export functionality for the database contents
//! \brief The normal export workflow is to pack every xml file from the
//! data directory to the zip file configured in the Configuration.
//! After that, a mercurial or git commit is issued, followed by a push.
public class ExportProcess : ImportExportBase
{
public ExportProcess()
{
Caption = "Database export";
Steps = 6;
}
//! \short Run export
//! \brief Zips the files in the data folder, then runs gpg to encrypt
//! and commits/pushes it to a hg or git repo
//! \return true if the import was successful.
//! \note May lead to a broken repository structure ("multiple heads")
//! if push fails
//! \todo Re-Implement validation (commented out)
protected override bool Run()
{
LogTarget.StepStarted(0, "Preparing export");
if (ImportExportSettings == null)
{
LogTarget.LogLine("MemberImportExport data class not set.", LogEvent.ELogLevel.Error, "ExportProcess");
return false;
}
if (MemberDir == null)
{
LogTarget.LogLine("Member directory not set.", LogEvent.ELogLevel.Error, "ExportProcess");
return false;
} else if (!Directory.Exists(MemberDir))
{
LogTarget.LogLine($"Cannot find directory '{MemberDir}'", LogEvent.ELogLevel.Error, "ExportProcess");
return false;
}
if (OutputDir == null)
{
LogTarget.LogLine("Input directory not set.", LogEvent.ELogLevel.Error, "ExportProcess");
return false;
} else if (!Directory.Exists(OutputDir))
{
LogTarget.LogLine($"Cannot find directory '{OutputDir}'", LogEvent.ELogLevel.Error, "ExportProcess");
return false;
}
//TBD: Check if newer version of database is online
LogTarget.StepStarted(1, $"Fetching commits from remote location");
string outFile = Path.Combine(OutputDir, ImportExportSettings.ZipFile);
try
{
// Depending on the directory this could be very large and would require more attention
// in a commercial package.
List<string> filenames = new List<string>();
filenames.AddRange(Directory.GetFiles(MemberDir, "*.xml")); //All serializable datasets
filenames.AddRange(Directory.GetFiles(MemberDir, "*.zip")); //compressed blobs, e.g. Attachments
if (File.Exists(outFile))
{
LogTarget.LogLine($"Creating backup of old {outFile}", LogEvent.ELogLevel.Info, "ExportProcess");
XmlData.CreateBackup(outFile, true, "ExportProcess", true);
}
LogTarget.StepStarted(2, $"Packing {ImportExportSettings.ZipFile} ({filenames.Count} files)");
// 'using' statements guarantee the stream is closed properly which is a big source
// of problems otherwise. Its exception safe as well which is great.
using (ZipOutputStream s = new ZipOutputStream(File.Create(outFile)))
{
s.SetLevel(9); // 0 - store only ... 9 - best compression
s.Password = ImportExportSettings.ZipPassword; //null is a desired value for "no encryption"
byte[] buffer = new byte[4096];
foreach (string file in filenames)
{
//LogTarget.LogLine($"Packing {file}", LogEvent.ELogLevel.Trace, "ExportProcess");
// Using GetFileName makes the result compatible with XP
// as the resulting path is not absolute.
var entry = new ZipEntry(Path.GetFileName(file));
// Setup the entry data as required.
// Crc and size are handled by the library for seakable streams
// so no need to do them here.
// Could also use the last write time or similar for the file.
entry.DateTime = DateTime.Now;
s.PutNextEntry(entry);
using (FileStream fs = File.OpenRead(file))
{
// Using a fixed size buffer here makes no noticeable difference for output
// but keeps a lid on memory usage.
int sourceBytes;
do
{
sourceBytes = fs.Read(buffer, 0, buffer.Length);
s.Write(buffer, 0, sourceBytes);
} while (sourceBytes > 0);
}
}
// Finish/Close arent needed strictly as the using statement does this automatically
// Finish is important to ensure trailing information for a Zip file is appended. Without this
// the created file would be invalid.
s.Finish();
// Close is important to wrap things up and unlock the file.
s.Close();
}
}
catch (Exception ex)
{
LogTarget.LogLine($"Exception during packing: {ex.Message}", LogEvent.ELogLevel.Error, "ExportProcess");
// No need to rethrow the exception as for our purposes its handled.
return false;
}
if (ImportExportSettings.GpgEnabled)
{
LogTarget.StepStarted(3, $"Encrypting {ImportExportSettings.GpgFile}");
LogTarget.LogLine($"Using GPG to encrypt {Path.Combine(OutputDir, ImportExportSettings.GpgFile)}", LogEvent.ELogLevel.Info, "ExportProcess");
if (!Gpg($"--output \"{Path.Combine(OutputDir, ImportExportSettings.GpgFile)}\" -c \"{outFile}\"", ImportExportSettings.GpgPassword, LogTarget)) return false;
outFile = Path.Combine(OutputDir, ImportExportSettings.GpgFile);
}
LogTarget.StepStarted(4, $"Committing and pushing");
if (ImportExportSettings.HgEnabled)
{
LogTarget.LogLine($"Using HG to commit / push {outFile}", LogEvent.ELogLevel.Info, "ExportProcess");
//this might fail as repo might be existing / file might be already in repo
RunProcess("hg", "init", OutputDir, LogTarget);
RunProcess("hg", $"add {outFile}", OutputDir, LogTarget);
//now, committing is more interesting
if (!RunProcess("hg",
"commit"
+ $" --message \"dezentrale-members.exe --mode=export\nProgram version={Program.VersionString}\""
+ $" --user \"{ImportExportSettings.HgUserName}\"", OutputDir, LogTarget))
return false;
if (!RunProcess("hg", $"--config auth.rc.prefix={ImportExportSettings.HgURL} --config auth.rc.username={ImportExportSettings.HgUserName} --config auth.rc.password={ImportExportSettings.HgPassword} push {ImportExportSettings.HgURL}", OutputDir, LogTarget))
return false;
}
//TBD: Validate
LogTarget.StepStarted(5, $"Validating the exported data");
//extract all to
/*string verifyDirectory = Path.Combine(memberDirectory, "verify");
try { Directory.CreateDirectory(verifyDirectory); }
catch (Exception ex) { Console.WriteLine($"Error while creating verify directory:\n{ex.Message}"); return false; }
//Move all *.xml to *.bak in verify directory
string[] filenames = Directory.GetFiles(verifyDirectory, "*.xml");
foreach (string f in filenames)
{
string fileName = Path.Combine(verifyDirectory, f);
string backupName = $"{fileName}.bak";
if (File.Exists(backupName)) File.Delete(backupName);
File.Move(fileName, backupName);
File.Delete(fileName);
}
if (!Import(verifyDirectory, outputDir)) return false;
try
{
Console.WriteLine("Verify: Checking files");
filenames = Directory.GetFiles(memberDirectory, "*.xml");
foreach (string fileWithPath in filenames)
{
string fileName = Path.GetFileName(fileWithPath);
Console.WriteLine($"Checking for {fileName}");
string origFile = Path.Combine(memberDirectory, fileName);
string compareFile = Path.Combine(verifyDirectory, fileName);
if (!File.Exists(compareFile))
{
Console.WriteLine($"File doesn't exist: {compareFile}");
return false;
}
if (!FilesCompare(origFile, compareFile))
{
Console.WriteLine($"File comparison failed between: \"{compareFile}\" and \"{compareFile}\"");
return false;
}
}
filenames = Directory.GetFiles(verifyDirectory, "*.xml");
foreach (string fileWithPath in filenames)
{
string fileName = Path.GetFileName(fileWithPath);
string origFile = Path.Combine(memberDirectory, fileName);
string compareFile = Path.Combine(verifyDirectory, fileName);
if (!File.Exists(origFile))
{
Console.WriteLine($"Found extra xml in verify folder: {compareFile}");
return false;
}
}
Console.WriteLine("Verify: Done. All OK");
}
catch (Exception ex)
{
Console.WriteLine($"Exception during verify: {ex.Message}");
// No need to rethrow the exception as for our purposes its handled.
return false;
}*/
Program.config.LastDbExport = DateTime.Now;
Program.config.LastDbImport = DateTime.Now;
Program.config.DbChangedSinceExport = false;
XmlData.SaveToFile(Program.ConfigFile, Program.config);
LogTarget.LogLine("Export complete!", LogEvent.ELogLevel.Info, "ExportProcess");
return true;
}
}
}