Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: #22 wrong file retention with multiple logs in the same dir #23

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
287 changes: 148 additions & 139 deletions src/Serilog.Sinks.File.Archive/ArchiveHooks.cs
Original file line number Diff line number Diff line change
Expand Up @@ -12,154 +12,163 @@ namespace Serilog.Sinks.File.Archive
/// Archives log files before they are deleted by Serilog's retention mechanism, copying them to another location
/// and optionally compressing them using GZip
/// </summary>
public class ArchiveHooks : FileLifecycleHooks
{
private readonly CompressionLevel compressionLevel;
private readonly int retainedFileCountLimit;
private readonly string targetDirectory;

/// <summary>
/// Create a new ArchiveHooks, which will archive completed log files before they are deleted by Serilog's retention mechanism
/// </summary>
/// <param name="compressionLevel">
/// Level of GZIP compression to use. Use CompressionLevel.NoCompression if no compression is required
/// </param>
/// <param name="targetDirectory">
/// Directory in which to archive files to. Use null if compressed, archived files should remain in the same folder
/// </param>
public ArchiveHooks(CompressionLevel compressionLevel = CompressionLevel.Fastest, string targetDirectory = null)
{
if (compressionLevel == CompressionLevel.NoCompression && targetDirectory == null)
public class ArchiveHooks : FileLifecycleHooks
{
private readonly CompressionLevel compressionLevel;
private readonly int retainedFileCountLimit;
private readonly string targetDirectory;
private static readonly object ArchiveLock = new object();

/// <summary>
/// Create a new ArchiveHooks, which will archive completed log files before they are deleted by Serilog's retention mechanism
/// </summary>
/// <param name="compressionLevel">
/// Level of GZIP compression to use. Use CompressionLevel.NoCompression if no compression is required
/// </param>
/// <param name="targetDirectory">
/// Directory in which to archive files to. Use null if compressed, archived files should remain in the same folder
/// </param>
public ArchiveHooks(CompressionLevel compressionLevel = CompressionLevel.Fastest, string targetDirectory = null)
{
if (compressionLevel == CompressionLevel.NoCompression && targetDirectory == null)
throw new ArgumentException($"Either {nameof(compressionLevel)} or {nameof(targetDirectory)} must be set");

this.compressionLevel = compressionLevel;
this.targetDirectory = targetDirectory;
}

/// <summary>
/// Create a new ArchiveHooks, which will archive completed log files before they are deleted by Serilog's retention mechanism
/// </summary>
/// <param name="retainedFileCountLimit">Limit of Archived files.</param>
/// <param name="compressionLevel">
/// Level of GZIP compression to use. Use CompressionLevel.NoCompression if no compression is required
/// </param>
/// <param name="targetDirectory">
/// Directory in which to archive files to. Use null if compressed, archived files should remain in the same folder
/// </param>
this.compressionLevel = compressionLevel;
this.targetDirectory = targetDirectory;
}

/// <summary>
/// Create a new ArchiveHooks, which will archive completed log files before they are deleted by Serilog's retention mechanism
/// </summary>
/// <param name="retainedFileCountLimit">Limit of Archived files.</param>
/// <param name="compressionLevel">
/// Level of GZIP compression to use. Use CompressionLevel.NoCompression if no compression is required
/// </param>
/// <param name="targetDirectory">
/// Directory in which to archive files to. Use null if compressed, archived files should remain in the same folder
/// </param>
public ArchiveHooks(int retainedFileCountLimit, CompressionLevel compressionLevel = CompressionLevel.Fastest, string targetDirectory = null)
{
if (retainedFileCountLimit <= 0)
{
if (retainedFileCountLimit <= 0)
throw new ArgumentException($"{nameof(retainedFileCountLimit)} must be greater than zero", nameof(retainedFileCountLimit));
if (targetDirectory is not null && TokenExpander.IsTokenised(targetDirectory))
if (targetDirectory is not null && TokenExpander.IsTokenised(targetDirectory))
throw new ArgumentException($"{nameof(targetDirectory)} must not be tokenised when using {nameof(retainedFileCountLimit)}", nameof(targetDirectory));
if (compressionLevel == CompressionLevel.NoCompression && targetDirectory == null)
if (compressionLevel == CompressionLevel.NoCompression && targetDirectory == null)
throw new ArgumentException($"Either {nameof(compressionLevel)} or {nameof(targetDirectory)} must be set");

this.compressionLevel = compressionLevel;
this.retainedFileCountLimit = retainedFileCountLimit;
this.targetDirectory = targetDirectory;
}

public override void OnFileDeleting(string path)
{
try
{
// Use .gz file extension if we are going to compress the source file
var filename = this.compressionLevel != CompressionLevel.NoCompression
? Path.GetFileName(path) + ".gz"
: Path.GetFileName(path);

// Determine the target path for the current file
var currentTargetDir = this.targetDirectory != null
? TokenExpander.Expand(this.targetDirectory)
: Path.GetDirectoryName(path);

// Create the target directory, if it doesn't already exist
if (!Directory.Exists(currentTargetDir))
{
Directory.CreateDirectory(currentTargetDir!);
}

// Target file path
var targetPath = Path.Combine(currentTargetDir, filename);

// Do we need to compress the file, or simply copy it as-is?
if (this.compressionLevel == CompressionLevel.NoCompression)
{
System.IO.File.Copy(path, targetPath, true);
}
else
{
using (var sourceStream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read))
this.compressionLevel = compressionLevel;
this.retainedFileCountLimit = retainedFileCountLimit;
this.targetDirectory = targetDirectory;
}

public override void OnFileDeleting(string path)
{
try
{
// Use .gz file extension if we are going to compress the source file
var filename = this.compressionLevel != CompressionLevel.NoCompression
? Path.GetFileName(path) + ".gz"
: Path.GetFileName(path);

// Determine the target path for the current file
var currentTargetDir = this.targetDirectory != null
? TokenExpander.Expand(this.targetDirectory)
: Path.GetDirectoryName(path);

lock (ArchiveLock)
{
// Create the target directory, if it doesn't already exist
if (!Directory.Exists(currentTargetDir))
{
Directory.CreateDirectory(currentTargetDir!);
}

// Target file path
var targetPath = Path.Combine(currentTargetDir, filename);

// Do we need to compress the file, or simply copy it as-is?
if (this.compressionLevel == CompressionLevel.NoCompression)
{
System.IO.File.Copy(path, targetPath, true);
}
else
{
using (var sourceStream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read))
using (var targetStream = new FileStream(targetPath, FileMode.OpenOrCreate, FileAccess.Write, FileShare.None))
using (var compressStream = new GZipStream(targetStream, this.compressionLevel))
{
sourceStream.CopyTo(compressStream);
}
}

// Only apply archive file limit if we are archiving to a non-tokenised path (a constant path)
if (this.retainedFileCountLimit > 0 && !this.IsArchivePathTokenised)
{
RemoveExcessFiles(currentTargetDir);
}
}
catch (Exception ex)
{
SelfLog.WriteLine("Error while archiving file {0}: {1}", path, ex);
throw;
}
}
using (var compressStream = new GZipStream(targetStream, this.compressionLevel))
{
sourceStream.CopyTo(compressStream);
}
}

// Only apply archive file limit if we are archiving to a non-tokenised path (a constant path)
if (this.retainedFileCountLimit > 0 && !this.IsArchivePathTokenised)
{
RemoveExcessFiles(currentTargetDir, path);
}
}
}
catch (Exception ex)
{
SelfLog.WriteLine("Error while archiving file {0}: {1}", path, ex);
throw;
}
}

private bool IsArchivePathTokenised => this.targetDirectory is not null && TokenExpander.IsTokenised(this.targetDirectory);

private void RemoveExcessFiles(string folder)
{
var searchPattern = this.compressionLevel != CompressionLevel.NoCompression
? "*.gz"
: "*.*";

var filesToDelete = Directory.GetFiles(folder, searchPattern)
.Select(f => new FileInfo(f))
.OrderByDescending(f => f, LogFileComparer.Default)
.Skip(this.retainedFileCountLimit)
.ToList();

foreach (var file in filesToDelete)
{
try
{
file.Delete();
}
catch (Exception ex)
{
SelfLog.WriteLine("Error while deleting file {0}: {1}", file.FullName, ex);
}
}
}

private class LogFileComparer : IComparer<FileInfo>
{
public static readonly IComparer<FileInfo> Default = new LogFileComparer();

// This will not work correctly when the file uses a date format where lexicographical order does not
// correspond to chronological order - but frankly, if you are using non ISO 8601 date formats in your
// files you should be shot :)
public int Compare(FileInfo x, FileInfo y)
{
if (x is null && y is null)
return 0;
if (x is null)
return -1;
if (y is null)
return 1;
if (x.Name.Length > y.Name.Length)
return 1;
if (y.Name.Length > x.Name.Length)
return -1;

return String.Compare(x.Name, y.Name, StringComparison.OrdinalIgnoreCase);
}
}
}
private void RemoveExcessFiles(string folder, string filePath)
{
lock (ArchiveLock)
{
var baseFileName = Path.GetFileNameWithoutExtension(filePath).Split('_').First();
var searchPattern = this.compressionLevel != CompressionLevel.NoCompression
? "*.gz"
: "*.*";

var filesToDelete = Directory.GetFiles(folder, searchPattern)
.Where(f => Path.GetFileNameWithoutExtension(f).StartsWith(baseFileName))
.Select(f => new FileInfo(f))
.OrderByDescending(f => f, LogFileComparer.Default)
.Skip(this.retainedFileCountLimit)
.ToList();

foreach (var file in filesToDelete)
{
try
{
file.Delete();
}
catch (Exception ex)
{
SelfLog.WriteLine("Error while deleting file {0}: {1}", file.FullName, ex);
}
}
}
}

private class LogFileComparer : IComparer<FileInfo>
{
public static readonly IComparer<FileInfo> Default = new LogFileComparer();

// This will not work correctly when the file uses a date format where lexicographical order does not
// correspond to chronological order - but frankly, if you are using non ISO 8601 date formats in your
// files you should be shot :)
public int Compare(FileInfo x, FileInfo y)
{
if (x is null && y is null)
return 0;
if (x is null)
return -1;
if (y is null)
return 1;
if (x.Name.Length > y.Name.Length)
return 1;
if (y.Name.Length > x.Name.Length)
return -1;

return String.Compare(x.Name, y.Name, StringComparison.OrdinalIgnoreCase);
}
}
}
}
35 changes: 29 additions & 6 deletions test/Serilog.Sinks.File.Archive.Test/RollingFileSinkTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -58,30 +58,40 @@ public void Should_remove_many_old_archives()
{
const int retainedFiles = 1;
var archiveWrapper = new ArchiveHooks(retainedFiles);
var logEvents = GenerateLogEvents(1_002).ToArray();
var noRemoveWrapper = new ArchiveHooks(5000);
var eventsCnt = 1_002;
var logEvents = GenerateLogEvents(eventsCnt).ToArray();

using (var temp = TempFolder.ForCaller())
{
var path = temp.AllocateFilename("log");
var otherLogPath = Path.Combine(temp.Path, "other-log.log");

// Write events, such that we end up with 1,001 deleted files and 1 retained file
WriteLogEvents(path, archiveWrapper, logEvents);
WriteLogEvents(otherLogPath, noRemoveWrapper, logEvents);

// Get all the files in the test directory
var files = Directory.GetFiles(temp.Path)
.OrderBy(p => p, StringComparer.OrdinalIgnoreCase)
.ToArray();

var otherLogPrefix = Path.Combine(temp.Path, Path.GetFileNameWithoutExtension(otherLogPath));

// We should have a single log file, and 'retainedFiles' 1 gz files
files.Count(x => x.EndsWith("log")).ShouldBe(1);
files.Count(x => x.EndsWith("gz")).ShouldBe(retainedFiles);
files.Count(x => x.EndsWith("log") && !x.StartsWith(otherLogPrefix)).ShouldBe(1);
files.Count(x => x.EndsWith("gz") && !x.StartsWith(otherLogPrefix)).ShouldBe(retainedFiles);

files.Single(x => x.EndsWith("gz") && !x.StartsWith(otherLogPrefix)).ShouldEndWith("_1000.log.gz");
files.Single(x => x.EndsWith("log") && !x.StartsWith(otherLogPrefix)).ShouldEndWith("_1001.log");

files.Single(x => x.EndsWith("gz")).ShouldEndWith("_1000.log.gz");
files.Single(x => x.EndsWith("log")).ShouldEndWith("_1001.log");
// other logs remains untouched
files.Count(x => x.EndsWith("log") && x.StartsWith(otherLogPrefix)).ShouldBe(1);
files.Count(x => x.EndsWith("gz") && x.StartsWith(otherLogPrefix)).ShouldBe(eventsCnt - 1);

// Ensure the data was GZip compressed, by decompressing and comparing against what we wrote
int i = logEvents.Length - retainedFiles - 1;
foreach (var gzipFile in files.Where(x => x.EndsWith("gz")))
foreach (var gzipFile in files.Where(x => x.EndsWith("gz") && !x.StartsWith(otherLogPrefix)))
{
var lines = Utils.DecompressLines(gzipFile);

Expand Down Expand Up @@ -253,6 +263,19 @@ private static void WriteLogEvents(string path, ArchiveHooks hooks, LogEvent[] l
}
}

private static void WriteLogEventsNoRemove(string path, ArchiveHooks hooks, LogEvent[] logEvents)
{
using (var log = new LoggerConfiguration()
.WriteTo.File(path, rollOnFileSizeLimit: true, fileSizeLimitBytes: 1, retainedFileCountLimit: 2000, hooks: hooks)
.CreateLogger())
{
foreach (var logEvent in logEvents)
{
log.Write(logEvent);
}
}
}

private static IEnumerable<LogEvent> GenerateLogEvents(int count)
{
for (var i = 0; i < count; i++)
Expand Down