diff --git a/Build.ps1 b/Build.ps1
index 3f8cb9b..c99fbba 100644
--- a/Build.ps1
+++ b/Build.ps1
@@ -11,7 +11,9 @@ if(Test-Path .\artifacts) {
$branch = @{ $true = $env:APPVEYOR_REPO_BRANCH; $false = $(git symbolic-ref --short -q HEAD) }[$env:APPVEYOR_REPO_BRANCH -ne $NULL];
$revision = @{ $true = "{0:00000}" -f [convert]::ToInt32("0" + $env:APPVEYOR_BUILD_NUMBER, 10); $false = "local" }[$env:APPVEYOR_BUILD_NUMBER -ne $NULL];
-$suffix = @{ $true = ""; $false = "$($branch.Substring(0, [math]::Min(10,$branch.Length)))-$revision"}[$branch -eq "master" -and $revision -ne "local"]
+#if branch includes features/, things blow up, so let's remove it
+$branchForPackageName = $branch -replace "feature[s]?/",'';
+$suffix = @{ $true = ""; $false = "$($branchForPackageName.Substring(0, [math]::Min(10,$branchForPackageName.Length)))-$revision"}[$branch -eq "master" -and $revision -ne "local"]
echo "build: Version suffix is $suffix"
diff --git a/README.md b/README.md
index 066a566..ef9383e 100644
--- a/README.md
+++ b/README.md
@@ -24,4 +24,6 @@ var log = new LoggerConfiguration()
.CreateLogger();
```
-This will write unsent messages to a `buffer-{Date}.json` file in the specified folder (`C:\test\` in the example).
\ No newline at end of file
+This will write unsent messages to a `buffer-{Date}.json` file in the specified folder (`C:\test\` in the example).
+
+The method also takes a `reretainedFileCountLimit` parameter that will allow you to control how much info to store / ship when back online. By default, the value is `null` with the intent is to send all persisted data, no matter how old. If you specify a value, only the data in the last N buffer files will be shipped back, preventing stale data to be indexed (if that info is no longer usefull).
\ No newline at end of file
diff --git a/appveyor.yml b/appveyor.yml
index 7e5f9b5..da14701 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -2,6 +2,8 @@ version: '{build}'
skip_tags: true
image: Visual Studio 2017
configuration: Release
+init:
+- git config --global core.autocrlf false
install:
- ps: mkdir -Force ".\build\" | Out-Null
build_script:
diff --git a/sample/sampleDurableLogger/Program.cs b/sample/sampleDurableLogger/Program.cs
index 916e275..9c88943 100644
--- a/sample/sampleDurableLogger/Program.cs
+++ b/sample/sampleDurableLogger/Program.cs
@@ -12,10 +12,10 @@ namespace SampleDurableLogger
{
public class Program
{
- public static void Main(string[] args)
+ public static void Main()
{
SetupLogglyConfiguration();
- using (var logger = CreateLogger(@"c:\test\"))
+ using (var logger = CreateLogger(@"C:\test\Logs\"))
{
logger.Information("Test message - app started");
logger.Warning("Test message with {@Data}", new {P1 = "sample", P2 = DateTime.Now});
@@ -44,6 +44,9 @@ public static void Main(string[] args)
static Logger CreateLogger(string logFilePath)
{
+ //write selflog to stderr
+ Serilog.Debugging.SelfLog.Enable(Console.Error);
+
return new LoggerConfiguration()
.MinimumLevel.Debug()
//Add enrichers
diff --git a/serilog-sinks-loggly.sln b/serilog-sinks-loggly.sln
index 9f2eb70..87ca0dd 100644
--- a/serilog-sinks-loggly.sln
+++ b/serilog-sinks-loggly.sln
@@ -1,13 +1,14 @@
Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 15
-VisualStudioVersion = 15.0.26430.16
+VisualStudioVersion = 15.0.26430.12
MinimumVisualStudioVersion = 10.0.40219.1
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{037440DE-440B-4129-9F7A-09B42D00397E}"
EndProject
Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "assets", "assets", "{E9D1B5E1-DEB9-4A04-8BAB-24EC7240ADAF}"
ProjectSection(SolutionItems) = preProject
.editorconfig = .editorconfig
+ appveyor.yml = appveyor.yml
Build.ps1 = Build.ps1
README.md = README.md
assets\Serilog.snk = assets\Serilog.snk
diff --git a/src/Serilog.Sinks.Loggly/LoggerConfigurationLogglyExtensions.cs b/src/Serilog.Sinks.Loggly/LoggerConfigurationLogglyExtensions.cs
index 7e166b8..58e060f 100644
--- a/src/Serilog.Sinks.Loggly/LoggerConfigurationLogglyExtensions.cs
+++ b/src/Serilog.Sinks.Loggly/LoggerConfigurationLogglyExtensions.cs
@@ -47,7 +47,8 @@ public static class LoggerConfigurationLogglyExtensions
/// A soft limit for the number of bytes to use for storing failed requests.
/// The limit is soft in that it can be exceeded by any single error payload, but in that case only that single error
/// payload will be retained.
- /// number of files to retain for the buffer
+ /// number of files to retain for the buffer. If defined, this also controls which records
+ /// in the buffer get sent to the remote Loggly instance
/// Logger configuration, allowing configuration to continue.
/// A required parameter is null.
public static LoggerConfiguration Loggly(
@@ -91,6 +92,5 @@ public static LoggerConfiguration Loggly(
return loggerConfiguration.Sink(sink, restrictedToMinimumLevel);
}
-
}
}
diff --git a/src/Serilog.Sinks.Loggly/Properties/AssemblyInfo.cs b/src/Serilog.Sinks.Loggly/Properties/AssemblyInfo.cs
index f5a5151..5269bcb 100644
--- a/src/Serilog.Sinks.Loggly/Properties/AssemblyInfo.cs
+++ b/src/Serilog.Sinks.Loggly/Properties/AssemblyInfo.cs
@@ -15,4 +15,15 @@
"201AC646C451830FC7E61A2DFD633D34" +
"C39F87B81894191652DF5AC63CC40C77" +
"F3542F702BDA692E6E8A9158353DF189" +
- "007A49DA0F3CFD55EB250066B19485EC")]
\ No newline at end of file
+ "007A49DA0F3CFD55EB250066B19485EC")]
+
+[assembly: InternalsVisibleTo("DynamicProxyGenAssembly2, PublicKey=00240000048000009400000006020000002" +
+ "40000525341310004000001000100c547ca" +
+ "c37abd99c8db225ef2f6c8a3602f3b3606c" +
+ "c9891605d02baa56104f4cfc0734aa39b93" +
+ "bf7852f7d9266654753cc297e7d2edfe0ba" +
+ "c1cdcf9f717241550e0a7b191195b7667bb" +
+ "4f64bcb8e2121380fd1d9d46ad2d92d2d15" +
+ "605093924cceaf74c4861eff62abf69b929" +
+ "1ed0a340e113be11e6a7d3113e92484cf70" +
+ "45cc7")]
\ No newline at end of file
diff --git a/src/Serilog.Sinks.Loggly/Serilog.Sinks.Loggly.csproj b/src/Serilog.Sinks.Loggly/Serilog.Sinks.Loggly.csproj
index e5208be..1090cc6 100644
--- a/src/Serilog.Sinks.Loggly/Serilog.Sinks.Loggly.csproj
+++ b/src/Serilog.Sinks.Loggly/Serilog.Sinks.Loggly.csproj
@@ -1,4 +1,4 @@
-
+
Serilog sink for Loggly.com service
@@ -15,6 +15,7 @@
http://serilog.net
http://www.apache.org/licenses/LICENSE-2.0
false
+ Serilog
diff --git a/src/Serilog.Sinks.Loggly/Sinks/Loggly/Durable/FileSetPosition.cs b/src/Serilog.Sinks.Loggly/Sinks/Loggly/Durable/FileSetPosition.cs
new file mode 100644
index 0000000..b02b7f0
--- /dev/null
+++ b/src/Serilog.Sinks.Loggly/Sinks/Loggly/Durable/FileSetPosition.cs
@@ -0,0 +1,16 @@
+namespace Serilog.Sinks.Loggly.Durable
+{
+ public class FileSetPosition
+ {
+ public FileSetPosition(long position, string fileFullPath)
+ {
+ NextLineStart = position;
+ File = fileFullPath;
+ }
+
+ public long NextLineStart { get; }
+ public string File { get; }
+
+ public static readonly FileSetPosition None = default(FileSetPosition);
+ }
+}
\ No newline at end of file
diff --git a/src/Serilog.Sinks.Loggly/Sinks/Loggly/DurableLogglySink.cs b/src/Serilog.Sinks.Loggly/Sinks/Loggly/DurableLogglySink.cs
index 7913d44..4247754 100644
--- a/src/Serilog.Sinks.Loggly/Sinks/Loggly/DurableLogglySink.cs
+++ b/src/Serilog.Sinks.Loggly/Sinks/Loggly/DurableLogglySink.cs
@@ -50,7 +50,8 @@ public DurableLogglySink(
eventBodyLimitBytes,
levelControlSwitch,
retainedInvalidPayloadsLimitBytes,
- encoding);
+ encoding,
+ retainedFileCountLimit);
//writes events to the file to support connection recovery
_sink = new RollingFileSink(
diff --git a/src/Serilog.Sinks.Loggly/Sinks/Loggly/FileBasedBookmarkProvider.cs b/src/Serilog.Sinks.Loggly/Sinks/Loggly/FileBasedBookmarkProvider.cs
new file mode 100644
index 0000000..49b3b4f
--- /dev/null
+++ b/src/Serilog.Sinks.Loggly/Sinks/Loggly/FileBasedBookmarkProvider.cs
@@ -0,0 +1,89 @@
+using System;
+using System.IO;
+using System.Text;
+using Serilog.Debugging;
+using Serilog.Sinks.Loggly.Durable;
+
+namespace Serilog.Sinks.Loggly
+{
+ class FileBasedBookmarkProvider : IBookmarkProvider
+ {
+ readonly IFileSystemAdapter _fileSystemAdapter;
+ readonly Encoding _encoding;
+
+ readonly string _bookmarkFilename;
+ Stream _currentBookmarkFileStream;
+
+ public FileBasedBookmarkProvider(string bufferBaseFilename, IFileSystemAdapter fileSystemAdapter, Encoding encoding)
+ {
+ _bookmarkFilename = Path.GetFullPath(bufferBaseFilename + ".bookmark");
+ _fileSystemAdapter = fileSystemAdapter;
+ _encoding = encoding;
+ }
+
+ public void Dispose()
+ {
+ _currentBookmarkFileStream?.Dispose();
+ }
+
+ public FileSetPosition GetCurrentBookmarkPosition()
+ {
+ EnsureCurrentBookmarkStreamIsOpen();
+
+ if (_currentBookmarkFileStream.Length != 0)
+ {
+ using (var bookmarkStreamReader = new StreamReader(_currentBookmarkFileStream, _encoding, false, 128, true))
+ {
+ //set the position to 0, to begin reading the initial line
+ bookmarkStreamReader.BaseStream.Position = 0;
+ var bookmarkInfoLine = bookmarkStreamReader.ReadLine();
+
+ if (bookmarkInfoLine != null)
+ {
+ //reset position after read
+ var parts = bookmarkInfoLine.Split(new[] {":::"}, StringSplitOptions.RemoveEmptyEntries);
+ if (parts.Length == 2 && long.TryParse(parts[0], out long position))
+ {
+ return new FileSetPosition(position, parts[1]);
+ }
+
+ SelfLog.WriteLine("Unable to read a line correctly from bookmark file");
+ }
+ else
+ {
+ SelfLog.WriteLine(
+ "For some unknown reason, we were unable to read the non-empty bookmark info...");
+ }
+ }
+ }
+
+ //bookmark file is empty or has been misread, so return a null bookmark
+ return null;
+ }
+
+ public void UpdateBookmark(FileSetPosition newBookmark)
+ {
+ EnsureCurrentBookmarkStreamIsOpen();
+
+ using (var bookmarkStreamWriter = new StreamWriter(_currentBookmarkFileStream, _encoding, 128, true))
+ {
+ bookmarkStreamWriter.BaseStream.Position = 0;
+ bookmarkStreamWriter.WriteLine("{0}:::{1}", newBookmark.NextLineStart, newBookmark.File);
+ bookmarkStreamWriter.Flush();
+ }
+ }
+
+ void EnsureCurrentBookmarkStreamIsOpen()
+ {
+ //this will ensure a stream is available, even if it means creating a new file associated to it
+ if (_currentBookmarkFileStream == null)
+ _currentBookmarkFileStream = _fileSystemAdapter.Open(
+ _bookmarkFilename,
+ FileMode.OpenOrCreate,
+ FileAccess.ReadWrite,
+ FileShare.Read);
+ }
+
+
+ }
+}
\ No newline at end of file
diff --git a/src/Serilog.Sinks.Loggly/Sinks/Loggly/FileBufferDataProvider.cs b/src/Serilog.Sinks.Loggly/Sinks/Loggly/FileBufferDataProvider.cs
new file mode 100644
index 0000000..6556d16
--- /dev/null
+++ b/src/Serilog.Sinks.Loggly/Sinks/Loggly/FileBufferDataProvider.cs
@@ -0,0 +1,382 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Text;
+using Loggly;
+using Newtonsoft.Json;
+using Serilog.Debugging;
+using Serilog.Sinks.Loggly.Durable;
+#if HRESULTS
+using System.Runtime.InteropServices;
+#endif
+
+namespace Serilog.Sinks.Loggly
+{
+ interface IBufferDataProvider
+ {
+ IEnumerable GetNextBatchOfEvents();
+ void MarkCurrentBatchAsProcessed();
+ void MoveBookmarkForward();
+ }
+
+ ///
+ /// Provides a facade to all File operations, namely bookmark management and
+ /// buffered data readings
+ ///
+ class FileBufferDataProvider : IBufferDataProvider
+ {
+#if HRESULTS
+ //for Marshalling error checks
+ const int ErrorSharingViolation = 32;
+ const int ErrorLockViolation = 33;
+#endif
+ readonly string _candidateSearchPath;
+ readonly string _logFolder;
+
+ readonly int _batchPostingLimit;
+ readonly long? _eventBodyLimitBytes;
+ readonly int? _retainedFileCountLimit;
+
+ readonly IFileSystemAdapter _fileSystemAdapter;
+ readonly IBookmarkProvider _bookmarkProvider;
+ readonly Encoding _encoding;
+
+ readonly JsonSerializer _serializer = JsonSerializer.Create();
+
+ // the following fields control the internal state and position of the queue
+ FileSetPosition _currentBookmark;
+ FileSetPosition _futureBookmark;
+ IEnumerable _currentBatchOfEventsToProcess;
+
+ public FileBufferDataProvider(
+ string baseBufferFileName,
+ IFileSystemAdapter fileSystemAdapter,
+ IBookmarkProvider bookmarkProvider,
+ Encoding encoding,
+ int batchPostingLimit,
+ long? eventBodyLimitBytes,
+ int? retainedFileCountLimit)
+ {
+ //construct a valid path to a file in the log folder to get the folder path:
+ _logFolder = Path.GetDirectoryName(Path.GetFullPath(baseBufferFileName + ".bookmark"));
+ _candidateSearchPath = Path.GetFileName(baseBufferFileName) + "*.json";
+
+ _fileSystemAdapter = fileSystemAdapter;
+ _bookmarkProvider = bookmarkProvider;
+ _encoding = encoding;
+ _batchPostingLimit = batchPostingLimit;
+ _eventBodyLimitBytes = eventBodyLimitBytes;
+ _retainedFileCountLimit = retainedFileCountLimit;
+ }
+
+ public IEnumerable GetNextBatchOfEvents()
+ {
+ //if current batch has not yet been processed, return it
+ if (_currentBatchOfEventsToProcess != null)
+ return _currentBatchOfEventsToProcess;
+
+ //if we have a bookmark in place, it may be the next position to read from
+ // otherwise try to get a valid one
+ if (_currentBookmark == null)
+ {
+ //read the current bookmark from file, and if invalid, try to create a valid one
+ _currentBookmark = TryGetValidBookmark();
+
+ if (!IsValidBookmark(_currentBookmark))
+ return Enumerable.Empty();
+ }
+
+ //bookmark is valid, so lets get the next batch from the files.
+ RefreshCurrentListOfEvents();
+
+ //this should never return null. If there is nothing to return, please return an empty list instead.
+ return _currentBatchOfEventsToProcess ?? Enumerable.Empty();
+ }
+
+ public void MarkCurrentBatchAsProcessed()
+ {
+ //reset internal state: only write to the bookmark file if we move forward.
+ //otherwise, there is a risk of rereading the current (first) buffer file again
+ if(_futureBookmark != null)
+ _bookmarkProvider.UpdateBookmark(_futureBookmark);
+
+ //we can move the marker to what's in "future" (next expected position)
+ _currentBookmark = _futureBookmark;
+ _currentBatchOfEventsToProcess = null;
+ }
+
+ public void MoveBookmarkForward()
+ {
+ //curren Batch is empty, so we should clear it out so that the enxt read cycle will refresh it correctly
+ _currentBatchOfEventsToProcess = null;
+
+ // Only advance the bookmark if no other process has the
+ // current file locked, and its length is as we found it.
+ // NOTE: we will typically enter this method after any buffer file is finished
+ // (no events read from previous file). This is the oportunity to clear out files
+ // especially the prevously read file
+
+ var fileSet = GetEventBufferFileSet();
+
+ try
+ {
+ //if we only have two files, move to the next one imediately, unless a locking situation
+ // impeads us from doing so
+ if (fileSet.Length == 2
+ && fileSet.First() == _currentBookmark.File
+ && IsUnlockedAtLength(_currentBookmark.File, _currentBookmark.NextLineStart))
+ {
+ //move to next file
+ _currentBookmark = new FileSetPosition(0, fileSet[1]);
+ //we can also delete the previously read file since we no longer need it
+ _fileSystemAdapter.DeleteFile(fileSet[0]);
+ }
+
+ if (fileSet.Length > 2)
+ {
+ //when we have more files, we want to delete older ones, but this depends on the
+ // limit retention policy. If no limit retention policy is in place, the intent is to
+ // send all messages, no matter how old. In this case, we should only delete the current
+ // file (since we are finished with it) and start at the next one. If we do have some
+ // retention policy in place, then delete anything older then the limit and the next
+ // message read should be at the start of the policy limit
+ if (_retainedFileCountLimit.HasValue)
+ {
+ //move to first file within retention limite
+ _currentBookmark = _retainedFileCountLimit.Value >= fileSet.Length
+ ? new FileSetPosition(0, fileSet[0])
+ : new FileSetPosition(0, fileSet[fileSet.Length - _retainedFileCountLimit.Value]);
+
+ //delete all the old files
+ foreach (var oldFile in fileSet.Take(fileSet.Length - _retainedFileCountLimit.Value))
+ {
+ _fileSystemAdapter.DeleteFile(oldFile);
+ }
+ }
+ else
+ {
+ //move to the next file and delete the current one
+ _currentBookmark = new FileSetPosition(0, fileSet[1]);
+ _fileSystemAdapter.DeleteFile(fileSet[0]);
+ }
+ }
+ }
+ catch (Exception ex)
+ {
+ SelfLog.WriteLine("An error occured while deleteing the files...{0}", ex.Message);
+ }
+ finally
+ {
+ //even if reading / deleteing files fails, we can / should update the bookmark file
+ //it is important that the file have the reset position, otherwise we risk failing to
+ // move forward in the next read cycle
+ _bookmarkProvider.UpdateBookmark(_currentBookmark);
+ }
+ }
+
+ bool IsUnlockedAtLength(string file, long maxLen)
+ {
+ try
+ {
+ using (var fileStream = _fileSystemAdapter.Open(file, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.Read))
+ {
+ return fileStream.Length <= maxLen;
+ }
+ }
+#if HRESULTS
+ catch (IOException ex)
+ {
+ //NOTE: this seems to be a way to check for file lock validations as in :
+ // https://stackoverflow.com/questions/16642858/files-how-to-distinguish-file-lock-and-permission-denied-cases
+ //sharing violation and LockViolation are expected, and we can follow trough if they occur
+
+ var errorCode = Marshal.GetHRForException(ex) & ((1 << 16) - 1);
+ if (errorCode != ErrorSharingViolation && errorCode != ErrorLockViolation )
+ {
+ SelfLog.WriteLine("Unexpected I/O exception while testing locked status of {0}: {1}", file, ex);
+ }
+ }
+#else
+ catch (IOException ex)
+ {
+ // Where no HRESULT is available, assume IOExceptions indicate a locked file
+ SelfLog.WriteLine("Unexpected IOException while testing locked status of {0}: {1}", file, ex);
+ }
+#endif
+ catch (Exception ex)
+ {
+ SelfLog.WriteLine("Unexpected exception while testing locked status of {0}: {1}", file, ex);
+ }
+
+ return false;
+ }
+
+ void RefreshCurrentListOfEvents()
+ {
+ var events = new List();
+ var count = 0;
+ var positionTracker = _currentBookmark.NextLineStart;
+
+ using (var currentBufferStream = _fileSystemAdapter.Open(_currentBookmark.File, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
+ {
+ while (count < _batchPostingLimit && TryReadLine(currentBufferStream, ref positionTracker, out string readLine))
+ {
+ // Count is the indicator that work was done, so advances even in the (rare) case an
+ // oversized event is dropped.
+ ++count;
+
+ if (_eventBodyLimitBytes.HasValue
+ && readLine != null
+ && _encoding.GetByteCount(readLine) > _eventBodyLimitBytes.Value)
+ {
+ SelfLog.WriteLine(
+ "Event JSON representation exceeds the byte size limit of {0} and will be dropped; data: {1}",
+ _eventBodyLimitBytes, readLine);
+ }
+
+ if (!readLine.StartsWith("{"))
+ {
+ //in some instances this can happen. TryReadLine no longer assumes a BOM if reading from the file start,
+ //but there may be (unobserved yet) situations where the line read is still not complete and valid
+ // Json. This and the try catch that follows are, therefore, attempts to preserve the
+ //logging functionality active, though some events may be dropped in the process.
+ SelfLog.WriteLine(
+ "Event JSON representation does not start with the expected '{{' character. " +
+ "This may be related to a BOM issue in the buffer file. Event will be dropped; data: {0}",
+ readLine);
+ }
+ else
+ {
+ try
+ {
+ events.Add(DeserializeEvent(readLine));
+ }
+ catch (Exception ex)
+ {
+ SelfLog.WriteLine(
+ "Unable to deserialize the json event; Event will be dropped; exception: {0}; data: {1}",
+ ex.Message, readLine);
+ }
+ }
+ }
+ }
+
+ _futureBookmark = new FileSetPosition(positionTracker, _currentBookmark.File);
+ _currentBatchOfEventsToProcess = events;
+ }
+
+ // It would be ideal to chomp whitespace here, but not required.
+ bool TryReadLine(Stream current, ref long nextStart, out string readLine)
+ {
+ // determine if we are reading the first line in the file. This will help with
+ // solving the BOM marker issue ahead
+ var firstline = nextStart == 0;
+
+ if (current.Length <= nextStart)
+ {
+ readLine = null;
+ return false;
+ }
+
+ // Important not to dispose this StreamReader as the stream must remain open.
+ using (var reader = new StreamReader(current, _encoding, false, 128, true))
+ {
+ // ByteOrder marker may still be a problem if we a reading the first line. We can test for it
+ // directly from the stream. This should only affect the first readline op, anyways. Since we
+ // If it's there, we need to move the start index by 3 bytes, so position will be correct throughout
+ if (firstline && StreamContainsBomMarker(current))
+ {
+ nextStart += 3;
+ }
+
+ //readline moves the marker forward farther then the line length, so it needs to be placed
+ // at the right position. This makes sure we try to read a line from the right starting point
+ current.Position = nextStart;
+ readLine = reader.ReadLine();
+
+ if (readLine == null)
+ return false;
+
+ //If we have read the line, advance the count by the number of bytes + newline bytes to
+ //mark the start of the next line
+ nextStart += _encoding.GetByteCount(readLine) + _encoding.GetByteCount(Environment.NewLine);
+
+ return true;
+ }
+ }
+
+ static bool StreamContainsBomMarker(Stream current)
+ {
+ bool isBom = false;
+ long currentPosition = current.Position; //save to reset after BOM check
+
+ byte[] potentialBomMarker = new byte[3];
+ current.Position = 0;
+ current.Read(potentialBomMarker, 0, 3);
+ //BOM is "ef bb bf" => 239 187 191
+ if (potentialBomMarker[0] == 239
+ && potentialBomMarker[1] == 187
+ && potentialBomMarker[2] == 191)
+ {
+ isBom = true;
+ }
+
+ current.Position = currentPosition; //put position back where it was
+ return isBom;
+ }
+
+ LogglyEvent DeserializeEvent(string eventLine)
+ {
+ return _serializer.Deserialize(new JsonTextReader(new StringReader(eventLine)));
+ }
+
+ FileSetPosition TryGetValidBookmark()
+ {
+ //get from the bookmark file first;
+ FileSetPosition newBookmark = _bookmarkProvider.GetCurrentBookmarkPosition();
+
+ if (!IsValidBookmark(newBookmark))
+ {
+ newBookmark = CreateFreshBookmarkBasedOnBufferFiles();
+ }
+
+ return newBookmark;
+ }
+
+ FileSetPosition CreateFreshBookmarkBasedOnBufferFiles()
+ {
+ var fileSet = GetEventBufferFileSet();
+
+ //the new bookmark should consider file retention rules, if any
+ // if no retention rule is in place (send all data to loggly, no matter how old)
+ // then take the first file and make a FileSetPosition out of it,
+ // otherwise, make the position marker relative to the oldest file as in the rule
+ //NOTE: this only happens when the previous bookmark is invalid (that's how we
+ // entered this method) so , if the prevous bookmark points to a valid file
+ // that will continue to be read till the end.
+ if (_retainedFileCountLimit.HasValue
+ && fileSet.Length > _retainedFileCountLimit.Value)
+ {
+ //we have more files then our rule requires (older than needed)
+ // so point to the oldest allowed by our rule
+ return new FileSetPosition(0, fileSet.Skip(fileSet.Length - _retainedFileCountLimit.Value).First());
+ }
+
+ return fileSet.Any() ? new FileSetPosition(0, fileSet.First()) : null;
+ }
+
+ bool IsValidBookmark(FileSetPosition bookmark)
+ {
+ return bookmark?.File != null
+ && _fileSystemAdapter.Exists(bookmark.File);
+ }
+
+ string[] GetEventBufferFileSet()
+ {
+ return _fileSystemAdapter.GetFiles(_logFolder, _candidateSearchPath)
+ .OrderBy(name => name)
+ .ToArray();
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/Serilog.Sinks.Loggly/Sinks/Loggly/FileSystemAdapter.cs b/src/Serilog.Sinks.Loggly/Sinks/Loggly/FileSystemAdapter.cs
new file mode 100644
index 0000000..7244333
--- /dev/null
+++ b/src/Serilog.Sinks.Loggly/Sinks/Loggly/FileSystemAdapter.cs
@@ -0,0 +1,36 @@
+using System.IO;
+
+namespace Serilog.Sinks.Loggly
+{
+ ///
+ /// adapter to abstract away filesystem specific / coupled calls, especially using File and Directory
+ ///
+ class FileSystemAdapter : IFileSystemAdapter
+ {
+ //TODO: can we use Physical
+ public bool Exists(string filePath)
+ {
+ return System.IO.File.Exists(filePath);
+ }
+
+ public void DeleteFile(string filePath)
+ {
+ System.IO.File.Delete(filePath);
+ }
+
+ public Stream Open(string filePath, FileMode mode, FileAccess access, FileShare share)
+ {
+ return System.IO.File.Open(filePath, mode, access, share);
+ }
+
+ public void WriteAllBytes(string filePath, byte[] bytesToWrite)
+ {
+ System.IO.File.WriteAllBytes(filePath, bytesToWrite);
+ }
+
+ public string[] GetFiles(string path, string searchPattern)
+ {
+ return Directory.GetFiles(path, searchPattern);
+ }
+ }
+}
\ No newline at end of file
diff --git a/src/Serilog.Sinks.Loggly/Sinks/Loggly/HttpLogShipper.cs b/src/Serilog.Sinks.Loggly/Sinks/Loggly/HttpLogShipper.cs
index adff21e..ef00ffe 100644
--- a/src/Serilog.Sinks.Loggly/Sinks/Loggly/HttpLogShipper.cs
+++ b/src/Serilog.Sinks.Loggly/Sinks/Loggly/HttpLogShipper.cs
@@ -19,11 +19,8 @@
using Serilog.Core;
using Serilog.Debugging;
using Serilog.Events;
-using IOFile = System.IO.File;
using System.Threading.Tasks;
-using System.Collections.Generic;
using Loggly;
-using Newtonsoft.Json;
#if HRESULTS
using System.Runtime.InteropServices;
@@ -33,16 +30,9 @@ namespace Serilog.Sinks.Loggly
{
class HttpLogShipper : IDisposable
{
- readonly JsonSerializer _serializer = JsonSerializer.Create();
-
readonly int _batchPostingLimit;
- readonly long? _eventBodyLimitBytes;
- readonly string _bookmarkFilename;
- readonly string _logFolder;
- readonly string _candidateSearchPath;
+ private readonly int? _retainedFileCountLimit;
readonly ExponentialBackoffConnectionSchedule _connectionSchedule;
- readonly long? _retainedInvalidPayloadsLimitBytes;
- readonly Encoding _encoding;
readonly object _stateLock = new object();
readonly PortableTimer _timer;
@@ -50,28 +40,37 @@ class HttpLogShipper : IDisposable
volatile bool _unloading;
readonly LogglyClient _logglyClient;
-
+ readonly IFileSystemAdapter _fileSystemAdapter = new FileSystemAdapter();
+ readonly FileBufferDataProvider _bufferDataProvider;
+ readonly InvalidPayloadLogger _invalidPayloadLogger;
+
public HttpLogShipper(
- string bufferBaseFilename,
- int batchPostingLimit,
- TimeSpan period,
- long? eventBodyLimitBytes,
- LoggingLevelSwitch levelControlSwitch,
- long? retainedInvalidPayloadsLimitBytes,
- Encoding encoding)
+ string bufferBaseFilename,
+ int batchPostingLimit,
+ TimeSpan period, long?
+ eventBodyLimitBytes,
+ LoggingLevelSwitch levelControlSwitch,
+ long? retainedInvalidPayloadsLimitBytes,
+ Encoding encoding,
+ int? retainedFileCountLimit)
{
_batchPostingLimit = batchPostingLimit;
- _eventBodyLimitBytes = eventBodyLimitBytes;
+ _retainedFileCountLimit = retainedFileCountLimit;
+
_controlledSwitch = new ControlledLevelSwitch(levelControlSwitch);
_connectionSchedule = new ExponentialBackoffConnectionSchedule(period);
- _retainedInvalidPayloadsLimitBytes = retainedInvalidPayloadsLimitBytes;
- _encoding = encoding;
_logglyClient = new LogglyClient(); //we'll use the loggly client instead of HTTP directly
- _bookmarkFilename = Path.GetFullPath(bufferBaseFilename + ".bookmark");
- _logFolder = Path.GetDirectoryName(_bookmarkFilename);
- _candidateSearchPath = Path.GetFileName(bufferBaseFilename) + "*.json";
+ //create necessary path elements
+ var candidateSearchPath = Path.GetFileName(bufferBaseFilename) + "*.json";
+ var logFolder = Path.GetDirectoryName(candidateSearchPath);
+
+ //Filebase is currently the only option available so we will stick with it directly (for now)
+ var encodingToUse = encoding;
+ var bookmarkProvider = new FileBasedBookmarkProvider(bufferBaseFilename, _fileSystemAdapter, encoding);
+ _bufferDataProvider = new FileBufferDataProvider(bufferBaseFilename, _fileSystemAdapter, bookmarkProvider, encodingToUse, batchPostingLimit, eventBodyLimitBytes, retainedFileCountLimit);
+ _invalidPayloadLogger = new InvalidPayloadLogger(logFolder, encodingToUse, _fileSystemAdapter, retainedInvalidPayloadsLimitBytes);
_timer = new PortableTimer(c => OnTick());
SetTimer();
@@ -115,91 +114,55 @@ async Task OnTick()
try
{
- // Locking the bookmark ensures that though there may be multiple instances of this
- // class running, only one will ship logs at a time.
- using (var bookmark = IOFile.Open(_bookmarkFilename, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.Read))
+ //we'll use this to control the number of events read per cycle. If the batch limit is reached,
+ //then there is probably more events queued and we should continue to read them. Otherwise,
+ // we can wait for the next timer tick moment to see if anything new is available.
+ int numberOfEventsRead;
+ do
{
- using (var bookmarkStreamReader = new StreamReader(bookmark, _encoding, false, 128))
- {
- using (var bookmarkStreamWriter = new StreamWriter(bookmark))
- {
- int count;
- do
- {
- count = 0;
-
- long nextLineBeginsAtOffset;
- string currentFile;
-
- TryReadBookmark(bookmark, bookmarkStreamReader, out nextLineBeginsAtOffset, out currentFile);
-
- var fileSet = GetFileSet();
+ //this should consistently return the same batch of events until
+ //a MarkAsProcessed message is sent to the provider. Never return a null, please...
+ var payload = _bufferDataProvider.GetNextBatchOfEvents();
+ numberOfEventsRead = payload.Count();
- if (currentFile == null || !IOFile.Exists(currentFile))
- {
- nextLineBeginsAtOffset = 0;
- currentFile = fileSet.FirstOrDefault();
- }
-
- if (currentFile == null)
- continue;
-
- //grab the list of pending LogglyEvents from the file
- var payload = GetListOfEvents(currentFile, ref nextLineBeginsAtOffset, ref count);
-
- if (count > 0)
- {
- //send the loggly events through the bulk API
- var result = await _logglyClient.Log(payload).ConfigureAwait(false);
- if (result.Code == ResponseCode.Success)
- {
- _connectionSchedule.MarkSuccess();
- WriteBookmark(bookmarkStreamWriter, nextLineBeginsAtOffset, currentFile);
- }
- else if (result.Code == ResponseCode.Error)
- {
- // The connection attempt was successful - the payload we sent was the problem.
- _connectionSchedule.MarkSuccess();
-
- DumpInvalidPayload(result, payload);
- WriteBookmark(bookmarkStreamWriter, nextLineBeginsAtOffset, currentFile);
- }
- else
- {
- _connectionSchedule.MarkFailure();
- SelfLog.WriteLine("Received failed HTTP shipping result {0}: {1}", result.Code,
- result.Message);
-
- break;
- }
- }
- else
- {
- // For whatever reason, there's nothing waiting to send. This means we should try connecting again at the
- // regular interval, so mark the attempt as successful.
- _connectionSchedule.MarkSuccess();
-
- // Only advance the bookmark if no other process has the
- // current file locked, and its length is as we found it.
- if (fileSet.Length == 2 && fileSet.First() == currentFile &&
- IsUnlockedAtLength(currentFile, nextLineBeginsAtOffset))
- {
- WriteBookmark(bookmarkStreamWriter, 0, fileSet[1]);
- }
+ if (numberOfEventsRead > 0)
+ {
+ //send the loggly events through the bulk API
+ var result = await _logglyClient.Log(payload).ConfigureAwait(false);
- if (fileSet.Length > 2)
- {
- // Once there's a third file waiting to ship, we do our
- // best to move on, though a lock on the current file
- // will delay this.
+ if (result.Code == ResponseCode.Success)
+ {
+ _connectionSchedule.MarkSuccess();
+ _bufferDataProvider.MarkCurrentBatchAsProcessed();
+ }
+ else if (result.Code == ResponseCode.Error)
+ {
+ // The connection attempt was successful - the payload we sent was the problem.
+ _connectionSchedule.MarkSuccess();
+ _bufferDataProvider.MarkCurrentBatchAsProcessed(); //move foward
- IOFile.Delete(fileSet[0]);
- }
- }
- } while (count == _batchPostingLimit);
+ _invalidPayloadLogger.DumpInvalidPayload(result, payload);
}
- }
- }
+ else
+ {
+ _connectionSchedule.MarkFailure();
+ SelfLog.WriteLine("Received failed HTTP shipping result {0}: {1}", result.Code,
+ result.Message);
+ break;
+ }
+ }
+ else
+ {
+ // For whatever reason, there's nothing waiting to send. This means we should try connecting again at the
+ // regular interval, so mark the attempt as successful.
+ _connectionSchedule.MarkSuccess();
+
+ // not getting any batch may mean our marker is off, or at the end of the current, old file.
+ // Try to move foward and cleanup
+ _bufferDataProvider.MoveBookmarkForward();
+ }
+ } while (numberOfEventsRead == _batchPostingLimit);
+ //keep sending as long as we can retrieve a full batch. If not, wait for next tick
}
catch (Exception ex)
{
@@ -217,244 +180,8 @@ async Task OnTick()
}
}
}
-
- const string InvalidPayloadFilePrefix = "invalid-";
- void DumpInvalidPayload(LogResponse result, IEnumerable payload)
- {
- var invalidPayloadFilename = $"{InvalidPayloadFilePrefix}{result.Code}-{Guid.NewGuid():n}.json";
- var invalidPayloadFile = Path.Combine(_logFolder, invalidPayloadFilename);
- SelfLog.WriteLine("HTTP shipping failed with {0}: {1}; dumping payload to {2}", result.Code, result.Message, invalidPayloadFile);
-
- byte[] bytesToWrite;
- using (StringWriter writer = new StringWriter())
- {
- SerializeLogglyEventsToWriter(payload, writer);
- bytesToWrite = _encoding.GetBytes(writer.ToString());
- }
-
- if (_retainedInvalidPayloadsLimitBytes.HasValue)
- {
- CleanUpInvalidPayloadFiles(_retainedInvalidPayloadsLimitBytes.Value - bytesToWrite.Length, _logFolder);
- }
- IOFile.WriteAllBytes(invalidPayloadFile, bytesToWrite);
- }
-
- static void CleanUpInvalidPayloadFiles(long maxNumberOfBytesToRetain, string logFolder)
- {
- try
- {
- var candiateFiles = Directory.EnumerateFiles(logFolder, $"{InvalidPayloadFilePrefix}*.json");
- DeleteOldFiles(maxNumberOfBytesToRetain, candiateFiles);
- }
- catch (Exception ex)
- {
- SelfLog.WriteLine("Exception thrown while trying to clean up invalid payload files: {0}", ex);
- }
- }
-
- static IEnumerable WhereCumulativeSizeGreaterThan(IEnumerable files, long maxCumulativeSize)
- {
- long cumulative = 0;
- foreach (var file in files)
- {
- cumulative += file.Length;
- if (cumulative > maxCumulativeSize)
- {
- yield return file;
- }
- }
- }
-
- ///
- /// Deletes oldest files in the group of invalid-* files.
- /// Existing files are ordered (from most recent to oldest) and file size is acumulated. All files
- /// who's cumulative byte count passes the defined limit are removed. Limit is therefore bytes
- /// and not number of files
- ///
- ///
- ///
- static void DeleteOldFiles(long maxNumberOfBytesToRetain, IEnumerable files)
- {
- var orderedFileInfos = from candidateFile in files
- let candidateFileInfo = new FileInfo(candidateFile)
- orderby candidateFileInfo.LastAccessTimeUtc descending
- select candidateFileInfo;
-
- var invalidPayloadFilesToDelete = WhereCumulativeSizeGreaterThan(orderedFileInfos, maxNumberOfBytesToRetain);
-
- foreach (var fileToDelete in invalidPayloadFilesToDelete)
- {
- try
- {
- fileToDelete.Delete();
- }
- catch (Exception ex)
- {
- SelfLog.WriteLine("Exception '{0}' thrown while trying to delete file {1}", ex.Message, fileToDelete.FullName);
- }
- }
- }
-
- List GetListOfEvents(string currentFile, ref long nextLineBeginsAtOffset, ref int count)
- {
- var events = new List();
-
- using (var current = IOFile.Open(currentFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
- {
- current.Position = nextLineBeginsAtOffset;
-
- string nextLine;
- while (count < _batchPostingLimit &&
- TryReadLine(current, ref nextLineBeginsAtOffset, out nextLine))
- {
- // Count is the indicator that work was done, so advances even in the (rare) case an
- // oversized event is dropped.
- ++count;
-
- if (_eventBodyLimitBytes.HasValue && _encoding.GetByteCount(nextLine) > _eventBodyLimitBytes.Value)
- {
- SelfLog.WriteLine(
- "Event JSON representation exceeds the byte size limit of {0} and will be dropped; data: {1}",
- _eventBodyLimitBytes, nextLine);
- }
- if (!nextLine.StartsWith("{"))
- {
- //in some instances this can happen. TryReadLine assumes a BOM if reading from the file start,
- //though we have captured instances in which the rolling file does not have it. This and the try catch
- //that follows are, therefore, attempts to preserve the logging functionality active, though some
- // events may be dropped in the process.
- SelfLog.WriteLine(
- "Event JSON representation does not start with the expected '{{' character. "+
- "This may be related to a BOM issue in the buffer file. Event will be dropped; data: {0}",
- nextLine);
- }
- else
- {
- try
- {
- events.Add(DeserializeEvent(nextLine));
- }
- catch (Exception ex)
- {
- SelfLog.WriteLine(
- "Unable to deserialize the json event; Event will be dropped; exception: {0}; data: {1}",
- ex.Message, nextLine);
- }
- }
- }
- }
-
- return events;
- }
-
- LogglyEvent DeserializeEvent(string eventLine)
- {
- return _serializer.Deserialize(new JsonTextReader(new StringReader(eventLine)));
- }
-
- void SerializeLogglyEventsToWriter(IEnumerable events, TextWriter writer)
- {
- foreach (var logglyEvent in events)
- {
- _serializer.Serialize(writer, logglyEvent);
- writer.WriteLine();
- }
- }
-
- static bool IsUnlockedAtLength(string file, long maxLen)
- {
- try
- {
- using (var fileStream = IOFile.Open(file, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.Read))
- {
- return fileStream.Length <= maxLen;
- }
- }
-#if HRESULTS
- catch (IOException ex)
- {
- var errorCode = Marshal.GetHRForException(ex) & ((1 << 16) - 1);
- if (errorCode != 32 && errorCode != 33)
- {
- SelfLog.WriteLine("Unexpected I/O exception while testing locked status of {0}: {1}", file, ex);
- }
- }
-#else
- catch (IOException)
- {
- // Where no HRESULT is available, assume IOExceptions indicate a locked file
- }
-#endif
- catch (Exception ex)
- {
- SelfLog.WriteLine("Unexpected exception while testing locked status of {0}: {1}", file, ex);
- }
-
- return false;
- }
-
- static void WriteBookmark(StreamWriter bookmarkStreamWriter, long nextLineBeginsAtOffset, string currentFile)
- {
- bookmarkStreamWriter.WriteLine("{0}:::{1}", nextLineBeginsAtOffset, currentFile);
- bookmarkStreamWriter.Flush();
- }
-
- // It would be ideal to chomp whitespace here, but not required.
- bool TryReadLine(Stream current, ref long nextStart, out string nextLine)
- {
- var includesBom = nextStart == 0;
-
- if (current.Length <= nextStart)
- {
- nextLine = null;
- return false;
- }
-
- current.Position = nextStart;
-
- // Important not to dispose this StreamReader as the stream must remain open.
- var reader = new StreamReader(current, _encoding, false, 128);
- nextLine = reader.ReadLine();
-
- if (nextLine == null)
- return false;
-
- nextStart += _encoding.GetByteCount(nextLine) + _encoding.GetByteCount(Environment.NewLine);
- if (includesBom)
- nextStart += 3;
-
- return true;
- }
-
- static void TryReadBookmark(Stream bookmark, StreamReader bookmarkStreamReader, out long nextLineBeginsAtOffset, out string currentFile)
- {
- nextLineBeginsAtOffset = 0;
- currentFile = null;
-
- if (bookmark.Length != 0)
- {
- bookmarkStreamReader.BaseStream.Position = 0;
- var current = bookmarkStreamReader.ReadLine();
-
- if (current != null)
- {
- bookmark.Position = 0;
- var parts = current.Split(new[] { ":::" }, StringSplitOptions.RemoveEmptyEntries);
- if (parts.Length == 2)
- {
- nextLineBeginsAtOffset = long.Parse(parts[0]);
- currentFile = parts[1];
- }
- }
- }
- }
-
- string[] GetFileSet()
- {
- return Directory.GetFiles(_logFolder, _candidateSearchPath)
- .OrderBy(n => n)
- .ToArray();
- }
}
}
+
+
diff --git a/src/Serilog.Sinks.Loggly/Sinks/Loggly/IBookmarkProvider.cs b/src/Serilog.Sinks.Loggly/Sinks/Loggly/IBookmarkProvider.cs
new file mode 100644
index 0000000..23c20f0
--- /dev/null
+++ b/src/Serilog.Sinks.Loggly/Sinks/Loggly/IBookmarkProvider.cs
@@ -0,0 +1,12 @@
+using System;
+using Serilog.Sinks.Loggly.Durable;
+
+namespace Serilog.Sinks.Loggly
+{
+ interface IBookmarkProvider : IDisposable
+ {
+ FileSetPosition GetCurrentBookmarkPosition();
+
+ void UpdateBookmark(FileSetPosition newBookmark);
+ }
+}
\ No newline at end of file
diff --git a/src/Serilog.Sinks.Loggly/Sinks/Loggly/IFileSystemAdapter.cs b/src/Serilog.Sinks.Loggly/Sinks/Loggly/IFileSystemAdapter.cs
new file mode 100644
index 0000000..9ec8865
--- /dev/null
+++ b/src/Serilog.Sinks.Loggly/Sinks/Loggly/IFileSystemAdapter.cs
@@ -0,0 +1,17 @@
+using System.IO;
+
+namespace Serilog.Sinks.Loggly
+{
+ interface IFileSystemAdapter
+ {
+ //file ops
+ bool Exists(string filePath);
+ void DeleteFile(string filePath);
+ Stream Open(string bookmarkFilename, FileMode openOrCreate, FileAccess readWrite, FileShare read);
+ void WriteAllBytes(string filePath, byte[] bytesToWrite);
+
+ //directory ops
+ string[] GetFiles(string folder, string searchTerms);
+
+ }
+}
\ No newline at end of file
diff --git a/src/Serilog.Sinks.Loggly/Sinks/Loggly/InvalidPayloadLogger.cs b/src/Serilog.Sinks.Loggly/Sinks/Loggly/InvalidPayloadLogger.cs
new file mode 100644
index 0000000..bd80f4e
--- /dev/null
+++ b/src/Serilog.Sinks.Loggly/Sinks/Loggly/InvalidPayloadLogger.cs
@@ -0,0 +1,141 @@
+// Serilog.Sinks.Seq Copyright 2016 Serilog Contributors
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+using System;
+using System.IO;
+using System.Linq;
+using System.Text;
+using Serilog.Debugging;
+using System.Collections.Generic;
+using Loggly;
+using Newtonsoft.Json;
+
+namespace Serilog.Sinks.Loggly
+{
+ class InvalidPayloadLogger
+ {
+ const string InvalidPayloadFilePrefix = "invalid-";
+ readonly string _logFolder;
+ readonly long? _retainedInvalidPayloadsLimitBytes;
+ readonly Encoding _encoding;
+ readonly IFileSystemAdapter _fileSystemAdapter;
+ readonly JsonSerializer _serializer = JsonSerializer.Create();
+
+
+ public InvalidPayloadLogger(string logFolder, Encoding encoding, IFileSystemAdapter fileSystemAdapter, long? retainedInvalidPayloadsLimitBytes = null)
+ {
+ _logFolder = logFolder;
+ _encoding = encoding;
+ _fileSystemAdapter = fileSystemAdapter;
+ _retainedInvalidPayloadsLimitBytes = retainedInvalidPayloadsLimitBytes;
+ }
+
+ public void DumpInvalidPayload(LogResponse result, IEnumerable payload)
+ {
+ var invalidPayloadFilename = $"{InvalidPayloadFilePrefix}{DateTime.UtcNow.ToString("yyyyMMddHHmmss")}-{result.Code}-{Guid.NewGuid():n}.json";
+ var invalidPayloadFile = Path.Combine(_logFolder, invalidPayloadFilename);
+ SelfLog.WriteLine("HTTP shipping failed with {0}: {1}; dumping payload to {2}", result.Code, result.Message, invalidPayloadFile);
+
+ byte[] bytesToWrite = SerializeLogglyEventsToBytes(payload);
+
+ if (_retainedInvalidPayloadsLimitBytes.HasValue)
+ {
+ CleanUpInvalidPayloadFiles(_retainedInvalidPayloadsLimitBytes.Value - bytesToWrite.Length, _logFolder);
+ }
+
+ //Adding this to perist WHY the invalid payload existed
+ // the library is not using these files to resend data, so format is not important.
+ var errorBytes = _encoding.GetBytes(string.Format(@"Error info: HTTP shipping failed with {0}: {1}", result.Code, result.Message));
+ _fileSystemAdapter.WriteAllBytes(invalidPayloadFile, bytesToWrite.Concat(errorBytes).ToArray());
+ }
+
+ byte[] SerializeLogglyEventsToBytes(IEnumerable events)
+ {
+ SelfLog.WriteLine("Newline to use: {0}", Environment.NewLine.Length == 2 ? "rn":"n");
+ using (StringWriter writer = new StringWriter() { NewLine = Environment.NewLine })
+ {
+ foreach (var logglyEvent in events)
+ {
+ _serializer.Serialize(writer, logglyEvent);
+ writer.Write(Environment.NewLine);
+ }
+
+ SelfLog.WriteLine("serialized events: {0}", writer.ToString());
+
+ byte[] bytes = _encoding.GetBytes(writer.ToString());
+ SelfLog.WriteLine("encoded events ending: {0} {1}", bytes[bytes.Length-2], bytes[bytes.Length-1]);
+ return _encoding.GetBytes(writer.ToString());
+ }
+ }
+
+ static void CleanUpInvalidPayloadFiles(long maxNumberOfBytesToRetain, string logFolder)
+ {
+ try
+ {
+ var candiateFiles = Directory.EnumerateFiles(logFolder, $"{InvalidPayloadFilePrefix}*.json");
+ DeleteOldFiles(maxNumberOfBytesToRetain, candiateFiles);
+ }
+ catch (Exception ex)
+ {
+ SelfLog.WriteLine("Exception thrown while trying to clean up invalid payload files: {0}", ex);
+ }
+ }
+
+
+
+ ///
+ /// Deletes oldest files in the group of invalid-* files.
+ /// Existing files are ordered (from most recent to oldest) and file size is acumulated. All files
+ /// who's cumulative byte count passes the defined limit are removed. Limit is therefore bytes
+ /// and not number of files
+ ///
+ ///
+ ///
+ static void DeleteOldFiles(long maxNumberOfBytesToRetain, IEnumerable files)
+ {
+ var orderedFileInfos = from candidateFile in files
+ let candidateFileInfo = new FileInfo(candidateFile)
+ orderby candidateFileInfo.LastAccessTimeUtc descending
+ select candidateFileInfo;
+
+ var invalidPayloadFilesToDelete = WhereCumulativeSizeGreaterThan(orderedFileInfos, maxNumberOfBytesToRetain);
+
+ foreach (var fileToDelete in invalidPayloadFilesToDelete)
+ {
+ try
+ {
+ fileToDelete.Delete();
+ }
+ catch (Exception ex)
+ {
+ SelfLog.WriteLine("Exception '{0}' thrown while trying to delete file {1}", ex.Message, fileToDelete.FullName);
+ }
+ }
+ }
+
+ static IEnumerable WhereCumulativeSizeGreaterThan(IEnumerable files, long maxCumulativeSize)
+ {
+ long cumulative = 0;
+ foreach (var file in files)
+ {
+ cumulative += file.Length;
+ if (cumulative > maxCumulativeSize)
+ {
+ yield return file;
+ }
+ }
+ }
+ }
+}
+
diff --git a/src/Serilog.Sinks.Loggly/Sinks/Loggly/LogEventConverter.cs b/src/Serilog.Sinks.Loggly/Sinks/Loggly/LogEventConverter.cs
index 1bdf406..1fe2ea2 100644
--- a/src/Serilog.Sinks.Loggly/Sinks/Loggly/LogEventConverter.cs
+++ b/src/Serilog.Sinks.Loggly/Sinks/Loggly/LogEventConverter.cs
@@ -1,5 +1,4 @@
using System;
-using System.Collections;
using System.Collections.Generic;
using System.Linq;
using Loggly;
diff --git a/test/Serilog.Sinks.Loggly.Tests/ExceptionSerialization.cs b/test/Serilog.Sinks.Loggly.Tests/ExceptionSerialization.cs
index d65c432..7e20b6a 100644
--- a/test/Serilog.Sinks.Loggly.Tests/ExceptionSerialization.cs
+++ b/test/Serilog.Sinks.Loggly.Tests/ExceptionSerialization.cs
@@ -1,10 +1,7 @@
using System;
-using System.Collections.Generic;
using System.IO;
using System.Linq;
-using System.Text;
using System.Threading;
-using System.Threading.Tasks;
using Loggly;
using Loggly.Config;
using Loggly.Transports.Syslog;
diff --git a/test/Serilog.Sinks.Loggly.Tests/Serilog.Sinks.Loggly.Tests.csproj b/test/Serilog.Sinks.Loggly.Tests/Serilog.Sinks.Loggly.Tests.csproj
index 1973172..1b67e3f 100644
--- a/test/Serilog.Sinks.Loggly.Tests/Serilog.Sinks.Loggly.Tests.csproj
+++ b/test/Serilog.Sinks.Loggly.Tests/Serilog.Sinks.Loggly.Tests.csproj
@@ -1,7 +1,7 @@
- net451;netcoreapp1.0
+ net452;netcoreapp1.0
Serilog.Sinks.Loggly.Tests
../../assets/Serilog.snk
true
@@ -12,14 +12,30 @@
1.0.4
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
+
+
-
+
@@ -27,4 +43,14 @@
+
+
+ ..\..\..\..\Program Files (x86)\Reference Assemblies\Microsoft\Framework\.NETFramework\v4.5\System.Activities.dll
+
+
+
+
+
+
+
diff --git a/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/Durable/FileSetPositionTests.cs b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/Durable/FileSetPositionTests.cs
new file mode 100644
index 0000000..ba37696
--- /dev/null
+++ b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/Durable/FileSetPositionTests.cs
@@ -0,0 +1,22 @@
+using Serilog.Sinks.Loggly.Durable;
+using Xunit;
+
+namespace Serilog.Sinks.Loggly.Tests.Sinks.Loggly.Durable
+{
+ public class FileSetPositionTests
+ {
+ [Fact]
+ public void CanCreateBookmarkInstance()
+ {
+ var marker = new FileSetPosition(0, @"C:\test");
+ Assert.NotNull(marker);
+ }
+
+ [Fact]
+ public void CanCreateEmptyBookmark()
+ {
+ var marker = FileSetPosition.None;
+ Assert.Null(marker);
+ }
+ }
+}
diff --git a/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/Expectations/expectedInvalidPayloadFileN.json b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/Expectations/expectedInvalidPayloadFileN.json
new file mode 100644
index 0000000..c4aec5e
--- /dev/null
+++ b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/Expectations/expectedInvalidPayloadFileN.json
@@ -0,0 +1,2 @@
+{"Timestamp":"2017-09-27T00:00:00+00:00","Syslog":{"MessageId":0,"Level":6},"Data":{},"Options":{"Tags":[]}}
+Error info: HTTP shipping failed with Error: 502 Bad Request
\ No newline at end of file
diff --git a/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/Expectations/expectedInvalidPayloadFileRN.json b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/Expectations/expectedInvalidPayloadFileRN.json
new file mode 100644
index 0000000..c4aec5e
--- /dev/null
+++ b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/Expectations/expectedInvalidPayloadFileRN.json
@@ -0,0 +1,2 @@
+{"Timestamp":"2017-09-27T00:00:00+00:00","Syslog":{"MessageId":0,"Level":6},"Data":{},"Options":{"Tags":[]}}
+Error info: HTTP shipping failed with Error: 502 Bad Request
\ No newline at end of file
diff --git a/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/FileBasedBookmarkProviderTests.cs b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/FileBasedBookmarkProviderTests.cs
new file mode 100644
index 0000000..5af8ed7
--- /dev/null
+++ b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/FileBasedBookmarkProviderTests.cs
@@ -0,0 +1,193 @@
+using System.IO;
+using System.Text;
+using NSubstitute;
+using Serilog.Sinks.Loggly.Durable;
+using Xunit;
+
+namespace Serilog.Sinks.Loggly.Tests.Sinks.Loggly
+{
+ public class FileBasedBookmarkProviderTests
+ {
+ static Encoding Encoder = new UTF8Encoding(false);
+ static string BaseBufferFileName = @"c:\test\buffer";
+ const string ExpectedBufferFilePath = @"C:\test\buffer-20170926.json";
+ const long ExpectedBytePosition = 123;
+
+ public class InstanceTests
+ {
+ readonly IBookmarkProvider _sut = new FileBasedBookmarkProvider(BaseBufferFileName, Substitute.For(), Encoder);
+
+ [Fact]
+ public void InstanceIsValid() => Assert.NotNull(_sut);
+ }
+
+ public class ReadBookmarkTests
+ {
+ public class ValidBookmarkFileOnDisk
+ {
+ readonly FileSetPosition _sut;
+ readonly FileSetPosition _reread;
+
+ public ValidBookmarkFileOnDisk()
+ {
+ var fileSystemAdapter = Substitute.For();
+ fileSystemAdapter
+ .Open(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any())
+ .Returns(new MemoryStream(
+ Encoding.UTF8.GetBytes($"{ExpectedBytePosition}:::{ExpectedBufferFilePath}\r\n")));
+
+ var provider = new FileBasedBookmarkProvider(BaseBufferFileName, fileSystemAdapter, Encoder);
+
+ _sut = provider.GetCurrentBookmarkPosition();
+ _reread = provider.GetCurrentBookmarkPosition();
+ }
+
+ [Fact]
+ public void ShouldHaveValidBookmark() => Assert.NotNull(_sut);
+
+ [Fact]
+ public void BookmarkPostionShouldBeCorrect() => Assert.Equal(ExpectedBytePosition, _sut.NextLineStart);
+
+ [Fact]
+ public void BookmarkBufferFilePathShouldBeCorrect() =>
+ Assert.Equal(ExpectedBufferFilePath, _sut.File);
+
+ [Fact]
+ public void RereadingtheBookmarkGivesSameValue() => Assert.Equal(_sut.NextLineStart, _reread.NextLineStart);
+
+ }
+
+ ///
+ /// This case represents some observed behaviour in the bookmark file. Since writes are normally done from the start of the file,
+ /// some garbage may remain in the file if the new written bytes are shorter than what existed.
+ ///
+ public class StrangeBookmarkFileOnDisk
+ {
+ readonly FileSetPosition _sut;
+
+ public StrangeBookmarkFileOnDisk()
+ {
+ var fileSystemAdapter = Substitute.For();
+ fileSystemAdapter
+ .Open(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any())
+ .Returns(new MemoryStream(Encoding.UTF8.GetBytes(
+ $"{ExpectedBytePosition}:::{ExpectedBufferFilePath}\r\nsome invalid stuff in the file\n\n\n")));
+
+ var provider = new FileBasedBookmarkProvider(BaseBufferFileName, fileSystemAdapter, Encoder);
+
+ _sut = provider.GetCurrentBookmarkPosition();
+ }
+
+ [Fact]
+ public void ShouldHaveValidBookmark() => Assert.NotNull(_sut);
+
+ [Fact]
+ public void BookmarkPostionShouldBeCorrect() => Assert.Equal(ExpectedBytePosition, _sut.NextLineStart);
+
+ [Fact]
+ public void BookmarkBufferFilePathShouldBeCorrect() =>
+ Assert.Equal(ExpectedBufferFilePath, _sut.File);
+
+ }
+
+ ///
+ /// An inexistent bookmark file will create a new, empty one, and the returned stream will be empty when trying to read
+ ///
+ public class InexistentBookmarkFileOnDisk
+ {
+ FileSetPosition _sut;
+
+ public InexistentBookmarkFileOnDisk()
+ {
+ var fileSystemAdapter = Substitute.For();
+ fileSystemAdapter
+ .Open(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any())
+ .Returns(new MemoryStream(new byte[] { }));
+
+ var provider = new FileBasedBookmarkProvider(BaseBufferFileName, fileSystemAdapter, Encoding.UTF8);
+
+ _sut = provider.GetCurrentBookmarkPosition();
+ }
+
+ [Fact]
+ public void BookmarkShouldBeNull() => Assert.Null(_sut);
+ }
+ }
+
+ public class WriteBookmarkTests
+ {
+ public class WriteToAnEmptyBookmarkStream
+ {
+ readonly MemoryStream _sut = new MemoryStream(new byte[128]); //make it big enough to take in new content, as a file stream would
+ readonly string _expectedFileContent = $"{ExpectedBytePosition}:::{ExpectedBufferFilePath}\r\n".PadRight(128, '\0');
+ readonly byte[] _expectedBytes;
+ readonly byte[] _actualBytes;
+
+ public WriteToAnEmptyBookmarkStream()
+ {
+ var fileSystemAdapter = Substitute.For();
+ fileSystemAdapter
+ .Open(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any())
+ .Returns(_sut);
+
+ var provider = new FileBasedBookmarkProvider(BaseBufferFileName, fileSystemAdapter, Encoder);
+ provider.UpdateBookmark(new FileSetPosition(ExpectedBytePosition, ExpectedBufferFilePath));
+
+ _expectedBytes = Encoder.GetBytes(_expectedFileContent);
+ _actualBytes = _sut.ToArray();
+ }
+
+ //compare on bytes and string - if Encoding.UTF8 is used, a BOM set of bytes may be added.
+ //it is therefore useful to use an encoder created by the UTF8Encoding constructor as in the container class
+
+ [Fact]
+ public void StreamShouldHaveBookmarkWritten() => Assert.Equal(_expectedBytes, _actualBytes);
+
+ [Fact]
+ public void StreamContentShouldConvertToExpectedText() => Assert.Equal(_expectedFileContent, Encoder.GetString(_sut.ToArray()));
+
+ }
+
+ public class WriteToAnBookmarkStreamWithExistingContent
+ {
+ readonly MemoryStream _sut = new MemoryStream(new byte[128]); //make it big enough to take in new content, as a file stream would
+
+ //contrary to the empty files, there will be sopme "garbage in the expected stream, since we are not clearing it
+ //but just wirting on top of it. Line endings determine the truly significant info in the file
+ //the following reflects this:
+ readonly string _expectedFileContent = $"{ExpectedBytePosition}:::{ExpectedBufferFilePath}\r\nn{ExpectedBufferFilePath}\r\n".PadRight(128, '\0');
+ readonly byte[] _expectedBytes;
+ readonly byte[] _actualBytes;
+
+ public WriteToAnBookmarkStreamWithExistingContent()
+ {
+ //simulate a stream with existing content that may be larger or shorter than what is written.
+ // newline detection ends up being iportant in this case
+ var initialContent = Encoder.GetBytes($"100000:::{ExpectedBufferFilePath}{ExpectedBufferFilePath}\r\n");
+ _sut.Write(initialContent, 0, initialContent.Length);
+
+ var fileSystemAdapter = Substitute.For();
+ fileSystemAdapter
+ .Open(Arg.Any(), Arg.Any(), Arg.Any(), Arg.Any())
+ .Returns(_sut);
+
+ var provider = new FileBasedBookmarkProvider(BaseBufferFileName, fileSystemAdapter, Encoder);
+ provider.UpdateBookmark(new FileSetPosition(ExpectedBytePosition, ExpectedBufferFilePath));
+
+ _expectedBytes = Encoder.GetBytes(_expectedFileContent);
+ _actualBytes = _sut.ToArray();
+ }
+
+ //compare on bytes and string - if Encoding.UTF8 is used, a BOM set of bytes may be added.
+ //it is therefore useful to use an encoder created by the UTF8Encoding constructor as in the container class
+
+ [Fact]
+ public void StreamShouldHaveBookmarkWritten() => Assert.Equal(_expectedBytes, _actualBytes);
+
+ [Fact]
+ public void StreamContentShouldConvertToExpectedText() => Assert.Equal(_expectedFileContent, Encoder.GetString(_sut.ToArray()));
+
+ }
+ }
+ }
+}
\ No newline at end of file
diff --git a/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/FileBufferDataProviderTests.cs b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/FileBufferDataProviderTests.cs
new file mode 100644
index 0000000..60f1e45
--- /dev/null
+++ b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/FileBufferDataProviderTests.cs
@@ -0,0 +1,733 @@
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Reflection;
+using System.Text;
+using Loggly;
+using Xunit;
+using NSubstitute;
+using System;
+using Serilog.Sinks.Loggly.Durable;
+
+namespace Serilog.Sinks.Loggly.Tests.Sinks.Loggly
+{
+ public class FileBufferDataProviderTests
+ {
+ static readonly string ResourceNamespace = $"Serilog.Sinks.Loggly.Tests.Sinks.Loggly";
+ static readonly string BaseBufferFileName = @"c:\test\buffer";
+ static readonly Encoding Utf8Encoder = new UTF8Encoding(true);
+ static readonly string Bufferfile = @"C:\test\buffer001.json"; //any valid name here will suffice
+ static readonly int BatchLimit = 10;
+ static readonly int EventSizeLimit = 1024 * 1024;
+
+ public class InstanceCreationTests
+ {
+ [Fact]
+ public void CanCreateInstanceOfFileBufferDataProvider()
+ {
+ var mockFileSystemAdapter = Substitute.For();
+ var bookmarkProvider = Substitute.For();
+
+ var instance = new FileBufferDataProvider(BaseBufferFileName, mockFileSystemAdapter, bookmarkProvider, Utf8Encoder, 10, 1024*1024, null);
+
+ Assert.NotNull(instance);
+ }
+ }
+
+ ///
+ /// In this scenario, there is neither a bufferX.json file nor a bookmark.
+ ///
+ public class EmptyBufferAndBookmarkScenario
+ {
+ readonly IEnumerable _sut;
+
+ public EmptyBufferAndBookmarkScenario()
+ {
+ var bookmarkProvider = Substitute.For();
+ bookmarkProvider.GetCurrentBookmarkPosition().Returns(null as FileSetPosition);
+
+ var mockFileSystem = Substitute.For();
+ mockFileSystem.GetFiles(Arg.Any(), Arg.Any()).Returns(new string[] { });
+
+ var provider = new FileBufferDataProvider(BaseBufferFileName, mockFileSystem, bookmarkProvider, Utf8Encoder, 10, 1024 * 1024, null);
+ _sut = provider.GetNextBatchOfEvents();
+ }
+
+ [Fact]
+ public void EventListShouldBeEmpty() => Assert.Empty(_sut);
+ }
+
+ ///
+ /// In this scenario, there is no bufferX.json file but there is a bookmark file. The bookmark, though,
+ /// points to a file buffer file that no longer exists
+ ///
+ public class EmptyBufferAndOutdatedBookmarkScenario
+ {
+ readonly IEnumerable _sut;
+
+ public EmptyBufferAndOutdatedBookmarkScenario()
+ {
+ var bookmarkProvider = Substitute.For();
+ bookmarkProvider.GetCurrentBookmarkPosition().Returns(new FileSetPosition(0, @"C:\test\existent.json"));
+
+ var mockFileSystem = Substitute.For();
+ mockFileSystem.GetFiles(Arg.Any(), Arg.Any()).Returns(new string[] { });
+
+ var provider = new FileBufferDataProvider(BaseBufferFileName, mockFileSystem, bookmarkProvider, Utf8Encoder, 10, 1024 * 1024, null);
+ _sut = provider.GetNextBatchOfEvents();
+ }
+
+ [Fact]
+ public void EventListShouldBeEmpty() => Assert.Empty(_sut);
+ }
+
+ ///
+ /// In this scenario, there is a single Buffer.json file but no bookmark file.
+ /// Results are the same as the SingleBufferFileAndSyncedBookmarkScenario as the
+ /// buffer will be initialized to the first buffer file
+ ///
+ public class SingleBufferFileAndNoBookmarkScenario
+ {
+ IEnumerable _sut;
+ IEnumerable _reRequestBatch;
+
+ public SingleBufferFileAndNoBookmarkScenario()
+ {
+ var bookmarkProvider = Substitute.For();
+ bookmarkProvider.GetCurrentBookmarkPosition().Returns(null as FileSetPosition);
+ IFileSystemAdapter fsAdapter = CreateFileSystemAdapter(Bufferfile);
+
+ var provider = new FileBufferDataProvider(
+ BaseBufferFileName,
+ fsAdapter,
+ bookmarkProvider,
+ Utf8Encoder,
+ BatchLimit,
+ EventSizeLimit,
+ null);
+ _sut = provider.GetNextBatchOfEvents();
+
+ _reRequestBatch = provider.GetNextBatchOfEvents();
+ }
+
+ IFileSystemAdapter CreateFileSystemAdapter(string bufferfile)
+ {
+ var fileSystemAdapter = Substitute.For();
+
+ //get files should return the single buffer file path in this scenario
+ fileSystemAdapter.GetFiles(Arg.Any(), Arg.Any())
+ .Returns(new[] { bufferfile });
+
+ //when we ask for the buffer file, simulate that it exists
+ fileSystemAdapter.Exists(bufferfile).Returns(true);
+
+ //Open() should open a stream that can return two events
+ fileSystemAdapter.Open(bufferfile, Arg.Any(), Arg.Any(),
+ Arg.Any())
+ .Returns(GetSingleEventLineStreamFromResources());
+
+ return fileSystemAdapter;
+ }
+
+ [Fact]
+ public void EventListShouldBeNotBeEmpty() => Assert.NotEmpty(_sut);
+
+ [Fact]
+ public void ShouldReadBatchOfEvents() => Assert.Single(_sut);
+
+ [Fact]
+ public void ReRequestingABatchShouldReturnSameUnprocessedEventsInQueue() =>
+ Assert.Equal(_sut, _reRequestBatch);
+ }
+
+
+ ///
+ /// In this scenario, there is a single Buffer.json file but a bookmark file pointing
+ /// to the start of the buffer.
+ ///
+ public class SingleBufferFileAndSyncedBookmarkScenario
+ {
+ readonly IEnumerable _sut;
+ readonly IEnumerable _reRequestBatch;
+
+ public SingleBufferFileAndSyncedBookmarkScenario()
+ {
+ var bookmarkProvider = Substitute.For();
+ bookmarkProvider.GetCurrentBookmarkPosition().Returns(new FileSetPosition(0, Bufferfile));
+ IFileSystemAdapter fsAdapter = CreateFileSystemAdapter(Bufferfile);
+
+ var provider = new FileBufferDataProvider(
+ BaseBufferFileName,
+ fsAdapter,
+ bookmarkProvider,
+ Utf8Encoder,
+ BatchLimit,
+ EventSizeLimit,
+ null);
+ _sut = provider.GetNextBatchOfEvents();
+
+ _reRequestBatch = provider.GetNextBatchOfEvents();
+ }
+
+ IFileSystemAdapter CreateFileSystemAdapter(string bufferfile)
+ {
+ var fileSystemAdapter = Substitute.For();
+
+ //get files should return the single buffer file path in this scenario
+ fileSystemAdapter.GetFiles(Arg.Any(), Arg.Any())
+ .Returns(new string[] {bufferfile});
+
+ //when we ask for the buffer file, simulate that it exists
+ fileSystemAdapter.Exists(bufferfile).Returns(true);
+
+ //Open() should open a stream that can return two events
+ fileSystemAdapter.Open(bufferfile, Arg.Any(), Arg.Any(),
+ Arg.Any())
+ .Returns(GetSingleEventLineStreamFromResources());
+
+ return fileSystemAdapter;
+ }
+
+
+
+ [Fact]
+ public void EventListShouldBeNotBeEmpty() => Assert.NotEmpty(_sut);
+
+ [Fact]
+ public void ShouldReadBatchOfEvents() => Assert.Single(_sut);
+
+ [Fact]
+ public void ReRequestingABatchShouldReturnSameUnprocessedEventsInQueue() =>
+ Assert.Equal(_sut, _reRequestBatch);
+ }
+
+ ///
+ /// buffer file contains more events than a single batch. Rereading a batch should not progress without
+ /// marking the batch as processed
+ ///
+ public class LongerBufferFileAndSyncedBookmarkScenario
+ {
+ readonly IEnumerable _sut;
+ readonly IEnumerable _reRequestBatch;
+
+ public LongerBufferFileAndSyncedBookmarkScenario()
+ {
+ var bookmarkProvider = Substitute.For();
+ bookmarkProvider.GetCurrentBookmarkPosition().Returns(new FileSetPosition(0, Bufferfile));
+ IFileSystemAdapter fsAdapter = CreateFileSystemAdapter(Bufferfile);
+
+ var provider = new FileBufferDataProvider(
+ BaseBufferFileName,
+ fsAdapter,
+ bookmarkProvider,
+ Utf8Encoder,
+ BatchLimit,
+ EventSizeLimit,
+ null);
+ _sut = provider.GetNextBatchOfEvents();
+
+ _reRequestBatch = provider.GetNextBatchOfEvents();
+ }
+
+ private IFileSystemAdapter CreateFileSystemAdapter(string bufferfile)
+ {
+ var fileSystemAdapter = Substitute.For();
+
+ //get files should return the single buffer file path in this scenario
+ fileSystemAdapter.GetFiles(Arg.Any(), Arg.Any())
+ .Returns(new string[] { bufferfile });
+
+ //when we ask for the buffer file, simulate that it exists
+ fileSystemAdapter.Exists(bufferfile).Returns(true);
+
+ //Open() should open a stream that can return two events
+ fileSystemAdapter.Open(bufferfile, Arg.Any(), Arg.Any(),
+ Arg.Any())
+ .Returns(Get20LineStreamFromResources());
+
+ return fileSystemAdapter;
+ }
+
+ [Fact]
+ public void EventListShouldBeNotBeEmpty() => Assert.NotEmpty(_sut);
+
+ [Fact]
+ public void ShouldReadBatchOfEventsLimitedToBatchCount() => Assert.Equal(10, _sut.Count());
+
+ [Fact]
+ public void ReRequestingABatchShouldReturnSameUnprocessedEventsInQueue() =>
+ Assert.Equal(_sut, _reRequestBatch);
+ }
+
+ ///
+ /// Gets two batches, having marked the first as processed to move through the buffer
+ /// After reading the second batch, there should no longer be anything in the buffer, so
+ /// the last batch should be empty
+ ///
+ public class AdvanceThroughBufferScenario
+ {
+ readonly IEnumerable _firstBatchRead;
+ readonly IEnumerable _reRequestBatch;
+ readonly IEnumerable _lastBatch;
+
+ public AdvanceThroughBufferScenario()
+ {
+ var bookmarkProvider = Substitute.For();
+ bookmarkProvider.GetCurrentBookmarkPosition().Returns(new FileSetPosition(0, Bufferfile));
+ IFileSystemAdapter fsAdapter = CreateFileSystemAdapter(Bufferfile);
+
+ var provider = new FileBufferDataProvider(
+ BaseBufferFileName,
+ fsAdapter,
+ bookmarkProvider,
+ Utf8Encoder,
+ BatchLimit,
+ EventSizeLimit,
+ null);
+
+ _firstBatchRead = provider.GetNextBatchOfEvents();
+ //after getting first batch, simulate moving foward
+ provider.MarkCurrentBatchAsProcessed();
+ //request next batch
+ _reRequestBatch = provider.GetNextBatchOfEvents();
+ //after getting second batch, simulate moving foward
+ provider.MarkCurrentBatchAsProcessed();
+ //should have no events available to read
+ _lastBatch = provider.GetNextBatchOfEvents();
+
+ }
+
+ IFileSystemAdapter CreateFileSystemAdapter(string bufferfile)
+ {
+ var fileSystemAdapter = Substitute.For();
+
+ //get files should return the single buffer file path in this scenario
+ fileSystemAdapter.GetFiles(Arg.Any(), Arg.Any())
+ .Returns(new[] { bufferfile });
+
+ //when we ask for the buffer file, simulate that it exists
+ fileSystemAdapter.Exists(bufferfile).Returns(true);
+
+ //Open() should open a stream that can return two events
+ fileSystemAdapter.Open(bufferfile, Arg.Any(), Arg.Any(),
+ Arg.Any())
+ .Returns(x => Get20LineStreamFromResources()); //use this form to reexecute the get stream for a new stream
+
+ return fileSystemAdapter;
+ }
+
+ [Fact]
+ public void EventListShouldBeNotBeEmpty() => Assert.NotEmpty(_firstBatchRead);
+
+ [Fact]
+ public void ShouldReadBatchOfEventsLimitedToBatchCount() => Assert.Equal(10, _firstBatchRead.Count());
+
+ [Fact]
+ public void ReRequestingABatchShouldReturnSameUnprocessedEventsInQueue() =>
+ Assert.NotEqual(_firstBatchRead, _reRequestBatch);
+
+ [Fact]
+ public void LastBatchShouldBeEmpty() => Assert.Empty(_lastBatch);
+ }
+
+ ///
+ /// In this scenario, the app may have been offline / disconnected for a few days (desktop clients, for instance)
+ /// and multiple files may have accumulated. We may or may not want data from all the days offline,
+ /// depending on retainedFileCountLimit's value, and should work the bookmark acordingly
+ ///
+ public class MultipleBufferFilesScenario
+ {
+ ///
+ /// When less then the limit, bookmark should point to initial file if current bookmark is invalid
+ ///
+ public class LessThenLimitnumberOfBufferFiles
+ {
+ const int NumberOfFilesToRetain = 5;
+ FileSetPosition _sut;
+
+ public LessThenLimitnumberOfBufferFiles()
+ {
+ var bookmarkProvider = Substitute.For();
+ bookmarkProvider
+ .GetCurrentBookmarkPosition()
+ .Returns(new FileSetPosition(0, @"c:\unknown.json")); //should force fileset analysis
+ bookmarkProvider
+ .When(x => x.UpdateBookmark(Arg.Any()))
+ .Do(x => _sut = x.ArgAt(0));
+
+ IFileSystemAdapter fsAdapter = CreateFileSystemAdapter(Bufferfile);
+
+ var provider = new FileBufferDataProvider(
+ BaseBufferFileName,
+ fsAdapter,
+ bookmarkProvider,
+ Utf8Encoder,
+ BatchLimit,
+ EventSizeLimit,
+ NumberOfFilesToRetain);
+
+ provider.GetNextBatchOfEvents();
+ provider.MarkCurrentBatchAsProcessed();
+ }
+
+ IFileSystemAdapter CreateFileSystemAdapter(string bufferfile)
+ {
+ var fileSystemAdapter = Substitute.For();
+
+ //get files should return the single buffer file path in this scenario
+ fileSystemAdapter.GetFiles(Arg.Any(), Arg.Any())
+ .Returns(new[] {bufferfile});
+
+ //when we ask for the buffer file (and only that file), simulate that it exists; for others return false
+ fileSystemAdapter.Exists(Arg.Any()).Returns(false);
+ fileSystemAdapter.Exists(bufferfile).Returns(true);
+
+ //Open() should open a stream that can return two events
+ fileSystemAdapter.Open(bufferfile, Arg.Any(), Arg.Any(),
+ Arg.Any())
+ .Returns(x =>
+ GetSingleEventLineStreamFromResources()); //use this form to reexecute the get stream for a new stream
+
+ return fileSystemAdapter;
+ }
+
+ ///
+ /// If we have an event, then the bookmark moved to the correct file. bookmark is private to the provider,
+ /// and doesn't get updated in the file
+ ///
+ [Fact]
+ public void ShouldReadFromFirstFileInSetOfExistingFiles() => Assert.Equal(Bufferfile, _sut.File);
+ }
+
+ public class EqualToLimitNumberOfBufferFiles
+ {
+ const int NumberOfFilesToRetain = 1;
+ FileSetPosition _sut;
+
+ public EqualToLimitNumberOfBufferFiles()
+ {
+ var bookmarkProvider = Substitute.For();
+ bookmarkProvider
+ .GetCurrentBookmarkPosition()
+ .Returns(new FileSetPosition(0, @"c:\unknown.json")); //should force fileset analysis
+ bookmarkProvider
+ .When(x => x.UpdateBookmark(Arg.Any()))
+ .Do(x => _sut = x.ArgAt(0));
+
+ IFileSystemAdapter fsAdapter = CreateFileSystemAdapter(Bufferfile);
+
+ var provider = new FileBufferDataProvider(
+ BaseBufferFileName,
+ fsAdapter,
+ bookmarkProvider,
+ Utf8Encoder,
+ BatchLimit,
+ EventSizeLimit,
+ NumberOfFilesToRetain);
+
+ provider.GetNextBatchOfEvents();
+ provider.MarkCurrentBatchAsProcessed();
+ }
+
+ IFileSystemAdapter CreateFileSystemAdapter(string bufferfile)
+ {
+ var fileSystemAdapter = Substitute.For();
+
+ //get files should return the single buffer file path in this scenario
+ fileSystemAdapter.GetFiles(Arg.Any(), Arg.Any())
+ .Returns(new[] { bufferfile });
+
+ //when we ask for the buffer file (and only that file), simulate that it exists; for others return false
+ fileSystemAdapter.Exists(Arg.Any()).Returns(false);
+ fileSystemAdapter.Exists(bufferfile).Returns(true);
+
+ //Open() should open a stream that can return two events
+ fileSystemAdapter.Open(bufferfile, Arg.Any(), Arg.Any(),
+ Arg.Any())
+ .Returns(x =>
+ GetSingleEventLineStreamFromResources()); //use this form to reexecute the get stream for a new stream
+
+ return fileSystemAdapter;
+ }
+
+ ///
+ /// If we have an event, then the bookmark moved to the correct file. bookmark is private to the provider,
+ /// and doesn't get updated in the file
+ ///
+ [Fact]
+ public void ShouldReadFromFirstFileInSetOfExistingFiles() => Assert.Equal(Bufferfile, _sut.File);
+ }
+
+ public class MoreThenTheLimitNumberOfBufferFiles
+ {
+ const string UnknownJsonFileName = @"c:\a\unknown.json"; // \a\ to guarantee ordering
+ const int NumberOfFilesToRetain = 1;
+ FileSetPosition _sut;
+
+ public MoreThenTheLimitNumberOfBufferFiles()
+ {
+ var bookmarkProvider = Substitute.For();
+ bookmarkProvider
+ .GetCurrentBookmarkPosition()
+ .Returns(new FileSetPosition(0, UnknownJsonFileName)); //should force fileset analysis
+ bookmarkProvider
+ .When(x => x.UpdateBookmark(Arg.Any()))
+ .Do(x => _sut = x.ArgAt(0));
+
+ IFileSystemAdapter fsAdapter = CreateFileSystemAdapter(Bufferfile);
+
+ var provider = new FileBufferDataProvider(
+ BaseBufferFileName,
+ fsAdapter,
+ bookmarkProvider,
+ Utf8Encoder,
+ BatchLimit,
+ EventSizeLimit,
+ NumberOfFilesToRetain);
+
+ provider.GetNextBatchOfEvents();
+ provider.MarkCurrentBatchAsProcessed();
+
+
+ }
+
+ IFileSystemAdapter CreateFileSystemAdapter(string bufferfile)
+ {
+ var fileSystemAdapter = Substitute.For();
+
+ //get files should return the single buffer file path in this scenario at the end,
+ // and equal to the number of retained files; unkowns should be ignored
+ fileSystemAdapter.GetFiles(Arg.Any(), Arg.Any())
+ .Returns(new[] { UnknownJsonFileName, UnknownJsonFileName, bufferfile });
+
+ //when we ask for the buffer file (and only that file), simulate that it exists; for others return false
+ fileSystemAdapter.Exists(Arg.Any()).Returns(false);
+ fileSystemAdapter.Exists(bufferfile).Returns(true);
+
+ //Open() should open a stream that can return two events
+ fileSystemAdapter.Open(bufferfile, Arg.Any(), Arg.Any(),
+ Arg.Any())
+ .Returns(x =>
+ GetSingleEventLineStreamFromResources()); //use this form to reexecute the get stream for a new stream
+
+ return fileSystemAdapter;
+ }
+
+ ///
+ /// If we have an event, then the bookmark moved to the correct file. bookmark is private to the provider,
+ /// and doesn't get updated in the file
+ ///
+ [Fact]
+ public void ShouldReadFromFirstFileInSetOfExistingFiles() => Assert.Equal(Bufferfile, _sut.File);
+ }
+ }
+
+ ///
+ /// this is typically called upon when we reach the end of a buffer file
+ /// and need to force the marker to move forward to the next file
+ ///
+ public class MoveBookmarkForwardCorrectly
+ {
+ public class NoRetentionLimitOnBuffer
+ {
+ readonly List _deletedFiles = new List();
+ FileSetPosition _sut;
+
+ public NoRetentionLimitOnBuffer()
+ {
+ var bookmarkProvider = Substitute.For();
+ bookmarkProvider
+ .When(x => x.UpdateBookmark(Arg.Any()))
+ .Do(x => _sut = x.ArgAt(0));
+
+ IFileSystemAdapter fsAdapter = CreateFileSystemAdapter();
+
+ var provider = new FileBufferDataProvider(
+ BaseBufferFileName,
+ fsAdapter,
+ bookmarkProvider,
+ Utf8Encoder,
+ BatchLimit,
+ EventSizeLimit,
+ null);
+
+ provider.MoveBookmarkForward();
+ }
+
+ IFileSystemAdapter CreateFileSystemAdapter()
+ {
+ var fileSystemAdapter = Substitute.For();
+
+ //get files should return the single buffer file path in this scenario
+ fileSystemAdapter.GetFiles(Arg.Any(), Arg.Any())
+ .Returns(new[] {@"c:\a\file001.json", @"c:\a\file002.json", @"c:\a\file003.json"});
+
+ //files exist
+ fileSystemAdapter.Exists(Arg.Any()).Returns(true);
+ fileSystemAdapter
+ .When(x => x.DeleteFile(Arg.Any()))
+ .Do(x => _deletedFiles.Add(x.ArgAt(0)));
+
+ return fileSystemAdapter;
+ }
+
+ [Fact]
+ public void BookmarkShouldBeAtStartOfNextFile() => Assert.Equal(0, _sut.NextLineStart);
+
+ [Fact]
+ public void BookmarkShouldBeAtNextFile() => Assert.Equal(@"c:\a\file002.json", _sut.File);
+
+ [Fact]
+ public void PreviousFileShouldHaveBeenDeleted() =>
+ Assert.Equal(@"c:\a\file001.json", _deletedFiles.First());
+
+ [Fact]
+ public void SingleFileShouldHaveBeenDeleted() =>
+ Assert.Equal(1, _deletedFiles.Count);
+ }
+
+ public class RetentionLimitLessThenLimitNumberOfBufferFiles
+ {
+ const int Limit = 2;
+ readonly List _deletedFiles = new List();
+ FileSetPosition _sut;
+
+ public RetentionLimitLessThenLimitNumberOfBufferFiles()
+ {
+ var bookmarkProvider = Substitute.For();
+ bookmarkProvider
+ .When(x => x.UpdateBookmark(Arg.Any()))
+ .Do(x => _sut = x.ArgAt(0));
+
+ IFileSystemAdapter fsAdapter = CreateFileSystemAdapter();
+
+ var provider = new FileBufferDataProvider(
+ BaseBufferFileName,
+ fsAdapter,
+ bookmarkProvider,
+ Utf8Encoder,
+ BatchLimit,
+ EventSizeLimit,
+ Limit);
+
+ provider.MoveBookmarkForward();
+ }
+
+ IFileSystemAdapter CreateFileSystemAdapter()
+ {
+ var fileSystemAdapter = Substitute.For();
+
+ //get files should return the single buffer file path in this scenario
+ fileSystemAdapter.GetFiles(Arg.Any(), Arg.Any())
+ .Returns(new[] { @"c:\a\file001.json", @"c:\a\file002.json", @"c:\a\file003.json", @"c:\a\file004.json" });
+
+ //files exist
+ fileSystemAdapter.Exists(Arg.Any()).Returns(true);
+ fileSystemAdapter
+ .When(x => x.DeleteFile(Arg.Any()))
+ .Do(x => _deletedFiles.Add(x.ArgAt(0)));
+
+ return fileSystemAdapter;
+ }
+
+ [Fact]
+ public void BookmarkShouldBeAtStartOfNextFile() => Assert.Equal(0, _sut.NextLineStart);
+
+ [Fact]
+ public void BookmarkShouldBeAtNextFile() => Assert.Equal(@"c:\a\file003.json", _sut.File);
+
+ [Fact]
+ public void PreviousFilesShouldHaveBeenDeleted()
+ {
+ Assert.Equal(@"c:\a\file001.json", _deletedFiles[0]);
+ Assert.Equal(@"c:\a\file002.json", _deletedFiles[1]);
+ }
+
+ [Fact]
+ public void SingleFileShouldHaveBeenDeleted() =>
+ Assert.Equal(2, _deletedFiles.Count);
+ }
+
+ public class RetentionLimitMoreThenLimitNumberOfBufferFiles
+ {
+ const int Limit = 10;
+ readonly List _deletedFiles = new List();
+ FileSetPosition _sut;
+
+ public RetentionLimitMoreThenLimitNumberOfBufferFiles()
+ {
+ var bookmarkProvider = Substitute.For();
+ bookmarkProvider
+ .When(x => x.UpdateBookmark(Arg.Any()))
+ .Do(x => _sut = x.ArgAt(0));
+
+ IFileSystemAdapter fsAdapter = CreateFileSystemAdapter();
+
+ var provider = new FileBufferDataProvider(
+ BaseBufferFileName,
+ fsAdapter,
+ bookmarkProvider,
+ Utf8Encoder,
+ BatchLimit,
+ EventSizeLimit,
+ Limit);
+
+ provider.MoveBookmarkForward();
+ }
+
+ IFileSystemAdapter CreateFileSystemAdapter()
+ {
+ var fileSystemAdapter = Substitute.For();
+
+ //get files should return the single buffer file path in this scenario
+ fileSystemAdapter.GetFiles(Arg.Any(), Arg.Any())
+ .Returns(new[] { @"c:\a\file001.json", @"c:\a\file002.json", @"c:\a\file003.json", @"c:\a\file004.json" });
+
+ //files exist
+ fileSystemAdapter.Exists(Arg.Any()).Returns(true);
+ fileSystemAdapter
+ .When(x => x.DeleteFile(Arg.Any()))
+ .Do(x => _deletedFiles.Add(x.ArgAt(0)));
+
+ return fileSystemAdapter;
+ }
+
+ [Fact]
+ public void BookmarkShouldBeAtStartOfNextFile() => Assert.Equal(0, _sut.NextLineStart);
+
+ [Fact]
+ public void BookmarkShouldBeAtNextFile() => Assert.Equal(@"c:\a\file001.json", _sut.File);
+
+ [Fact]
+ public void NoFilesShouldHaveBeenDeleted() => Assert.Empty(_deletedFiles);
+ }
+
+ }
+
+
+
+ static Stream Get20LineStreamFromResources()
+ {
+ var resourceNameSuffix = Environment.NewLine.Length == 2 ? "RN" : "N";
+ var resourceName = $"{ResourceNamespace}.SampleBuffers.20Events{resourceNameSuffix}.json";
+ return GetStreamFromResources(resourceName);
+ }
+
+ static Stream GetSingleEventLineStreamFromResources()
+ {
+ var resourceName = $"{ResourceNamespace}.SampleBuffers.singleEvent.json";
+ return GetStreamFromResources(resourceName);
+ }
+
+ static Stream GetStreamFromResources(string resourceName)
+ {
+ MemoryStream ms = new MemoryStream();
+ typeof(FileBufferDataProviderTests)
+ .GetTypeInfo()
+ .Assembly
+ .GetManifestResourceStream(resourceName)
+ ?.CopyTo(ms);
+ return ms;
+ }
+ }
+}
diff --git a/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/InvalidPayloadLoggerTests.cs b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/InvalidPayloadLoggerTests.cs
new file mode 100644
index 0000000..e7e135a
--- /dev/null
+++ b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/InvalidPayloadLoggerTests.cs
@@ -0,0 +1,94 @@
+using Loggly;
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Reflection;
+using System.Text;
+using System.Text.RegularExpressions;
+using NSubstitute;
+using Xunit;
+using Serilog.Debugging;
+
+namespace Serilog.Sinks.Loggly.Tests.Sinks.Loggly
+{
+ public class InvalidPayloadLoggerTests
+ {
+ const string LogFolder = @"C:\tests"; //any path here will do.
+ static readonly Encoding _utf8Encoder = new UTF8Encoding(true);
+
+ [Fact]
+ public void CanCreateInvalidShippmentLoggerInstance()
+ {
+ var instance = new InvalidPayloadLogger(LogFolder, _utf8Encoder, Substitute.For());
+ Assert.NotNull(instance);
+ }
+
+ public class InvalidPayloadPersistenceTests
+ {
+ string _writtenData;
+ string _generatedFilename;
+
+ public InvalidPayloadPersistenceTests()
+ {
+ var fsAdapter = Substitute.For();
+ fsAdapter.When(x => x.WriteAllBytes(Arg.Any(), Arg.Any()))
+ .Do(x =>
+ {
+ _generatedFilename = x.ArgAt(0);
+ _writtenData = _utf8Encoder.GetString(x.ArgAt(1));
+ });
+
+
+ //simulate the Post to Loggly failure with an error response and fixed payload.
+ var response = new LogResponse { Code = ResponseCode.Error, Message = "502 Bad Request" };
+ //just need an empty event for testing
+ var payload = new List
+ {
+ new LogglyEvent
+ {
+ Data = new MessageData(),
+ Options = new EventOptions(),
+ Syslog = new SyslogHeader() {MessageId = 0},
+ Timestamp = DateTimeOffset.Parse("2017-09-27T00:00:00+00:00") //fixed date for comparisson
+ }
+ };
+
+ var instance = new InvalidPayloadLogger(LogFolder, _utf8Encoder, fsAdapter);
+ //exercice the method
+ instance.DumpInvalidPayload(response, payload);
+ }
+
+ [Fact]
+ public void GeneratedFileHasEventsAndErrorInfoInContent()
+ {
+ using (var expectedFileTextStream = GetExpectedFileTextStream())
+ {
+#pragma warning disable SG0018 // Path traversal
+ using (var reader = new StreamReader(expectedFileTextStream, _utf8Encoder, true))
+#pragma warning restore SG0018 // Path traversal
+ {
+ var expectedFileTestString = reader.ReadToEnd();
+ Assert.Equal(expectedFileTestString, _writtenData);
+ }
+ }
+ }
+
+ [Fact]
+ public void GeneratedFileHasExpectedname()
+ {
+ var expectedFileNameRegex = new Regex(@"invalid-\d{14}-Error-[a-fA-F0-9]{32}.json$");
+ Assert.Matches(expectedFileNameRegex,_generatedFilename);
+ }
+ }
+
+ static Stream GetExpectedFileTextStream()
+ {
+ var resourceNameSuffix = Environment.NewLine.Length == 2 ? "RN" : "N";
+ var resourceName = $"Serilog.Sinks.Loggly.Tests.Sinks.Loggly.Expectations.expectedInvalidPayloadFile{resourceNameSuffix}.json";
+ return typeof(InvalidPayloadLoggerTests)
+ .GetTypeInfo()
+ .Assembly
+ .GetManifestResourceStream(resourceName);
+ }
+ }
+}
diff --git a/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/SampleBuffers/20EventsN.json b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/SampleBuffers/20EventsN.json
new file mode 100644
index 0000000..f2c6075
--- /dev/null
+++ b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/SampleBuffers/20EventsN.json
@@ -0,0 +1,20 @@
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
\ No newline at end of file
diff --git a/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/SampleBuffers/20EventsRN.json b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/SampleBuffers/20EventsRN.json
new file mode 100644
index 0000000..f2c6075
--- /dev/null
+++ b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/SampleBuffers/20EventsRN.json
@@ -0,0 +1,20 @@
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
\ No newline at end of file
diff --git a/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/SampleBuffers/singleEvent.json b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/SampleBuffers/singleEvent.json
new file mode 100644
index 0000000..fcefa30
--- /dev/null
+++ b/test/Serilog.Sinks.Loggly.Tests/Sinks/Loggly/SampleBuffers/singleEvent.json
@@ -0,0 +1 @@
+{"Timestamp": "2017-09-24T22:30:09.5025793+00:00","Syslog": {"MessageId": 0,"Level": 6},"Data": {"Message": "Finished handling request...","MachineName": "test-A1","Level": "Information"},"Options": {"Tags": []}}
\ No newline at end of file